Contenu du didacticiel
Preparation1. Vagrant et Virtual Box.
2. Apache
3. MySQL
4. Python
5. Python Hello World
Apprentissage profond
1. Installer Keras et tester l'apprentissage profond (Deep learning)
2. Enregistrer et charger les paramètres
3. Enregistrer et charger les paramètres en même temps
4. Utiliser son propre le jeu de données
Utiliser son propre le jeu de données
On va demander à la machine d'apprendre à quoi ressemble les chats et les chiens.
Télécharger les photos des chats et des chiens: https://www.kaggle.com/c/dogs-vs-cats-redux-kernels-edition
Les photos sont:
C'est le jeu de données.
Créer un dossier "data":
Créer un dossier "train" et un dossier "validation" dans le dossier data :
Ouvrir le dossier train et créer un dossier "cats" et un dossier "dogs". Déplacer 200 photos des chats vers le dossier cats et 200 photos des chiens vers le dossier dogs:
Now open the validation folder and create cats folder and dogs folder. Déplacer 200 photos des chats vers le dossier cats et 200 photos des chiens vers le dossier dogs:
Créer "dogvscat.py" et écrire comme ça dedans:
from __future__ import print_function
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras.preprocessing.image import ImageDataGenerator
from keras.models import model_from_json
from keras.layers import Dense, Dropout, Activation
from keras.preprocessing.text import Tokenizer
import keras.backend.tensorflow_backend as KTF
import tensorflow as tf
import os.path
f_log = './log'
f_model = './model/dogvscat'
model_yaml = 'dogvscat_model.yaml'
model_filename = 'dogvscat_model.json'
weights_filename = 'dogvscat_model_weights.hdf5'
batch_size = 32
epochs = 5
nb_validation_samples = 100
old_session = KTF.get_session()
print('Building model...')
session = tf.Session('')
KTF.set_session(session)
if os.path.isfile(os.path.join(f_model,model_filename)):
print('Saved parameters found. I will use this file...')
json_string = open(os.path.join(f_model, model_filename)).read()
model = model_from_json(json_string)
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.load_weights(os.path.join(f_model,weights_filename))
else:
print('Saved parameters Not found. Creating new model...')
model = Sequential()
model.add(Conv2D(32, 3, 3, input_shape=(128, 128, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(2))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
train_datagen = ImageDataGenerator(
rescale=1.0 / 255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1.0 / 255)
train_generator = train_datagen.flow_from_directory(
'data/train',
target_size=(128, 128),
batch_size=batch_size,
class_mode='categorical')
validation_generator = test_datagen.flow_from_directory(
'data/validation',
target_size=(128, 128),
batch_size=batch_size,
class_mode='categorical')
tb_cb = keras.callbacks.TensorBoard(log_dir=f_log, histogram_freq=0)
cp_cb = keras.callbacks.ModelCheckpoint(filepath = os.path.join(f_model,weights_filename), monitor='val_loss', verbose=1, save_best_only=True, mode='auto')
cbks = [tb_cb, cp_cb]
history = model.fit_generator(
train_generator,
steps_per_epoch=np.ceil(nb_validation_samples/batch_size),
epochs=epochs,
validation_data=validation_generator,
validation_steps=np.ceil(nb_validation_samples/batch_size),
callbacks=cbks
)
score = model.evaluate_generator(validation_generator, nb_validation_samples/batch_size)
print('')
print('Test score:', score[0])
print('Test accuracy:', score[1])
json_string = model.to_json()
open(os.path.join(f_model,model_filename), 'w').write(json_string)
yaml_string = model.to_yaml()
open(os.path.join(f_model,model_yaml), 'w').write(yaml_string)
print('save weights')
model.save_weights(os.path.join(f_model,weights_filename))
KTF.set_session(old_session)
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras.preprocessing.image import ImageDataGenerator
from keras.models import model_from_json
from keras.layers import Dense, Dropout, Activation
from keras.preprocessing.text import Tokenizer
import keras.backend.tensorflow_backend as KTF
import tensorflow as tf
import os.path
f_log = './log'
f_model = './model/dogvscat'
model_yaml = 'dogvscat_model.yaml'
model_filename = 'dogvscat_model.json'
weights_filename = 'dogvscat_model_weights.hdf5'
batch_size = 32
epochs = 5
nb_validation_samples = 100
old_session = KTF.get_session()
print('Building model...')
session = tf.Session('')
KTF.set_session(session)
if os.path.isfile(os.path.join(f_model,model_filename)):
print('Saved parameters found. I will use this file...')
json_string = open(os.path.join(f_model, model_filename)).read()
model = model_from_json(json_string)
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.load_weights(os.path.join(f_model,weights_filename))
else:
print('Saved parameters Not found. Creating new model...')
model = Sequential()
model.add(Conv2D(32, 3, 3, input_shape=(128, 128, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(2))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
train_datagen = ImageDataGenerator(
rescale=1.0 / 255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1.0 / 255)
train_generator = train_datagen.flow_from_directory(
'data/train',
target_size=(128, 128),
batch_size=batch_size,
class_mode='categorical')
validation_generator = test_datagen.flow_from_directory(
'data/validation',
target_size=(128, 128),
batch_size=batch_size,
class_mode='categorical')
tb_cb = keras.callbacks.TensorBoard(log_dir=f_log, histogram_freq=0)
cp_cb = keras.callbacks.ModelCheckpoint(filepath = os.path.join(f_model,weights_filename), monitor='val_loss', verbose=1, save_best_only=True, mode='auto')
cbks = [tb_cb, cp_cb]
history = model.fit_generator(
train_generator,
steps_per_epoch=np.ceil(nb_validation_samples/batch_size),
epochs=epochs,
validation_data=validation_generator,
validation_steps=np.ceil(nb_validation_samples/batch_size),
callbacks=cbks
)
score = model.evaluate_generator(validation_generator, nb_validation_samples/batch_size)
print('')
print('Test score:', score[0])
print('Test accuracy:', score[1])
json_string = model.to_json()
open(os.path.join(f_model,model_filename), 'w').write(json_string)
yaml_string = model.to_yaml()
open(os.path.join(f_model,model_yaml), 'w').write(yaml_string)
print('save weights')
model.save_weights(os.path.join(f_model,weights_filename))
KTF.set_session(old_session)
Faire ces commandes:
$ sudo cd /vagrant
$ sudo python3 dogvscat.py
$ sudo python3 dogvscat.py
Et l'apprentissage profond de le jeu de données va démarrer
Aucun commentaire:
Enregistrer un commentaire