Initial commit
This commit is contained in:
5
Tensorflow/tutoriel33/README.md
Normal file
5
Tensorflow/tutoriel33/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Tutoriel Keras
|
||||
## Utilisation des Callbacks
|
||||
|
||||
La vidéo de ce tutoriel est disponible à l'adresse suivante: https://www.youtube.com/watch?v=_cBmcp5zi2w
|
||||
|
||||
68
Tensorflow/tutoriel33/graph.py
Normal file
68
Tensorflow/tutoriel33/graph.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.ticker as mtick
|
||||
import csv
|
||||
import sys
|
||||
import numpy as np
|
||||
|
||||
if len(sys.argv)!=2:
|
||||
print("Usage:", sys.argv[0], "<fichier csv>")
|
||||
quit()
|
||||
fichier=sys.argv[1]
|
||||
|
||||
def calc(tab_data, fenetre):
|
||||
tab_m=[]
|
||||
for i in range(len(tab_data)-fenetre):
|
||||
m=np.mean(tab_data[i:i+fenetre])
|
||||
tab_m.append(m)
|
||||
return tab_m
|
||||
|
||||
x=[]
|
||||
accuracy=[]
|
||||
loss=[]
|
||||
val_accuracy=[]
|
||||
val_loss=[]
|
||||
fenetre=50
|
||||
|
||||
val=0
|
||||
with open(fichier,'r') as csvfile:
|
||||
plots=csv.reader(csvfile, delimiter=',')
|
||||
next(plots)
|
||||
for row in plots:
|
||||
x.append(float(row[0]))
|
||||
accuracy.append(float(row[1]))
|
||||
loss.append(float(row[2]))
|
||||
if len(row)==5:
|
||||
val_accuracy.append(float(row[3]))
|
||||
val_loss.append(float(row[4]))
|
||||
val=1
|
||||
|
||||
|
||||
fig, (ax1, ax2)=plt.subplots(2)
|
||||
fig.set_size_inches(9, 7, forward=True)
|
||||
|
||||
ax1.set_ylim([0, 1.0])
|
||||
ax1.grid(which='both')
|
||||
ax1.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
|
||||
ln=ax1.plot(x, accuracy, label='Accuracy')
|
||||
|
||||
ax1_=ax1.twinx()
|
||||
ax1_.set_ylim([0.0, 2.0])
|
||||
ln_=ax1_.plot(x, loss, label='Loss', color='red')
|
||||
|
||||
lns=ln+ln_
|
||||
labs=[l.get_label() for l in lns]
|
||||
|
||||
ax2.set_ylim([0, 1.0])
|
||||
ax2.grid(which='both')
|
||||
ax2.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
|
||||
ln=ax2.plot(x, val_accuracy, label='Val accuracy')
|
||||
|
||||
ax2_=ax2.twinx()
|
||||
ax2_.set_ylim([0.0, 2.0])
|
||||
ln_=ax2_.plot(x, val_loss, label='Val loss', color='red')
|
||||
|
||||
lns=ln+ln_
|
||||
labs=[l.get_label() for l in lns]
|
||||
ax2.legend(lns, labs, loc='upper center', bbox_to_anchor=(0.5, -0.1), fancybox=True, shadow=True, ncol=5)
|
||||
|
||||
plt.show()
|
||||
78
Tensorflow/tutoriel33/model.py
Normal file
78
Tensorflow/tutoriel33/model.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from tensorflow.keras import layers, models
|
||||
|
||||
# Fonction d'activation à tester: sigmoid, tanh, relu,
|
||||
|
||||
def model(nbr_sortie, nbr_cc):
|
||||
entree=layers.Input(shape=(75, 100, 3), dtype='float32')
|
||||
|
||||
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(entree)
|
||||
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.MaxPool2D()(result)
|
||||
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.MaxPool2D()(result)
|
||||
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.MaxPool2D()(result)
|
||||
|
||||
result=layers.Flatten()(result)
|
||||
result=layers.Dense(1024, activation='relu')(result)
|
||||
result=layers.Dense(1024, activation='relu')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
sortie=layers.Dense(nbr_sortie, activation='softmax')(result)
|
||||
|
||||
model=models.Model(inputs=entree, outputs=sortie)
|
||||
return model
|
||||
|
||||
def model2(nbr_sortie, nbr_cc):
|
||||
entree=layers.Input(shape=(75, 100, 3), dtype='float32')
|
||||
|
||||
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(entree)
|
||||
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same', strides=2)(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same', strides=2)(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
|
||||
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same', strides=2)(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
|
||||
result=layers.Flatten()(result)
|
||||
result=layers.Dense(1024, activation='relu')(result)
|
||||
result=layers.Dense(1024, activation='relu')(result)
|
||||
result=layers.BatchNormalization()(result)
|
||||
sortie=layers.Dense(nbr_sortie, activation='softmax')(result)
|
||||
|
||||
model=models.Model(inputs=entree, outputs=sortie)
|
||||
return model
|
||||
93
Tensorflow/tutoriel33/predict.py
Normal file
93
Tensorflow/tutoriel33/predict.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import random
|
||||
import tensorflow as tf
|
||||
import csv
|
||||
import numpy as np
|
||||
import cv2
|
||||
import model
|
||||
|
||||
fichier='ISIC2018_Task3_Training_GroundTruth/ISIC2018_Task3_Training_GroundTruth.csv'
|
||||
dir_images='ISIC2018_Task3_Training_Input/'
|
||||
|
||||
labels=['Melanoma',
|
||||
'Melanocytic nevus',
|
||||
'Basal cell carcinoma',
|
||||
'Actinic keratosis',
|
||||
'Benign keratosis',
|
||||
'Dermatofibroma',
|
||||
'Vascular lesion']
|
||||
|
||||
tab_images=[]
|
||||
tab_labels=[]
|
||||
|
||||
def rotateImage(image, angle):
|
||||
image_center=tuple(np.array(image.shape[1::-1])/2)
|
||||
rot_mat=cv2.getRotationMatrix2D(image_center, angle, 1.0)
|
||||
result=cv2.warpAffine(image, rot_mat, image.shape[1::-1], flags=cv2.INTER_LINEAR)
|
||||
return result
|
||||
|
||||
def bruit(image):
|
||||
h, w, c=image.shape
|
||||
n=np.random.randn(h, w, c)*random.randint(5, 30)
|
||||
return np.clip(image+n, 0, 255).astype(np.uint8)
|
||||
|
||||
with open(fichier, newline='') as csvfile:
|
||||
lignes=csv.reader(csvfile, delimiter=',')
|
||||
next(lignes, None)
|
||||
for ligne in lignes:
|
||||
label=np.array(ligne[1:], dtype=np.float32)
|
||||
img=cv2.imread(dir_images+ligne[0]+'.jpg')
|
||||
img=cv2.resize(img, (100, 75))
|
||||
if img is None:
|
||||
print("XXX")
|
||||
quit()
|
||||
tab_labels.append(label)
|
||||
tab_images.append(img)
|
||||
|
||||
if label[1]:
|
||||
continue
|
||||
|
||||
flag=0
|
||||
for angle in range(0, 360, 30):
|
||||
img_r=rotateImage(img, angle)
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
|
||||
if not flag%3 and (label[0] or label[4]):
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
flag+=1
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 1)
|
||||
tab_images.append(i)
|
||||
|
||||
if label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, -1)
|
||||
tab_images.append(i)
|
||||
|
||||
tab_labels=np.array(tab_labels, dtype=np.float32)
|
||||
tab_images=np.array(tab_images, dtype=np.float32)/255
|
||||
|
||||
indices=np.random.permutation(len(tab_labels))
|
||||
tab_labels=tab_labels[indices]
|
||||
tab_images=tab_images[indices]
|
||||
|
||||
print("SOMME", np.sum(tab_labels, axis=0))
|
||||
|
||||
model=tf.keras.models.load_model('my_model/')
|
||||
|
||||
for i in range(len(tab_images)):
|
||||
cv2.imshow("image", tab_images[i])
|
||||
prediction=model.predict(np.array([tab_images[i]], dtype=np.float32))
|
||||
print("Bonne reponse:{}, Reponse du réseau:{}".format(labels[np.argmax(tab_labels[i])], labels[np.argmax(prediction[0])]))
|
||||
key=cv2.waitKey()&0xFF
|
||||
if key==ord('q'):
|
||||
break
|
||||
|
||||
cv2.destroyAllWindows()
|
||||
87
Tensorflow/tutoriel33/train.py
Normal file
87
Tensorflow/tutoriel33/train.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import random
|
||||
import tensorflow as tf
|
||||
import csv
|
||||
import numpy as np
|
||||
import cv2
|
||||
import model
|
||||
|
||||
fichier='ISIC2018_Task3_Training_GroundTruth/ISIC2018_Task3_Training_GroundTruth.csv'
|
||||
dir_images='ISIC2018_Task3_Training_Input/'
|
||||
|
||||
tab_images=[]
|
||||
tab_labels=[]
|
||||
|
||||
def rotateImage(image, angle):
|
||||
image_center=tuple(np.array(image.shape[1::-1])/2)
|
||||
rot_mat=cv2.getRotationMatrix2D(image_center, angle, 1.0)
|
||||
result=cv2.warpAffine(image, rot_mat, image.shape[1::-1], flags=cv2.INTER_LINEAR)
|
||||
return result
|
||||
|
||||
with open(fichier, newline='') as csvfile:
|
||||
lignes=csv.reader(csvfile, delimiter=',')
|
||||
next(lignes, None)
|
||||
for ligne in lignes:
|
||||
label=np.array(ligne[1:], dtype=np.float32)
|
||||
img=cv2.imread(dir_images+ligne[0]+'.jpg')
|
||||
if img is None:
|
||||
print("Image absente", dir_images+ligne[0]+'.jpg')
|
||||
quit()
|
||||
img=cv2.resize(img, (100, 75))
|
||||
tab_labels.append(label)
|
||||
tab_images.append(img)
|
||||
|
||||
if label[1]:
|
||||
continue
|
||||
|
||||
flag=0
|
||||
for angle in range(0, 360, 30):
|
||||
img_r=rotateImage(img, angle)
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
|
||||
if not flag%3 and (label[0] or label[4]):
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
flag+=1
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 1)
|
||||
tab_images.append(i)
|
||||
|
||||
if label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, -1)
|
||||
tab_images.append(i)
|
||||
|
||||
tab_labels=np.array(tab_labels, dtype=np.float32)
|
||||
tab_images=np.array(tab_images, dtype=np.float32)/255
|
||||
|
||||
indices=np.random.permutation(len(tab_labels))
|
||||
tab_labels=tab_labels[indices]
|
||||
tab_images=tab_images[indices]
|
||||
|
||||
print("SOMME", np.sum(tab_labels, axis=0))
|
||||
|
||||
model=model.model(7, 8)
|
||||
#optimizer=tf.keras.optimizers.RMSprop(learning_rate=1E-4)
|
||||
#optimizer=tf.keras.optimizers.SGD(learning_rate=1E-4)
|
||||
optimizer=tf.keras.optimizers.Adam(learning_rate=1E-4)
|
||||
csv_logger=tf.keras.callbacks.CSVLogger('training.log')
|
||||
|
||||
model.compile(optimizer=optimizer,
|
||||
loss='categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
model.fit(tab_images,
|
||||
tab_labels,
|
||||
validation_split=0.05,
|
||||
batch_size=64,
|
||||
epochs=300,
|
||||
callbacks=[csv_logger])
|
||||
|
||||
|
||||
90
Tensorflow/tutoriel33/train2.py
Normal file
90
Tensorflow/tutoriel33/train2.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import random
|
||||
import tensorflow as tf
|
||||
import csv
|
||||
import numpy as np
|
||||
import cv2
|
||||
import model
|
||||
|
||||
fichier='ISIC2018_Task3_Training_GroundTruth/ISIC2018_Task3_Training_GroundTruth.csv'
|
||||
dir_images='ISIC2018_Task3_Training_Input/'
|
||||
|
||||
tab_images=[]
|
||||
tab_labels=[]
|
||||
|
||||
def rotateImage(image, angle):
|
||||
image_center=tuple(np.array(image.shape[1::-1])/2)
|
||||
rot_mat=cv2.getRotationMatrix2D(image_center, angle, 1.0)
|
||||
result=cv2.warpAffine(image, rot_mat, image.shape[1::-1], flags=cv2.INTER_LINEAR)
|
||||
return result
|
||||
|
||||
with open(fichier, newline='') as csvfile:
|
||||
lignes=csv.reader(csvfile, delimiter=',')
|
||||
next(lignes, None)
|
||||
for ligne in lignes:
|
||||
label=np.array(ligne[1:], dtype=np.float32)
|
||||
img=cv2.imread(dir_images+ligne[0]+'.jpg')
|
||||
if img is None:
|
||||
print("Image absente", dir_images+ligne[0]+'.jpg')
|
||||
quit()
|
||||
img=cv2.resize(img, (100, 75))
|
||||
tab_labels.append(label)
|
||||
tab_images.append(img)
|
||||
|
||||
if label[1]:
|
||||
continue
|
||||
|
||||
flag=0
|
||||
for angle in range(0, 360, 30):
|
||||
img_r=rotateImage(img, angle)
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
|
||||
if not flag%3 and (label[0] or label[4]):
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 0)
|
||||
tab_images.append(i)
|
||||
flag+=1
|
||||
|
||||
if label[2] or label[3] or label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, 1)
|
||||
tab_images.append(i)
|
||||
|
||||
if label[5] or label[6]:
|
||||
tab_labels.append(label)
|
||||
i=cv2.flip(img_r, -1)
|
||||
tab_images.append(i)
|
||||
|
||||
tab_labels=np.array(tab_labels, dtype=np.float32)
|
||||
tab_images=np.array(tab_images, dtype=np.float32)/255
|
||||
|
||||
indices=np.random.permutation(len(tab_labels))
|
||||
tab_labels=tab_labels[indices]
|
||||
tab_images=tab_images[indices]
|
||||
|
||||
print("SOMME", np.sum(tab_labels, axis=0))
|
||||
|
||||
model=model.model(7, 8)
|
||||
optimizer=tf.keras.optimizers.RMSprop(learning_rate=1E-4)
|
||||
csv_logger=tf.keras.callbacks.CSVLogger('training.log')
|
||||
|
||||
class my_callback(tf.keras.callbacks.Callback):
|
||||
def on_epoch_end(self, epoch, logs=None):
|
||||
if epoch>=30 and not epoch%10:
|
||||
model.save('my_model/{:d}'.format(epoch))
|
||||
|
||||
model.compile(optimizer=optimizer,
|
||||
loss='categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
model.fit(tab_images,
|
||||
tab_labels,
|
||||
validation_split=0.05,
|
||||
batch_size=64,
|
||||
epochs=300,
|
||||
callbacks=[csv_logger, my_callback()])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user