Initial commit
This commit is contained in:
94
Tensorflow/tutoriel2/MNIST_convolution.py
Normal file
94
Tensorflow/tutoriel2/MNIST_convolution.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import tensorflow as tf
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plot
|
||||
import cv2
|
||||
|
||||
def convolution(couche_prec, taille_noyau, nbr_noyau):
|
||||
w=tf.Variable(tf.random.truncated_normal(shape=(taille_noyau, taille_noyau, int(couche_prec.get_shape()[-1]), nbr_noyau)))
|
||||
b=np.zeros(nbr_noyau)
|
||||
result=tf.nn.conv2d(couche_prec, w, strides=[1, 1, 1, 1], padding='SAME')+b
|
||||
return result
|
||||
|
||||
def fc(couche_prec, nbr_neurone):
|
||||
w=tf.Variable(tf.random.truncated_normal(shape=(int(couche_prec.get_shape()[-1]), nbr_neurone), dtype=tf.float32))
|
||||
b=tf.Variable(np.zeros(shape=(nbr_neurone)), dtype=tf.float32)
|
||||
result=tf.matmul(couche_prec, w)+b
|
||||
return result
|
||||
|
||||
taille_batch=100
|
||||
nbr_entrainement=3
|
||||
learning_rate=0.001
|
||||
|
||||
mnist_train_images=np.fromfile("mnist/train-images-idx3-ubyte", dtype=np.uint8)[16:].reshape(-1, 28, 28, 1)/255
|
||||
mnist_train_labels=np.eye(10)[np.fromfile("mnist/train-labels-idx1-ubyte", dtype=np.uint8)[8:]]
|
||||
mnist_test_images=np.fromfile("mnist/t10k-images-idx3-ubyte", dtype=np.uint8)[16:].reshape(-1, 28, 28, 1)/255
|
||||
mnist_test_labels=np.eye(10)[np.fromfile("mnist/t10k-labels-idx1-ubyte", dtype=np.uint8)[8:]]
|
||||
|
||||
ph_images=tf.placeholder(shape=(None, 28, 28, 1), dtype=tf.float32)
|
||||
ph_labels=tf.placeholder(shape=(None, 10), dtype=tf.float32)
|
||||
|
||||
result=convolution(ph_images, 5, 32)
|
||||
result=convolution(result, 5, 32)
|
||||
result=tf.nn.max_pool(result, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
|
||||
|
||||
result=convolution(result, 5, 128)
|
||||
result=convolution(result, 5, 128)
|
||||
result=tf.nn.max_pool(result, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
|
||||
|
||||
result=tf.contrib.layers.flatten(result)
|
||||
|
||||
result=fc(result, 512)
|
||||
result=tf.nn.sigmoid(result)
|
||||
result=fc(result, 10)
|
||||
scso=tf.nn.softmax(result)
|
||||
|
||||
loss=tf.nn.softmax_cross_entropy_with_logits_v2(labels=ph_labels, logits=result)
|
||||
train=tf.train.AdamOptimizer(learning_rate).minimize(loss)
|
||||
accuracy=tf.reduce_mean(tf.cast(tf.equal(tf.argmax(scso, 1), tf.argmax(ph_labels, 1)), tf.float32))
|
||||
|
||||
with tf.Session() as s:
|
||||
s.run(tf.global_variables_initializer())
|
||||
tab_train=[]
|
||||
tab_test=[]
|
||||
for id_entrainement in np.arange(nbr_entrainement):
|
||||
tab_accuracy_train=[]
|
||||
tab_accuracy_test=[]
|
||||
for batch in np.arange(0, len(mnist_train_images), taille_batch):
|
||||
s.run(train, feed_dict={
|
||||
ph_images: mnist_train_images[batch:batch+taille_batch],
|
||||
ph_labels: mnist_train_labels[batch:batch+taille_batch]
|
||||
})
|
||||
for batch in np.arange(0, len(mnist_train_images), taille_batch):
|
||||
precision=s.run(accuracy, feed_dict={
|
||||
ph_images: mnist_train_images[batch:batch+taille_batch],
|
||||
ph_labels: mnist_train_labels[batch:batch+taille_batch]
|
||||
})
|
||||
tab_accuracy_train.append(precision)
|
||||
for batch in np.arange(0, len(mnist_test_images), taille_batch):
|
||||
precision=s.run(accuracy, feed_dict={
|
||||
ph_images: mnist_test_images[batch:batch+taille_batch],
|
||||
ph_labels: mnist_test_labels[batch:batch+taille_batch]
|
||||
})
|
||||
tab_accuracy_test.append(precision)
|
||||
print("> Entrainement", id_entrainement)
|
||||
print(" train:", np.mean(tab_accuracy_train))
|
||||
tab_train.append(1-np.mean(tab_accuracy_train))
|
||||
print(" test :", np.mean(tab_accuracy_test))
|
||||
tab_test.append(1-np.mean(tab_accuracy_test))
|
||||
|
||||
plot.ylim(0, 1)
|
||||
plot.grid()
|
||||
plot.plot(tab_train, label="Train error")
|
||||
plot.plot(tab_test, label="Test error")
|
||||
plot.legend(loc="upper right")
|
||||
plot.show()
|
||||
|
||||
resulat=s.run(scso, feed_dict={ph_images: mnist_test_images[0:taille_batch]})
|
||||
np.set_printoptions(formatter={'float': '{:0.3f}'.format})
|
||||
for image in range(taille_batch):
|
||||
print("image", image)
|
||||
print("sortie du réseau:", resulat[image], np.argmax(resulat[image]))
|
||||
print("sortie attendue :", mnist_test_labels[image], np.argmax(mnist_test_labels[image]))
|
||||
cv2.imshow('image', mnist_test_images[image])
|
||||
if cv2.waitKey()&0xFF==ord('q'):
|
||||
break
|
||||
25
Tensorflow/tutoriel2/README.md
Normal file
25
Tensorflow/tutoriel2/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Tutoriel tensorflow
|
||||
## Réalisation d'un réseau convolutif et utilisation sur la base MNIST
|
||||
|
||||
La vidéo du tutoriel se trouve à l'adresse suivante:
|
||||
https://www.youtube.com/watch?v=mUyRdiQRJBI
|
||||
|
||||
Si vous souhaitez me soutenir: <https://fr.tipeee.com/l42-project>
|
||||
|
||||
Le code de cette vidéo est écrit pour la version 1.X de tensorflow (je recommande la version 1.13.1), pour l'installer, il suffit de taper la commande suivante :
|
||||
|
||||
`# pip install tensorflow==1.13.1`
|
||||
|
||||
ou la version GPU:
|
||||
|
||||
`# pip install tensorflow-gpu==1.13.1`
|
||||
|
||||
Pour utiliser ce programme, vous devez récuperer les fichiers MNIST sur le site suivant:
|
||||
http://yann.lecun.com/exdb/mnist/
|
||||
et les placer dans le repertoire ./mnist
|
||||
|
||||
La courbe d'erreur après 200 cycles d'apprentissage est la suivante :
|
||||
|
||||

|
||||
|
||||
L'apprentissage prend environ 35 minutes sur une GeForce 1080
|
||||
BIN
Tensorflow/tutoriel2/graph_error.png
Normal file
BIN
Tensorflow/tutoriel2/graph_error.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
200
Tensorflow/tutoriel2/log_error
Normal file
200
Tensorflow/tutoriel2/log_error
Normal file
@@ -0,0 +1,200 @@
|
||||
0:0.394567:0.402900
|
||||
1:0.487783:0.498400
|
||||
2:0.556483:0.562900
|
||||
3:0.666317:0.667800
|
||||
4:0.719817:0.723000
|
||||
5:0.665250:0.665400
|
||||
6:0.632883:0.637300
|
||||
7:0.645533:0.653300
|
||||
8:0.582667:0.595900
|
||||
9:0.682800:0.680900
|
||||
10:0.679067:0.680400
|
||||
11:0.689933:0.694900
|
||||
12:0.608850:0.616400
|
||||
13:0.646267:0.648700
|
||||
14:0.700233:0.705700
|
||||
15:0.604517:0.604800
|
||||
16:0.703417:0.706600
|
||||
17:0.744233:0.751700
|
||||
18:0.767233:0.770000
|
||||
19:0.664367:0.665900
|
||||
20:0.703983:0.703200
|
||||
21:0.711650:0.715200
|
||||
22:0.681983:0.687100
|
||||
23:0.729483:0.737800
|
||||
24:0.694517:0.694500
|
||||
25:0.729733:0.735300
|
||||
26:0.727667:0.736100
|
||||
27:0.727283:0.726800
|
||||
28:0.728900:0.734200
|
||||
29:0.673317:0.678600
|
||||
30:0.631750:0.637900
|
||||
31:0.695350:0.702800
|
||||
32:0.697967:0.706800
|
||||
33:0.688217:0.698100
|
||||
34:0.668683:0.669900
|
||||
35:0.712167:0.717200
|
||||
36:0.633217:0.635100
|
||||
37:0.729250:0.731600
|
||||
38:0.769667:0.775600
|
||||
39:0.787717:0.795900
|
||||
40:0.710100:0.717700
|
||||
41:0.752083:0.759900
|
||||
42:0.747650:0.753200
|
||||
43:0.770250:0.775600
|
||||
44:0.776183:0.783900
|
||||
45:0.776567:0.783000
|
||||
46:0.779767:0.786700
|
||||
47:0.777167:0.782000
|
||||
48:0.730217:0.735000
|
||||
49:0.757633:0.759300
|
||||
50:0.732167:0.741700
|
||||
51:0.744833:0.751500
|
||||
52:0.764850:0.766600
|
||||
53:0.767033:0.773100
|
||||
54:0.776567:0.783200
|
||||
55:0.788350:0.794400
|
||||
56:0.736117:0.738100
|
||||
57:0.767867:0.771600
|
||||
58:0.751817:0.759400
|
||||
59:0.750967:0.761000
|
||||
60:0.727200:0.727100
|
||||
61:0.746750:0.743000
|
||||
62:0.760017:0.756500
|
||||
63:0.768400:0.764600
|
||||
64:0.774700:0.773100
|
||||
65:0.779467:0.775300
|
||||
66:0.782733:0.779900
|
||||
67:0.785433:0.782000
|
||||
68:0.787467:0.784300
|
||||
69:0.789333:0.786200
|
||||
70:0.790650:0.787000
|
||||
71:0.791417:0.788500
|
||||
72:0.792683:0.789700
|
||||
73:0.793350:0.789700
|
||||
74:0.794217:0.790800
|
||||
75:0.794433:0.791200
|
||||
76:0.795067:0.791600
|
||||
77:0.795550:0.792600
|
||||
78:0.795933:0.792400
|
||||
79:0.796700:0.792300
|
||||
80:0.797150:0.792600
|
||||
81:0.797333:0.792700
|
||||
82:0.797817:0.793100
|
||||
83:0.798417:0.793000
|
||||
84:0.798617:0.793400
|
||||
85:0.798833:0.793800
|
||||
86:0.799083:0.794000
|
||||
87:0.799467:0.794100
|
||||
88:0.799567:0.794500
|
||||
89:0.799817:0.795200
|
||||
90:0.799850:0.795400
|
||||
91:0.800100:0.795300
|
||||
92:0.800283:0.795200
|
||||
93:0.800467:0.794800
|
||||
94:0.800600:0.795000
|
||||
95:0.800750:0.795300
|
||||
96:0.800850:0.795400
|
||||
97:0.801067:0.795900
|
||||
98:0.801067:0.795600
|
||||
99:0.801317:0.795500
|
||||
100:0.801300:0.795600
|
||||
101:0.801617:0.795700
|
||||
102:0.801717:0.796000
|
||||
103:0.801783:0.796200
|
||||
104:0.801867:0.796100
|
||||
105:0.802117:0.796300
|
||||
106:0.802133:0.796600
|
||||
107:0.802167:0.796500
|
||||
108:0.802300:0.796500
|
||||
109:0.802383:0.796300
|
||||
110:0.802400:0.796300
|
||||
111:0.802450:0.796200
|
||||
112:0.802517:0.796300
|
||||
113:0.802483:0.796300
|
||||
114:0.802533:0.796500
|
||||
115:0.802700:0.796400
|
||||
116:0.802583:0.796600
|
||||
117:0.802533:0.796500
|
||||
118:0.802633:0.796500
|
||||
119:0.802650:0.796600
|
||||
120:0.802483:0.796600
|
||||
121:0.802517:0.796600
|
||||
122:0.802567:0.796600
|
||||
123:0.802667:0.796600
|
||||
124:0.802700:0.796600
|
||||
125:0.802850:0.796800
|
||||
126:0.802767:0.796800
|
||||
127:0.802750:0.796700
|
||||
128:0.802833:0.796700
|
||||
129:0.802883:0.796600
|
||||
130:0.802917:0.796600
|
||||
131:0.802883:0.796700
|
||||
132:0.802967:0.797000
|
||||
133:0.802967:0.797100
|
||||
134:0.803083:0.797000
|
||||
135:0.803083:0.797000
|
||||
136:0.803050:0.797200
|
||||
137:0.803150:0.797300
|
||||
138:0.803200:0.797300
|
||||
139:0.803300:0.797400
|
||||
140:0.803283:0.797400
|
||||
141:0.803350:0.797300
|
||||
142:0.803400:0.797300
|
||||
143:0.803417:0.797100
|
||||
144:0.803417:0.797200
|
||||
145:0.803400:0.797200
|
||||
146:0.803433:0.797100
|
||||
147:0.803600:0.797100
|
||||
148:0.803650:0.797200
|
||||
149:0.803667:0.797200
|
||||
150:0.803717:0.797200
|
||||
151:0.803750:0.797200
|
||||
152:0.803750:0.797100
|
||||
153:0.803783:0.797100
|
||||
154:0.803717:0.797100
|
||||
155:0.803750:0.797000
|
||||
156:0.803733:0.797000
|
||||
157:0.803783:0.797000
|
||||
158:0.803817:0.797100
|
||||
159:0.803950:0.797200
|
||||
160:0.804083:0.797000
|
||||
161:0.804183:0.797000
|
||||
162:0.804183:0.797200
|
||||
163:0.804350:0.797300
|
||||
164:0.804400:0.797300
|
||||
165:0.804400:0.797200
|
||||
166:0.804400:0.797100
|
||||
167:0.804467:0.797100
|
||||
168:0.804417:0.797000
|
||||
169:0.804383:0.797000
|
||||
170:0.804400:0.797000
|
||||
171:0.804433:0.796900
|
||||
172:0.804433:0.796800
|
||||
173:0.804483:0.796800
|
||||
174:0.804467:0.796800
|
||||
175:0.804450:0.796900
|
||||
176:0.804483:0.796800
|
||||
177:0.804467:0.796700
|
||||
178:0.804517:0.796400
|
||||
179:0.804533:0.796500
|
||||
180:0.804600:0.796500
|
||||
181:0.804550:0.796400
|
||||
182:0.804517:0.796300
|
||||
183:0.804583:0.796300
|
||||
184:0.804617:0.796400
|
||||
185:0.804633:0.796400
|
||||
186:0.804633:0.796500
|
||||
187:0.804633:0.796600
|
||||
188:0.804717:0.796800
|
||||
189:0.804683:0.796900
|
||||
190:0.804783:0.796900
|
||||
191:0.804850:0.796900
|
||||
192:0.804950:0.796900
|
||||
193:0.804983:0.796800
|
||||
194:0.804967:0.796800
|
||||
195:0.804967:0.796900
|
||||
196:0.804967:0.797000
|
||||
197:0.804967:0.797000
|
||||
198:0.805000:0.797000
|
||||
199:0.805000:0.797200
|
||||
Reference in New Issue
Block a user