Files
cours-ai-tutorials/Tensorflow/tutoriel33/model.py
2026-03-31 13:28:59 +02:00

79 lines
3.9 KiB
Python

from tensorflow.keras import layers, models
# Fonction d'activation à tester: sigmoid, tanh, relu,
def model(nbr_sortie, nbr_cc):
entree=layers.Input(shape=(75, 100, 3), dtype='float32')
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(entree)
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.MaxPool2D()(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.MaxPool2D()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.MaxPool2D()(result)
result=layers.Flatten()(result)
result=layers.Dense(1024, activation='relu')(result)
result=layers.Dense(1024, activation='relu')(result)
result=layers.BatchNormalization()(result)
sortie=layers.Dense(nbr_sortie, activation='softmax')(result)
model=models.Model(inputs=entree, outputs=sortie)
return model
def model2(nbr_sortie, nbr_cc):
entree=layers.Input(shape=(75, 100, 3), dtype='float32')
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same')(entree)
result=layers.Conv2D(nbr_cc, 5, activation='relu', padding='same', strides=2)(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(2*nbr_cc, 3, activation='relu', padding='same', strides=2)(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.BatchNormalization()(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same')(result)
result=layers.Conv2D(4*nbr_cc, 3, activation='relu', padding='same', strides=2)(result)
result=layers.BatchNormalization()(result)
result=layers.Flatten()(result)
result=layers.Dense(1024, activation='relu')(result)
result=layers.Dense(1024, activation='relu')(result)
result=layers.BatchNormalization()(result)
sortie=layers.Dense(nbr_sortie, activation='softmax')(result)
model=models.Model(inputs=entree, outputs=sortie)
return model