Explorar el Código

Merge branch 'Emilien'

Emile2
Emile Siboulet hace 4 años
padre
commit
33a6a23bb5
Se han modificado 2 ficheros con 219 adiciones y 0 borrados
  1. 92
    0
      code/resnet18/resnet18.py
  2. 127
    0
      code/resnet18/resnet18_snake.py

+ 92
- 0
code/resnet18/resnet18.py Ver fichero

from keras.callbacks import EarlyStopping
from keras.layers import Dense, Conv2D, MaxPool2D, Flatten, GlobalAveragePooling2D, BatchNormalization, Layer, Add
from keras.models import Sequential
from keras.models import Model
import tensorflow as tf

class ResnetBlock(Model):
"""
A standard resnet block.
"""

def __init__(self, channels: int, down_sample=False):
"""
channels: same as number of convolution kernels
"""
super().__init__()

self.__channels = channels
self.__down_sample = down_sample
self.__strides = [2, 1] if down_sample else [1, 1]

KERNEL_SIZE = (3, 3)
# use He initialization, instead of Xavier (a.k.a 'glorot_uniform' in Keras), as suggested in [2]
INIT_SCHEME = "he_normal"

self.conv_1 = Conv2D(self.__channels, strides=self.__strides[0],
kernel_size=KERNEL_SIZE, padding="same", kernel_initializer=INIT_SCHEME)
self.bn_1 = BatchNormalization()
self.conv_2 = Conv2D(self.__channels, strides=self.__strides[1],
kernel_size=KERNEL_SIZE, padding="same", kernel_initializer=INIT_SCHEME)
self.bn_2 = BatchNormalization()
self.merge = Add()

if self.__down_sample:
# perform down sampling using stride of 2, according to [1].
self.res_conv = Conv2D(
self.__channels, strides=2, kernel_size=(1, 1), kernel_initializer=INIT_SCHEME, padding="same")
self.res_bn = BatchNormalization()

def call(self, inputs):
res = inputs

x = self.conv_1(inputs)
x = self.bn_1(x)
x = tf.nn.relu(x)
x = self.conv_2(x)
x = self.bn_2(x)

if self.__down_sample:
res = self.res_conv(res)
res = self.res_bn(res)

# if not perform down sample, then add a shortcut directly
x = self.merge([x, res])
out = tf.nn.relu(x)
return out


class ResNet18(Model):

def __init__(self, num_classes, **kwargs):
"""
num_classes: number of classes in specific classification task.
"""
super().__init__(**kwargs)
self.conv_1 = Conv2D(64, (7, 7), strides=2,
padding="same", kernel_initializer="he_normal")
self.init_bn = BatchNormalization()
self.pool_2 = MaxPool2D(pool_size=(2, 2), strides=2, padding="same")
self.res_1_1 = ResnetBlock(64)
self.res_1_2 = ResnetBlock(64)
self.res_2_1 = ResnetBlock(128, down_sample=True)
self.res_2_2 = ResnetBlock(128)
self.res_3_1 = ResnetBlock(256, down_sample=True)
self.res_3_2 = ResnetBlock(256)
self.res_4_1 = ResnetBlock(512, down_sample=True)
self.res_4_2 = ResnetBlock(512)
self.avg_pool = GlobalAveragePooling2D()
self.flat = Flatten()
self.fc = Dense(num_classes, activation="softmax")

def call(self, inputs):
out = self.conv_1(inputs)
out = self.init_bn(out)
out = tf.nn.relu(out)
out = self.pool_2(out)
for res_block in [self.res_1_1, self.res_1_2, self.res_2_1, self.res_2_2, self.res_3_1, self.res_3_2, self.res_4_1, self.res_4_2]:
out = res_block(out)
out = self.avg_pool(out)
out = self.flat(out)
out = self.fc(out)
return out

+ 127
- 0
code/resnet18/resnet18_snake.py Ver fichero

# Réseau inspiré de http://yann.lecun.com/exdb/publis/pdf/lecun-98.pdf


from keras.callbacks import History
from resnet18 import ResNet18
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
# Pour les utilisateurs de MacOS (pour utiliser plt & keras en même temps)
import os
#os.environ['KMP_DUPLICATE_LIB_OK']='True'


def displayConvFilers(model, layer_name, num_filter=4, filter_size=(3,3), num_channel=0, fig_size=(2,2)):
layer_dict = dict([(layer.name, layer) for layer in model.layers])
weight, biais = layer_dict[layer_name].get_weights()
print(weight.shape)
plt.figure(figsize=fig_size)
for i in range(num_filter):
plt.subplot(fig_size[0],fig_size[1],i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
vis = np.reshape(weight[:,:,num_channel,i],filter_size)
plt.imshow(vis, cmap=plt.cm.binary)
plt.show()


def snake(x):
return x + tf.sin(x)**2


## Chargement et normalisation des données
resnet18 = tf.keras.datasets.cifar10
(train_images, train_labels), (test_images, test_labels) = resnet18.load_data()

val_images = train_images[40000:]
val_labels = train_labels[40000:]

train_images = train_images[:40000]
train_labels = train_labels[:40000]

train_images = train_images / 255.0
val_images = val_images /255.0
test_images = test_images / 255.0

# POUR LES CNN : On rajoute une dimension pour spécifier qu'il s'agit d'imgages en NdG
train_images = train_images.reshape(40000,32,32,3)
val_images = val_images.reshape(10000,32,32,3)
test_images = test_images.reshape(10000,32,32,3)


# One hot encoding
train_labels = tf.keras.utils.to_categorical(train_labels)
val_labels = tf.keras.utils.to_categorical(val_labels)
test_labels = tf.keras.utils.to_categorical(test_labels)

filter_size_conv1 = (3,3)

model = ResNet18(10)
model.build(input_shape = (None,32,32,3))
filter_size_conv1 = (5,5)

'''
## Définition de l'architecture du modèle
model = tf.keras.models.Sequential()
# Expliquez à quoi correspondent les valeurs numériques qui définissent les couches du réseau
model.add(tf.keras.layers.Conv2D(filters=6,kernel_size=filter_size_conv1,padding="same", activation=snake, input_shape=(32, 32, 3)))
model.add(tf.keras.layers.AveragePooling2D())
model.add(tf.keras.layers.Conv2D(filters=16,kernel_size=(5,5),padding="valid", activation=snake))
model.add(tf.keras.layers.AveragePooling2D())
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(120 , activation=snake))
#mnist_model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(84 , activation=snake))
model.add(tf.keras.layers.Dense(10 , activation='softmax'))
'''

# expliquer le nombre de paramètre de ce réseau
print(model.summary())


sgd = tf.keras.optimizers.Adam()

model.compile(sgd, loss='categorical_crossentropy', metrics=['accuracy'])

# On visualise avant l'entrainement



history = model.fit(train_images,
train_labels,
batch_size=64,
epochs=4,
validation_data=(val_images, val_labels),
)

## Evaluation du modèle
test_loss, test_acc = model.evaluate(test_images, test_labels)
print('Test accuracy:', test_acc)

fig, axs = plt.subplots(2, 1, figsize=(15,15))

axs[0].plot(history.history['loss'])
axs[0].plot(history.history['val_loss'])
axs[0].title.set_text('Training Loss vs Validation Loss')
axs[0].legend(['Train', 'Val'])

axs[1].plot(history.history['accuracy'])
axs[1].plot(history.history['val_accuracy'])
axs[1].title.set_text('Training Accuracy vs Validation Accuracy')
axs[1].legend(['Train', 'Val'])
plt.savefig('./resnet18snake.png')


'''
displayConvFilers(mnist_model,
'conv2d',
num_filter=6,
filter_size=filter_size_conv1,
num_channel=0,
fig_size=(2,3)
)
'''


Cargando…
Cancelar
Guardar