Преглед на файлове

avancement sur les fonctions d'activation

Virgile
Virgile преди 4 години
родител
ревизия
30d75620f7

+ 22
- 0
code/fonctions_activations_classiques/fonction_activation.py Целия файл

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 14:48:00 2021

@author: virgi
"""
import tensorflow as tf



def snake(x, alpha=1.0):
return (x + tf.sin(x)**2/alpha)


def x_sin(x,alpha=1.0):
return (x + tf.sin(x)/alpha)

def sin(x,alpha=1.0):
return(tf.sin(x)/alpha)




+ 3
- 3
code/fonctions_activations_classiques/premier_reseau.py Целия файл

@@ -4,7 +4,7 @@ Created on Wed Nov 10 10:30:04 2021

@author: virgi
"""
from Creation_donnee import *
import numpy as np
import math as ma
import matplotlib.pyplot as plt
@@ -20,13 +20,13 @@ Xv,Yv=creation_sin(tmin,tmax,n,1,a=1,c=0)
model = tf.keras.models.Sequential()

model.add(tf.keras.Input(shape=(1,)))
# une première couche avec deux neurones (couche cachée) avec la fonction sigmoid comme fonction d'activation
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(128, activation='relu'))
# une deuxième couche avec un neurones (couche de sortie) avec la fonction sigmoid comme fonction d'activation

model.add(tf.keras.layers.Dense(1))


+ 77
- 0
code/fonctions_activations_classiques/sin.py Целия файл

@@ -0,0 +1,77 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:58:44 2021

@author: virgi
"""





import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_sin=tf.keras.models.Sequential()

model_sin.add(tf.keras.Input(shape=(1,)))

model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))


model_sin.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_sin.compile(opti, loss='mse', metrics=['accuracy'])


model_sin.summary()

model_sin.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_sin=model_sin.predict(X)
Y_predis_validation_sin=model_sin.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_sin,'o',label='prediction sur les donné avec sin comme activation')
plt.plot(Xv,Y_predis_validation_sin,label='prediction sur la validation avec sin comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


+ 108
- 0
code/fonctions_activations_classiques/snake_vs_ReLU.py Целия файл

@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 15:44:12 2021

@author: virgi
"""

import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)


model_ReLU=tf.keras.models.Sequential()


model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))


model_ReLU.add(tf.keras.layers.Dense(1))

# Choix de la méthode d'optimisation
opti=tf.keras.optimizers.Adam()
# Compilation du graphe et choix de la fonction de coût
model_ReLU.compile(opti, loss='mse', metrics=['accuracy'])

model_ReLU.summary()



model_ReLU.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))



model_snake=tf.keras.models.Sequential()

model_snake.add(tf.keras.Input(shape=(1,)))

model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))


model_snake.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_snake.compile(opti, loss='mse', metrics=['accuracy'])


model_snake.summary()

model_snake.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))









Y_predis_snake=model_snake.predict(X)
Y_predis_validation_snake=model_snake.predict(Xv)

Y_predis_ReLU=model_ReLU.predict(X)
Y_predis_validation_ReLU=model_ReLU.predict(Xv)





plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_ReLU,'o',label='prediction sur les donné')
plt.plot(Xv,Y_predis_validation_ReLU,label='prediction sur la validation')
plt.legend()
plt.show()

plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_snake,'o',label='prediction sur les donné avec snake comme activation')
plt.plot(Xv,Y_predis_validation_snake,label='prediction sur la validation avec snake comme activation')
plt.legend()
plt.show()


+ 75
- 0
code/fonctions_activations_classiques/swish.py Целия файл

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:48:52 2021

@author: virgi
"""



import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_swish=tf.keras.models.Sequential()

model_swish.add(tf.keras.Input(shape=(1,)))

model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))


model_swish.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_swish.compile(opti, loss='mse', metrics=['accuracy'])


model_swish.summary()

model_swish.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_swish=model_swish.predict(X)
Y_predis_validation_swish=model_swish.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_swish,'o',label='prediction sur les donné avec swish comme activation')
plt.plot(Xv,Y_predis_validation_swish,label='prediction sur la validation avec swish comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


+ 110
- 0
code/fonctions_activations_classiques/tanh_vs_ReLU.py Целия файл

@@ -0,0 +1,110 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:48:52 2021

@author: virgi
"""



import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)


model_ReLU=tf.keras.models.Sequential()


model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))


model_ReLU.add(tf.keras.layers.Dense(1))

# Choix de la méthode d'optimisation
opti=tf.keras.optimizers.Adam()
# Compilation du graphe et choix de la fonction de coût
model_ReLU.compile(opti, loss='mse', metrics=['accuracy'])

model_ReLU.summary()



model_ReLU.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))



model_tanh=tf.keras.models.Sequential()

model_tanh.add(tf.keras.Input(shape=(1,)))

model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))


model_tanh.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_tanh.compile(opti, loss='mse', metrics=['accuracy'])


model_tanh.summary()

model_tanh.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))









Y_predis_tanh=model_tanh.predict(X)
Y_predis_validation_tanh=model_tanh.predict(Xv)

Y_predis_ReLU=model_ReLU.predict(X)
Y_predis_validation_ReLU=model_ReLU.predict(Xv)





plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_ReLU,'o',label='prediction sur les donné')
plt.plot(Xv,Y_predis_validation_ReLU,label='prediction sur la validation')
plt.legend()
plt.show()

plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_tanh,'o',label='prediction sur les donné avec tanh comme activation')
plt.plot(Xv,Y_predis_validation_tanh,label='prediction sur la validation avec tanh comme activation')
plt.legend()
plt.show()


+ 84
- 0
code/fonctions_activations_classiques/x_sin.py Целия файл

@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 17:02:16 2021

@author: virgi
"""

# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:58:44 2021

@author: virgi
"""





import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_xsin=tf.keras.models.Sequential()

model_xsin.add(tf.keras.Input(shape=(1,)))

model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))


model_xsin.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_xsin.compile(opti, loss='mse', metrics=['accuracy'])


model_xsin.summary()

model_xsin.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_xsin=model_xsin.predict(X)
Y_predis_validation_xsin=model_xsin.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_xsin,'o',label='prediction sur les donné avec x+sin comme activation')
plt.plot(Xv,Y_predis_validation_xsin,label='prediction sur la validation avec x+sin comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


Loading…
Отказ
Запис