Преглед изворни кода

Merge branch 'Virgile'

Emile2
Emile Siboulet пре 4 година
родитељ
комит
078e1707db

+ 93
- 0
code/fonctions_activations_classiques/Creation_donnee.py Прегледај датотеку

@@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 26 17:53:54 2021

@author: virgi
"""
import numpy as np
import math as ma
len_seq = 10
def creation_sin_RNN(len_seq,tmin,tmax,n,w,a=1,b=0):
Datax, Datay = [], []
t = np.linspace(tmin,tmax,n)
x =a* np.sin(2*ma.pi*w*t)+b
for i in range(len(x)-len_seq):
Datax.append([x[i:i+len_seq]])
Datay.append([x[i+1:i+len_seq+1]])
Datax = np.array(Datax)
Datay = np.array(Datay)
return(Datax,Datay)




def creation_x_sin_RNN(len_seq,tmin,tmax,n,w,a=1,b=1,c=0):
Datax, Datay = [], []
t = np.linspace(tmin,tmax,n)
x=[]
for i in t:
x.append(a*i+b* np.sin(2*ma.pi*w*i)+c)
for i in range(len(x)-len_seq):
Datax.append([x[i:i+len_seq]])
Datay.append([x[i+1:i+len_seq+1]])
Datax = np.array(Datax)
Datay = np.array(Datay)
return(Datax,Datay)

def creation_x_sin2_RNN(len_seq,tmin,tmax,n,w,a=1,b=1,c=0):
Datax, Datay = [], []
t = np.linspace(tmin,tmax,n)
x=[]
for i in t:
x.append(a*i+b*np.sin(2*ma.pi*w*i)*np.sin(2*ma.pi*w*i)+c)
for i in range(len(x)-len_seq):
Datax.append([x[i:i+len_seq]])
Datay.append([x[i+1:i+len_seq+1]])
Datax = np.array(Datax)
Datay = np.array(Datay)
return(Datax,Datay)




def creation_sin(tmin,tmax,n,w,a=1,c=0):
Lx=[]
t = np.linspace(tmin,tmax,n)
for i in t:
Lx.append(a*np.sin(w*i)+c)
Lx=np.array(Lx)
return(t,Lx)

def creation_x_sin(tmin,tmax,n,w,a=1,b=0,c=0):
Lx=[]
t = np.linspace(tmin,tmax,n)
for i in t:
Lx.append(a*i+b* np.sin(2*ma.pi*w*i)+c)
Lx=np.array(Lx)
return(t,Lx)

def creation_x_sin2(tmin,tmax,n,w,a=1,b=1,c=0):
Lx=[]
t = np.linspace(tmin,tmax,n)
for i in t:
Lx.append(a*i+b*np.sin(2*ma.pi*w*i)*np.sin(2*ma.pi*w*i)+c)
Lx=np.array(Lx)
return(t,Lx)



+ 22
- 0
code/fonctions_activations_classiques/fonction_activation.py Прегледај датотеку

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 14:48:00 2021

@author: virgi
"""
import tensorflow as tf



def snake(x, alpha=1.0):
return (x + tf.sin(x)**2/alpha)


def x_sin(x,alpha=1.0):
return (x + tf.sin(x)/alpha)

def sin(x,alpha=1.0):
return(tf.sin(x)/alpha)




+ 54
- 0
code/fonctions_activations_classiques/premier_reseau.py Прегледај датотеку

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 10 10:30:04 2021

@author: virgi
"""
from Creation_donnee import *
import numpy as np
import math as ma
import matplotlib.pyplot as plt
import tensorflow as tf
tmin=-20
tmax=5
n=100000
X,Y=creation_sin(tmin,tmax,n,1,a=1,c=0)
tmin=5
tmax=20
Xv,Yv=creation_sin(tmin,tmax,n,1,a=1,c=0)

model = tf.keras.models.Sequential()

model.add(tf.keras.Input(shape=(1,)))

model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(64, activation='relu'))
model.add(tf.keras.layers.Dense(128, activation='relu'))


model.add(tf.keras.layers.Dense(1))


# Choix de la méthode d'optimisation
opti=tf.keras.optimizers.Adam()
# Compilation du graphe et choix de la fonction de coût
model.compile(opti, loss='mse', metrics=['accuracy'])


model.summary()


model.fit(X, Y, batch_size=32, epochs=2, shuffle='True',validation_data=(Xv, Yv))

Y_predis=model.predict(X)
Y_predis_validation=model.predict(Xv)

plt.figure()
plt.plot(X,Y,label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis,label='prediction sur les donné')
plt.plot(Xv,Y_predis_validation,label='prediction sur la validation')
plt.legend()
plt.show()

+ 77
- 0
code/fonctions_activations_classiques/sin.py Прегледај датотеку

@@ -0,0 +1,77 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:58:44 2021

@author: virgi
"""





import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_sin=tf.keras.models.Sequential()

model_sin.add(tf.keras.Input(shape=(1,)))

model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))


model_sin.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_sin.compile(opti, loss='mse', metrics=['accuracy'])


model_sin.summary()

model_sin.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_sin=model_sin.predict(X)
Y_predis_validation_sin=model_sin.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_sin,'o',label='prediction sur les donné avec sin comme activation')
plt.plot(Xv,Y_predis_validation_sin,label='prediction sur la validation avec sin comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


+ 108
- 0
code/fonctions_activations_classiques/snake_vs_ReLU.py Прегледај датотеку

@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 15:44:12 2021

@author: virgi
"""

import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)


model_ReLU=tf.keras.models.Sequential()


model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))


model_ReLU.add(tf.keras.layers.Dense(1))

# Choix de la méthode d'optimisation
opti=tf.keras.optimizers.Adam()
# Compilation du graphe et choix de la fonction de coût
model_ReLU.compile(opti, loss='mse', metrics=['accuracy'])

model_ReLU.summary()



model_ReLU.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))



model_snake=tf.keras.models.Sequential()

model_snake.add(tf.keras.Input(shape=(1,)))

model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))


model_snake.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_snake.compile(opti, loss='mse', metrics=['accuracy'])


model_snake.summary()

model_snake.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))









Y_predis_snake=model_snake.predict(X)
Y_predis_validation_snake=model_snake.predict(Xv)

Y_predis_ReLU=model_ReLU.predict(X)
Y_predis_validation_ReLU=model_ReLU.predict(Xv)





plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_ReLU,'o',label='prediction sur les donné')
plt.plot(Xv,Y_predis_validation_ReLU,label='prediction sur la validation')
plt.legend()
plt.show()

plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_snake,'o',label='prediction sur les donné avec snake comme activation')
plt.plot(Xv,Y_predis_validation_snake,label='prediction sur la validation avec snake comme activation')
plt.legend()
plt.show()


+ 75
- 0
code/fonctions_activations_classiques/swish.py Прегледај датотеку

@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:48:52 2021

@author: virgi
"""



import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_swish=tf.keras.models.Sequential()

model_swish.add(tf.keras.Input(shape=(1,)))

model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))


model_swish.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_swish.compile(opti, loss='mse', metrics=['accuracy'])


model_swish.summary()

model_swish.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_swish=model_swish.predict(X)
Y_predis_validation_swish=model_swish.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_swish,'o',label='prediction sur les donné avec swish comme activation')
plt.plot(Xv,Y_predis_validation_swish,label='prediction sur la validation avec swish comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


+ 110
- 0
code/fonctions_activations_classiques/tanh_vs_ReLU.py Прегледај датотеку

@@ -0,0 +1,110 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:48:52 2021

@author: virgi
"""



import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)


model_ReLU=tf.keras.models.Sequential()


model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))


model_ReLU.add(tf.keras.layers.Dense(1))

# Choix de la méthode d'optimisation
opti=tf.keras.optimizers.Adam()
# Compilation du graphe et choix de la fonction de coût
model_ReLU.compile(opti, loss='mse', metrics=['accuracy'])

model_ReLU.summary()



model_ReLU.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))



model_tanh=tf.keras.models.Sequential()

model_tanh.add(tf.keras.Input(shape=(1,)))

model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))


model_tanh.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_tanh.compile(opti, loss='mse', metrics=['accuracy'])


model_tanh.summary()

model_tanh.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))









Y_predis_tanh=model_tanh.predict(X)
Y_predis_validation_tanh=model_tanh.predict(Xv)

Y_predis_ReLU=model_ReLU.predict(X)
Y_predis_validation_ReLU=model_ReLU.predict(Xv)





plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_ReLU,'o',label='prediction sur les donné')
plt.plot(Xv,Y_predis_validation_ReLU,label='prediction sur la validation')
plt.legend()
plt.show()

plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_tanh,'o',label='prediction sur les donné avec tanh comme activation')
plt.plot(Xv,Y_predis_validation_tanh,label='prediction sur la validation avec tanh comme activation')
plt.legend()
plt.show()


+ 84
- 0
code/fonctions_activations_classiques/x_sin.py Прегледај датотеку

@@ -0,0 +1,84 @@
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 17:02:16 2021

@author: virgi
"""

# -*- coding: utf-8 -*-
"""
Created on Wed Nov 24 16:58:44 2021

@author: virgi
"""





import tensorflow as tf
import matplotlib.pyplot as plt
from fonction_activation import *

from Creation_donnee import *
import numpy as np

n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)




model_xsin=tf.keras.models.Sequential()

model_xsin.add(tf.keras.Input(shape=(1,)))

model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))


model_xsin.add(tf.keras.layers.Dense(1))

opti=tf.keras.optimizers.Adam()

model_xsin.compile(opti, loss='mse', metrics=['accuracy'])


model_xsin.summary()

model_xsin.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))




Y_predis_xsin=model_xsin.predict(X)
Y_predis_validation_xsin=model_xsin.predict(Xv)




plt.figure()
plt.plot(X,Y,'x',label='donnée')
plt.plot(Xv,Yv,label="validation")
plt.plot(X,Y_predis_xsin,'o',label='prediction sur les donné avec x+sin comme activation')
plt.plot(Xv,Y_predis_validation_xsin,label='prediction sur la validation avec x+sin comme activation')
plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


Loading…
Откажи
Сачувај