소스 검색

rajout de nouvelle fonction

Virgile
Virgile 4 년 전
부모
커밋
501ee9e4e1

+ 22
- 0
code/fonctions_activations_classiques/Creation_donnee.py 파일 보기

@@ -90,4 +90,26 @@ def creation_x_sin2(tmin,tmax,n,w,a=1,b=1,c=0):
Lx=np.array(Lx)
return(t,Lx)

def creation_x(tmin,tmax,n):
Lx=[]
t= np.linspace(tmin,tmax,n)
for i in t:
Lx.append(i)
return(t,np.array(Lx))

def creation_arctan(tmin,tmax,n):
Lx=[]
t= np.linspace(tmin,tmax,n)
for i in t:
Lx.append(np.arctan(i))
return(t,np.array(Lx))


def creation_x2(tmin,tmax,n):
Lx=[]
t= np.linspace(tmin,tmax,n)
for i in t:
Lx.append(i**2)
return(t,np.array(Lx))

+ 1
- 1
code/fonctions_activations_classiques/fonction_activation.py 파일 보기

@@ -8,7 +8,7 @@ import tensorflow as tf



def snake(x, alpha=1.0):
def snake(x, alpha=10):
return (x + tf.sin(x)**2/alpha)



+ 2
- 11
code/fonctions_activations_classiques/sin.py 파일 보기

@@ -33,11 +33,8 @@ model_sin=tf.keras.models.Sequential()

model_sin.add(tf.keras.Input(shape=(1,)))

model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(4, activation=sin))
model_sin.add(tf.keras.layers.Dense(512, activation=sin))



model_sin.add(tf.keras.layers.Dense(1))
@@ -69,9 +66,3 @@ plt.legend()
plt.show()


"""
Created on Wed Nov 24 16:53:37 2021

@author: virgi
"""


+ 9
- 16
code/fonctions_activations_classiques/snake_vs_ReLU.py 파일 보기

@@ -11,16 +11,16 @@ from fonction_activation import *

from Creation_donnee import *
import numpy as np
w=10
n=20
#création de la base de donnéé
X,Y=creation_sin(-15,-8,n,1,)
X2,Y2=creation_sin(10,18,n,1,)
X,Y=creation_sin(-1.5,-1,n,w,)
X2,Y2=creation_sin(1,1.5,n,w,)
X=np.concatenate([X,X2])
Y=np.concatenate([Y,Y2])

n=10000
Xv,Yv=creation_sin(-20,20,n,1)
Xv,Yv=creation_sin(-2,2,n,w)


model_ReLU=tf.keras.models.Sequential()
@@ -28,11 +28,8 @@ model_ReLU=tf.keras.models.Sequential()

model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(512, activation='relu'))



model_ReLU.add(tf.keras.layers.Dense(1))
@@ -46,7 +43,7 @@ model_ReLU.summary()



model_ReLU.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))
model_ReLU.fit(X, Y, batch_size=16, epochs=10, shuffle='True',validation_data=(Xv, Yv))



@@ -54,11 +51,7 @@ model_snake=tf.keras.models.Sequential()

model_snake.add(tf.keras.Input(shape=(1,)))

model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(64, activation=snake))
model_snake.add(tf.keras.layers.Dense(512, activation=snake))


model_snake.add(tf.keras.layers.Dense(1))
@@ -70,7 +63,7 @@ model_snake.compile(opti, loss='mse', metrics=['accuracy'])

model_snake.summary()

model_snake.fit(X, Y, batch_size=1, epochs=10, shuffle='True',validation_data=(Xv, Yv))
model_snake.fit(X, Y, batch_size=16, epochs=100, shuffle='True',validation_data=(Xv, Yv))




+ 2
- 5
code/fonctions_activations_classiques/swish.py 파일 보기

@@ -31,11 +31,8 @@ model_swish=tf.keras.models.Sequential()

model_swish.add(tf.keras.Input(shape=(1,)))

model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(64, activation='swish'))
model_swish.add(tf.keras.layers.Dense(512, activation='swish'))



model_swish.add(tf.keras.layers.Dense(1))

+ 4
- 10
code/fonctions_activations_classiques/tanh_vs_ReLU.py 파일 보기

@@ -30,11 +30,8 @@ model_ReLU=tf.keras.models.Sequential()

model_ReLU.add(tf.keras.Input(shape=(1,)))

model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(64, activation='relu'))
model_ReLU.add(tf.keras.layers.Dense(512, activation='relu'))



model_ReLU.add(tf.keras.layers.Dense(1))
@@ -56,11 +53,8 @@ model_tanh=tf.keras.models.Sequential()

model_tanh.add(tf.keras.Input(shape=(1,)))

model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(64, activation='tanh'))
model_tanh.add(tf.keras.layers.Dense(512, activation='tanh'))



model_tanh.add(tf.keras.layers.Dense(1))

+ 2
- 5
code/fonctions_activations_classiques/x_sin.py 파일 보기

@@ -40,11 +40,8 @@ model_xsin=tf.keras.models.Sequential()

model_xsin.add(tf.keras.Input(shape=(1,)))

model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(64, activation=x_sin))
model_xsin.add(tf.keras.layers.Dense(512, activation=x_sin))



model_xsin.add(tf.keras.layers.Dense(1))

Loading…
취소
저장