| import numpy as np | import numpy as np | ||||
| import math as ma | import math as ma | ||||
| len_seq = 10 | len_seq = 10 | ||||
| def creation_sin(len_seq,tmin,tmax,n,w,a=1,b=0): | |||||
| def creation_sin_RNN(len_seq,tmin,tmax,n,w,a=1,b=0): | |||||
| Datax, Datay = [], [] | Datax, Datay = [], [] | ||||
| return(Datax,Datay) | return(Datax,Datay) | ||||
| def creation_x_sin(len_seq,tmin,tmax,n,w,a=1,b=1,c=0): | |||||
| def creation_x_sin_RNN(len_seq,tmin,tmax,n,w,a=1,b=1,c=0): | |||||
| Datax, Datay = [], [] | Datax, Datay = [], [] | ||||
| return(Datax,Datay) | return(Datax,Datay) | ||||
| def creation_x_sin2(len_seq,tmin,tmax,n,w,a=1,b=1,c=0): | |||||
| def creation_x_sin2_RNN(len_seq,tmin,tmax,n,w,a=1,b=1,c=0): | |||||
| Datax, Datay = [], [] | Datax, Datay = [], [] | ||||
| Datay = np.array(Datay) | Datay = np.array(Datay) | ||||
| return(Datax,Datay) | |||||
| return(Datax,Datay) | |||||
| def creation_sin(tmin,tmax,n,w,a=1,c=0): | |||||
| Lx=[] | |||||
| t = np.linspace(tmin,tmax,n) | |||||
| for i in t: | |||||
| Lx.append(a*np.sin(w*i)+c) | |||||
| Lx=np.array(Lx) | |||||
| return(t,Lx) | |||||
| def creation_x_sin(tmin,tmax,n,w,a=1,b=0,c=0): | |||||
| Lx=[] | |||||
| t = np.linspace(tmin,tmax,n) | |||||
| for i in t: | |||||
| Lx.append(a*i+b* np.sin(2*ma.pi*w*i)+c) | |||||
| Lx=np.array(Lx) | |||||
| return(t,Lx) | |||||
| def creation_x_sin2(tmin,tmax,n,w,a=1,b=1,c=0): | |||||
| Lx=[] | |||||
| t = np.linspace(tmin,tmax,n) | |||||
| for i in t: | |||||
| Lx.append(a*i+b*np.sin(2*ma.pi*w*i)*np.sin(2*ma.pi*w*i)+c) | |||||
| Lx=np.array(Lx) | |||||
| return(t,Lx) | |||||
| # -*- coding: utf-8 -*- | |||||
| """ | |||||
| Created on Wed Nov 10 10:30:04 2021 | |||||
| @author: virgi | |||||
| """ | |||||
| import numpy as np | |||||
| import math as ma | |||||
| import matplotlib.pyplot as plt | |||||
| import tensorflow as tf | |||||
| tmin=-20 | |||||
| tmax=5 | |||||
| n=100000 | |||||
| X,Y=creation_sin(tmin,tmax,n,1,a=1,c=0) | |||||
| tmin=5 | |||||
| tmax=20 | |||||
| Xv,Yv=creation_sin(tmin,tmax,n,1,a=1,c=0) | |||||
| model = tf.keras.models.Sequential() | |||||
| model.add(tf.keras.Input(shape=(1,))) | |||||
| # une première couche avec deux neurones (couche cachée) avec la fonction sigmoid comme fonction d'activation | |||||
| model.add(tf.keras.layers.Dense(64, activation='relu')) | |||||
| model.add(tf.keras.layers.Dense(64, activation='relu')) | |||||
| model.add(tf.keras.layers.Dense(64, activation='relu')) | |||||
| model.add(tf.keras.layers.Dense(64, activation='relu')) | |||||
| model.add(tf.keras.layers.Dense(128, activation='relu')) | |||||
| # une deuxième couche avec un neurones (couche de sortie) avec la fonction sigmoid comme fonction d'activation | |||||
| model.add(tf.keras.layers.Dense(1)) | |||||
| # Choix de la méthode d'optimisation | |||||
| opti=tf.keras.optimizers.Adam() | |||||
| # Compilation du graphe et choix de la fonction de coût | |||||
| model.compile(opti, loss='mse', metrics=['accuracy']) | |||||
| model.summary() | |||||
| model.fit(X, Y, batch_size=32, epochs=2, shuffle='True',validation_data=(Xv, Yv)) | |||||
| Y_predis=model.predict(X) | |||||
| Y_predis_validation=model.predict(Xv) | |||||
| plt.figure() | |||||
| plt.plot(X,Y,label='donnée') | |||||
| plt.plot(Xv,Yv,label="validation") | |||||
| plt.plot(X,Y_predis,label='prediction sur les donné') | |||||
| plt.plot(Xv,Y_predis_validation,label='prediction sur la validation') | |||||
| plt.legend() | |||||
| plt.show() |