Bläddra i källkod

optuna wilshire

Emile
Emile Siboulet 4 år sedan
förälder
incheckning
11491091bd
41 ändrade filer med 3523 tillägg och 71 borttagningar
  1. Binär
      code/RNN_wilshire_5000/images/140_84_fig2.png
  2. Binär
      code/RNN_wilshire_5000/images/140_84_fig3.png
  3. Binär
      code/RNN_wilshire_5000/images/140_84_fig4.png
  4. Binär
      code/RNN_wilshire_5000/images/149_117_fig2.png
  5. Binär
      code/RNN_wilshire_5000/images/149_117_fig3.png
  6. Binär
      code/RNN_wilshire_5000/images/149_117_fig4.png
  7. Binär
      code/RNN_wilshire_5000/images/165_119_fig2.png
  8. Binär
      code/RNN_wilshire_5000/images/165_119_fig3.png
  9. Binär
      code/RNN_wilshire_5000/images/165_119_fig4.png
  10. Binär
      code/RNN_wilshire_5000/images/197_45_fig2.png
  11. Binär
      code/RNN_wilshire_5000/images/197_45_fig3.png
  12. Binär
      code/RNN_wilshire_5000/images/197_45_fig4.png
  13. Binär
      code/RNN_wilshire_5000/images/204_254_fig2.png
  14. Binär
      code/RNN_wilshire_5000/images/204_254_fig3.png
  15. Binär
      code/RNN_wilshire_5000/images/204_254_fig4.png
  16. Binär
      code/RNN_wilshire_5000/images/211_133_fig2.png
  17. Binär
      code/RNN_wilshire_5000/images/211_133_fig3.png
  18. Binär
      code/RNN_wilshire_5000/images/211_133_fig4.png
  19. Binär
      code/RNN_wilshire_5000/images/234_67_fig2.png
  20. Binär
      code/RNN_wilshire_5000/images/234_67_fig3.png
  21. Binär
      code/RNN_wilshire_5000/images/234_67_fig4.png
  22. Binär
      code/RNN_wilshire_5000/images/238_208_fig2.png
  23. Binär
      code/RNN_wilshire_5000/images/238_208_fig3.png
  24. Binär
      code/RNN_wilshire_5000/images/238_208_fig4.png
  25. Binär
      code/RNN_wilshire_5000/images/256_32_fig2.png
  26. Binär
      code/RNN_wilshire_5000/images/256_32_fig3.png
  27. Binär
      code/RNN_wilshire_5000/images/256_32_fig4.png
  28. Binär
      code/RNN_wilshire_5000/images/70_234_fig2.png
  29. Binär
      code/RNN_wilshire_5000/images/70_234_fig3.png
  30. Binär
      code/RNN_wilshire_5000/images/70_234_fig4.png
  31. Binär
      code/RNN_wilshire_5000/images/89_85_fig2.png
  32. Binär
      code/RNN_wilshire_5000/images/89_85_fig3.png
  33. Binär
      code/RNN_wilshire_5000/images/89_85_fig4.png
  34. Binär
      code/RNN_wilshire_5000/images/97_196_fig2.png
  35. Binär
      code/RNN_wilshire_5000/images/97_196_fig3.png
  36. Binär
      code/RNN_wilshire_5000/images/97_196_fig4.png
  37. Binär
      code/RNN_wilshire_5000/images/98_115_fig2.png
  38. Binär
      code/RNN_wilshire_5000/images/98_115_fig3.png
  39. Binär
      code/RNN_wilshire_5000/images/98_115_fig4.png
  40. 3430
    0
      code/RNN_wilshire_5000/rep.txt
  41. 93
    71
      code/RNN_wilshire_5000/training.py

Binär
code/RNN_wilshire_5000/images/140_84_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/140_84_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/140_84_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/149_117_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/149_117_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/149_117_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/165_119_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/165_119_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/165_119_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/197_45_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/197_45_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/197_45_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/204_254_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/204_254_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/204_254_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/211_133_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/211_133_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/211_133_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/234_67_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/234_67_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/234_67_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/238_208_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/238_208_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/238_208_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/256_32_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/256_32_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/256_32_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/70_234_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/70_234_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/70_234_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/89_85_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/89_85_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/89_85_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/97_196_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/97_196_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/97_196_fig4.png Visa fil


Binär
code/RNN_wilshire_5000/images/98_115_fig2.png Visa fil


Binär
code/RNN_wilshire_5000/images/98_115_fig3.png Visa fil


Binär
code/RNN_wilshire_5000/images/98_115_fig4.png Visa fil


+ 3430
- 0
code/RNN_wilshire_5000/rep.txt
Filskillnaden har hållits tillbaka eftersom den är för stor
Visa fil


+ 93
- 71
code/RNN_wilshire_5000/training.py Visa fil

@@ -2,74 +2,96 @@ import tensorflow as tf
from matplotlib import pyplot as plt
import numpy as np
from Model import MyModel


LEN_SEQ = 64
PRED = 0.027
HIDDEN = 128

model = MyModel(HIDDEN)

dataset = np.load('dataset.npy')
scale = (np.max(dataset) - np.min(dataset))
data = dataset/scale
shift = np.min(data)
data = data - shift

annee = np.array(list(range(len(data))))/365
annee = annee - annee[-1]
start_pred = int(len(data)*(1-PRED))
print(len(data))
print(start_pred)
plt.figure(1)
plt.plot(annee[:start_pred],data[:start_pred], label="apprentissage")
plt.plot(annee[start_pred:],data[start_pred:], label="validation")
plt.legend()
plt.show()

X_train_tot = [data[0:start_pred-1]]
Y_train_tot = [data[1:start_pred]]

X_train_tot = np.expand_dims(np.array(X_train_tot),2)
Y_train_tot = np.expand_dims(np.array(Y_train_tot),2)

X_train = X_train_tot[:,:LEN_SEQ,:]
Y_train = Y_train_tot[:,:LEN_SEQ,:]

for i in range(len(X_train_tot[0]) - LEN_SEQ) :
X_train = np.concatenate((X_train, X_train_tot[:,i:i+LEN_SEQ,:]),0)
Y_train = np.concatenate((Y_train, Y_train_tot[:,i:i+LEN_SEQ,:]),0)
print(X_train_tot.shape)
print(X_train.shape)


model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['binary_crossentropy'])
import os
os.system("rm -rf log_dir")

model.fit(x=X_train, y=Y_train, batch_size=16, epochs=5, shuffle=True)

Pred = X_train_tot.copy()
while len(Pred[0]) < len(data) :
print(len(data) - len(Pred[0]))
Pred = np.concatenate((Pred, np.array([[model.predict(Pred)[0][-1]]])),1)
Pred = Pred + shift
Pred = Pred * scale

data_Pred = np.squeeze(Pred)

plt.figure(2)
plt.plot(annee[:start_pred],dataset[:start_pred], label="apprentissage")
plt.plot(annee[start_pred:],dataset[start_pred:], label="validation")
plt.plot(annee, data_Pred, label="prediction")
plt.legend()

fig, axs = plt.subplots(2)
axs[0].plot(annee[:start_pred],dataset[:start_pred], label="données")
axs[0].plot(annee[start_pred:],dataset[start_pred:], label="validation")
axs[1].plot(annee[:start_pred],dataset[:start_pred], label="données")
axs[1].plot(annee[start_pred:], data_Pred[start_pred:], label="prediction")
plt.legend()
plt.show()
import optuna

def objectif(trial) :
LEN_SEQ = trial.suggest_int('len_seq', 64,256)
PRED = 0.1
HIDDEN = trial.suggest_int('hidden',32,256)

model = MyModel(HIDDEN)

dataset = np.load('dataset.npy')
scale = (np.max(dataset) - np.min(dataset))
data = dataset/scale
shift = np.min(data)
data = data - shift

annee = np.array(list(range(len(data))))/365
annee = annee - annee[-1]
start_pred = int(len(data)*(1-PRED))
#print(len(data))
#print(start_pred)
plt.figure(1)
plt.plot(annee[:start_pred],data[:start_pred], label="apprentissage")
plt.plot(annee[start_pred:],data[start_pred:], label="validation")
plt.legend()
#plt.show()

X_train_tot = [data[0:start_pred-1]]
Y_train_tot = [data[1:start_pred]]

X_train_tot = np.expand_dims(np.array(X_train_tot),2)
Y_train_tot = np.expand_dims(np.array(Y_train_tot),2)

X_train = X_train_tot[:,:LEN_SEQ,:]
Y_train = Y_train_tot[:,:LEN_SEQ,:]

for i in range(len(X_train_tot[0]) - LEN_SEQ) :
X_train = np.concatenate((X_train, X_train_tot[:,i:i+LEN_SEQ,:]),0)
Y_train = np.concatenate((Y_train, Y_train_tot[:,i:i+LEN_SEQ,:]),0)
#print(X_train_tot.shape)
#print(X_train.shape)


model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['binary_crossentropy','mse'])
import os
os.system("rm -rf log_dir")

model.fit(x=X_train, y=Y_train, batch_size=16, epochs=5, shuffle=True)

Pred = X_train_tot.copy()
while len(Pred[0]) < len(data)+500 :
print(len(data) - len(Pred[0]))
Pred = np.concatenate((Pred, np.array([[model.predict(Pred)[0][-1]]])),1)
Pred = Pred + shift
Pred = Pred * scale

data_Pred = np.squeeze(Pred)

plt.figure(2)
plt.clf()
plt.plot(annee[:start_pred],dataset[:start_pred], label="apprentissage")
plt.plot(annee[start_pred:],dataset[start_pred:], label="validation")
plt.plot(annee[start_pred:], data_Pred[start_pred:len(annee)], label="prediction")
plt.legend()
plt.savefig(f"images/{LEN_SEQ}_{HIDDEN}_fig2.png")

plt.figure(3)
plt.clf()
fig, axs = plt.subplots(2)
axs[0].plot(annee[:start_pred],dataset[:start_pred], label="apprentissage")
axs[0].plot(annee[start_pred:],dataset[start_pred:], label="validation")
axs[1].plot(annee[:start_pred],dataset[:start_pred], label="données")
axs[1].plot(annee[start_pred:], data_Pred[start_pred:len(annee)], label="prediction")
plt.legend()
plt.savefig(f"images/{LEN_SEQ}_{HIDDEN}_fig3.png")

annee_ex = np.array(list(range(len(data_Pred))))/365
annee_ex = annee_ex + annee[0]
plt.figure(4)
plt.clf()
plt.plot(annee[:start_pred],dataset[:start_pred], label="données")
plt.plot(annee_ex[start_pred:], data_Pred[start_pred:], label="prediction")
plt.legend()
plt.savefig(f"images/{LEN_SEQ}_{HIDDEN}_fig4.png")

loss = np.linalg.norm(dataset[start_pred:] - data_Pred[start_pred:len(dataset)])/(np.max(data_Pred[start_pred:]) - np.min(data_Pred[start_pred:]))
print(loss*100)
return loss
#plt.show()

study = optuna.create_study()
study.optimize(objectif, n_trials=10)

Laddar…
Avbryt
Spara