瀏覽代碼

New results et surtout travail sur les graphes

Doriand
Doriand Petit 4 年之前
父節點
當前提交
59e1ab96a0
共有 4 個文件被更改,包括 6656 次插入6 次删除
  1. 6639
    0
      code/wilshire_5000/new_data.txt
  2. 9
    6
      code/wilshire_5000/nn.py
  3. 二進制
      code/wilshire_5000/resnet18snake.png
  4. 8
    0
      code/wilshire_5000/results.txt

+ 6639
- 0
code/wilshire_5000/new_data.txt
文件差異過大導致無法顯示
查看文件


+ 9
- 6
code/wilshire_5000/nn.py 查看文件

@@ -14,8 +14,8 @@ def sinus_cosinus(x):
def swish(x):
return(x*tf.math.sigmoid(x))

#activations = [tf.keras.activations.relu,swish,sinus_cosinus,sinus,snake]
activations = [snake]
activations = [tf.keras.activations.relu,swish,sinus_cosinus,sinus,snake]
#activations = [snake]
models = []
errors_train,errors_test = [],[]
mean_y_train,mean_y_test,std_y_test=[],[],[]
@@ -42,7 +42,7 @@ for activation in activations :
y_test_5=[]
errors_train_5=[]
errors_test_5=[]
for k in range(1):
for k in range(2):

model = tf.keras.Sequential()

@@ -73,7 +73,7 @@ for activation in activations :
# y_preds_test.append(y_pred_test)


x = np.arange(df_train.shape[0]+df_test.shape[0]+908)
x = np.arange(9000)
x = x / maximum
future_preds = model.predict(x) ## Calculated with a website the number of working days between 01-06-2020 and 01-01-2024
@@ -85,7 +85,7 @@ def plot_total(x_train,y_train,y_pred_train,x_test,y_test,y_pred_test):
plt.figure()
plt.plot(x,y_true,label="True data")
plt.plot(x,y_pred,label="Predictions")
plt.vlines([5240,5326])
plt.vlines([6545,6629])
plt.legend()
plt.show()

@@ -99,6 +99,9 @@ x_cut = np.arange(df_train.shape[0]+df_test.shape[0])
plt.figure()
plt.plot(x_cut,y_true,label="True data")
plt.plot(x,future_preds,label="Predictions")
plt.vlines([5240,5326],ymin=0,ymax=1)
plt.xticks(range(0, 9000, 250), range(1995, 2030, 1))
plt.xlabel("Années")
plt.ylabel("Index Willshire5000 normalisé")
plt.vlines([6545,6629],ymin=0,ymax=1)
plt.legend()
plt.show()

二進制
code/wilshire_5000/resnet18snake.png 查看文件


+ 8
- 0
code/wilshire_5000/results.txt 查看文件

@@ -0,0 +1,8 @@
100 epochs adam mse 2_runs 4_batch_size

[[0.5401026606559753, 0.0], [0.5401026606559753, 0.0], [0.18415303528308868, 0.009042516350746155],
[0.1577790528535843, 0.0004640519618988037], [0.007161237532272935, 0.0007062254007905722]]


[[0.0681375540792942, 0.11859790716458475], [0.0113261666148901, 0.002322317928070604], [nan, nan],
[0.008264125511050224, 0.0013573866481290681], [0.008175850845873356, 0.001503577316727166]]

Loading…
取消
儲存