t1-3 done

This commit is contained in:
Claudio Maggioni (maggicl) 2021-04-26 12:39:29 +02:00
parent 4e5c75e52c
commit 8a4c98d0d1

View file

@ -7,6 +7,7 @@ from tensorflow.keras.layers import Dense
from tensorflow.keras import losses from tensorflow.keras import losses
from tensorflow import keras from tensorflow import keras
import tensorflow as tf import tensorflow as tf
from sklearn.metrics import mean_squared_error
data = np.load("../data/data.npz") data = np.load("../data/data.npz")
xs = data["x"] # 2000x2 xs = data["x"] # 2000x2
@ -60,8 +61,11 @@ X_val = X_val[:, 1:]
mean = np.mean(X_train, axis=0) mean = np.mean(X_train, axis=0)
std = np.std(X_train, axis=0) std = np.std(X_train, axis=0)
y_mean = np.mean(y_train, axis=0) #y_mean = np.mean(y_train, axis=0)
y_std = np.std(y_train, axis=0) #y_std = np.std(y_train, axis=0)
y_mean = 0
y_std = 1
X_train -= mean X_train -= mean
X_train /= std X_train /= std
@ -74,15 +78,17 @@ y_val -= y_mean
y_val /= y_std y_val /= y_std
network = Sequential() network = Sequential()
network.add(Dense(20, activation='relu')) network.add(Dense(35, activation='relu'))
network.add(Dense(20, activation='relu')) network.add(Dense(10, activation='relu'))
network.add(Dense(1, activation='sigmoid')) network.add(Dense(3, activation='relu'))
network.add(Dense(1, activation='linear'))
network.compile(optimizer='rmsprop', loss='mse', metrics=['mse']) network.compile(optimizer='rmsprop', loss='mse', metrics=['mse'])
callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=20) epochs = 10000
network.fit(X_train, y_train, epochs=2000, verbose=1, batch_size=1000, validation_data=(X_val, y_val), callbacks=[callback]) callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=1000)
network.fit(X_train, y_train, epochs=epochs, verbose=1, batch_size=100, validation_data=(X_val, y_val), callbacks=[callback])
msq = np.mean(((network.predict(X_val) - y_val) * y_std + y_mean) ** 2) msq = mean_squared_error(network.predict(X_val), y_val)
print(msq) print(msq)
X_test = X_test[:, 1:] X_test = X_test[:, 1:]
@ -90,5 +96,5 @@ X_test -= mean
X_test /= std X_test /= std
y_test -= y_mean y_test -= y_mean
y_test /= y_std y_test /= y_std
msq = np.mean(((network.predict(X_test) - y_test) * y_std + y_mean) ** 2) msq = mean_squared_error(network.predict(X_test), y_test)
print(msq) print(msq)