diff --git a/assignment_1/src/t1.py b/assignment_1/src/t1.py index 23e14e7..f980fb3 100644 --- a/assignment_1/src/t1.py +++ b/assignment_1/src/t1.py @@ -7,6 +7,7 @@ from tensorflow.keras.layers import Dense from tensorflow.keras import losses from tensorflow import keras import tensorflow as tf +from sklearn.metrics import mean_squared_error data = np.load("../data/data.npz") xs = data["x"] # 2000x2 @@ -60,8 +61,11 @@ X_val = X_val[:, 1:] mean = np.mean(X_train, axis=0) std = np.std(X_train, axis=0) -y_mean = np.mean(y_train, axis=0) -y_std = np.std(y_train, axis=0) +#y_mean = np.mean(y_train, axis=0) +#y_std = np.std(y_train, axis=0) + +y_mean = 0 +y_std = 1 X_train -= mean X_train /= std @@ -74,15 +78,17 @@ y_val -= y_mean y_val /= y_std network = Sequential() -network.add(Dense(20, activation='relu')) -network.add(Dense(20, activation='relu')) -network.add(Dense(1, activation='sigmoid')) +network.add(Dense(35, activation='relu')) +network.add(Dense(10, activation='relu')) +network.add(Dense(3, activation='relu')) +network.add(Dense(1, activation='linear')) network.compile(optimizer='rmsprop', loss='mse', metrics=['mse']) -callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=20) -network.fit(X_train, y_train, epochs=2000, verbose=1, batch_size=1000, validation_data=(X_val, y_val), callbacks=[callback]) +epochs = 10000 +callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=1000) +network.fit(X_train, y_train, epochs=epochs, verbose=1, batch_size=100, validation_data=(X_val, y_val), callbacks=[callback]) -msq = np.mean(((network.predict(X_val) - y_val) * y_std + y_mean) ** 2) +msq = mean_squared_error(network.predict(X_val), y_val) print(msq) X_test = X_test[:, 1:] @@ -90,5 +96,5 @@ X_test -= mean X_test /= std y_test -= y_mean y_test /= y_std -msq = np.mean(((network.predict(X_test) - y_test) * y_std + y_mean) ** 2) +msq = mean_squared_error(network.predict(X_test), y_test) print(msq)