import sys
import warnings
if not sys.warnoptions:
warnings.simplefilter("ignore")
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.metrics import mean_squared_error, r2_score
%matplotlib inline
# Let X have two features (X1 and X2) where each feature is a random uniform variable distributed between 0 and 10
# Let y = 2*X1 + X2 + 1 + N(0,1)
X = np.random.uniform(0,10, size=(10000, 2))
y = X@[2,1] + 1 + np.random.normal(0, 1, size=(10000))
# note I could also define this as:
# y = np.dot(X[0],[2,1]) + 1 + np.random.normal(0, 1 size=(10000))
model = Sequential()
# note that X.shape[1] is the number of columns (features) in X
model.add(Dense(1, input_shape=(X.shape[1],), activation='linear'))
model.compile(optimizer='adam', loss='mse')
model.summary()
history = model.fit(X, y, epochs=64, verbose=0, batch_size=32)
plt.plot(history.history['loss'])
plt.show()
X_test = np.random.uniform(0,10, size=(10,2))
y_test = X_test@[2,1] + 1 + np.random.normal(0,1, size=(10))
y_pred = model.predict(X_test).flatten()
rmse = np.sqrt(mean_squared_error(y_true=y_test, y_pred=y_pred))
r2 = r2_score(y_true=y_test, y_pred=y_pred)
print('rmse:', rmse)
print('r^2:', r2)
X@[2,1]
# plot predictions
plt.scatter(y_test, y_pred, s=40, color='red')
# plot training data
plt.scatter(X@[2,1] + 1, y, color='black', s=0.005)
# plot line where error = 0
plt.plot([0,30], [0,30], lw=1, color='blue')
plt.xlabel('true value')
plt.ylabel('prediction')
plt.show()
weights, biases = model.layers[0].get_weights()
print(weights)
print(biases)
# 2*0 + 1*1 + 1 = 2
np.dot(np.asarray([[0,1]]),weights) + biases
np.asarray([[0,1]])@weights + biases