Lab 4
Lab 4
In [ ]: import numpy as np
In [ ]: class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# Compute gradients
dz2 = self.a2 - y
dW2 = (1 / m) * np.dot(self.a1.T, dz2)
db2 = (1 / m) * np.sum(dz2, axis=0, keepdims=True)
dz1 = np.dot(dz2, self.W2.T) * self.sigmoid_derivative(self.a1)
dW1 = (1 / m) * np.dot(X.T, dz1)
db1 = (1 / m) * np.sum(dz1, axis=0, keepdims=True)
# Backpropagation
self.backward(X, y, learning_rate)
# Print loss every 100 epochs
if epoch % 100 == 0:
loss = np.mean(np.square(y - output))
print(f'Epoch {epoch}, Loss: {loss}')
In [ ]: # Example usage:
input_size = 2
hidden_size = 3
output_size = 1