Building a Simple Neural Network for Odd-Even Classification Using Sigmoid Activation
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(x):
return x * (1 - x)
X = np.array([[i] for i in range(1, 101)])
y = np.array([[i % 2] for i in range(1, 101)])
np.random.seed(42)
input_size = 1
hidden_size = 2
output_size = 1
learning_rate = 0.1
weights_input_hidden = np.random.rand(input_size, hidden_size)
weights_hidden_output = np.random.rand(hidden_size, output_size)
for epoch in range(10000):
hidden_input = np.dot(X, weights_input_hidden)
hidden_output = sigmoid(hidden_input)
final_input = np.dot(hidden_output, weights_hidden_output)
final_output = sigmoid(final_input)
error = y - final_output
if epoch % 1000 == 0:
loss = np.mean(np.square(error))
print(f'Epoch {epoch}, Loss: {loss}')
output_error = error * sigmoid_derivative(final_output)
hidden_error = output_error.dot(weights_hidden_output.T) * sigmoid_derivative(hidden_output)
weights_hidden_output += hidden_output.T.dot(output_error) * learning_rate
weights_input_hidden += X.T.dot(hidden_error) * learning_rate
test_numbers = np.array([[i] for i in range(1, 11)])
hidden_input = np.dot(test_numbers, weights_input_hidden)
hidden_output = sigmoid(hidden_input)
final_input = np.dot(hidden_output, weights_hidden_output)
final_output = sigmoid(final_input)
for i, num in enumerate(test_numbers):
prediction = final_output[i]
print(f"{num[0]}: {'Odd' if num[0] %2 !=0 else 'Even'}")
OUTPUT: