0% found this document useful (0 votes)
9 views2 pages

Program 3

Uploaded by

ansaarkhan498
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
9 views2 pages

Program 3

Uploaded by

ansaarkhan498
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 2

import numpy as np

import pandas as pd
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder, StandardScaler
from sklearn.metrics import accuracy_score

# Load the Iris dataset


iris = load_iris()
X = iris.data
y = iris.target

# One-hot encode the target labels


encoder = OneHotEncoder(sparse_output=False)
y = encoder.fit_transform(y.reshape(-1, 1))

# Standardize the features


scaler = StandardScaler()
X = scaler.fit_transform(X)

# Split the dataset into training and testing sets


X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,
random_state=42)

# Define the neural network structure


input_size = X.shape[1]
hidden_size = 5
output_size = y.shape[1]
learning_rate = 0.01
epochs = 10000

# Initialize weights and biases


np.random.seed(42)
W1 = np.random.randn(input_size, hidden_size)
b1 = np.zeros((1, hidden_size))
W2 = np.random.randn(hidden_size, output_size)
b2 = np.zeros((1, output_size))

# Activation function (sigmoid) and its derivative


def sigmoid(x):
return 1 / (1 + np.exp(-x))

def sigmoid_derivative(x):
return x * (1 - x)

# Forward pass
def forward(X):
z1 = np.dot(X, W1) + b1
a1 = sigmoid(z1)
z2 = np.dot(a1, W2) + b2
a2 = sigmoid(z2)
return a1, a2

# Backward pass
def backward(X, y, a1, a2):
global W1, b1, W2, b2
m = y.shape[0]
dz2 = a2 - y
dW2 = np.dot(a1.T, dz2) / m
db2 = np.sum(dz2, axis=0, keepdims=True) / m
dz1 = np.dot(dz2, W2.T) * sigmoid_derivative(a1)
dW1 = np.dot(X.T, dz1) / m
db1 = np.sum(dz1, axis=0, keepdims=True) / m
W1 -= learning_rate * dW1
b1 -= learning_rate * db1
W2 -= learning_rate * dW2
b2 -= learning_rate * db2

# Training the neural network


for epoch in range(epochs):
a1, a2 = forward(X_train)
backward(X_train, y_train, a1, a2)
if epoch % 1000 == 0:
loss = np.mean(np.square(y_train - a2))
print(f'Epoch {epoch}, Loss: {loss:.4f}')

# Testing the neural network


_, a2_test = forward(X_test)
predictions = np.argmax(a2_test, axis=1)
y_test_labels = np.argmax(y_test, axis=1)
accuracy = accuracy_score(y_test_labels, predictions)
print(f'Accuracy: {accuracy * 100:.2f}%')

# Classify a new sample


new_sample = np.array([[5.1, 3.5, 1.4, 0.2]]) # Example input
new_sample = scaler.transform(new_sample)
_, new_sample_output = forward(new_sample)
new_sample_prediction = np.argmax(new_sample_output, axis=1)
predicted_class = iris.target_names[new_sample_prediction[0]]
print(f'The new sample {new_sample} is classified as: {predicted_class}')

You might also like