Practical Assignment 2
[Link] the following question.
1) Implementing Artificial Neural Network training process
→
import numpy as np
# Sigmoid function and derivative for activation
def sigmoid(x):
return 1 / (1 + [Link](-x))
def sigmoid_derivative(x):
return x * (1 - x)
# Neural Network Class
class SimpleANN:
def __init__(self, input_size, hidden_size, output_size, learning_rate=0.1):
self.learning_rate = learning_rate
# Initialize weights and biases
self.weights_input_hidden = [Link](input_size, hidden_size)
self.weights_hidden_output = [Link](hidden_size, output_size)
self.bias_hidden = [Link](1, hidden_size)
self.bias_output = [Link](1, output_size)
def forward(self, X):
# Forward propagation
self.hidden_input = [Link](X, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = sigmoid(self.hidden_input)
self.output_input = [Link](self.hidden_output, self.weights_hidden_output) +
self.bias_output
[Link] = sigmoid(self.output_input)
return [Link]
def backward(self, X, y, output):
# Backpropagation
output_error = y - output
output_delta = output_error * sigmoid_derivative(output)
hidden_error = output_delta.dot(self.weights_hidden_output.T)
hidden_delta = hidden_error * sigmoid_derivative(self.hidden_output)
# Update weights and biases
self.weights_hidden_output += self.hidden_output.[Link](output_delta) *
self.learning_rate
self.bias_output += [Link](output_delta, axis=0, keepdims=True) *
self.learning_rate
self.weights_input_hidden += [Link](hidden_delta) * self.learning_rate
self.bias_hidden += [Link](hidden_delta, axis=0, keepdims=True) *
self.learning_rate
def train(self, X, y, epochs=1000):
for epoch in range(epochs):
output = [Link](X)
[Link](X, y, output)
if epoch % 100 == 0:
loss = [Link]((y - output) ** 2)
print(f"Epoch {epoch}, Loss: {loss}")
# Example usage
X = [Link]([[0, 0], [0, 1], [1, 0], [1, 1]]) # XOR inputs
y = [Link]([[0], [1], [1], [0]]) # XOR outputs
nn = SimpleANN(input_size=2, hidden_size=2, output_size=1)
[Link](X, y)
print("Output after training:")
print([Link](X))
2) Implement Back propagation.
→
import numpy as np
# Activation functions and their derivatives
def sigmoid(x):
return 1 / (1 + [Link](-x))
def sigmoid_derivative(x):
return x * (1 - x)
class NeuralNetworkWithBackprop:
def __init__(self, input_size, hidden_size, output_size, learning_rate=0.1):
self.learning_rate = learning_rate
# Initialize weights and biases
self.weights_input_hidden = [Link](input_size, hidden_size)
self.weights_hidden_output = [Link](hidden_size, output_size)
self.bias_hidden = [Link](1, hidden_size)
self.bias_output = [Link](1, output_size)
def forward(self, X):
# Forward propagation
self.hidden_input = [Link](X, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = sigmoid(self.hidden_input)
self.output_input = [Link](self.hidden_output, self.weights_hidden_output) +
self.bias_output
[Link] = sigmoid(self.output_input)
return [Link]
def backward(self, X, y, output):
# Backpropagation
output_error = y - output
output_delta = output_error * sigmoid_derivative(output)
hidden_error = output_delta.dot(self.weights_hidden_output.T)
hidden_delta = hidden_error * sigmoid_derivative(self.hidden_output)
# Update weights and biases
self.weights_hidden_output += self.hidden_output.[Link](output_delta) *
self.learning_rate
self.bias_output += [Link](output_delta, axis=0, keepdims=True) *
self.learning_rate
self.weights_input_hidden += [Link](hidden_delta) * self.learning_rate
self.bias_hidden += [Link](hidden_delta, axis=0, keepdims=True) *
self.learning_rate
def train(self, X, y, epochs=1000):
for epoch in range(epochs):
output = [Link](X)
[Link](X, y, output)
if epoch % 100 == 0:
loss = [Link]((y - output) ** 2)
print(f"Epoch {epoch}, Loss: {loss}")
# Example usage
X = [Link]([[0, 0], [0, 1], [1, 0], [1, 1]]) # XOR inputs
y = [Link]([[0], [1], [1], [0]]) # XOR outputs
nn = NeuralNetworkWithBackprop(input_size=2, hidden_size=2, output_size=1)
[Link](X, y)
print("Output after training:")
print([Link](X))
3) Implement deep learning.
→
class DeepNN:
def __init__(self, layer_sizes, learning_rate=0.1):
self.learning_rate = learning_rate
[Link] = [[Link](layer_sizes[i], layer_sizes[i + 1]) for i in
range(len(layer_sizes) - 1)]
[Link] = [[Link](1, layer_sizes[i + 1]) for i in
range(len(layer_sizes) - 1)]
def forward(self, X):
self.layer_inputs = []
self.layer_outputs = [X]
for i in range(len([Link])):
X = sigmoid([Link](X, [Link][i]) + [Link][i])
self.layer_inputs.append(X)
self.layer_outputs.append(X)
return X
def backward(self, X, y):
output_error = y - self.layer_outputs[-1]
deltas = [output_error * sigmoid_derivative(self.layer_outputs[-1])]
for i in range(len([Link]) - 2, -1, -1):
error = deltas[-1].dot([Link][i + 1].T)
delta = error * sigmoid_derivative(self.layer_outputs[i + 1])
[Link](delta)
[Link]()
for i in range(len([Link])):
[Link][i] += self.layer_outputs[i].[Link](deltas[i]) * self.learning_rate
[Link][i] += [Link](deltas[i], axis=0, keepdims=True) *
self.learning_rate
def train(self, X, y, epochs=1000):
for epoch in range(epochs):
output = [Link](X)
[Link](X, y)
# Example usage
layer_sizes = [2, 4, 3, 1] # input layer, two hidden layers, and output layer
deep_nn = DeepNN(layer_sizes)
deep_nn.train(X, y)
4) Implement Multilayer perceptron algorithm.
→
import numpy as np
from [Link] import Sequential
from [Link] import Dense
# XOR dataset
X = [Link]([[0, 0], [0, 1], [1, 0], [1, 1]])
y = [Link]([[0], [1], [1], [0]])
# Building the MLP model
model = Sequential()
[Link](Dense(4, input_dim=2, activation='relu')) # First hidden layer
[Link](Dense(4, activation='relu')) # Second hidden layer
[Link](Dense(1, activation='sigmoid')) # Output layer
# Compile the model
[Link](optimizer='adam', loss='binary_crossentropy',
metrics=['accuracy'])
# Train the model
[Link](X, y, epochs=1000, verbose=0)
# Evaluate the model
print("MLP output for XOR problem:")
print([Link](X))
5) Write program to create target string, starting from random string
using Genetic Algorithm.
→
import random
target = "hello world"
population_size = 100
mutation_rate = 0.01
generation_limit = 1000
# Generate random string
def random_string(length):
return ''.join([Link]('abcdefghijklmnopqrstuvwxyz ') for _ in
range(length))
# Fitness function
def fitness(candidate):
return sum(1 for expected, actual in zip(target, candidate) if expected ==
actual)
# Mutate a string
def mutate(string):
new_string = ''
for char in string:
if [Link]() < mutation_rate:
new_string += [Link]('abcdefghijklmnopqrstuvwxyz ')
else:
new_string += char
return new_string
# Crossover
def crossover(parent1, parent2):
midpoint = [Link](0, len(target))
return parent1[:midpoint] + parent2[midpoint:]
# Genetic Algorithm
population = [random_string(len(target)) for _ in range(population_size)]
for generation in range(generation_limit):
population = sorted(population, key=fitness, reverse=True)
if fitness(population[0]) == len(target):
print(f"Target string '{target}' created in generation {generation}")
break
next_population = population[:10] # Keep top 10
for _ in range(population_size - 10):
parent1, parent2 = [Link](next_population, 2)
child = crossover(parent1, parent2)
next_population.append(mutate(child))
population = next_population
6) Write program to Implement travelling salesman problem using
genetic algorithm.
→
import random
import numpy as np
# Generate random cities as coordinates
num_cities = 10
cities = [Link](num_cities, 2) # Random 2D coordinates for cities
# Distance calculation
def distance(a, b):
return [Link](a - b)
# Fitness function (inverse of total path length)
def fitness(route):
total_distance = sum(distance(cities[route[i]], cities[route[i + 1]]) for i in
range(len(route) - 1))
total_distance += distance(cities[route[-1]], cities[route[0]]) # Return to start
return 1 / total_distance
# Mutation
def mutate(route):
a, b = [Link](range(len(route)), 2)
route[a], route[b] = route[b], route[a]
# Crossover
def crossover(parent1, parent2):
start, end = sorted([Link](range(len(parent1)), 2))
child = [None] * len(parent1)
child[start:end] = parent1[start:end]
ptr = 0
for gene in parent2:
if gene not in child:
while child[ptr] is not None:
ptr += 1
child[ptr] = gene
return child
# Genetic Algorithm
population_size = 100
population = [[Link](range(num_cities), num_cities) for _ in
range(population_size)]
generations = 500
for generation in range(generations):
population = sorted(population, key=fitness, reverse=True)
next_generation = population[:10] # Elitism
for _ in range(population_size - 10):
parent1, parent2 = [Link](next_generation, 2)
child = crossover(parent1, parent2)
if [Link]() < 0.1:
mutate(child)
next_generation.append(child)
population = next_generation
best_route = max(population, key=fitness)
print("Best route found:", best_route)
print("Total distance:", 1 / fitness(best_route))
7) Write program to study and analyze genetic life cycle.
→
import random
import [Link] as plt
# Problem setup: target array of ones
target = [1] * 10
population_size = 100
mutation_rate = 0.01
generations = 100
# Randomly initialize population with 0s and 1s
def random_individual():
return [[Link](0, 1) for _ in range(len(target))]
# Fitness function: count of matching elements
def fitness(individual):
return sum(1 for i, j in zip(individual, target) if i == j)
# Mutation
def mutate(individual):
return [gene if [Link]() > mutation_rate else 1 - gene for gene in
individual]
# Crossover
def crossover(parent1, parent2):
point = [Link](1, len(target) - 1)
return parent1[:point] + parent2[point:]
# Genetic Algorithm
population = [random_individual() for _ in range(population_size)]
best_fitness_over_time = []
for generation in range(generations):
population = sorted(population, key=fitness, reverse=True)
best_fitness = fitness(population[0])
best_fitness_over_time.append(best_fitness)
if best_fitness == len(target):
print(f"Target reached in generation {generation}")
break
next_population = population[:10] # Elitism
for _ in range(population_size - 10):
parent1, parent2 = [Link](next_population, 2)
child = mutate(crossover(parent1, parent2))
next_population.append(child)
population = next_population
# Plot fitness over generations
[Link](best_fitness_over_time)
[Link]('Generation')
[Link]('Best Fitness Score')
[Link]('Genetic Algorithm Fitness Progression')
[Link]()