Practical No.
3
Aim:-Generate OR function using linear sepratability.
def or_gate(x1, x2):
point1 = (-1, 0)
point2 = (0, -1)
m = (point2[1] - point1[1]) / (point2[0] - point1[0])
c = point1[1] - m * point1[0]
w1, w2, b = 1, 1, 1
decision_boundary_x2 = -w1 / w2 * x1 - b / w2
linear_combination = w1 * x1 + w2 * x2 + b
output = 1 if linear_combination >= 0 else 0
print(f"Decision Boundary: x2 = {-w1/w2:.2f} * x1 + {-b/w2:.2f}")
print(f"Input: ({x1}, {x2}) => Linear Combination: {linear_combination:.2f} => Output: {output}")
return output
test_inputs = [(-1, -1), (-1, 1), (1, -1), (1, 1)]
print("OR Gate Output:")
for x1, x2 in test_inputs:
or_gate(x1, x2)
Output:-
Practical No. 4
Aim:-Generate AND-NOT function using Perceptron network.
import numpy as np
def perceptron(X, w, b):
z = np.dot(X, w) + b
return 1 if z >= 0 else -1
def train_perceptron(X, y, learning_rate=1, epochs=100):
n_samples, n_features = X.shape
w = np.zeros(n_features)
b=0
for _ in range(epochs):
for i in range(n_samples):
y_pred = perceptron(X[i], w, b)
if y[i] * y_pred < 0:
w += learning_rate * y[i] * X[i]
b += learning_rate * y[i]
return w, b
X = np.array([[-1, -1], [-1, 1], [1, -1], [1, 1]])
y = np.array([-1, -1, 1, -1])
w, b = train_perceptron(X, y)
print("Trained weights:", w)
print("Trained bias:", b)
Output:-