0% found this document useful (0 votes)
8 views2 pages

Imthicode

The document outlines a simple implementation of a perceptron training loop using binary and bipolar step activation functions. It initializes weights and biases, defines training data for both activation functions, and iteratively updates weights based on prediction errors over a maximum of 30 epochs. The training process stops early if the model achieves correct outputs for all training data.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as RTF, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
8 views2 pages

Imthicode

The document outlines a simple implementation of a perceptron training loop using binary and bipolar step activation functions. It initializes weights and biases, defines training data for both activation functions, and iteratively updates weights based on prediction errors over a maximum of 30 epochs. The training process stops early if the model achieves correct outputs for all training data.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as RTF, PDF, TXT or read online on Scribd

# ---------- Activation functions ----------

def binary_step(x):
return 1 if x >= 0 else 0

def bipolar_step(x):
return 1 if x >= 0 else -1

activations = {
"Binary Step": binary_step,
"Bipolar Step": bipolar_step
}

# ---------- Training parameters ----------


w1_init, w2_init, bias_init = 0.3, 0.2, 0.4
learning_rate = 0.2
max_epochs = 30

# ---------- Training loop ----------


for name, activation_fn in activations.items():
print(f"\n=== Activation Function: {name} ===")

# Choose dataset based on activation function


if name == "Binary Step":
# Binary inputs/targets (0,1)
training_data = [
{'inputs': [0, 0], 'output': 0},
{'inputs': [0, 1], 'output': 0},
{'inputs': [1, 0], 'output': 0},
{'inputs': [1, 1], 'output': 1},
]
else:
# Bipolar inputs/targets (-1,1)
training_data = [
{'inputs': [-1, -1], 'output': -1},
{'inputs': [-1, 1], 'output': -1},
{'inputs': [ 1, -1], 'output': -1},
{'inputs': [ 1, 1], 'output': 1},
]

# Reset weights
w1, w2, bias = w1_init, w2_init, bias_init

for epoch in range(max_epochs):


print(f"\nEpoch {epoch+1}")
all_correct = True

for data in training_data:


x1, x2 = data['inputs']
target = data['output']
# Weighted sum
weighted_sum = x1*w1 + x2*w2 + bias
raw_output = activation_fn(weighted_sum)

# Predicted output
predicted = raw_output # already discrete (0/1 or -1/+1)

# Error
error = target - predicted

# Update weights (Perceptron learning rule)


w1 += learning_rate * error * x1
w2 += learning_rate * error * x2
bias += learning_rate * error

print(f"Inputs: {x1, x2}, Target: {target}, RawOut: {raw_output}, Pred:


{predicted}, w1={w1:.2f}, w2={w2:.2f}, Bias={bias:.2f}")

if predicted != target:
all_correct = False

# Early stopping if all correct


if all_correct:
print(f"\n✅ Target outputs achieved. Stopping at epoch {epoch+1}.")
break

# ---------- Final Testing ----------


print("\nFinal Results:")
for data in training_data:
x1, x2 = data['inputs']
weighted_sum = x1*w1 + x2*w2 + bias
predicted = activation_fn(weighted_sum)
print(f"{x1} AND {x2} = {predicted}")

You might also like