You are on page 1of 4

11/15/23, 12:21 AM AND_Gate

In [1]: import numpy as np


import matplotlib.pyplot as plt
from prettytable import PrettyTable

# Perceptron initialization
np.random.seed(42) # for reproducibility
w = np.random.rand(2) # Use two weights for the two inputs
b = np.random.rand()

# Learning rate
alpha = 0.1

# Number of epochs
num_epochs = 10

# Training examples for AND gate


training_data_and = [(np.array([0, 0]), 0), (np.array([0, 1]), 0), (np.array([1, 0]

# Lists to store the weights and errors for plotting


epoch_weights = []
errors = []
output_data = PrettyTable()

# Set column names for output table


output_data.field_names = ['Epoch', 'Input', 'Target', 'Output', 'Error', 'Updated

# Perceptron learning rule


for epoch in range(num_epochs):
# Reset weights at the beginning of each epoch
epoch_weights.append(w.copy())

for x, t in training_data_and:
y = 1 if np.dot(w, x) + b > 0 else 0 # Step function
error = t - y
w = w + alpha * error * x
b = b + alpha * error

# Append the error to the list


errors.append(error)

output_data.add_row([epoch + 1, str(x), t, y, error, str(np.round(w, 3)), f

# Print output table


print(output_data)

# Final weights and bias


print("\nFinal weights and bias:")
print(f" w = {np.round(w, 3)}")
print(f" b = {b:.3f}")

# Plotting per epoch


plt.figure(figsize=(12, 4))

# Plot updated weights

file:///C:/Users/musta/Downloads/AND_Gate.html 1/4
11/15/23, 12:21 AM AND_Gate

plt.subplot(1, 2, 1)
for i in range(2): # Two weights for the two inputs
plt.plot(range(1, num_epochs + 1), [weights[i] for weights in epoch_weights], m
plt.title('Updated Weights Over Epochs')
plt.xlabel('Epoch')
plt.ylabel('Weight Value')
plt.legend()
plt.grid(True)

# Plot errors
plt.subplot(1, 2, 2)
plt.plot(range(1, len(errors) + 1), errors, marker='o', linestyle='-', color='red')
plt.title('Errors Over Iterations')
plt.xlabel('Iteration')
plt.ylabel('Error')
plt.grid(True)

plt.tight_layout()
plt.show()

file:///C:/Users/musta/Downloads/AND_Gate.html 2/4
11/15/23, 12:21 AM AND_Gate

+-------+-------+--------+--------+-------+-----------------+--------------+
| Epoch | Input | Target | Output | Error | Updated weights | Updated bias |
+-------+-------+--------+--------+-------+-----------------+--------------+
| 1 | [0 0] | 0 | 1 | -1 | [0.375 0.951] | 0.632 |
| 1 | [0 1] | 0 | 1 | -1 | [0.375 0.851] | 0.532 |
| 1 | [1 0] | 0 | 1 | -1 | [0.275 0.851] | 0.432 |
| 1 | [1 1] | 1 | 1 | 0 | [0.275 0.851] | 0.432 |
| 2 | [0 0] | 0 | 1 | -1 | [0.275 0.851] | 0.332 |
| 2 | [0 1] | 0 | 1 | -1 | [0.275 0.751] | 0.232 |
| 2 | [1 0] | 0 | 1 | -1 | [0.175 0.751] | 0.132 |
| 2 | [1 1] | 1 | 1 | 0 | [0.175 0.751] | 0.132 |
| 3 | [0 0] | 0 | 1 | -1 | [0.175 0.751] | 0.032 |
| 3 | [0 1] | 0 | 1 | -1 | [0.175 0.651] | -0.068 |
| 3 | [1 0] | 0 | 1 | -1 | [0.075 0.651] | -0.168 |
| 3 | [1 1] | 1 | 1 | 0 | [0.075 0.651] | -0.168 |
| 4 | [0 0] | 0 | 0 | 0 | [0.075 0.651] | -0.168 |
| 4 | [0 1] | 0 | 1 | -1 | [0.075 0.551] | -0.268 |
| 4 | [1 0] | 0 | 0 | 0 | [0.075 0.551] | -0.268 |
| 4 | [1 1] | 1 | 1 | 0 | [0.075 0.551] | -0.268 |
| 5 | [0 0] | 0 | 0 | 0 | [0.075 0.551] | -0.268 |
| 5 | [0 1] | 0 | 1 | -1 | [0.075 0.451] | -0.368 |
| 5 | [1 0] | 0 | 0 | 0 | [0.075 0.451] | -0.368 |
| 5 | [1 1] | 1 | 1 | 0 | [0.075 0.451] | -0.368 |
| 6 | [0 0] | 0 | 0 | 0 | [0.075 0.451] | -0.368 |
| 6 | [0 1] | 0 | 1 | -1 | [0.075 0.351] | -0.468 |
| 6 | [1 0] | 0 | 0 | 0 | [0.075 0.351] | -0.468 |
| 6 | [1 1] | 1 | 0 | 1 | [0.175 0.451] | -0.368 |
| 7 | [0 0] | 0 | 0 | 0 | [0.175 0.451] | -0.368 |
| 7 | [0 1] | 0 | 1 | -1 | [0.175 0.351] | -0.468 |
| 7 | [1 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 7 | [1 1] | 1 | 1 | 0 | [0.175 0.351] | -0.468 |
| 8 | [0 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 8 | [0 1] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 8 | [1 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 8 | [1 1] | 1 | 1 | 0 | [0.175 0.351] | -0.468 |
| 9 | [0 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 9 | [0 1] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 9 | [1 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 9 | [1 1] | 1 | 1 | 0 | [0.175 0.351] | -0.468 |
| 10 | [0 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 10 | [0 1] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 10 | [1 0] | 0 | 0 | 0 | [0.175 0.351] | -0.468 |
| 10 | [1 1] | 1 | 1 | 0 | [0.175 0.351] | -0.468 |
+-------+-------+--------+--------+-------+-----------------+--------------+

Final weights and bias:


w = [0.175 0.351]
b = -0.468

file:///C:/Users/musta/Downloads/AND_Gate.html 3/4
11/15/23, 12:21 AM AND_Gate

In [ ]:

file:///C:/Users/musta/Downloads/AND_Gate.html 4/4

You might also like