=========== PR 1 ===================
import numpy as np
import [Link] as plt
from [Link] import norm
# Generate synthetic data
[Link](42)
class_0 = [Link](loc=0, scale=1, size=100)
class_1 = [Link](loc=3, scale=1, size=100)
# Prior probabilities
P_C0 = 0.5 # P(Class 0)
P_C1 = 0.5 # P(Class 1)
# Likelihoods
def likelihood(x, class_mean, class_std):
return [Link](x, loc=class_mean, scale=class_std)
# Decision boundary
def decision_boundary(x):
likelihood_0 = likelihood(x, class_mean=0, class_std=1)
likelihood_1 = likelihood(x, class_mean=3, class_std=1)
P_x_C0 = likelihood_0 * P_C0
P_x_C1 = likelihood_1 * P_C1
return P_x_C0, P_x_C1
# Create a range of x values
x_values = [Link](-2, 5, 100)
P_x_C0_vals, P_x_C1_vals = decision_boundary(x_values)
# Make decisions based on posterior probabilities
decisions = [Link](P_x_C0_vals > P_x_C1_vals, 0, 1)
# Plotting
[Link](figsize=(10, 6))
[Link](x_values, P_x_C0_vals, label='P(x|C0) * P(C0)', color='blue')
[Link](x_values, P_x_C1_vals, label='P(x|C1) * P(C1)', color='orange')
[Link](x=1.5, color='red', linestyle='--', label='Decision Boundary')
[Link]('Bayesian Decision Theory')
[Link]('Feature Value')
[Link]('Probability Density')
[Link]()
[Link]()
[Link]()
==================== Output ========================
============== PR 3 ===========================
class Parser:
def __init__(self, string):
[Link] = string
[Link] = 0
[Link] = [] # To store the recognized pattern
def parse(self):
result = self.S()
return result
def S(self):
# Try to parse as A
pattern_a = self.A()
if pattern_a:
[Link](pattern_a)
return True
# Reset position for B
[Link] = 0
pattern_b = self.B()
if pattern_b:
[Link](pattern_b)
return True
return False
def A(self):
if [Link] < len([Link]) and [Link][[Link]] == 'a':
start = [Link]
while [Link] < len([Link]) and [Link][[Link]] == 'a':
[Link] += 1
return f"A({[Link][start:[Link]]})" # Return the pattern
return None
def B(self):
if [Link] < len([Link]) and [Link][[Link]] == 'b':
start = [Link]
while [Link] < len([Link]) and [Link][[Link]] == 'b':
[Link] += 1
return f"B({[Link][start:[Link]]})" # Return the pattern
return None
# Example usage
input_string = "aaabbb" # Modify this string to test different patterns
parser = Parser(input_string)
if [Link]():
print(f"The string '{input_string}' is recognized by the grammar.")
print("Generated Pattern:", " -> ".join([Link]))
else:
print(f"The string '{input_string}' is NOT recognized by the grammar.")
================== output ======================
The string 'aaabbb' is recognized by the grammar.
Generated Pattern: A(aaa)
============== pr 4,5 ===============================
import numpy as np
import tensorflow as tf
from tensorflow import keras
import [Link] as plt
# Load MNIST dataset
(x_train, y_train), (x_test, y_test) = [Link].load_data()
# Normalize the images to a range of 0 to 1
x_train = x_train.astype('float32') / 255.0
x_test = x_test.astype('float32') / 255.0
# Reshape the data to add a channel dimension
x_train = np.expand_dims(x_train, axis=-1)
x_test = np.expand_dims(x_test, axis=-1)
# Build the CNN model
def create_model():
model = [Link]([
[Link].Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)),
[Link].MaxPooling2D((2, 2)),
[Link].Conv2D(64, (3, 3), activation='relu'),
[Link].MaxPooling2D((2, 2)),
[Link].Conv2D(64, (3, 3), activation='relu'),
[Link](),
[Link](64, activation='relu'),
[Link](10, activation='softmax')
])
return model
model = create_model()
[Link](optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Train the model
[Link](x_train, y_train, epochs=5, batch_size=64, validation_split=0.2)
# Evaluate the model on test data
test_loss, test_accuracy = [Link](x_test, y_test)
print(f'Test accuracy: {test_accuracy:.4f}')
# Predict on the test dataset
predictions = [Link](x_test)
# Display some predictions
for i in range(5):
[Link](x_test[i].reshape(28, 28), cmap='gray')
[Link](f'Predicted: {[Link](predictions[i])}, Actual: {y_test[i]}')
[Link]('off')
[Link]()
========== output ==========================
Epoch 1/5
750/750 [==============================] - 9s 11ms/step - loss: 0.2160 - accuracy: 0.9355 - val_loss:
0.0696 - val_accuracy: 0.9793
Epoch 2/5
750/750 [==============================] - 9s 12ms/step - loss: 0.0601 - accuracy: 0.9810 - val_loss:
0.0573 - val_accuracy: 0.9820
Epoch 3/5
750/750 [==============================] - 9s 12ms/step - loss: 0.0412 - accuracy: 0.9870 - val_loss:
0.0631 - val_accuracy: 0.9809
Epoch 4/5
750/750 [==============================] - 9s 12ms/step - loss: 0.0300 - accuracy: 0.9909 - val_loss:
0.0401 - val_accuracy: 0.9889
Epoch 5/5
750/750 [==============================] - 9s 11ms/step - loss: 0.0258 - accuracy: 0.9917 - val_loss:
0.0450 - val_accuracy: 0.9879
313/313 [==============================] - 2s 7ms/step - loss: 0.0325 - accuracy: 0.9896
Test accuracy: 0.9896