Logistic Regression
Program:
import numpy as np
import [Link] as plt
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from [Link] import accuracy_score, confusion_matrix
from [Link] import make_classification
# Generate sample data
X, y = make_classification(n_samples=1000, n_features=2, n_redundant=0,
n_informative=2, random_state=1, n_clusters_per_class=1)
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
# Create and train the model
model = LogisticRegression()
[Link](X_train, y_train)
# Make predictions
y_pred = [Link](X_test)
# Evaluate the model
accuracy = accuracy_score(y_test, y_pred)
conf_matrix = confusion_matrix(y_test, y_pred)
print(f"Accuracy: {accuracy:.2f}")
print("Confusion Matrix:")
print(conf_matrix)
# Plot the decision boundary
x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5
y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5
xx, yy = [Link]([Link](x_min, x_max, 0.02),
[Link](y_min, y_max, 0.02))
Z = [Link](np.c_[[Link](), [Link]()])
Z = [Link]([Link])
[Link](figsize=(10, 8))
[Link](xx, yy, Z, alpha=0.4)
[Link](X[:, 0], X[:, 1], c=y, alpha=0.8)
[Link]("Feature 1")
[Link]("Feature 2")
[Link]("Logistic Regression Decision Boundary")
[Link]()