# 1.
Simple Linear Regression
import numpy as np
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
# Generate synthetic data
X = np.array([1, 2, 3, 4, 5]).reshape(-1, 1)
y = np.array([3, 5, 7, 9, 11])
# Train the model
model = LinearRegression()
model.fit(X, y)
# Make predictions
y_pred = model.predict(X)
# Plot the results
plt.scatter(X, y, color='blue', label='Actual')
plt.plot(X, y_pred, color='red', label='Predicted')
plt.title('Simple Linear Regression')
plt.legend()
plt.show()
# 2. Multivariate Linear Regression
from sklearn.model_selection import train_test_split
# Generate synthetic data
X_multi = np.array([[1, 2], [2, 3], [3, 4], [4, 5], [5, 6]])
y_multi = np.array([10, 15, 20, 25, 30])
# Train-test split
X_train, X_test, y_train, y_test = train_test_split(X_multi, y_multi, test_size=0.2, random_state=42)
# Train the model
model_multi = LinearRegression()
model_multi.fit(X_train, y_train)
# Model evaluation
print('Multivariate Regression Coefficients:', model_multi.coef_)
print('Multivariate Regression Intercept:', model_multi.intercept_)
# 3. Polynomial Regression (Degree 2)
from sklearn.preprocessing import PolynomialFeatures
# Generate synthetic data
X_poly = np.array([1, 2, 3, 4, 5]).reshape(-1, 1)
y_poly = np.array([3, 5, 10, 15, 30])
# Transform to polynomial features
degree = 2
poly = PolynomialFeatures(degree=degree)
X_poly_transformed = poly.fit_transform(X_poly)
# Train the model
model_poly = LinearRegression()
model_poly.fit(X_poly_transformed, y_poly)
# Predictions
y_poly_pred = model_poly.predict(X_poly_transformed)
# Plot the results
plt.scatter(X_poly, y_poly, color='blue', label='Actual')
plt.plot(X_poly, y_poly_pred, color='red', label='Predicted')
plt.title(f'Polynomial Regression (Degree {degree})')
plt.legend()
plt.show()
# 4. Polynomial Regression (Degree 3)
degree = 3
poly = PolynomialFeatures(degree=degree)
X_poly_transformed = poly.fit_transform(X_poly)
# Train the model
model_poly = LinearRegression()
model_poly.fit(X_poly_transformed, y_poly)
# Predictions
y_poly_pred = model_poly.predict(X_poly_transformed)
# Plot the results
plt.scatter(X_poly, y_poly, color='blue', label='Actual')
plt.plot(X_poly, y_poly_pred, color='red', label='Predicted')
plt.title(f'Polynomial Regression (Degree {degree})')
plt.legend()
plt.show()
# 5. Ridge Regression (L2 Regularization)
from sklearn.linear_model import Ridge
ridge_model = Ridge(alpha=1.0)
ridge_model.fit(X_train, y_train)
print('Ridge Coefficients:', ridge_model.coef_)
# 6. Lasso Regression (L1 Regularization)
from sklearn.linear_model import Lasso
lasso_model = Lasso(alpha=0.1)
lasso_model.fit(X_train, y_train)
print('Lasso Coefficients:', lasso_model.coef_)
# 7. Multivariate Polynomial Regression (Degree 2)
poly_multi = PolynomialFeatures(degree=2)
X_multi_poly = poly_multi.fit_transform(X_multi)
model_multi_poly = LinearRegression()
model_multi_poly.fit(X_multi_poly, y_multi)
print('Multivariate Polynomial Coefficients:', model_multi_poly.coef_)
# 8. Multiple Linear Regression with Feature Scaling
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
X_multi_scaled = scaler.fit_transform(X_multi)
model_multi_scaled = LinearRegression()
model_multi_scaled.fit(X_multi_scaled, y_multi)
print('Scaled Multivariate Coefficients:', model_multi_scaled.coef_)
# 9. Polynomial Regression with Feature Scaling
X_poly_scaled = scaler.fit_transform(X_poly)
model_poly_scaled = LinearRegression()
model_poly_scaled.fit(X_poly_scaled, y_poly)
print('Scaled Polynomial Coefficients:', model_poly_scaled.coef_)
# 10. Regularized Polynomial Regression
from sklearn.pipeline import make_pipeline
model_regularized_poly = make_pipeline(PolynomialFeatures(degree=2), Ridge(alpha=0.5))
model_regularized_poly.fit(X_poly, y_poly)
print('Regularized Polynomial Coefficients:', model_regularized_poly[-1].coef_)
Practice Questions:
Linear Regression
1. Basic Linear Regression: Train a linear regression model to predict house prices based
on the number of rooms and the area of the house. Use a synthetic dataset with at least
100 samples.
2. Performance Evaluation: Calculate the Mean Squared Error (MSE) and R² score for a
linear regression model trained on a real-world dataset like the Boston Housing dataset.
Multivariate Regression
3. Predicting Car Prices: Use multivariate linear regression to predict car prices based on
features like horsepower, mileage, and engine size.
4. Feature Impact Analysis: Train a multivariate model on a synthetic dataset and analyze
the impact of each feature using the learned coefficients.
Polynomial Regression
5. Non-linear Relationship: Create a synthetic dataset that follows a quadratic or cubic
pattern and fit a polynomial regression model. Experiment with different degrees and
visualize the results.
6. Overfitting and Underfitting: Train polynomial regression models with degrees ranging
from 1 to 10. Plot the training and validation loss to visualize overfitting.
Multivariate Polynomial Regression: Use a dataset with multiple features (e.g., stock prices,
weather data) to fit a second-degree polynomial regression model.