Python 1 :
import numpy as np
# Fonction d'activation sigmoid
def sigmoid(x):
return 1 / (1 + [Link](-x))
# Dérivée de la fonction sigmoid
def sigmoid_derivative(x):
return x * (1 - x)
# Perceptron multicouche
class MLP:
def __init__(self, input_size, hidden_size, output_size):
# Initialisation des poids
self.weights_input_hidden = [Link](input_size, hidden_size)
self.weights_hidden_output = [Link](hidden_size, output_size)
self.bias_hidden = [Link]((1, hidden_size))
self.bias_output = [Link]((1, output_size))
def forward(self, X):
self.hidden_input = [Link](X, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = sigmoid(self.hidden_input)
self.final_input = [Link](self.hidden_output, self.weights_hidden_output) + self.bias_output
self.final_output = sigmoid(self.final_input)
return self.final_output
def backward(self, X, y, learning_rate):
# Calcul des erreurs
output_error = y - self.final_output
output_delta = output_error * sigmoid_derivative(self.final_output)
hidden_error = output_delta.dot(self.weights_hidden_output.T)
hidden_delta = hidden_error * sigmoid_derivative(self.hidden_output)
# Mise à jour des poids et des biais
self.weights_input_hidden += [Link](hidden_delta) * learning_rate
self.weights_hidden_output += self.hidden_output.[Link](output_delta) * learning_rate
self.bias_hidden += [Link](hidden_delta, axis=0, keepdims=True) * learning_rate
self.bias_output += [Link](output_delta, axis=0, keepdims=True) * learning_rate
def train(self, X, y, epochs, learning_rate):
for _ in range(epochs):
[Link](X)
[Link](X, y, learning_rate)
# Exemples d'entrée et sortie
X = [Link]([[0, 0], [0, 1], [1, 0], [1, 1]]) # Entrées (porte logique XOR)
y = [Link]([[0], [1], [1], [0]]) # Sorties attendues
# Création et entraînement du modèle
model = MLP(input_size=2, hidden_size=4, output_size=1)
[Link](X, y, epochs=10000, learning_rate=0.1)
# Test du modèle
predictions = [Link](X)
print(f"Prédictions :\n{predictions}")