import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Dense, Flatten, Reshape
# 1. Load dataset
(x_train, _), (x_test, _) = mnist.load_data()
# Normalize to range [0,1]
x_train = x_train.astype('float32') / 255.0
x_test = x_test.astype('float32') / 255.0
# Flatten images (28x28 → 784)
x_train = x_train.reshape((len(x_train), 28*28))
x_test = x_test.reshape((len(x_test), 28*28))
# 2. Define Autoencoder
input_dim = 784
encoding_dim = 64 # compressed representation size
# Encoder
input_img = Input(shape=(input_dim,))
encoded = Dense(128, activation='relu')(input_img)
encoded = Dense(encoding_dim, activation='relu')(encoded)
# Decoder
decoded = Dense(128, activation='relu')(encoded)
decoded = Dense(input_dim, activation='sigmoid')(decoded)
# Autoencoder model
autoencoder = Model(input_img, decoded)
# Encoder model (for compression)
encoder = Model(input_img, encoded)
# 3. Compile model
autoencoder.compile(optimizer='adam', loss='binary_crossentropy')
# 4. Train model
history = autoencoder.fit(
x_train, x_train,
epochs=20,
batch_size=256,
shuffle=True,
validation_data=(x_test, x_test)
)
# 5. Encode and decode some images
encoded_imgs = encoder.predict(x_test)
decoded_imgs = autoencoder.predict(x_test)
# 6. Visualize original vs reconstructed
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# Original
ax = plt.subplot(2, n, i+1)
plt.imshow(x_test[i].reshape(28,28), cmap='gray')
plt.axis('off')
# Reconstructed
ax = plt.subplot(2, n, i+1+n)
plt.imshow(decoded_imgs[i].reshape(28,28), cmap='gray')
plt.axis('off')
plt.show()
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz
11490434/11490434 ━━━━━━━━━━━━━━━━━━━━ 1s 0us/step
Epoch 1/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 6s 18ms/step - loss: 0.3220 - val_loss: 0.1419
Epoch 2/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 19ms/step - loss: 0.1337 - val_loss: 0.1135
Epoch 3/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 17ms/step - loss: 0.1123 - val_loss: 0.1035
Epoch 4/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 6s 22ms/step - loss: 0.1035 - val_loss: 0.0976
Epoch 5/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 17ms/step - loss: 0.0978 - val_loss: 0.0938
Epoch 6/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0945 - val_loss: 0.0912
Epoch 7/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 21ms/step - loss: 0.0914 - val_loss: 0.0893
Epoch 8/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0892 - val_loss: 0.0868
Epoch 9/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0875 - val_loss: 0.0854
Epoch 10/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 21ms/step - loss: 0.0862 - val_loss: 0.0843
Epoch 11/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0849 - val_loss: 0.0837
Epoch 12/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0841 - val_loss: 0.0823
Epoch 13/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 22ms/step - loss: 0.0826 - val_loss: 0.0814
Epoch 14/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 9s 17ms/step - loss: 0.0822 - val_loss: 0.0807
Epoch 15/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 21ms/step - loss: 0.0812 - val_loss: 0.0808
Epoch 16/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 4s 16ms/step - loss: 0.0807 - val_loss: 0.0796
Epoch 17/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 6s 19ms/step - loss: 0.0801 - val_loss: 0.0790
Epoch 18/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 18ms/step - loss: 0.0796 - val_loss: 0.0792
Epoch 19/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 5s 17ms/step - loss: 0.0792 - val_loss: 0.0782
Epoch 20/20
235/235 ━━━━━━━━━━━━━━━━━━━━ 6s 21ms/step - loss: 0.0788 - val_loss: 0.0777
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step
313/313 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step