import os
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from keras.applications.xception import Xception
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Dropout, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import ModelCheckpoint
from sklearn.metrics import classification_report, confusion_matrix
import numpy as np
# Define constants
IMAGE_SIZE = 224
BATCH_SIZE = 64
NUM_CLASSES = 5
EPOCHS = 50
# Define data directories
base_dir = os.path.join(os.getcwd(), 'The Gems Atlas')
train_dir = os.path.join(base_dir, 'train')
valid_dir = os.path.join(base_dir, 'valid')
test_dir = os.path.join(base_dir, 'test')
# Define data generators
train_datagen = ImageDataGenerator( rescale=1./255,
shear_range=0.2, zoom_range=0.2,
horizontal_flip=True, validation_split=0.2)
train_generator = train_datagen.flow_from_directory( train_dir,
target_size=(IMAGE_SIZE, IMAGE_SIZE), batch_size=BATCH_SIZE,
class_mode='categorical',
subset='training')
validation_generator = train_datagen.flow_from_directory( train_dir,
target_size=(IMAGE_SIZE, IMAGE_SIZE), batch_size=BATCH_SIZE,
class_mode='categorical', subset='validation')
test_datagen = ImageDataGenerator(rescale=1./255)
test_generator = test_datagen.flow_from_directory(
test_dir,
target_size=(IMAGE_SIZE, IMAGE_SIZE), batch_size=BATCH_SIZE,
class_mode='categorical',
shuffle=False)
# Load pre-trained Inception V3 model
base_model = Xception(include_top=False, weights='imagenet', input_shape=(224, 224, 3))
# Freeze the layers in the base model
for layer in base_model.layers: layer.trainable = False
# Build the model
model = Sequential()
model.add(base_model)
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(NUM_CLASSES, activation='softmax'))
# Compile the model
model.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy', metrics=['accuracy'])
# Train the model
history = model.fit(
train_generator,
epochs=EPOCHS,
validation_data=validation_generator)
# Evaluate the model on the test data
loss, accuracy = model.evaluate(test_generator)
print('Test loss:', loss)
print('Test accuracy:', accuracy)
y_pred = model.predict(test_generator)
y_pred_classes = np.argmax(y_pred, axis=1)
y_true = test_generator.classes
# Print the confusion matrix
print(confusion_matrix(y_true, y_pred_classes)) # Print the classification report
target_names = list(test_generator.class_indices.keys())
print(classification_report(y_true, y_pred_classes, target_names=target_names)) # Calculate the
accuracy
accuracy = np.mean(y_pred_classes == y_true)
print('Accuracy:', accuracy)
# Calculate the precision, recall, and F1 score
report = classification_report(y_true, y_pred_classes, target_names=target_names, output_dict=True)
precision = report['weighted avg']['precision']
recall = report['weighted avg']['recall']
f1_score = report['weighted avg']['f1-score']
print('Precision:', precision)
print('Recall:', recall)
print('F1 score:', f1_score)