8/3/25, 7:21 PM cnn
In [1]: dataset_path = 'Downloads/dataset/w/RealWaste'
In [3]: import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dr
from tensorflow.keras.preprocessing.image import ImageDataGenerator
IMG_SIZE = 128
BATCH_SIZE = 32
EPOCHS = 50
In [5]: train_datagen = ImageDataGenerator(
rescale=1./255,
validation_split=0.2,
rotation_range=30,
zoom_range=0.3,
shear_range=0.2,
brightness_range=[0.8, 1.2],
horizontal_flip=True
)
In [6]: train_generator = train_datagen.flow_from_directory(
dataset_path,
target_size=(IMG_SIZE, IMG_SIZE),
batch_size=BATCH_SIZE,
class_mode='categorical',
subset='training'
)
Found 3808 images belonging to 9 classes.
In [10]: val_generator = train_datagen.flow_from_directory(
dataset_path,
target_size=(IMG_SIZE, IMG_SIZE),
batch_size=BATCH_SIZE,
class_mode='categorical',
subset='validation'
)
Found 949 images belonging to 9 classes.
In [12]: model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3,3), activation='relu', input_shape=(IMG_SI
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(),
16.16.220.43:8888/lab/tree/cnn.ipynb 1/9
8/3/25, 7:21 PM cnn
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(9, activation='softmax')
])
In [13]: model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
from sklearn.utils import class_weight
import numpy as np
# 🧮
Compute class weights
labels = train_generator.classes
class_weights = class_weight.compute_class_weight(
class_weight='balanced',
classes=np.unique(labels),
y=labels
)
class_weights = dict(enumerate(class_weights))
history = model.fit(
train_generator,
validation_data=val_generator,
epochs=EPOCHS,
class_weight=class_weights
)
/home/deepak/anaconda3/lib/python3.11/site-packages/keras/src/trainers/data_
adapters/py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class shou
ld call `super().__init__(**kwargs)` in its constructor. `**kwargs` can incl
ude `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these ar
guments to `fit()`, as they will be ignored.
self._warn_if_super_not_called()
16.16.220.43:8888/lab/tree/cnn.ipynb 2/9
8/3/25, 7:21 PM cnn
Epoch 1/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 47s 384ms/step - accuracy: 0.1980 - loss: 2.100
2 - val_accuracy: 0.3562 - val_loss: 1.7769
Epoch 2/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 373ms/step - accuracy: 0.4166 - loss: 1.585
7 - val_accuracy: 0.3593 - val_loss: 1.8935
Epoch 3/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 374ms/step - accuracy: 0.4758 - loss: 1.387
4 - val_accuracy: 0.4067 - val_loss: 1.6604
Epoch 4/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 372ms/step - accuracy: 0.5394 - loss: 1.227
5 - val_accuracy: 0.4352 - val_loss: 1.6514
Epoch 5/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 374ms/step - accuracy: 0.5515 - loss: 1.179
9 - val_accuracy: 0.4278 - val_loss: 1.7058
Epoch 6/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 362ms/step - accuracy: 0.5809 - loss: 1.093
1 - val_accuracy: 0.4415 - val_loss: 1.7720
Epoch 7/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 366ms/step - accuracy: 0.5690 - loss: 1.118
6 - val_accuracy: 0.4763 - val_loss: 1.6347
Epoch 8/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 365ms/step - accuracy: 0.6225 - loss: 1.011
8 - val_accuracy: 0.4405 - val_loss: 1.6116
Epoch 9/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 365ms/step - accuracy: 0.6444 - loss: 0.936
6 - val_accuracy: 0.4868 - val_loss: 1.6148
Epoch 10/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 362ms/step - accuracy: 0.6641 - loss: 0.881
5 - val_accuracy: 0.4647 - val_loss: 1.5094
Epoch 11/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 376ms/step - accuracy: 0.6772 - loss: 0.854
8 - val_accuracy: 0.4805 - val_loss: 1.5553
Epoch 12/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 366ms/step - accuracy: 0.7020 - loss: 0.762
0 - val_accuracy: 0.4974 - val_loss: 1.4464
Epoch 13/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 378ms/step - accuracy: 0.6972 - loss: 0.776
3 - val_accuracy: 0.5026 - val_loss: 1.4796
Epoch 14/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 375ms/step - accuracy: 0.7265 - loss: 0.729
4 - val_accuracy: 0.4805 - val_loss: 1.5560
Epoch 15/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 373ms/step - accuracy: 0.7264 - loss: 0.706
2 - val_accuracy: 0.5142 - val_loss: 1.3812
Epoch 16/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 373ms/step - accuracy: 0.7461 - loss: 0.672
4 - val_accuracy: 0.5184 - val_loss: 1.4871
Epoch 17/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 362ms/step - accuracy: 0.7563 - loss: 0.658
9 - val_accuracy: 0.4984 - val_loss: 1.5867
Epoch 18/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 45s 375ms/step - accuracy: 0.7391 - loss: 0.673
2 - val_accuracy: 0.5258 - val_loss: 1.3906
Epoch 19/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 366ms/step - accuracy: 0.7758 - loss: 0.605
16.16.220.43:8888/lab/tree/cnn.ipynb 3/9
8/3/25, 7:21 PM cnn
8 - val_accuracy: 0.5216 - val_loss: 1.3955
Epoch 20/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 356ms/step - accuracy: 0.7697 - loss: 0.611
0 - val_accuracy: 0.5163 - val_loss: 1.5276
Epoch 21/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 359ms/step - accuracy: 0.7863 - loss: 0.560
1 - val_accuracy: 0.5732 - val_loss: 1.4584
Epoch 22/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 358ms/step - accuracy: 0.7865 - loss: 0.572
2 - val_accuracy: 0.5364 - val_loss: 1.5141
Epoch 23/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 356ms/step - accuracy: 0.7891 - loss: 0.544
7 - val_accuracy: 0.5090 - val_loss: 1.4783
Epoch 24/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 41s 347ms/step - accuracy: 0.7952 - loss: 0.516
3 - val_accuracy: 0.5690 - val_loss: 1.4772
Epoch 25/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 368ms/step - accuracy: 0.8126 - loss: 0.498
4 - val_accuracy: 0.5353 - val_loss: 1.6028
Epoch 26/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 368ms/step - accuracy: 0.8199 - loss: 0.462
4 - val_accuracy: 0.4984 - val_loss: 1.7680
Epoch 27/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 356ms/step - accuracy: 0.7916 - loss: 0.542
7 - val_accuracy: 0.5427 - val_loss: 1.6988
Epoch 28/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 365ms/step - accuracy: 0.8148 - loss: 0.497
9 - val_accuracy: 0.5321 - val_loss: 1.5234
Epoch 29/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 362ms/step - accuracy: 0.8163 - loss: 0.499
8 - val_accuracy: 0.5195 - val_loss: 1.6409
Epoch 30/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 366ms/step - accuracy: 0.8337 - loss: 0.430
5 - val_accuracy: 0.5806 - val_loss: 1.5390
Epoch 31/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 361ms/step - accuracy: 0.8176 - loss: 0.471
0 - val_accuracy: 0.5553 - val_loss: 1.6207
Epoch 32/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 364ms/step - accuracy: 0.8453 - loss: 0.361
7 - val_accuracy: 0.5385 - val_loss: 1.7018
Epoch 33/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 358ms/step - accuracy: 0.8463 - loss: 0.406
6 - val_accuracy: 0.5648 - val_loss: 1.6122
Epoch 34/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 357ms/step - accuracy: 0.8423 - loss: 0.367
1 - val_accuracy: 0.5859 - val_loss: 1.5194
Epoch 35/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 355ms/step - accuracy: 0.8517 - loss: 0.375
1 - val_accuracy: 0.5395 - val_loss: 1.6473
Epoch 36/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 354ms/step - accuracy: 0.8516 - loss: 0.373
8 - val_accuracy: 0.5785 - val_loss: 1.6949
Epoch 37/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 361ms/step - accuracy: 0.8576 - loss: 0.356
5 - val_accuracy: 0.6027 - val_loss: 1.4751
Epoch 38/50
16.16.220.43:8888/lab/tree/cnn.ipynb 4/9
8/3/25, 7:21 PM cnn
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 352ms/step - accuracy: 0.8376 - loss: 0.423
1 - val_accuracy: 0.5901 - val_loss: 1.5955
Epoch 39/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 370ms/step - accuracy: 0.8515 - loss: 0.362
6 - val_accuracy: 0.5353 - val_loss: 1.9740
Epoch 40/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 44s 365ms/step - accuracy: 0.8650 - loss: 0.338
2 - val_accuracy: 0.5111 - val_loss: 1.9862
Epoch 41/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 353ms/step - accuracy: 0.8647 - loss: 0.337
1 - val_accuracy: 0.5848 - val_loss: 1.7867
Epoch 42/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 359ms/step - accuracy: 0.8877 - loss: 0.305
4 - val_accuracy: 0.5869 - val_loss: 1.6642
Epoch 43/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 360ms/step - accuracy: 0.8745 - loss: 0.304
1 - val_accuracy: 0.5638 - val_loss: 1.8951
Epoch 44/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 361ms/step - accuracy: 0.8729 - loss: 0.335
9 - val_accuracy: 0.5669 - val_loss: 1.7508
Epoch 45/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 352ms/step - accuracy: 0.8871 - loss: 0.300
0 - val_accuracy: 0.5574 - val_loss: 1.8962
Epoch 46/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 362ms/step - accuracy: 0.8727 - loss: 0.309
4 - val_accuracy: 0.6101 - val_loss: 1.7305
Epoch 47/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 360ms/step - accuracy: 0.8871 - loss: 0.303
5 - val_accuracy: 0.5638 - val_loss: 1.8268
Epoch 48/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 42s 354ms/step - accuracy: 0.8802 - loss: 0.326
1 - val_accuracy: 0.5933 - val_loss: 1.6941
Epoch 49/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 360ms/step - accuracy: 0.8904 - loss: 0.306
6 - val_accuracy: 0.5911 - val_loss: 1.7019
Epoch 50/50
119/119 ━━━━━━━━━━━━━━━━━━━━ 43s 360ms/step - accuracy: 0.8981 - loss: 0.260
0 - val_accuracy: 0.6217 - val_loss: 1.6943
In [14]: plt.plot(history.history['accuracy'], label='Training Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.grid(True)
plt.show()
16.16.220.43:8888/lab/tree/cnn.ipynb 5/9
8/3/25, 7:21 PM cnn
In [15]: import os
📂
# Check your current working directory
print(f" Current working directory: {os.getcwd()}")
# Create a 'models' folder if it doesn't exist
os.makedirs('models', exist_ok=True)
# Save the model
✅
model.save('models/waste_classifier_model.h5')
print(" Model saved successfully in the 'models' folder!")
WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or
`keras.saving.save_model(model)`. This file format is considered legacy. We
recommend using instead the native Keras format, e.g. `model.save('my_model.
📂
keras')` or `keras.saving.save_model(model, 'my_model.keras')`.
✅
Current working directory: /home/deepak
Model saved successfully in the 'models' folder!
✅
In [16]: #model.save('/content/drive/MyDrive/Project/waste_classifier_model.h5')
#print(" Model saved successfully!")
In [17]: import tensorflow as tf
# Load the model from the local 'models' folder
✅
model = tf.keras.models.load_model('models/waste_classifier_model.h5')
print(" Model loaded successfully!")
16.16.220.43:8888/lab/tree/cnn.ipynb 6/9
8/3/25, 7:21 PM cnn
WARNING:absl:Compiled the loaded model, but the compiled metrics have yet to
be built. `model.compile_metrics` will be empty until you train or evaluate
✅
the model.
Model loaded successfully!
In [22]: class_labels = ['Cardboard', 'Food Organics', 'Glass', 'Metal', 'Miscellaneo
'Paper', 'Plastic', 'Textile Trash', 'Vegetation']
In [63]: from tensorflow.keras.preprocessing import image
import numpy as np
import matplotlib.pyplot as plt
# Step 1: Load and preprocess the image
✅
img_path = 'Downloads/dataset/w/RealWaste/Vegetation/Vegetation_193.jpg' #
img = image.load_img(img_path, target_size=(128, 128)) # use same size a
img_array = image.img_to_array(img)
img_array = np.expand_dims(img_array, axis=0)
img_array = img_array / 255.0 # normalize
# Step 2: Predict
prediction = model.predict(img_array)
predicted_index = np.argmax(prediction)
predicted_class = class_labels[predicted_index]
print(" 🔢
Prediction Probabilities:")
for i, label in enumerate(class_labels):
print(f"{label}: {prediction[0][i]*100:.2f}%")
print(f" 🔍 Predicted Waste Type: {predicted_class}")
# Step 3: Show eco-friendly suggestion
eco_tips = {
'Plastic': 'Use reusable cloth or paper bags instead of plastic.',
'Glass': 'Switch to steel or reusable bottles.',
'Food Organics': 'Use compost bins to make fertilizer.',
'Metal': 'Recycle metal items at local scrap centers.',
'Cardboard': 'Reuse or recycle cardboard for packing.',
'Paper': 'Minimize paper use; go digital!',
'Textile Trash': 'Donate usable clothes; avoid fast fashion.',
'Vegetation': 'Compost garden waste.',
'Miscellaneous Trash': 'Avoid disposables; use durable alternatives.'
}
print(f" ♻️ Eco Alternative Tip: {eco_tips[predicted_class]}")
16.16.220.43:8888/lab/tree/cnn.ipynb 7/9
8/3/25, 7:21 PM cnn
🔢
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 39ms/step
Prediction Probabilities:
Cardboard: 0.00%
Food Organics: 0.00%
Glass: 0.00%
Metal: 0.00%
Miscellaneous Trash: 0.00%
Paper: 0.00%
Plastic: 0.00%
Textile Trash: 0.00%
🔍
Vegetation: 100.00%
♻️
Predicted Waste Type: Vegetation
Eco Alternative Tip: Compost garden waste.
In [64]: # Step 4: Map to biodegradability, compostability, recyclability
waste_category_map = {
'Plastic': ['Non-biodegradable', 'Recyclable'],
'Glass': ['Non-biodegradable', 'Recyclable'],
'Food Organics': ['Biodegradable', 'Compostable'],
'Metal': ['Non-biodegradable', 'Recyclable'],
'Cardboard': ['Biodegradable', 'Recyclable'],
'Paper': ['Biodegradable', 'Recyclable'],
'Textile Trash': ['Biodegradable'], # assuming natural fibers
'Vegetation': ['Biodegradable', 'Compostable'],
'Miscellaneous Trash': ['Non-biodegradable']
}
🧾
waste_categories = waste_category_map.get(predicted_class, ['Unknown'])
print(f"\n Waste Category: {', '.join(waste_categories)}")
# Step 5: General waste handling tips
category_tips = {
'Biodegradable': "You can compost this or let it naturally decompose.",
'Non-biodegradable': "Avoid single-use items. Try to reuse or recycle.",
'Compostable': "Put this in a compost bin or compost pile.",
'Recyclable': "Place in a designated recycling bin."
}
print("\n 📌
Waste Handling Tips:")
for cat in waste_categories:
print(f"- {cat}: {category_tips.get(cat, 'No suggestion available.')}")
🧾 Waste Category: Biodegradable, Compostable
📌 Waste Handling Tips:
- Biodegradable: You can compost this or let it naturally decompose.
- Compostable: Put this in a compost bin or compost pile.
In [65]: # Step 4: Display image
plt.imshow(img)
plt.axis('off')
plt.title(f"Predicted: {predicted_class}")
plt.show()
16.16.220.43:8888/lab/tree/cnn.ipynb 8/9
8/3/25, 7:21 PM cnn
In [ ]:
16.16.220.43:8888/lab/tree/cnn.ipynb 9/9