Diff of /xception.ipynb [000000] .. [2ed9d9]

Switch to side-by-side view

--- a
+++ b/xception.ipynb
@@ -0,0 +1,259 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "6b8fc951",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Found 980 images belonging to 2 classes.\n",
+      "Found 140 images belonging to 2 classes.\n",
+      "Found 280 images belonging to 2 classes.\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\mohit\\AppData\\Local\\Temp\\ipykernel_33056\\3483214989.py:72: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
+      "  xception_history = xception_model.fit_generator(train_generator, steps_per_epoch=train_generator.n // train_generator.batch_size, epochs=5, validation_data=val_generator, validation_steps=val_generator.n // val_generator.batch_size, callbacks=[checkpoint])\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Epoch 1/5\n",
+      "30/30 [==============================] - 186s 6s/step - loss: 0.1842 - accuracy: 0.9462 - val_loss: 0.1311 - val_accuracy: 0.9219\n",
+      "Epoch 2/5\n",
+      "30/30 [==============================] - 193s 6s/step - loss: 0.0225 - accuracy: 0.9926 - val_loss: 0.1422 - val_accuracy: 0.9297\n",
+      "Epoch 3/5\n",
+      "30/30 [==============================] - 190s 6s/step - loss: 0.0069 - accuracy: 0.9989 - val_loss: 0.0426 - val_accuracy: 0.9922\n",
+      "Epoch 4/5\n",
+      "30/30 [==============================] - 192s 6s/step - loss: 0.0070 - accuracy: 0.9979 - val_loss: 0.0151 - val_accuracy: 0.9922\n",
+      "Epoch 5/5\n",
+      "30/30 [==============================] - 192s 6s/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 0.0074 - val_accuracy: 0.9922\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\mohit\\AppData\\Local\\Temp\\ipykernel_33056\\3483214989.py:74: UserWarning: `Model.evaluate_generator` is deprecated and will be removed in a future version. Please use `Model.evaluate`, which supports generators.\n",
+      "  xception_scores = xception_model.evaluate_generator(test_generator, steps=test_samples // BATCH_SIZE)\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Xception Test Loss: 0.012623521499335766\n",
+      "Xception Test Accuracy: 0.9921875\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\mohit\\AppData\\Local\\Temp\\ipykernel_33056\\3483214989.py:85: UserWarning: `Model.predict_generator` is deprecated and will be removed in a future version. Please use `Model.predict`, which supports generators.\n",
+      "  xception_predictions = xception_model.predict_generator(test_generator, steps=num_prediction_steps, verbose=1)\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "9/9 [==============================] - 11s 1s/step\n",
+      "Xception Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "           0       0.99      1.00      0.99       140\n",
+      "           1       1.00      0.99      0.99       140\n",
+      "\n",
+      "    accuracy                           0.99       280\n",
+      "   macro avg       0.99      0.99      0.99       280\n",
+      "weighted avg       0.99      0.99      0.99       280\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Importing necessary libraries\n",
+    "import os\n",
+    "import random\n",
+    "import shutil\n",
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "from keras.preprocessing.image import ImageDataGenerator\n",
+    "from keras.applications import VGG16, VGG19, ResNet50, InceptionV3, Xception\n",
+    "from keras.models import Model\n",
+    "from keras.layers import Dense, GlobalAveragePooling2D\n",
+    "from keras.optimizers import Adam\n",
+    "from keras.callbacks import ModelCheckpoint\n",
+    "from sklearn.metrics import classification_report, confusion_matrix\n",
+    "\n",
+    "# Setting random seed for reproducibility\n",
+    "np.random.seed(42)\n",
+    "random.seed(42)\n",
+    "\n",
+    "# Define constants\n",
+    "NUM_CLASSES = 2\n",
+    "IMG_SIZE = (224, 224)\n",
+    "BATCH_SIZE = 32\n",
+    "TRAIN_DIR = 'train'\n",
+    "VAL_DIR = 'val'\n",
+    "TEST_DIR = 'test'\n",
+    "TRAIN_SPLIT = 0.7\n",
+    "VAL_SPLIT = 0.1\n",
+    "TEST_SPLIT = 0.2\n",
+    "test_samples = 280\n",
+    "\n",
+    "# Define the base directory where the image data is located\n",
+    "BASE_DIR = r\"C:\\Users\\mohit\\Downloads\\DIP Assignment 2\\Data\"\n",
+    "\n",
+    "# Set the paths for training, validation, and test data\n",
+    "TRAIN_DIR = os.path.join(BASE_DIR, 'train')\n",
+    "VAL_DIR = os.path.join(BASE_DIR, 'val')\n",
+    "TEST_DIR = os.path.join(BASE_DIR, 'test')\n",
+    "\n",
+    "# Load and preprocess the data\n",
+    "train_datagen = ImageDataGenerator(rescale=1./255,shear_range=0.2,zoom_range=0.2,horizontal_flip=True)\n",
+    "val_datagen = ImageDataGenerator(rescale=1./255)\n",
+    "test_datagen = ImageDataGenerator(rescale=1./255)\n",
+    "\n",
+    "train_generator = train_datagen.flow_from_directory(TRAIN_DIR,target_size=IMG_SIZE,batch_size=BATCH_SIZE,class_mode='categorical')\n",
+    "\n",
+    "val_generator = val_datagen.flow_from_directory(VAL_DIR,target_size=IMG_SIZE,batch_size=BATCH_SIZE,class_mode='categorical')\n",
+    "\n",
+    "test_generator = test_datagen.flow_from_directory(TEST_DIR,target_size=IMG_SIZE,batch_size=BATCH_SIZE,class_mode='categorical',shuffle=False)\n",
+    "\n",
+    "# Define function for creating transfer learning models\n",
+    "def create_transfer_model(base_model, num_classes):\n",
+    "    x = base_model.output\n",
+    "    x = GlobalAveragePooling2D()(x)\n",
+    "    x = Dense(1024, activation='relu')(x)\n",
+    "    predictions = Dense(num_classes, activation='softmax')(x)\n",
+    "    model = Model(inputs=base_model.input, outputs=predictions)\n",
+    "    return model\n",
+    "\n",
+    "# Define Xception model\n",
+    "xception_base = Xception(include_top=False, weights='imagenet', input_shape=(IMG_SIZE[0], IMG_SIZE[1], 3))\n",
+    "xception_model = create_transfer_model(xception_base, NUM_CLASSES)\n",
+    "\n",
+    "# Compile the models\n",
+    "optimizer = Adam(learning_rate=0.0001)\n",
+    "xception_model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])\n",
+    "\n",
+    "# Define checkpoint callback to save the best model during training\n",
+    "checkpoint = ModelCheckpoint('best_model.h5', monitor='val_loss', save_best_only=True)\n",
+    "\n",
+    "xception_model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])\n",
+    "\n",
+    "xception_history = xception_model.fit_generator(train_generator, steps_per_epoch=train_generator.n // train_generator.batch_size, epochs=5, validation_data=val_generator, validation_steps=val_generator.n // val_generator.batch_size, callbacks=[checkpoint])\n",
+    "\n",
+    "xception_scores = xception_model.evaluate_generator(test_generator, steps=test_samples // BATCH_SIZE)\n",
+    "print(\"Xception Test Loss:\", xception_scores[0])\n",
+    "print(\"Xception Test Accuracy:\", xception_scores[1])\n",
+    "\n",
+    "# Get the number of test samples\n",
+    "num_test_samples = test_generator.n\n",
+    "\n",
+    "# Calculate the number of steps for prediction\n",
+    "num_prediction_steps = num_test_samples // test_generator.batch_size + 1\n",
+    "\n",
+    "# Generate predictions for all test samples\n",
+    "xception_predictions = xception_model.predict_generator(test_generator, steps=num_prediction_steps, verbose=1)\n",
+    "\n",
+    "# Convert predictions to class labels\n",
+    "xception_predicted_labels = np.argmax(xception_predictions, axis=1)\n",
+    "\n",
+    "# Get true class labels\n",
+    "true_labels = test_generator.classes\n",
+    "\n",
+    "# Calculate classification report\n",
+    "xception_report = classification_report(true_labels, xception_predicted_labels)\n",
+    "\n",
+    "print(\"Xception Classification Report:\")\n",
+    "print(xception_report)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "17422ece",
+   "metadata": {},
+   "outputs": [
+    {
+     "ename": "NameError",
+     "evalue": "name 'xception_model' is not defined",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[1;31mNameError\u001b[0m                                 Traceback (most recent call last)",
+      "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_10180\\1533048863.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpyplot\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mplt\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      2\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m \u001b[0mxception_history\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mxception_model\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtrain_generator\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mtrain_generator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mn\u001b[0m \u001b[1;33m//\u001b[0m \u001b[0mtrain_generator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m3\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      4\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      5\u001b[0m \u001b[1;31m# Plot the training loss curve\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
+      "\u001b[1;31mNameError\u001b[0m: name 'xception_model' is not defined"
+     ]
+    }
+   ],
+   "source": [
+    "import matplotlib.pyplot as plt\n",
+    "\n",
+    "xception_history = xception_model.fit(train_generator, steps_per_epoch=train_generator.n // train_generator.batch_size, epochs=3)\n",
+    "\n",
+    "# Plot the training loss curve\n",
+    "plt.plot(xception_history.history['loss'])\n",
+    "plt.title('xception Training Loss')\n",
+    "plt.xlabel('Epoch')\n",
+    "plt.ylabel('Loss')\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "6641997c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plt.plot(xception_history.history['accuracy'])\n",
+    "plt.title('xception Training Accuracy')\n",
+    "plt.xlabel('Epoch')\n",
+    "plt.ylabel('Accuracy')\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e503a76d",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.16"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}