--- a +++ b/Classification/EfficientNet.ipynb @@ -0,0 +1,1268 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# training_images\n", + "import os\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "import matplotlib.pyplot as plt\n", + "import cv2\n", + "from tensorflow.keras.layers import Input, Flatten, Dense, Dropout\n", + "from tensorflow.keras.models import Model\n", + "from tensorflow.keras.optimizers import Adam\n", + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "from tensorflow.keras.applications import EfficientNetB0\n", + "from tensorflow.keras.preprocessing.image import load_img, img_to_array" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['bleeding', 'non_bleeding']" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import os\n", + "os.listdir(\"training_images\")" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "class_names = sorted(os.listdir(\"training_images\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "data = []\n", + "labels = []\n", + "for class_idx, class_name in enumerate(class_names):\n", + " class_dir = os.path.join(\"training_images\", class_name)\n", + " for image_name in os.listdir(class_dir):\n", + " image_path = os.path.join(class_dir, image_name)\n", + " img = load_img(image_path, target_size=(224, 224))\n", + " img_array = img_to_array(img)\n", + " data.append(img_array)\n", + " labels.append(class_idx)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "data = np.array(data, dtype=np.uint8)\n", + "labels = np.array(labels)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(2618, 224, 224, 3)" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.utils import to_categorical\n", + "num_classes = len(class_names)\n", + "labels_encoded = to_categorical(labels, num_classes)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.model_selection import train_test_split\n", + "X_train, X_test, y_train, y_test = train_test_split(data, labels_encoded, test_size=0.3, random_state=42)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "input_shape = (224,224,3)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "base_model = EfficientNetB0(weights='imagenet', include_top=False, input_shape=input_shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.layers import GlobalAveragePooling2D\n", + "for layer in base_model.layers:\n", + " layer.trainable = False\n", + "x = base_model.output\n", + "x = GlobalAveragePooling2D()(x)\n", + "x = Dense(1024, activation='relu')(x)\n", + "x = Dense(512, activation='relu')(x)\n", + "x = Dense(256, activation='relu')(x)\n", + "predictions = Dense(2, activation='sigmoid')(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "model = Model(inputs=base_model.input, outputs=predictions)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "model.compile(optimizer=Adam(learning_rate=1e-2), loss='binary_crossentropy', metrics=['accuracy'])" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/10\n", + "58/58 [==============================] - 172s 3s/step - loss: 0.4263 - accuracy: 0.8908 - val_loss: 0.0595 - val_accuracy: 0.9784\n", + "Epoch 2/10\n", + "58/58 [==============================] - 171s 3s/step - loss: 0.0874 - accuracy: 0.9689 - val_loss: 0.0345 - val_accuracy: 0.9835\n", + "Epoch 3/10\n", + "58/58 [==============================] - 178s 3s/step - loss: 0.0807 - accuracy: 0.9727 - val_loss: 0.0398 - val_accuracy: 0.9873\n", + "Epoch 4/10\n", + "58/58 [==============================] - 180s 3s/step - loss: 0.0538 - accuracy: 0.9831 - val_loss: 0.0371 - val_accuracy: 0.9885\n", + "Epoch 5/10\n", + "58/58 [==============================] - 183s 3s/step - loss: 0.0290 - accuracy: 0.9913 - val_loss: 0.0401 - val_accuracy: 0.9885\n", + "Epoch 6/10\n", + "58/58 [==============================] - 184s 3s/step - loss: 0.0214 - accuracy: 0.9945 - val_loss: 0.0371 - val_accuracy: 0.9860\n", + "Epoch 7/10\n", + "58/58 [==============================] - 153s 3s/step - loss: 0.0308 - accuracy: 0.9918 - val_loss: 0.0323 - val_accuracy: 0.9949\n", + "Epoch 8/10\n", + "58/58 [==============================] - 151s 3s/step - loss: 0.0400 - accuracy: 0.9858 - val_loss: 0.0215 - val_accuracy: 0.9885\n", + "Epoch 9/10\n", + "58/58 [==============================] - 148s 3s/step - loss: 0.0297 - accuracy: 0.9940 - val_loss: 0.1193 - val_accuracy: 0.9644\n", + "Epoch 10/10\n", + "58/58 [==============================] - 147s 3s/step - loss: 0.0401 - accuracy: 0.9902 - val_loss: 0.0795 - val_accuracy: 0.9758\n" + ] + }, + { + "data": { + "text/plain": [ + "<keras.callbacks.History at 0x20d1674bc70>" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.fit(X_train, y_train, batch_size = 32, epochs=10,validation_data=(X_test,y_test))" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25/25 [==============================] - 45s 2s/step - loss: 0.0795 - accuracy: 0.9758\n" + ] + }, + { + "data": { + "text/plain": [ + "[0.07951617985963821, 0.9758269786834717]" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.evaluate(X_test,y_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model.save(\"efficientnet.h5\")" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "25/25 [==============================] - 48s 2s/step\n" + ] + } + ], + "source": [ + "y_pred = model.predict(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(786, 2)" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "y_pred.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(y_pred.shape[0]):\n", + " max_value = np.max(y_pred[i])\n", + " y_pred[i] = np.where(y_pred[i] == max_value, 1, 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[0., 1.],\n", + " [1., 0.],\n", + " [1., 0.],\n", + " ...,\n", + " [1., 0.],\n", + " [0., 1.],\n", + " [1., 0.]], dtype=float32)" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "y_pred" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "list" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "type(class_names)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " precision recall f1-score support\n", + "\n", + " 0 1.00 0.95 0.98 393\n", + " 1 0.96 1.00 0.98 393\n", + "\n", + " micro avg 0.98 0.98 0.98 786\n", + " macro avg 0.98 0.98 0.98 786\n", + "weighted avg 0.98 0.98 0.98 786\n", + " samples avg 0.98 0.98 0.98 786\n", + "\n" + ] + } + ], + "source": [ + "from sklearn.metrics import classification_report\n", + "print(classification_report(y_test,y_pred))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[10], line 7\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[39mfor\u001b[39;00m image_name \u001b[39min\u001b[39;00m os\u001b[39m.\u001b[39mlistdir(class_dir):\n\u001b[0;32m 6\u001b[0m image_path \u001b[39m=\u001b[39m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39mjoin(class_dir, image_name)\n\u001b[1;32m----> 7\u001b[0m img \u001b[39m=\u001b[39m load_img(image_path, target_size\u001b[39m=\u001b[39;49m(\u001b[39m224\u001b[39;49m, \u001b[39m224\u001b[39;49m))\n\u001b[0;32m 8\u001b[0m img_array \u001b[39m=\u001b[39m img_to_array(img)\n\u001b[0;32m 9\u001b[0m data\u001b[39m.\u001b[39mappend(img_array)\n", + "File \u001b[1;32mc:\\Users\\User\\anaconda3\\lib\\site-packages\\keras\\utils\\image_utils.py:423\u001b[0m, in \u001b[0;36mload_img\u001b[1;34m(path, grayscale, color_mode, target_size, interpolation, keep_aspect_ratio)\u001b[0m\n\u001b[0;32m 421\u001b[0m path \u001b[39m=\u001b[39m \u001b[39mstr\u001b[39m(path\u001b[39m.\u001b[39mresolve())\n\u001b[0;32m 422\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mopen\u001b[39m(path, \u001b[39m\"\u001b[39m\u001b[39mrb\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mas\u001b[39;00m f:\n\u001b[1;32m--> 423\u001b[0m img \u001b[39m=\u001b[39m pil_image\u001b[39m.\u001b[39mopen(io\u001b[39m.\u001b[39mBytesIO(f\u001b[39m.\u001b[39;49mread()))\n\u001b[0;32m 424\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m 425\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mTypeError\u001b[39;00m(\n\u001b[0;32m 426\u001b[0m \u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mpath should be path-like or io.BytesIO, not \u001b[39m\u001b[39m{\u001b[39;00m\u001b[39mtype\u001b[39m(path)\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m\n\u001b[0;32m 427\u001b[0m )\n", + "\u001b[1;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "data = []\n", + "labels = []\n", + "for class_idx, class_name in enumerate(class_names):\n", + " class_dir = os.path.join(\"training_images\", class_name)\n", + " for image_name in os.listdir(class_dir):\n", + " image_path = os.path.join(class_dir, image_name)\n", + " img = load_img(image_path, target_size=(224, 224))\n", + " img_array = img_to_array(img)\n", + " data.append(img_array)\n", + " labels.append(class_idx)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "os.path.join(\"Auto-WCEBleedGen Challenge Test Dataset/Test_Dataset_2\", i)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "# E:/GI bleeding detection/Auto-WCEBleedGen Challenge Test Dataset_mrin/Testing_images\n", + "data = []\n", + "files = []\n", + "for i in os.listdir(\"E:/GI bleeding detection/Auto-WCEBleedGen Challenge Test Dataset_mrin/Testing_images\"):\n", + " img = load_img(os.path.join(\"E:/GI bleeding detection/Auto-WCEBleedGen Challenge Test Dataset_mrin/Testing_images\", i), target_size=(224,224))\n", + " img_array = img_to_array(img)\n", + " data.append(img_array)\n", + " files.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A0008.png'" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "files[8]" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [], + "source": [ + "data = np.array(data, dtype=np.uint8)" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "18/18 [==============================] - 24s 1s/step\n" + ] + } + ], + "source": [ + "y_pred = model.predict(data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(y_pred.shape[0]):\n", + " max_value = np.max(y_pred[i])\n", + " y_pred[i] = np.where(y_pred[i] == max_value, 1, 0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import numpy as np\n", + "import shutil" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [], + "source": [ + "output_dir = \"E:/GI bleeding detection/Auto-WCEBleedGen Challenge Test Dataset_mrin/tested\"\n", + "bleeding_dir = os.path.join(output_dir, \"bleeding\")\n", + "non_bleeding_dir = os.path.join(output_dir, \"non_bleeding\")\n", + "\n", + "# Create the main output directory if it doesn't exist\n", + "if not os.path.exists(output_dir):\n", + " os.makedirs(output_dir)\n", + "\n", + "# Create subdirectories for bleeding and non-bleeding images\n", + "if not os.path.exists(bleeding_dir):\n", + " os.makedirs(bleeding_dir)\n", + "if not os.path.exists(non_bleeding_dir):\n", + " os.makedirs(non_bleeding_dir)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "df = pd.DataFrame()" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A0000.png'" + ] + }, + "execution_count": 50, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "files[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [], + "source": [ + "lbl = []\n", + "for i, label in enumerate(y_pred):\n", + " file_path = files[i] # Get the file path for the current image\n", + "\n", + " # Determine the label and destination directory\n", + " \n", + " if label[0] == 1:\n", + " destination_dir = bleeding_dir\n", + " lbl.append(\"Bleeding\")\n", + " \n", + " elif label[1] == 1:\n", + " destination_dir = non_bleeding_dir\n", + " lbl.append(\"Non Bleeding\")\n", + " else:\n", + " continue # Skip rows without a valid label\n", + "\n", + " # Extract the image file name from the file path\n", + " file_path = os.path.join(\"E:/GI bleeding detection/Auto-WCEBleedGen Challenge Test Dataset_mrin/Testing_images\", files[i])\n", + " file_name = os.path.basename(file_path)\n", + "\n", + " # Construct the destination file path\n", + " destination_file_path = os.path.join(destination_dir, file_name)\n", + "\n", + " # Copy the image to the appropriate directory\n", + " shutil.copy(file_path, destination_file_path)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['A0000.png',\n", + " 'A0001.png',\n", + " 'A0002.png',\n", + " 'A0003.png',\n", + " 'A0004.png',\n", + " 'A0005.png',\n", + " 'A0006.png',\n", + " 'A0007.png',\n", + " 'A0008.png',\n", + " 'A0009.png',\n", + " 'A0010.png',\n", + " 'A0011.png',\n", + " 'A0012.png',\n", + " 'A0013.png',\n", + " 'A0014.png',\n", + " 'A0016.png',\n", + " 'A0017.png',\n", + " 'A0018.png',\n", + " 'A0019.png',\n", + " 'A0020.png',\n", + " 'A0021.png',\n", + " 'A0022.png',\n", + " 'A0023.png',\n", + " 'A0024.png',\n", + " 'A0025.png',\n", + " 'A0026.png',\n", + " 'A0027.png',\n", + " 'A0028.png',\n", + " 'A0029.png',\n", + " 'A0030.png',\n", + " 'A0031.png',\n", + " 'A0032.png',\n", + " 'A0033.png',\n", + " 'A0034.png',\n", + " 'A0035.png',\n", + " 'A0036.png',\n", + " 'A0037.png',\n", + " 'A0038.png',\n", + " 'A0039.png',\n", + " 'A0040.png',\n", + " 'A0041.png',\n", + " 'A0042.png',\n", + " 'A0043.png',\n", + " 'A0044.png',\n", + " 'A0045.png',\n", + " 'A0046.png',\n", + " 'A0047.png',\n", + " 'A0048.png',\n", + " 'A0049.png',\n", + " 'A0050.png',\n", + " 'A0051.png',\n", + " 'A0052.png',\n", + " 'A0053.png',\n", + " 'A0054.png',\n", + " 'A0055.png',\n", + " 'A0056.png',\n", + " 'A0057.png',\n", + " 'A0058.png',\n", + " 'A0059.png',\n", + " 'A0060.png',\n", + " 'A0061.png',\n", + " 'A0062.png',\n", + " 'A0063.png',\n", + " 'A0064.png',\n", + " 'A0065.png',\n", + " 'A0066.png',\n", + " 'A0067.png',\n", + " 'A0068.png',\n", + " 'A0069.png',\n", + " 'A0070.png',\n", + " 'A0071.png',\n", + " 'A0072.png',\n", + " 'A0073.png',\n", + " 'A0074.png',\n", + " 'A0075.png',\n", + " 'A0076.png',\n", + " 'A0077.png',\n", + " 'A0078.png',\n", + " 'A0079.png',\n", + " 'A0080.png',\n", + " 'A0081.png',\n", + " 'A0082.png',\n", + " 'A0083.png',\n", + " 'A0084.png',\n", + " 'A0085.png',\n", + " 'A0086.png',\n", + " 'A0087.png',\n", + " 'A0088.png',\n", + " 'A0089.png',\n", + " 'A0090.png',\n", + " 'A0091.png',\n", + " 'A0092.png',\n", + " 'A0093.png',\n", + " 'A0094.png',\n", + " 'A0095.png',\n", + " 'A0096.png',\n", + " 'A0097.png',\n", + " 'A0098.png',\n", + " 'A0099.png',\n", + " 'A0100.png',\n", + " 'A0101.png',\n", + " 'A0102.png',\n", + " 'A0103.png',\n", + " 'A0104.png',\n", + " 'A0105.png',\n", + " 'A0106.png',\n", + " 'A0107.png',\n", + " 'A0108.png',\n", + " 'A0109.png',\n", + " 'A0110.png',\n", + " 'A0111.png',\n", + " 'A0112.png',\n", + " 'A0113.png',\n", + " 'A0114.png',\n", + " 'A0115.png',\n", + " 'A0116.png',\n", + " 'A0117.png',\n", + " 'A0118.png',\n", + " 'A0119.png',\n", + " 'A0120.png',\n", + " 'A0121.png',\n", + " 'A0122.png',\n", + " 'A0123.png',\n", + " 'A0124.png',\n", + " 'A0125.png',\n", + " 'A0126.png',\n", + " 'A0127.png',\n", + " 'A0128.png',\n", + " 'A0129.png',\n", + " 'A0130.png',\n", + " 'A0131.png',\n", + " 'A0132.png',\n", + " 'A0133.png',\n", + " 'A0134.png',\n", + " 'A0135.png',\n", + " 'A0136.png',\n", + " 'A0137.png',\n", + " 'A0138.png',\n", + " 'A0139.png',\n", + " 'A0140.png',\n", + " 'A0141.png',\n", + " 'A0142.png',\n", + " 'A0143.png',\n", + " 'A0144.png',\n", + " 'A0145.png',\n", + " 'A0146.png',\n", + " 'A0147.png',\n", + " 'A0148.png',\n", + " 'A0149.png',\n", + " 'A0150.png',\n", + " 'A0151.png',\n", + " 'A0152.png',\n", + " 'A0153.png',\n", + " 'A0154.png',\n", + " 'A0155.png',\n", + " 'A0156.png',\n", + " 'A0157.png',\n", + " 'A0158.png',\n", + " 'A0159.png',\n", + " 'A0160.png',\n", + " 'A0161.png',\n", + " 'A0162.png',\n", + " 'A0163.png',\n", + " 'A0164.png',\n", + " 'A0165.png',\n", + " 'A0166.png',\n", + " 'A0167.png',\n", + " 'A0168.png',\n", + " 'A0169.png',\n", + " 'A0170.png',\n", + " 'A0171.png',\n", + " 'A0172.png',\n", + " 'A0173.png',\n", + " 'A0174.png',\n", + " 'A0175.png',\n", + " 'A0176.png',\n", + " 'A0177.png',\n", + " 'A0178.png',\n", + " 'A0179.png',\n", + " 'A0180.png',\n", + " 'A0181.png',\n", + " 'A0182.png',\n", + " 'A0183.png',\n", + " 'A0184.png',\n", + " 'A0185.png',\n", + " 'A0186.png',\n", + " 'A0187.png',\n", + " 'A0188.png',\n", + " 'A0189.png',\n", + " 'A0190.png',\n", + " 'A0191.png',\n", + " 'A0192.png',\n", + " 'A0193.png',\n", + " 'A0194.png',\n", + " 'A0195.png',\n", + " 'A0196.png',\n", + " 'A0197.png',\n", + " 'A0198.png',\n", + " 'A0199.png',\n", + " 'A0200.png',\n", + " 'A0201.png',\n", + " 'A0202.png',\n", + " 'A0203.png',\n", + " 'A0204.png',\n", + " 'A0205.png',\n", + " 'A0206.png',\n", + " 'A0207.png',\n", + " 'A0208.png',\n", + " 'A0209.png',\n", + " 'A0210.png',\n", + " 'A0211.png',\n", + " 'A0212.png',\n", + " 'A0213.png',\n", + " 'A0214.png',\n", + " 'A0215.png',\n", + " 'A0216.png',\n", + " 'A0217.png',\n", + " 'A0218.png',\n", + " 'A0219.png',\n", + " 'A0220.png',\n", + " 'A0221.png',\n", + " 'A0222.png',\n", + " 'A0223.png',\n", + " 'A0224.png',\n", + " 'A0225.png',\n", + " 'A0226.png',\n", + " 'A0227.png',\n", + " 'A0228.png',\n", + " 'A0229.png',\n", + " 'A0230.png',\n", + " 'A0231.png',\n", + " 'A0232.png',\n", + " 'A0233.png',\n", + " 'A0234.png',\n", + " 'A0235.png',\n", + " 'A0236.png',\n", + " 'A0237.png',\n", + " 'A0238.png',\n", + " 'A0239.png',\n", + " 'A0240.png',\n", + " 'A0241.png',\n", + " 'A0242.png',\n", + " 'A0243.png',\n", + " 'A0244.png',\n", + " 'A0245.png',\n", + " 'A0246.png',\n", + " 'A0247.png',\n", + " 'A0248.png',\n", + " 'A0249.png',\n", + " 'A0250.png',\n", + " 'A0251.png',\n", + " 'A0252.png',\n", + " 'A0253.png',\n", + " 'A0254.png',\n", + " 'A0255.png',\n", + " 'A0256.png',\n", + " 'A0257.png',\n", + " 'A0258.png',\n", + " 'A0259.png',\n", + " 'A0260.png',\n", + " 'A0261.png',\n", + " 'A0262.png',\n", + " 'A0263.png',\n", + " 'A0264.png',\n", + " 'A0265.png',\n", + " 'A0266.png',\n", + " 'A0267.png',\n", + " 'A0268.png',\n", + " 'A0269.png',\n", + " 'A0270.png',\n", + " 'A0271.png',\n", + " 'A0272.png',\n", + " 'A0273.png',\n", + " 'A0274.png',\n", + " 'A0275.png',\n", + " 'A0276.png',\n", + " 'A0277.png',\n", + " 'A0278.png',\n", + " 'A0279.png',\n", + " 'A0280.png',\n", + " 'A0281.png',\n", + " 'A0282.png',\n", + " 'A0283.png',\n", + " 'A0284.png',\n", + " 'A0285.png',\n", + " 'A0286.png',\n", + " 'A0287.png',\n", + " 'A0288.png',\n", + " 'A0289.png',\n", + " 'A0290.png',\n", + " 'A0291.png',\n", + " 'A0292.png',\n", + " 'A0293.png',\n", + " 'A0294.png',\n", + " 'A0295.png',\n", + " 'A0296.png',\n", + " 'A0297.png',\n", + " 'A0298.png',\n", + " 'A0299.png',\n", + " 'A0300.png',\n", + " 'A0301.png',\n", + " 'A0302.png',\n", + " 'A0303.png',\n", + " 'A0304.png',\n", + " 'A0305.png',\n", + " 'A0306.png',\n", + " 'A0307.png',\n", + " 'A0308.png',\n", + " 'A0309.png',\n", + " 'A0310.png',\n", + " 'A0311.png',\n", + " 'A0312.png',\n", + " 'A0313.png',\n", + " 'A0314.png',\n", + " 'A0315.png',\n", + " 'A0316.png',\n", + " 'A0317.png',\n", + " 'A0318.png',\n", + " 'A0319.png',\n", + " 'A0320.png',\n", + " 'A0321.png',\n", + " 'A0322.png',\n", + " 'A0323.png',\n", + " 'A0324.png',\n", + " 'A0325.png',\n", + " 'A0326.png',\n", + " 'A0327.png',\n", + " 'A0328.png',\n", + " 'A0329.png',\n", + " 'A0330.png',\n", + " 'A0331.png',\n", + " 'A0332.png',\n", + " 'A0333.png',\n", + " 'A0334.png',\n", + " 'A0335.png',\n", + " 'A0336.png',\n", + " 'A0337.png',\n", + " 'A0338.png',\n", + " 'A0339.png',\n", + " 'A0340.png',\n", + " 'A0341.png',\n", + " 'A0342.png',\n", + " 'A0343.png',\n", + " 'A0344.png',\n", + " 'A0345.png',\n", + " 'A0346.png',\n", + " 'A0347.png',\n", + " 'A0348.png',\n", + " 'A0349.png',\n", + " 'A0350.png',\n", + " 'A0351.png',\n", + " 'A0352.png',\n", + " 'A0353.png',\n", + " 'A0354.png',\n", + " 'A0355.png',\n", + " 'A0356.png',\n", + " 'A0357.png',\n", + " 'A0358.png',\n", + " 'A0359.png',\n", + " 'A0360.png',\n", + " 'A0361.png',\n", + " 'A0362.png',\n", + " 'A0363.png',\n", + " 'A0364.png',\n", + " 'A0365.png',\n", + " 'A0366.png',\n", + " 'A0367.png',\n", + " 'A0368.png',\n", + " 'A0369.png',\n", + " 'A0370.png',\n", + " 'A0371.png',\n", + " 'A0372.png',\n", + " 'A0373.png',\n", + " 'A0374.png',\n", + " 'A0375.png',\n", + " 'A0376.png',\n", + " 'A0377.png',\n", + " 'A0378.png',\n", + " 'A0379.png',\n", + " 'A0380.png',\n", + " 'A0381.png',\n", + " 'A0382.png',\n", + " 'A0383.png',\n", + " 'A0384.png',\n", + " 'A0385.png',\n", + " 'A0386.png',\n", + " 'A0387.png',\n", + " 'A0388.png',\n", + " 'A0389.png',\n", + " 'A0390.png',\n", + " 'A0391.png',\n", + " 'A0392.png',\n", + " 'A0393.png',\n", + " 'A0394.png',\n", + " 'A0395.png',\n", + " 'A0396.png',\n", + " 'A0397.png',\n", + " 'A0398.png',\n", + " 'A0399.png',\n", + " 'A0400.png',\n", + " 'A0401.png',\n", + " 'A0402.png',\n", + " 'A0403.png',\n", + " 'A0404.png',\n", + " 'A0405.png',\n", + " 'A0406.png',\n", + " 'A0407.png',\n", + " 'A0408.png',\n", + " 'A0409.png',\n", + " 'A0410.png',\n", + " 'A0411.png',\n", + " 'A0412.png',\n", + " 'A0413.png',\n", + " 'A0414.png',\n", + " 'A0415.png',\n", + " 'A0416.png',\n", + " 'A0417.png',\n", + " 'A0418.png',\n", + " 'A0419.png',\n", + " 'A0420.png',\n", + " 'A0421.png',\n", + " 'A0422.png',\n", + " 'A0423.png',\n", + " 'A0424.png',\n", + " 'A0425.png',\n", + " 'A0426.png',\n", + " 'A0427.png',\n", + " 'A0428.png',\n", + " 'A0429.png',\n", + " 'A0430.png',\n", + " 'A0431.png',\n", + " 'A0432.png',\n", + " 'A0433.png',\n", + " 'A0434.png',\n", + " 'A0435.png',\n", + " 'A0436.png',\n", + " 'A0437.png',\n", + " 'A0438.png',\n", + " 'A0439.png',\n", + " 'A0440.png',\n", + " 'A0441.png',\n", + " 'A0442.png',\n", + " 'A0443.png',\n", + " 'A0444.png',\n", + " 'A0445.png',\n", + " 'A0446.png',\n", + " 'A0447.png',\n", + " 'A0448.png',\n", + " 'A0449.png',\n", + " 'A0450.png',\n", + " 'A0451.png',\n", + " 'A0452.png',\n", + " 'A0453.png',\n", + " 'A0454.png',\n", + " 'A0455.png',\n", + " 'A0456.png',\n", + " 'A0457.png',\n", + " 'A0458.png',\n", + " 'A0459.png',\n", + " 'A0460.png',\n", + " 'A0461.png',\n", + " 'A0462.png',\n", + " 'A0463.png',\n", + " 'A0464.png',\n", + " 'A0465.png',\n", + " 'A0466.png',\n", + " 'A0467.png',\n", + " 'A0468.png',\n", + " 'A0469.png',\n", + " 'A0470.png',\n", + " 'A0471.png',\n", + " 'A0472.png',\n", + " 'A0473.png',\n", + " 'A0474.png',\n", + " 'A0475.png',\n", + " 'A0476.png',\n", + " 'A0477.png',\n", + " 'A0478.png',\n", + " 'A0479.png',\n", + " 'A0480.png',\n", + " 'A0481.png',\n", + " 'A0482.png',\n", + " 'A0483.png',\n", + " 'A0484.png',\n", + " 'A0485.png',\n", + " 'A0486.png',\n", + " 'A0487.png',\n", + " 'A0488.png',\n", + " 'A0489.png',\n", + " 'A0490.png',\n", + " 'A0491.png',\n", + " 'A0492.png',\n", + " 'A0493.png',\n", + " 'A0494.png',\n", + " 'A0495.png',\n", + " 'A0496.png',\n", + " 'A0497.png',\n", + " 'A0498.png',\n", + " 'A0499.png',\n", + " 'A0500.png',\n", + " 'A0501.png',\n", + " 'A0502.png',\n", + " 'A0503.png',\n", + " 'A0504.png',\n", + " 'A0505.png',\n", + " 'A0506.png',\n", + " 'A0507.png',\n", + " 'A0508.png',\n", + " 'A0509.png',\n", + " 'A0510.png',\n", + " 'A0511.png',\n", + " 'A0512.png',\n", + " 'A0513.png',\n", + " 'A0514.png',\n", + " 'A0515.png',\n", + " 'A0516.png',\n", + " 'A0517.png',\n", + " 'A0518.png',\n", + " 'A0519.png',\n", + " 'A0520.png',\n", + " 'A0521.png',\n", + " 'A0522.png',\n", + " 'A0523.png',\n", + " 'A0524.png',\n", + " 'A0525.png',\n", + " 'A0526.png',\n", + " 'A0527.png',\n", + " 'A0528.png',\n", + " 'A0529.png',\n", + " 'A0530.png',\n", + " 'A0531.png',\n", + " 'A0532.png',\n", + " 'A0533.png',\n", + " 'A0534.png',\n", + " 'A0535.png',\n", + " 'A0536.png',\n", + " 'A0537.png',\n", + " 'A0538.png',\n", + " 'A0539.png',\n", + " 'A0540.png',\n", + " 'A0541.png',\n", + " 'A0542.png',\n", + " 'A0543.png',\n", + " 'A0544.png',\n", + " 'A0545.png',\n", + " 'A0546.png',\n", + " 'A0547.png',\n", + " 'A0548.png',\n", + " 'A0549.png',\n", + " 'A0550.png',\n", + " 'A0551.png',\n", + " 'A0552.png',\n", + " 'A0553.png',\n", + " 'A0554.png',\n", + " 'A0555.png',\n", + " 'A0556.png',\n", + " 'A0557.png',\n", + " 'A0558.png',\n", + " 'A0559.png',\n", + " 'A0560.png',\n", + " 'A0561.png',\n", + " 'A0562.png',\n", + " 'A0563.png',\n", + " 'A0564.png']" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "files" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": {}, + "outputs": [], + "source": [ + "df['files'] = files\n", + "df['Label'] = lbl" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Non Bleeding 380\n", + "Bleeding 184\n", + "Name: Label, dtype: int64" + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df[\"Label\"].value_counts()" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": {}, + "outputs": [], + "source": [ + "df.to_csv(\"results.csv\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +}