Switch to side-by-side view

--- a
+++ b/Classification pCR/Classification_Dev.ipynb
@@ -0,0 +1,2083 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "b348fcda-1cd7-424f-aafb-f0fb5f4fe94d",
+   "metadata": {},
+   "source": [
+    "### Loading dataset"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "564c5a25-bc61-4d3d-acca-d54d998bdb4a",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/html": [
+       "<div>\n",
+       "<style scoped>\n",
+       "    .dataframe tbody tr th:only-of-type {\n",
+       "        vertical-align: middle;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe tbody tr th {\n",
+       "        vertical-align: top;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe thead th {\n",
+       "        text-align: right;\n",
+       "    }\n",
+       "</style>\n",
+       "<table border=\"1\" class=\"dataframe\">\n",
+       "  <thead>\n",
+       "    <tr style=\"text-align: right;\">\n",
+       "      <th></th>\n",
+       "      <th>ID</th>\n",
+       "      <th>pCR (outcome)</th>\n",
+       "      <th>RelapseFreeSurvival (outcome)</th>\n",
+       "      <th>Age</th>\n",
+       "      <th>ER</th>\n",
+       "      <th>PgR</th>\n",
+       "      <th>HER2</th>\n",
+       "      <th>TrippleNegative</th>\n",
+       "      <th>ChemoGrade</th>\n",
+       "      <th>Proliferation</th>\n",
+       "      <th>...</th>\n",
+       "      <th>original_glszm_SmallAreaHighGrayLevelEmphasis</th>\n",
+       "      <th>original_glszm_SmallAreaLowGrayLevelEmphasis</th>\n",
+       "      <th>original_glszm_ZoneEntropy</th>\n",
+       "      <th>original_glszm_ZonePercentage</th>\n",
+       "      <th>original_glszm_ZoneVariance</th>\n",
+       "      <th>original_ngtdm_Busyness</th>\n",
+       "      <th>original_ngtdm_Coarseness</th>\n",
+       "      <th>original_ngtdm_Complexity</th>\n",
+       "      <th>original_ngtdm_Contrast</th>\n",
+       "      <th>original_ngtdm_Strength</th>\n",
+       "    </tr>\n",
+       "  </thead>\n",
+       "  <tbody>\n",
+       "    <tr>\n",
+       "      <th>0</th>\n",
+       "      <td>TRG002174</td>\n",
+       "      <td>1</td>\n",
+       "      <td>144.0</td>\n",
+       "      <td>41.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>3</td>\n",
+       "      <td>3</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.517172</td>\n",
+       "      <td>0.375126</td>\n",
+       "      <td>3.325332</td>\n",
+       "      <td>0.002314</td>\n",
+       "      <td>3880771.500</td>\n",
+       "      <td>473.464852</td>\n",
+       "      <td>0.000768</td>\n",
+       "      <td>0.182615</td>\n",
+       "      <td>0.030508</td>\n",
+       "      <td>0.000758</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>1</th>\n",
+       "      <td>TRG002178</td>\n",
+       "      <td>0</td>\n",
+       "      <td>142.0</td>\n",
+       "      <td>39.0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>1</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>3</td>\n",
+       "      <td>3</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.444391</td>\n",
+       "      <td>0.444391</td>\n",
+       "      <td>3.032144</td>\n",
+       "      <td>0.005612</td>\n",
+       "      <td>2372009.744</td>\n",
+       "      <td>59.459710</td>\n",
+       "      <td>0.004383</td>\n",
+       "      <td>0.032012</td>\n",
+       "      <td>0.001006</td>\n",
+       "      <td>0.003685</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>2</th>\n",
+       "      <td>TRG002204</td>\n",
+       "      <td>1</td>\n",
+       "      <td>135.0</td>\n",
+       "      <td>31.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>2</td>\n",
+       "      <td>1</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.534549</td>\n",
+       "      <td>0.534549</td>\n",
+       "      <td>2.485848</td>\n",
+       "      <td>0.006752</td>\n",
+       "      <td>1540027.421</td>\n",
+       "      <td>33.935384</td>\n",
+       "      <td>0.007584</td>\n",
+       "      <td>0.024062</td>\n",
+       "      <td>0.000529</td>\n",
+       "      <td>0.006447</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>3</th>\n",
+       "      <td>TRG002206</td>\n",
+       "      <td>0</td>\n",
+       "      <td>12.0</td>\n",
+       "      <td>35.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>3</td>\n",
+       "      <td>3</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.506185</td>\n",
+       "      <td>0.506185</td>\n",
+       "      <td>2.606255</td>\n",
+       "      <td>0.003755</td>\n",
+       "      <td>6936740.794</td>\n",
+       "      <td>46.859265</td>\n",
+       "      <td>0.005424</td>\n",
+       "      <td>0.013707</td>\n",
+       "      <td>0.000178</td>\n",
+       "      <td>0.004543</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>4</th>\n",
+       "      <td>TRG002210</td>\n",
+       "      <td>0</td>\n",
+       "      <td>109.0</td>\n",
+       "      <td>61.0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>2</td>\n",
+       "      <td>1</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.462282</td>\n",
+       "      <td>0.462282</td>\n",
+       "      <td>2.809279</td>\n",
+       "      <td>0.006521</td>\n",
+       "      <td>1265399.054</td>\n",
+       "      <td>39.621023</td>\n",
+       "      <td>0.006585</td>\n",
+       "      <td>0.034148</td>\n",
+       "      <td>0.001083</td>\n",
+       "      <td>0.005626</td>\n",
+       "    </tr>\n",
+       "  </tbody>\n",
+       "</table>\n",
+       "<p>5 rows × 121 columns</p>\n",
+       "</div>"
+      ],
+      "text/plain": [
+       "          ID  pCR (outcome)  RelapseFreeSurvival (outcome)   Age  ER  PgR  \\\n",
+       "0  TRG002174              1                          144.0  41.0   0    0   \n",
+       "1  TRG002178              0                          142.0  39.0   1    1   \n",
+       "2  TRG002204              1                          135.0  31.0   0    0   \n",
+       "3  TRG002206              0                           12.0  35.0   0    0   \n",
+       "4  TRG002210              0                          109.0  61.0   1    0   \n",
+       "\n",
+       "   HER2  TrippleNegative  ChemoGrade  Proliferation  ...  \\\n",
+       "0     0                1           3              3  ...   \n",
+       "1     0                0           3              3  ...   \n",
+       "2     0                1           2              1  ...   \n",
+       "3     0                1           3              3  ...   \n",
+       "4     0                0           2              1  ...   \n",
+       "\n",
+       "   original_glszm_SmallAreaHighGrayLevelEmphasis  \\\n",
+       "0                                       0.517172   \n",
+       "1                                       0.444391   \n",
+       "2                                       0.534549   \n",
+       "3                                       0.506185   \n",
+       "4                                       0.462282   \n",
+       "\n",
+       "   original_glszm_SmallAreaLowGrayLevelEmphasis  original_glszm_ZoneEntropy  \\\n",
+       "0                                      0.375126                    3.325332   \n",
+       "1                                      0.444391                    3.032144   \n",
+       "2                                      0.534549                    2.485848   \n",
+       "3                                      0.506185                    2.606255   \n",
+       "4                                      0.462282                    2.809279   \n",
+       "\n",
+       "   original_glszm_ZonePercentage  original_glszm_ZoneVariance  \\\n",
+       "0                       0.002314                  3880771.500   \n",
+       "1                       0.005612                  2372009.744   \n",
+       "2                       0.006752                  1540027.421   \n",
+       "3                       0.003755                  6936740.794   \n",
+       "4                       0.006521                  1265399.054   \n",
+       "\n",
+       "   original_ngtdm_Busyness  original_ngtdm_Coarseness  \\\n",
+       "0               473.464852                   0.000768   \n",
+       "1                59.459710                   0.004383   \n",
+       "2                33.935384                   0.007584   \n",
+       "3                46.859265                   0.005424   \n",
+       "4                39.621023                   0.006585   \n",
+       "\n",
+       "   original_ngtdm_Complexity  original_ngtdm_Contrast  original_ngtdm_Strength  \n",
+       "0                   0.182615                 0.030508                 0.000758  \n",
+       "1                   0.032012                 0.001006                 0.003685  \n",
+       "2                   0.024062                 0.000529                 0.006447  \n",
+       "3                   0.013707                 0.000178                 0.004543  \n",
+       "4                   0.034148                 0.001083                 0.005626  \n",
+       "\n",
+       "[5 rows x 121 columns]"
+      ]
+     },
+     "execution_count": 1,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "import pandas as pd\n",
+    "\n",
+    "df= pd.read_excel(\"TrainDataset2024.xls\")\n",
+    "df.head()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "6cecd481-9f9e-45a6-9ece-6b43a71b2fb1",
+   "metadata": {},
+   "source": [
+    "## Data Preprocessing"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "id": "52341c70-e6d4-4738-8817-eda5398912d8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "df = df.rename(columns={\"pCR (outcome)\": \"PCR\"})"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "8457af35-f78b-4595-b162-77fa6f62621d",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of null values:  105\n"
+     ]
+    }
+   ],
+   "source": [
+    "import numpy as np\n",
+    "df.replace(999, np.nan, inplace= True)\n",
+    "null = df.isna().sum().sum()\n",
+    "print(\"Number of null values: \",null)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "cf17629c-72ca-4fb7-a094-8c26cfd34dfa",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "=== Features with Missing Values ===\n",
+      "PCR                 5\n",
+      "PgR                 1\n",
+      "HER2                1\n",
+      "TrippleNegative     1\n",
+      "ChemoGrade          3\n",
+      "Proliferation       2\n",
+      "HistologyType       3\n",
+      "LNStatus            1\n",
+      "Gene               88\n",
+      "dtype: int64\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAABKUAAAJOCAYAAABm7rQwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/GU6VOAAAACXBIWXMAAA9hAAAPYQGoP6dpAACXMUlEQVR4nOzdd3yN9///8ecRkcSILWKEGLX3HlUjdo1SW41SarX2aG1qtqW1S6tqa6xWiV27au8ttiBIgjRB8v794ZfzlYZ+EuIcSR732y03zvu6znVe57zPdcbzvK/3ZTHGGAEAAAAAAAA2lMjeBQAAAAAAACDhIZQCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAgHvrzzz9lsVjk7e1t71Ki5datW/rwww+VNm1aWSwWTZ48OdZvo127dsqePXusb3f48OGyWCyxvl17+Pnnn2WxWHTp0iV7lxLFpUuXZLFY9PXXX9u7FAAAEEsIpQAAeEURX+CdnZ11/fr1KMsrV66sggUL2qGyuKdXr15av369Bg0apPnz56tWrVovXddischisahjx44vXP7ll19a1/H3939TJdvVkydPlC5dOlWsWPGl6xhjlDVrVhUvXtyGlcV9Efv1i/4GDhz4Rm5z9+7dGj58uAICAt7I9gEAeFsltncBAADEdaGhoRo3bpymTJli71LirC1btqhBgwbq27dvtNZ3dnbW8uXLNX36dCVJkiTSssWLF8vZ2VkhISGR2mfPnq3w8PBYqznC4MGD31hY8TKOjo5q0qSJZs2apcuXLytbtmxR1tm+fbuuXbumXr162bS2+GLkyJHy9PSM1PamQubdu3drxIgRateunVKlSvVGbgMAgLcRI6UAAHhNRYsW1ezZs3Xjxg17l2Jzjx49ipXt3L59O0ZfxmvVqqWgoCCtW7cuUvvu3bvl6+urunXrRrmOo6OjnJycXrfUKBInTixnZ+dY3+7/0qpVKxljtHjx4hcuX7RokRIlSqTmzZvbuLL4oXbt2mrdunWkv6JFi9q7rBiJrf0TAIA3hVAKAIDX9MUXXygsLEzjxo37z/Ui5sT5+eefoyyzWCwaPny49XLEPEVnz55V69atlTJlSqVPn15DhgyRMUZXr15VgwYN5OrqqowZM+qbb7554W2GhYXpiy++UMaMGZUsWTLVr19fV69ejbLe3r17VatWLaVMmVJJkybVe++9p127dkVaJ6KmkydPqmXLlkqdOvV/Hj4mSRcvXlSTJk2UJk0aJU2aVGXLltUff/xhXR5xqJQxRtOmTbMeJvW/ZM6cWZUqVdKiRYsitS9cuFCFChV64YiWF80ptWTJEpUoUUIpUqSQq6urChUqpO+++866/MmTJxoxYoRy584tZ2dnpU2bVhUrVtTGjRujPC7Ps1gs6t69u1atWqWCBQvKyclJBQoUkI+PT5S6/vzzT5UsWVLOzs7KmTOnZs2aFa15qipUqKDs2bNHeQwi6vb29laVKlWUKVMmHT16VO3atVOOHDnk7OysjBkz6uOPP9bdu3f/8zYi7svzz80I2bNnV7t27SK1BQQEqGfPnsqaNaucnJyUK1cujR8/PsoItf/1uP8vkyZNUrZs2eTi4qL33ntPx48fty6bO3euLBaLDh06FOV6Y8aMkYODwwsPt42pdevW6d1331WyZMmUIkUK1a1bVydOnIi0TnQe9+HDh6tfv36SJE9PT+s+cOnSpVd6zXjZ/rlgwQKVKFFCLi4uSpMmjZo3b/7C1wIAAGyJw/cAAHhNnp6eatOmjWbPnq2BAwcqU6ZMsbbtZs2aKV++fBo3bpz++OMPjR49WmnSpNGsWbNUtWpVjR8/XgsXLlTfvn1VqlQpVapUKdL1v/rqK1ksFg0YMEC3b9/W5MmT5eXlpcOHD8vFxUXSs0PnateurRIlSmjYsGFKlCiR5s6dq6pVq2rHjh0qXbp0pG02adJEuXPn1pgxY2SMeWntt27dUvny5RUcHKzPPvtMadOm1bx581S/fn15e3vrgw8+UKVKlTR//nx99NFHql69utq0aRPtx6Zly5b6/PPP9fDhQyVPnlxPnz7Vr7/+qt69e0c5dO9FNm7cqBYtWqhatWoaP368JOnUqVPatWuXPv/8c0nPvuiPHTtWHTt2VOnSpRUUFKT9+/fr4MGDql69+n9uf+fOnVqxYoW6du2qFClS6Pvvv1fjxo115coVpU2bVpJ06NAh1apVS+7u7hoxYoTCwsI0cuRIpU+f/n/Wb7FY1LJlS40ZM0YnTpxQgQIFrMt8fHx07949tWrVynpfL168qPbt2ytjxow6ceKEfvjhB504cUJ//fVXrEzUHhwcrPfee0/Xr19X586d5eHhod27d2vQoEG6efOmdfL66Dzu/+WXX37RgwcP1K1bN4WEhOi7775T1apVdezYMbm5uenDDz9Ut27dtHDhQhUrVizSdRcuXKjKlSsrc+bM//N2AgMDo8xJli5dOknS/Pnz1bZtW9WsWVPjx49XcHCwZsyYoYoVK+rQoUPW8DM6j3ujRo109uxZLV68WJMmTbLeRvr06XXnzp3/Wee/vWj//OqrrzRkyBA1bdpUHTt21J07dzRlyhRVqlRJhw4d4pBBAID9GAAA8Ermzp1rJJl9+/aZCxcumMSJE5vPPvvMuvy9994zBQoUsF729fU1kszcuXOjbEuSGTZsmPXysGHDjCTTqVMna9vTp09NlixZjMViMePGjbO2379/37i4uJi2bdta27Zu3WokmcyZM5ugoCBr+7Jly4wk89133xljjAkPDze5c+c2NWvWNOHh4db1goODjaenp6levXqUmlq0aBGtx6dnz55GktmxY4e17cGDB8bT09Nkz57dhIWFRbr/3bp1i9Z2I9a9d++eSZIkiZk/f74xxpg//vjDWCwWc+nSJWutd+7csV6vbdu2Jlu2bNbLn3/+uXF1dTVPnz596W0VKVLE1K1b9z/ribitf9eYJEkSc/78eWvbkSNHjCQzZcoUa1u9evVM0qRJzfXr161t586dM4kTJ46yzRc5ceKEkWQGDRoUqb158+bG2dnZBAYGGmOe9ee/LV682Egy27dvt7ZFPKd9fX0j3Zfnn5sRsmXLFuk5N2rUKJMsWTJz9uzZSOsNHDjQODg4mCtXrhhjove4v0jE/uPi4mKuXbtmbd+7d6+RZHr16mVta9GihcmUKVOk59jBgwdfuv89L+IxeNGfMc+ew6lSpTKffPJJpOv5+fmZlClTRmqP7uM+ceLEKI/78/c5Jq8Z/94/L126ZBwcHMxXX30Vqf3YsWMmceLEUdoBALAlDt8DACAW5MiRQx999JF++OEH3bx5M9a2+/wZ5hwcHFSyZEkZY9ShQwdre6pUqZQnTx5dvHgxyvXbtGmjFClSWC9/+OGHcnd319q1ayVJhw8f1rlz59SyZUvdvXtX/v7+8vf316NHj1StWjVt3749yqFXn376abRqX7t2rUqXLh3pEKLkyZOrU6dOunTpkk6ePBm9B+ElUqdOrVq1alnnVFq0aJHKly//wkm/XyRVqlR69OhRpEPxXrTOiRMndO7cuRjX5+XlpZw5c1ovFy5cWK6urtZ+CgsL06ZNm9SwYcNIo+ty5cql2rVrR+s28ufPr2LFimnJkiXWtkePHum3337T+++/L1dXV0myjoqTpJCQEPn7+6ts2bKSpIMHD8b4vr3Ir7/+qnfffVepU6e2Po/8/f3l5eWlsLAwbd++XVL0Hvf/0rBhw0gjnUqXLq0yZcpYn9PSs+f9jRs3tHXrVmvbwoUL5eLiosaNG0frdqZNm6aNGzdG+pOejX4KCAhQixYtIt1PBwcHlSlTJtJt2uJx/7d/758rVqxQeHi4mjZtGqnejBkzKnfu3JHqBQDA1gilAACIJYMHD9bTp0//59xSMeHh4RHpcsqUKeXs7Gw9xOf59vv370e5fu7cuSNdtlgsypUrly5duiRJ1rClbdu2Sp8+faS/OXPmKDQ0VIGBgZG28e8zkr3M5cuXlSdPnijt+fLlsy5/XS1bttTGjRt15coVrVq1Si1btoz2dbt27ap33nlHtWvXVpYsWfTxxx9HmfNp5MiRCggI0DvvvKNChQqpX79+Onr0aLS2/+++k54FaRH9dPv2bf3zzz/KlStXlPVe1PYyrVq1kq+vr3bv3i1JWrVqlYKDg62H7knSvXv39Pnnn8vNzU0uLi5Knz69tR//3b+v6ty5c/Lx8YnyPPLy8pL07P5K0Xvc/8u/n9OS9M4771if05JUvXp1ubu7a+HChZKk8PBwLV68WA0aNIgU0v6X0qVLy8vLK9JfxP2UpKpVq0a5rxs2bLDeT8k2j/u//Xv/PHfunIwxyp07d5R6T506FaleAABsjTmlAACIJTly5FDr1q31ww8/aODAgVGWv2zenrCwsJdu08HBIVptkv5zfqeXiRgFNXHixJeeWSx58uSRLj8/+sPe6tevLycnJ7Vt21ahoaFq2rRptK+bIUMGHT58WOvXr9e6deu0bt06zZ07V23atNG8efMkSZUqVdKFCxe0evVqbdiwQXPmzNGkSZM0c+bMSKPYXiQ2++m/tGjRQv3797eOFFu0aJFSp06tOnXqWNdp2rSpdu/erX79+qlo0aJKnjy5wsPDVatWrSgj4aLr38/b8PBwVa9eXf3793/h+u+8846k6D3ur8vBwUEtW7bU7NmzNX36dO3atUs3btxQ69atX3vbEY/X/PnzlTFjxijLEyf+v4/Xr/u4v8prxr/3z/DwcFksFq1bt+6Fz8l/798AANgSoRQAALFo8ODBWrBggXUC5+elTp1a0rMzlD0vNkYMvcy/Dzszxuj8+fMqXLiwJFkPL3N1dbWOBIkt2bJl05kzZ6K0nz592rr8dbm4uKhhw4ZasGCBateuHWUE2f+SJEkS1atXT/Xq1VN4eLi6du2qWbNmaciQIdbRSmnSpFH79u3Vvn17PXz4UJUqVdLw4cP/Zyj1v2TIkEHOzs46f/58lGUvanuZTJkyqUqVKvr11181ZMgQbdy4Ue3atVOSJEkkSffv39fmzZs1YsQIDR061Hq96B6SmDp16ijP2cePH0c5TDVnzpx6+PBhtJ5H0XncX+ZFdZ89ezbKmRXbtGmjb775Rr///rvWrVun9OnTq2bNmv+ztv8lYp/JkCHDf97XmDzuLwufYuM1I2fOnDLGyNPT0xoMAgDwtuDwPQAAYlHOnDnVunVrzZo1S35+fpGWubq6Kl26dNa5dSJMnz79jdUTcaayCN7e3rp586Z1zqISJUooZ86c+vrrr/Xw4cMo13+Vs39FqFOnjv7++2/t2bPH2vbo0SP98MMPyp49u/Lnz//K235e3759NWzYMA0ZMiRG17t7926ky4kSJbKGdaGhoS9cJ3ny5MqVK5d1+etwcHCQl5eXVq1apRs3bljbz58/r3Xr1sVoW61atdLt27fVuXNnPXnyJNKhexGjY/49QivibHj/S86cOaM8Z3/44Ycoo3WaNm2qPXv2aP369VG2ERAQoKdPn0qK3uP+X1atWqXr169bL//999/au3dvlHm4ChcurMKFC2vOnDlavny5mjdvHmkU06uqWbOmXF1dNWbMGD158iTK8oh9JiaPe7JkySRFDZ9i4zWjUaNGcnBw0IgRI6LUYoyJ0h8AANgSI6UAAIhlX375pebPn68zZ86oQIECkZZ17NhR48aNU8eOHVWyZElt375dZ8+efWO1pEmTRhUrVlT79u1169YtTZ48Wbly5dInn3wi6VkgMGfOHNWuXVsFChRQ+/btlTlzZl2/fl1bt26Vq6urfv/991e67YEDB2rx4sWqXbu2PvvsM6VJk0bz5s2Tr6+vli9frkSJYue3sSJFiqhIkSIxvl7Hjh117949Va1aVVmyZNHly5c1ZcoUFS1a1DrvVf78+VW5cmWVKFFCadKk0f79++Xt7a3u3bvHSu3Dhw/Xhg0bVKFCBXXp0kVhYWGaOnWqChYsqMOHD0d7O40bN1bXrl21evVqZc2aVZUqVbIuc3V1VaVKlTRhwgQ9efJEmTNn1oYNG+Tr6xutbXfs2FGffvqpGjdurOrVq+vIkSNav359lFFp/fr1s06w3q5dO5UoUUKPHj3SsWPH5O3trUuXLildunTRetz/S65cuVSxYkV16dJFoaGhmjx5stKmTfvCwwbbtGmjvn37SlKsHLonPXs8Z8yYoY8++kjFixdX8+bNlT59el25ckV//PGHKlSooKlTp8bocS9RooSkZ68dzZs3l6Ojo+rVq6dkyZK99mtGzpw5NXr0aA0aNEiXLl1Sw4YNlSJFCvn6+mrlypXq1KmT9TECAMDWCKUAAIhluXLlUuvWrV84P87QoUN1584deXt7a9myZapdu7bWrVunDBkyvJFavvjiCx09elRjx47VgwcPVK1aNU2fPl1Jkya1rlO5cmXt2bNHo0aN0tSpU/Xw4UNlzJhRZcqUUefOnV/5tt3c3LR7924NGDBAU6ZMUUhIiAoXLqzff/9ddevWjY2791oi5v+aPn26AgIClDFjRjVr1kzDhw+3BmafffaZfvvtN23YsEGhoaHKli2bRo8erX79+sVKDSVKlNC6devUt29fDRkyRFmzZtXIkSN16tQp62GO0eHq6qp69erp119/VYsWLaIcDrZo0SL16NFD06ZNkzFGNWrU0Lp16yKd9e9lPvnkE/n6+urHH3+Uj4+P3n33XW3cuFHVqlWLtF7SpEm1bds2jRkzRr/++qt++eUXubq66p133tGIESOUMmVKSdF73P9LmzZtlChRIk2ePFm3b99W6dKlNXXqVLm7u0dZt1WrVhowYIBy5syp0qVL/89tR1fLli2VKVMmjRs3ThMnTlRoaKgyZ86sd999V+3bt7euF93HvVSpUho1apRmzpwpHx8fhYeHy9fXV8mSJYuV14yBAwfqnXfe0aRJkzRixAhJUtasWVWjRg3Vr18/dh4UAABegcXE9mybAAAAeC0NGzbUiRMnoj3vE17M399f7u7uGjp0aIwP7wQAAG8ec0oBAADY0T///BPp8rlz57R27VpVrlzZPgXFIz///LPCwsL00Ucf2bsUAADwAoyUAgAAsCN3d3e1a9dOOXLk0OXLlzVjxgyFhobq0KFDyp07t73Li5O2bNmikydPasiQIapSpYpWrFhh75IAAMALEEoBAADYUfv27bV161b5+fnJyclJ5cqV05gxY1S8eHF7lxZnVa5cWbt371aFChW0YMECZc6c2d4lAQCAFyCUAgAAAAAAgM0xpxQAAAAAAABsjlAKAAAAAAAANpfY3gW8aeHh4bpx44ZSpEghi8Vi73IAAAAAAADiNWOMHjx4oEyZMilRopePh4r3odSNGzeUNWtWe5cBAAAAAACQoFy9elVZsmR56fJ4H0qlSJFC0rMHwtXV1c7VAAAAAAAAxG9BQUHKmjWrNZN5mXgfSkUcsufq6kooBQAAAAAAYCP/axolJjoHAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNJbZ3AQAAAAAAAG/KlStX5O/vb+8y7CJdunTy8PCwdxkvRSgFAAAAAADipStXrihvvnz6JzjY3qXYhUvSpDp96tRbG0wRSgEAAAAAgHjJ399f/wQHq+noGcrgmdve5djUbd9zWja4i/z9/QmlAAAAAAAA7CGDZ25lzlfE3mXgX5joHAAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwObuGUmFhYRoyZIg8PT3l4uKinDlzatSoUTLGWNcxxmjo0KFyd3eXi4uLvLy8dO7cOTtWDQAAAAAAgNdl11Bq/PjxmjFjhqZOnapTp05p/PjxmjBhgqZMmWJdZ8KECfr+++81c+ZM7d27V8mSJVPNmjUVEhJix8oBAAAAAADwOhLb88Z3796tBg0aqG7dupKk7Nmza/Hixfr7778lPRslNXnyZA0ePFgNGjSQJP3yyy9yc3PTqlWr1Lx5c7vVDgAAAAAAgFdn15FS5cuX1+bNm3X27FlJ0pEjR7Rz507Vrl1bkuTr6ys/Pz95eXlZr5MyZUqVKVNGe/bseeE2Q0NDFRQUFOkPAAAAAAAAbxe7jpQaOHCggoKClDdvXjk4OCgsLExfffWVWrVqJUny8/OTJLm5uUW6npubm3XZv40dO1YjRox4s4UDAAAAAADgtdh1pNSyZcu0cOFCLVq0SAcPHtS8efP09ddfa968ea+8zUGDBikwMND6d/Xq1VisGAAAAAAAALHBriOl+vXrp4EDB1rnhipUqJAuX76ssWPHqm3btsqYMaMk6datW3J3d7de79atWypatOgLt+nk5CQnJ6c3XjsAAAAAAABenV1HSgUHBytRosglODg4KDw8XJLk6empjBkzavPmzdblQUFB2rt3r8qVK2fTWgEAAAAAABB77DpSql69evrqq6/k4eGhAgUK6NChQ/r222/18ccfS5IsFot69uyp0aNHK3fu3PL09NSQIUOUKVMmNWzY0J6lAwAAAAAA4DXYNZSaMmWKhgwZoq5du+r27dvKlCmTOnfurKFDh1rX6d+/vx49eqROnTopICBAFStWlI+Pj5ydne1YOQAAAAAAAF6HXUOpFClSaPLkyZo8efJL17FYLBo5cqRGjhxpu8IAAAAAAADwRtl1TikAAAAAAAAkTIRSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAm4uVUCogICA2NgMAAAAAAIAEIsah1Pjx47V06VLr5aZNmypt2rTKnDmzjhw5EqvFAQAAAAAAIH6KcSg1c+ZMZc2aVZK0ceNGbdy4UevWrVPt2rXVr1+/WC8QAAAAAAAA8U/imF7Bz8/PGkqtWbNGTZs2VY0aNZQ9e3aVKVMm1gsEAAAAAABA/BPjkVKpU6fW1atXJUk+Pj7y8vKSJBljFBYWFrvVAQAAAAAAIF6K8UipRo0aqWXLlsqdO7fu3r2r2rVrS5IOHTqkXLlyxXqBAAAAAAAAiH9iHEpNmjRJ2bNn19WrVzVhwgQlT55cknTz5k117do11gsEAAAAAABA/BPjUMrR0VF9+/aN0t6rV69YKQgAAAAAAADxX4znlJKk+fPnq2LFisqUKZMuX74sSZo8ebJWr14dq8UBAAAAAAAgfopxKDVjxgz17t1btWvXVkBAgHVy81SpUmny5MmxXR8AAAAAAADioRiHUlOmTNHs2bP15ZdfysHBwdpesmRJHTt2LFaLAwAAAAAAQPwU41DK19dXxYoVi9Lu5OSkR48exUpRAAAAAAAAiN9iHEp5enrq8OHDUdp9fHyUL1++GBdw/fp1tW7dWmnTppWLi4sKFSqk/fv3W5cbYzR06FC5u7vLxcVFXl5eOnfuXIxvBwAAAAAAAG+PGJ99r3fv3urWrZtCQkJkjNHff/+txYsXa+zYsZozZ06MtnX//n1VqFBBVapU0bp165Q+fXqdO3dOqVOntq4zYcIEff/995o3b548PT01ZMgQ1axZUydPnpSzs3NMywcAAAAAAMBbIMahVMeOHeXi4qLBgwcrODhYLVu2VKZMmfTdd9+pefPmMdrW+PHjlTVrVs2dO9fa5unpaf2/MUaTJ0/W4MGD1aBBA0nSL7/8Ijc3N61atSrGtwcAAAAAAIC3Q4wP35OkVq1a6dy5c3r48KH8/Px07do1dejQIcbb+e2331SyZEk1adJEGTJkULFixTR79mzrcl9fX/n5+cnLy8valjJlSpUpU0Z79ux5ldIBAAAAAADwFnilUCpC0qRJlSFDhle+/sWLFzVjxgzlzp1b69evV5cuXfTZZ59p3rx5kiQ/Pz9JkpubW6Trubm5WZf9W2hoqIKCgiL9AQAAAAAA4O0S48P3PD09ZbFYXrr84sWL0d5WeHi4SpYsqTFjxkiSihUrpuPHj2vmzJlq27ZtTEuTJI0dO1YjRox4pesCAAAAAADANmIcSvXs2TPS5SdPnujQoUPy8fFRv379YrQtd3d35c+fP1Jbvnz5tHz5cklSxowZJUm3bt2Su7u7dZ1bt26paNGiL9zmoEGD1Lt3b+vloKAgZc2aNUZ1AQAAAAAA4M2KcSj1+eefv7B92rRp2r9/f4y2VaFCBZ05cyZS29mzZ5UtWzZJz0ZlZcyYUZs3b7aGUEFBQdq7d6+6dOnywm06OTnJyckpRnUAAAAAAADAtl5rTqnn1a5d2zrCKbp69eqlv/76S2PGjNH58+e1aNEi/fDDD+rWrZskyWKxqGfPnho9erR+++03HTt2TG3atFGmTJnUsGHD2CodAAAAAAAANhbjkVIv4+3trTRp0sToOqVKldLKlSs1aNAgjRw5Up6enpo8ebJatWplXad///569OiROnXqpICAAFWsWFE+Pj5ydnaOrdIBAAAAAABgYzEOpYoVKxZponNjjPz8/HTnzh1Nnz49xgW8//77ev/991+63GKxaOTIkRo5cmSMtw0AAAAAAIC3U4xDqX8fNpcoUSKlT59elStXVt68eWOrLgAAAAAAAMRjMQ6lhg0b9ibqAAAAAAAAQAISrVAqKCgo2ht0dXV95WIAAAAAAACQMEQrlEqVKlWkeaRexBgji8WisLCwWCkMAAAAAAAA8Ve0QqmtW7e+6ToAAAAAAACQgEQrlHrvvffedB0AAAAAAABIQGI80XmE4OBgXblyRY8fP47UXrhw4dcuCgAAAAAAAPFbjEOpO3fuqH379lq3bt0LlzOnFAAAAAAAAP6XRDG9Qs+ePRUQEKC9e/fKxcVFPj4+mjdvnnLnzq3ffvvtTdQIAAAAAACAeCbGI6W2bNmi1atXq2TJkkqUKJGyZcum6tWry9XVVWPHjlXdunXfRJ0AAAAAAACIR2I8UurRo0fKkCGDJCl16tS6c+eOJKlQoUI6ePBg7FYHAAAAAACAeCnGoVSePHl05swZSVKRIkU0a9YsXb9+XTNnzpS7u3usFwgAAAAAAID4J8aH733++ee6efOmJGnYsGGqVauWFi5cqCRJkujnn3+O7foAAAAAAAAQD0U7lPrwww/VsWNHtWrVShaLRZJUokQJXb58WadPn5aHh4fSpUv3xgoFAAAAAABA/BHtw/fu37+vunXrysPDQ0OHDtXFixclSUmTJlXx4sUJpAAAAAAAABBt0Q6lNm/erIsXL6pDhw5asGCBcufOrapVq2rRokUKDQ19kzUCAAAAAAAgnonRROfZsmXT8OHDdfHiRW3cuFGZMmXSJ598Ind3d3Xr1k0HDhx4U3UCAAAAAAAgHonx2fciVK1aVQsWLJCfn5/Gjh2rJUuWqEyZMrFZGwAAAAAAAOKpGJ9973m+vr76+eef9fPPPyswMFBeXl6xVRcAAAAAAADisRiPlAoJCdGCBQtUtWpV5c6dW7/88os6dOggX19f+fj4vIkaAQAAAAAAEM9Ee6TU33//rZ9++klLly5VSEiIPvjgA/n4+KhatWqyWCxvskYAAAAAAADEM9EOpcqWLasiRYpo1KhRatWqlVKnTv0m6wIAAAAAAEA8Fu1Qav/+/SpevPibrAUAAAAAAAAJRLTnlCKQAgAAAAAAQGyJ8UTnAAAAAAAAwOsilAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHPRPvtehGLFislisURpt1gscnZ2Vq5cudSuXTtVqVIlVgoEAAAAAABA/BPjkVK1atXSxYsXlSxZMlWpUkVVqlRR8uTJdeHCBZUqVUo3b96Ul5eXVq9e/SbqBQAAAAAAQDwQ45FS/v7+6tOnj4YMGRKpffTo0bp8+bI2bNigYcOGadSoUWrQoEGsFQoAAAAAAID4I8YjpZYtW6YWLVpEaW/evLmWLVsmSWrRooXOnDnz+tUBAAAAAAAgXopxKOXs7Kzdu3dHad+9e7ecnZ0lSeHh4db/AwAAAAAAAP8W48P3evTooU8//VQHDhxQqVKlJEn79u3TnDlz9MUXX0iS1q9fr6JFi8ZqoQAAAAAAAIg/YhxKDR48WJ6enpo6darmz58vScqTJ49mz56tli1bSpI+/fRTdenSJXYrBQAAAAAAQLwR41BKklq1aqVWrVq9dLmLi8srFwQAAAAAAID475VCKUl6/Pixbt++rfDw8EjtHh4er10UAAAAAAAA4rcYh1Lnzp3Txx9/HGWyc2OMLBaLwsLCYq04AAAAAAAAxE8xDqXatWunxIkTa82aNXJ3d5fFYnkTdQEAAAAAACAei3EodfjwYR04cEB58+Z9E/UAAAAAAAAgAUgU0yvkz59f/v7+b6IWAAAAAAAAJBAxDqXGjx+v/v37688//9Tdu3cVFBQU6Q8AAAAAAAD4X2J8+J6Xl5ckqVq1apHamegcAAAAAAAA0RXjUGrr1q1vog4AAAAAAAAkIDEOpd577703UQcAAAAAAAASkGiFUkePHlXBggWVKFEiHT169D/XLVy4cKwUBgAAAAAAgPgrWqFU0aJF5efnpwwZMqho0aKyWCwyxkRZjzmlAAAAAAAAEB3RCqV8fX2VPn166/8BAAAAAACA1xGtUCpbtmwv/D8AAAAAAADwKhLF9Arz5s3TH3/8Yb3cv39/pUqVSuXLl9fly5djtTgAAAAAAADETzEOpcaMGSMXFxdJ0p49ezR16lRNmDBB6dKlU69evWK9QAAAAAAAAMQ/0Tp873lXr15Vrly5JEmrVq3Shx9+qE6dOqlChQqqXLlybNcHAAAAAACAeCjGI6WSJ0+uu3fvSpI2bNig6tWrS5KcnZ31zz//xG51AAAAAAAAiJdiPFKqevXq6tixo4oVK6azZ8+qTp06kqQTJ04oe/bssV0fAAAAAAAA4qEYj5SaNm2aypUrpzt37mj58uVKmzatJOnAgQNq0aJFrBcIAAAAAACA+CfGI6VSpUqlqVOnRmkfMWJErBQEAAAAAACA+C/GI6V8fHy0c+dO6+Vp06apaNGiatmype7fvx+rxQEAAAAAACB+inEo1a9fPwUFBUmSjh07pj59+qhOnTry9fVV7969Y71AAAAAAAAAxD8xPnzP19dX+fPnlyQtX75c77//vsaMGaODBw9aJz0HAAAAAAAA/kuMR0olSZJEwcHBkqRNmzapRo0akqQ0adJYR1ABAAAAAAAA/yXGI6UqVqyo3r17q0KFCvr777+1dOlSSdLZs2eVJUuWWC8QAAAAAAAA8U+MR0pNnTpViRMnlre3t2bMmKHMmTNLktatW6datWrFeoEAAAAAAACIf2I8UsrDw0Nr1qyJ0j5p0qRYKQgAAAAAAADxX7RCqaCgILm6ulr//18i1gMAAAAAAABeJlqhVOrUqXXz5k1lyJBBqVKlksViibKOMUYWi0VhYWGxXiQAAAAAAADil2iFUlu2bFGaNGkkSVu3bn2jBQEAAAAAACD+i1Yo9d57773w/wAAAAAAAMCriPZE51euXInWeh4eHq9cDAAAAAAAABKGaIdSnp6e1v8bYyQp0txSzCkFAAAAAACA6Ip2KGWxWJQlSxa1a9dO9erVU+LE0b4qAAAAAAAAEEm0k6Vr165p3rx5mjt3rmbOnKnWrVurQ4cOypcv35usDwAAAAAAAPFQouiumDFjRg0YMECnT5+Wt7e37t+/rzJlyqhs2bKaPXu2wsPD32SdAAAAAAAAiEeiHUo9r2LFivrxxx917tw5JU2aVJ9++qkCAgJiuTQAAAAAAADEV68USu3evVsdO3bUO++8o4cPH2ratGlKlSpVLJcGAAAAAACA+Crac0rdvHlTv/zyi+bOnav79++rVatW2rVrlwoWLPgm6wMAAAAAAEA8FO1QysPDQ5kzZ1bbtm1Vv359OTo6Kjw8XEePHo20XuHChWO9SAAAAAAAAMQv0Q6lwsLCdOXKFY0aNUqjR4+WJBljIq1jsVgUFhYWuxUCAAAAAAAg3ol2KOXr6/sm6wAAAAAAAEACEu1QKlu2bG+yDgAAAAAAACQgr3T2PQAAAAAAAOB1EEoBAAAAAADA5gilAAAAAAAAYHPRCqV+++03PXny5E3XAgAAAAAAgAQiWqHUBx98oICAAEmSg4ODbt++HeuFjBs3ThaLRT179rS2hYSEqFu3bkqbNq2SJ0+uxo0b69atW7F+2wAAAAAAALCtaIVS6dOn119//SVJMsbIYrHEahH79u3TrFmzVLhw4UjtvXr10u+//65ff/1V27Zt040bN9SoUaNYvW0AAAAAAADYXrRCqU8//VQNGjSQg4ODLBaLMmbMKAcHhxf+xdTDhw/VqlUrzZ49W6lTp7a2BwYG6scff9S3336rqlWrqkSJEpo7d652795tDcgAAAAAAAAQNyWOzkrDhw9X8+bNdf78edWvX19z585VqlSpYqWAbt26qW7duvLy8tLo0aOt7QcOHNCTJ0/k5eVlbcubN688PDy0Z88elS1b9oXbCw0NVWhoqPVyUFBQrNQJAAAAAACA2BOtUEp6FgjlzZtXw4YNU5MmTZQ0adLXvvElS5bo4MGD2rdvX5Rlfn5+SpIkSZTwy83NTX5+fi/d5tixYzVixIjXrg0AAAAAAABvTrQO33vesGHDlDRpUt25c0c7d+7Uzp07defOnRjf8NWrV/X5559r4cKFcnZ2jvH1X2bQoEEKDAy0/l29ejXWtg0AAAAAAIDYEeNQKjg4WB9//LEyZcqkSpUqqVKlSsqUKZM6dOig4ODgaG/nwIEDun37tooXL67EiRMrceLE2rZtm77//nslTpxYbm5uevz4sfWsfxFu3bqljBkzvnS7Tk5OcnV1jfQHAAAAAACAt0uMQ6levXpp27Zt+u233xQQEKCAgACtXr1a27ZtU58+faK9nWrVqunYsWM6fPiw9a9kyZJq1aqV9f+Ojo7avHmz9TpnzpzRlStXVK5cuZiWDQAAAAAAgLdItOeUirB8+XJ5e3urcuXK1rY6derIxcVFTZs21YwZM6K1nRQpUqhgwYKR2pIlS6a0adNa2zt06KDevXsrTZo0cnV1VY8ePVSuXLmXTnIOAAAAAACAuCHGoVRwcLDc3NyitGfIkCFGh+9Fx6RJk5QoUSI1btxYoaGhqlmzpqZPnx6rtwEAAAAAAADbi3EoVa5cOQ0bNky//PKLdYLyf/75RyNGjHjtw+r+/PPPSJednZ01bdo0TZs27bW2CwAAAAAAgLdLjEOp7777TjVr1lSWLFlUpEgRSdKRI0fk7Oys9evXx3qBAAAAAAAAiH9iHEoVLFhQ586d08KFC3X69GlJUosWLdSqVSu5uLjEeoEAAAAAAACIf2IcSklS0qRJ9cknn8R2LQAAAAAAAEggEtm7AAAAAAAAACQ8hFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGwuxqFUjhw5dPfu3SjtAQEBypEjR6wUBQAAAAAAgPgtxqHUpUuXFBYWFqU9NDRU169fj5WiAAAAAAAAEL8lju6Kv/32m/X/69evV8qUKa2Xw8LCtHnzZmXPnj1WiwMAAAAAAED8FO1QqmHDhpIki8Witm3bRlrm6Oio7Nmz65tvvonV4gAAAAAAABA/RTuUCg8PlyR5enpq3759Spcu3RsrCgAAAAAAAPFbtEOpCL6+vm+iDgAAAAAAACQgMQ6lJGnz5s3avHmzbt++bR1BFeGnn36KlcIAAAAAAAAQf8U4lBoxYoRGjhypkiVLyt3dXRaL5U3UBQAAAAAAgHgsxqHUzJkz9fPPP+ujjz56E/UAAAAAAAAgAUgU0ys8fvxY5cuXfxO1AAAAAAAAIIGIcSjVsWNHLVq06E3UAgAAAAAAgAQixofvhYSE6IcfftCmTZtUuHBhOTo6Rlr+7bffxlpxAAAAAAAAiJ9iHEodPXpURYsWlSQdP3480jImPQcAAAAAAEB0xDiU2rp165uoAwAAAAAAAAlIjOeUAgAAAAAAAF5XjEdKValS5T8P09uyZctrFQQAAAAAAID4L8ahVMR8UhGePHmiw4cP6/jx42rbtm1s1QUAAAAAAIB4LMah1KRJk17YPnz4cD18+PC1CwIAAAAAAED8F2tzSrVu3Vo//fRTbG0OAAAAAAAA8VishVJ79uyRs7NzbG0OAAAAAAAA8ViMD99r1KhRpMvGGN28eVP79+/XkCFDYq0wAAAAAAAAxF8xDqVSpkwZ6XKiRImUJ08ejRw5UjVq1Ii1wgAAAAAAABB/xTiUmjt37puoAwAAAAAAAAlIjEOpCAcOHNCpU6ckSQUKFFCxYsVirSgAAAAAAADEbzEOpW7fvq3mzZvrzz//VKpUqSRJAQEBqlKlipYsWaL06dPHdo0AAAAAAACIZ2J89r0ePXrowYMHOnHihO7du6d79+7p+PHjCgoK0mefffYmagQAAAAAAEA8E+ORUj4+Ptq0aZPy5ctnbcufP7+mTZvGROcAAAAAAACIlhiPlAoPD5ejo2OUdkdHR4WHh8dKUQAAAAAAAIjfYhxKVa1aVZ9//rlu3Lhhbbt+/bp69eqlatWqxWpxAAAAAAAAiJ9iHEpNnTpVQUFByp49u3LmzKmcOXPK09NTQUFBmjJlypuoEQAAAAAAAPFMjOeUypo1qw4ePKhNmzbp9OnTkqR8+fLJy8sr1osDAAAAAABA/BTjUEqSLBaLqlevrurVq8d2PQAAAAAAAEgAon343pYtW5Q/f34FBQVFWRYYGKgCBQpox44dsVocAAAAAAAA4qdoh1KTJ0/WJ598IldX1yjLUqZMqc6dO+vbb7+N1eIAAAAAAAAQP0U7lDpy5Ihq1ar10uU1atTQgQMHYqUoAAAAAAAAxG/RDqVu3bolR0fHly5PnDix7ty5EytFAQAAAAAAIH6LdiiVOXNmHT9+/KXLjx49Knd391gpCgAAAAAAAPFbtEOpOnXqaMiQIQoJCYmy7J9//tGwYcP0/vvvx2pxAAAAAAAAiJ8SR3fFwYMHa8WKFXrnnXfUvXt35cmTR5J0+vRpTZs2TWFhYfryyy/fWKEAAAAAAACIP6IdSrm5uWn37t3q0qWLBg0aJGOMJMlisahmzZqaNm2a3Nzc3lihAAAAAAAAiD+iHUpJUrZs2bR27Vrdv39f58+flzFGuXPnVurUqd9UfQAAAAAAAIiHYhRKRUidOrVKlSoV27UAAAAAAAAggYj2ROcAAAAAAABAbCGUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJuzayg1duxYlSpVSilSpFCGDBnUsGFDnTlzJtI6ISEh6tatm9KmTavkyZOrcePGunXrlp0qBgAAAAAAQGywayi1bds2devWTX/99Zc2btyoJ0+eqEaNGnr06JF1nV69eun333/Xr7/+qm3btunGjRtq1KiRHasGAAAAAADA60pszxv38fGJdPnnn39WhgwZdODAAVWqVEmBgYH68ccftWjRIlWtWlWSNHfuXOXLl09//fWXypYta4+yAQAAAAAA8JreqjmlAgMDJUlp0qSRJB04cEBPnjyRl5eXdZ28efPKw8NDe/bssUuNAAAAAAAAeH12HSn1vPDwcPXs2VMVKlRQwYIFJUl+fn5KkiSJUqVKFWldNzc3+fn5vXA7oaGhCg0NtV4OCgp6YzUDAAAAAADg1bw1I6W6deum48ePa8mSJa+1nbFjxyplypTWv6xZs8ZShQAAAAAAAIgtb0Uo1b17d61Zs0Zbt25VlixZrO0ZM2bU48ePFRAQEGn9W7duKWPGjC/c1qBBgxQYGGj9u3r16pssHQAAAAAAAK/ArqGUMUbdu3fXypUrtWXLFnl6ekZaXqJECTk6Omrz5s3WtjNnzujKlSsqV67cC7fp5OQkV1fXSH8AAAAAAAB4u9h1Tqlu3bpp0aJFWr16tVKkSGGdJyplypRycXFRypQp1aFDB/Xu3Vtp0qSRq6urevTooXLlynHmPQAAAAAAgDjMrqHUjBkzJEmVK1eO1D537ly1a9dOkjRp0iQlSpRIjRs3VmhoqGrWrKnp06fbuFIAAAAAAADEJruGUsaY/7mOs7Ozpk2bpmnTptmgIgAAAAAAANjCWzHROQAAAAAAABIWQikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmCKUAAAAAAABgc4RSAAAAAAAAsDlCKQAAAAAAANgcoRQAAAAAAABsjlAKAAAAAAAANkcoBQAAAAAAAJsjlAIAAAAAAIDNEUoBAAAAAADA5gilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACbI5QCAAAAAACAzRFKAQAAAAAAwOYIpQAAAAAAAGBzhFIAAAAAAACwOUIpAAAAAAAA2ByhFAAAAAAAAGyOUAoAAAAAAAA2RygFAAAAAAAAmyOUAgAAAAAAgM0RSgEAAAAAAMDmEtu7gPjuypUr8vf3t3cZNpcuXTp5eHjYuwwAAAAAAPCWIpR6g65cuaK8+fLpn+Bge5dicy5Jk+r0qVMEUwAAAAAA4IUIpd4gf39//RMcrKajZyiDZ257l2Mzt33PadngLvL39yeUAgAAAAAAL0QoZQMZPHMrc74i9i4DAAAAAADgrcFE5wAAAAAAALA5QikAAAAAAADYHKEUAAAAAAAAbI5QCgAAAAAAADZHKAUAAAAAAACb4+x7AAAA/+HKlSvy9/e3dxl2kS5dOnl4eNi7DOCNS6j7Ofs4AHsjlAIAAHiJK1euKG++fPonONjepdiFS9KkOn3qFF9aEa8l5P2cfRyAvRFKAQAAvIS/v7/+CQ5W09EzlMEzt73Lsanbvue0bHAX+fv784UV8VpC3c/ZxwG8DQilAAAA/ocMnrmVOV8Re5cB4A1iPwcA22OicwAAAAAAANgcI6UAAACA5zDpNQAAtkEoBQAAAPx/THrNpNcAANshlAIAAAD+Pya9ZtJrAIDtEEoBAAAA/8Kk1wAAvHmEUkAsYx6KhIc+T1gSan9LCbfPAQDxT0J9P+e9HG+bOBFKTZs2TRMnTpSfn5+KFCmiKVOmqHTp0vYuC4iCeSgS3jwU9HnC6vOE3N9SwuxzAED8k5Dfz3kvx9vmrQ+lli5dqt69e2vmzJkqU6aMJk+erJo1a+rMmTPKkCGDvcsDImEeioQ3DwV9nrD6PKH2t5Rw+xwAEP8k1Pdz3svxNnrrQ6lvv/1Wn3zyidq3by9Jmjlzpv744w/99NNPGjhwoJ2rA16MeSgSHvo8YaG/AQCI+3g/B+wvkb0L+C+PHz/WgQMH5OXlZW1LlCiRvLy8tGfPHjtWBgAAAAAAgNfxVo+U8vf3V1hYmNzc3CK1u7m56fTp0y+8TmhoqEJDQ62XAwMDJUlBQUFvrtCXePjwoSTp+qmjehz8yOa3by93Ll+Q9Oz+2+Nxtyf6nD5PKBJqnyfU/pboc/qcPo/vEmp/S/Q5fU6fx3cJtb8l+/Z5xO0ZY/5zPYv5X2vY0Y0bN5Q5c2bt3r1b5cqVs7b3799f27Zt0969e6NcZ/jw4RoxYoQtywQAAAAAAMC/XL16VVmyZHnp8rd6pFS6dOnk4OCgW7duRWq/deuWMmbM+MLrDBo0SL1797ZeDg8P171795Q2bVpZLJY3Wu/bJCgoSFmzZtXVq1fl6upq73JgA/R5wkOfJzz0ecJCfyc89HnCQ58nPPR5wpNQ+9wYowcPHihTpkz/ud5bHUolSZJEJUqU0ObNm9WwYUNJz0KmzZs3q3v37i+8jpOTk5ycnCK1pUqV6g1X+vZydXVNUE980OcJEX2e8NDnCQv9nfDQ5wkPfZ7w0OcJT0Ls85QpU/7Pdd7qUEqSevfurbZt26pkyZIqXbq0Jk+erEePHlnPxgcAAAAAAIC4560PpZo1a6Y7d+5o6NCh8vPzU9GiReXj4xNl8nMAAAAAAADEHW99KCVJ3bt3f+nhengxJycnDRs2LMqhjIi/6POEhz5PeOjzhIX+Tnjo84SHPk946POEhz7/b2/12fcAAAAAAAAQPyWydwEAAAAAAABIeAilAAAAAAAAYHOEUgAAAAAAALA5QikAAAAAAADYHKEUAABvCc49AgAAgISEUAoAADsJDw+PdNlisUginAIAAEDCkNjeBeDVhYeHK1EicsWEgL5OuIwx1qAC8cvz+/XKlSt19uxZOTo6qmLFiipdujR9n8DQ30D8xL4NxD8v26/5zvZqLIafY+MEX19fLVu2TMHBwSpYsKCaNGkiiTe6+OzmzZuSJBcXF6VKlYoXuQTg4sWLWrRokfz9/ZU5c2b169dPEvt5fNe/f38tW7ZM+fPnV8qUKbV06VKtWrVK9evXt3dpiGUR+/LevXt17Ngx3bt3T+XKldO7774baTnil4h+NcbIGKNEiRLR1/EYn9cSpoh9+v79+0qWLJmSJEli75LwhkT09fbt27V7925dunRJNWrUUOXKlZUmTRpe318Br5hxwJEjR/Tuu+/Kx8dHS5YsUZs2bfT9999LEk/4eGrx4sWqX7++KlasqJIlS+rgwYNKlChRlEN9EH8cO3ZMFSpU0N9//63jx49r7Nix6tixoyT28/js119/1YIFC7Rs2TKtXbtW9erVkyTdv3/fzpXhTbBYLFq+fLlq1aqlDRs2aMWKFerdu7d69OhhXY74JeLLyfr169W1a1d99NFH+uuvv+jreOr5QGr27Nnq2LGjWrVqpSlTpti5MrxJEfv5mjVr1KZNG+3Zs0ehoaH2LgtviMVi0YoVK1S3bl1dunRJN2/e1IQJE9S6dWsFBwfz+v4KCKXeckePHlX58uXVpk0bbdy4UStWrFC+fPk0Y8YM+fn5WUMKBrzFH7NmzdLHH3+s9u3bq2/fvkqVKpVq1aqlGzdu8MtbPHX16lU1adJEbdq00W+//aY1a9bop59+0vbt23X06FF7l4c3IOI1++LFi6pdu7ZKly6tFStWqHPnzpo1a5batm2roKAgXbx40c6V4nU9/2PCyZMn1atXL40bN07Lli3TnDlzdOLECbm6uka6Du/p8YfFYpGPj48aNWokPz8/Xbp0Se+9955mz56tx48f27s8xLKIz2kDBgzQiBEjlDJlShUuXFiff/65+vfvr6dPn9q5QrwJFotFK1euVIsWLVSqVCllzpxZTk5O9i4Lb8jFixc1aNAgff3115o5c6amTZumkydPqmDBgkqaNKm9y4uT+Ib7Frt165Zq166tihUrasyYMUqcOLEKFCig9OnT6/79+3ry5In1Aw2T48YP8+fPV5cuXeTj46OuXbuqS5cuaty4sYKCgnTq1CnreoSR8YcxRmvWrFGmTJnUv39/SZKzs7MKFy6swMBABQUF2blCxJbn99ewsLBIbd7e3mrbtq0mTpyoTz75RJL0xx9/aObMmXrw4IHti8VrW7NmjfXHhIj+vnz5sjJkyKDOnTvL19dX77//vj766CN99dVXkqTDhw9LYsRUfBIQEKBDhw7pm2++0cqVK7Vr1y4NHjxYXbp00U8//UQwFQ/t3LlT3t7eWrJkib755hsVLVpUiRMnVp48eZQ4MdP5xkcXL15Unz59NGHCBA0dOlQ5c+ZUeHi4jh49qmvXrtm7PMSyu3fvysHBQR07dpSvr68qVKig5s2ba8KECZKkXbt2MVIuhgil3mJ37txRhQoV5OfnJ29vb0nS2LFjtWnTJmXLlk2dO3fW+++/r759+2r37t26d+8eH2TjMH9/f82fP1+urq4qWbKktX3nzp16/PixNm/erAULFujUqVPWLzj0d9xnsVhUpkwZ1a1bV2nTppX0LLDIkSOHUqdOTSARj0Tsr/PmzdO4ceMkSTlz5tT27dvVtm1bjRkzRp9++qkk6cGDB5o/f77CwsKUIkUKu9WMV7N//34NGDBAAwcO1K1bt+Tg4GBd5u7ursuXL6tSpUqqWbOmpk+fLunZh9hly5bxBSYeOXHihNzd3TV//nzr67skDRkyRMOGDVP37t31888/E0zFM7du3VKmTJlUsWJFrVixQh9++KGmTJmiDh06KDAwUNu2bbN3iYglET8s/fPPP0qbNq3effdd3bt3T5MnT1bVqlVVtWpVtW/fXn/99ZedK0VsiOhvY4zc3Nx05swZVa5cWTVr1tSMGTMkSQcOHNCyZcvk6+trz1LjHEKpt9CTJ08kSQULFtTIkSNVqlQpjRgxQs2aNdPkyZO1cuVKrV+/XqNGjdInn3yijRs3qnnz5qpRowYfbOKwtGnTasiQISpWrJjKlCmjkJAQtW7dWqdPn9bkyZNljJG3t7dKlSqlxo0b69NPP2XemXiiYMGC6tOnj6Rnb3QRX2AdHR318OFD63orV65UcHCwXWpE7AgLC9O6deu0YcMGSVKTJk3k5eWlsLAwJU2aVEePHtXhw4fVpEkT+fn5afz48ZIYFRnXlCxZUh07dtTFixc1cOBA+fn5SZI8PT21adMm5ciRQ40aNdKsWbOs+/vSpUt16NAhJU+e3J6lIxYVKFBAHTt21OnTp3Xjxg1J/7cvDxkyRCNHjtSnn36qhQsX2rNMxLL06dPLYrFo+vTpateunSZOnKjOnTtLkvbu3avvv/9ely9ftnOViA3//POPJClJkiQ6c+aMBg8erEKFCmn79u2qXr265s6dq6tXr+rkyZN2rhSv6vnPXxE/LhYoUEAXLlxQwYIFVa9ePf3www/W9/JFixbp0KFDSpcunV3qjasYQ/qWOX/+vH788Ufdvn1b7dq107vvvmudAHXJkiX67LPPrGdkKlGihEqUKKEGDRroxIkTSpMmDWd6iIMiJsW0WCyqWLGiRo0apUGDBillypTKli2bTpw4IWdnZ+v6f/zxh/bt26ft27crZcqUdqwcr+rWrVs6f/68goODVaVKFSVJkkRhYWFycHCQxWJRWFiYEiVKpCdPnljnJBg6dKhGjx6tixcvKnv27Pa9A3gl4eHhcnBw0MSJE1WsWDFNnTpV3bt314wZM/To0SN9//33OnHihEqVKqWkSZNq7969Spw4sfW5gbjh6dOnSpw4sXr16iWLxaJ169bpyy+/1JgxY5Q3b17NnTtXH3/8sVKlSqULFy7o8ePHmjt3rhYsWKDt27crVapU9r4LiEVTpkzRkydP9MUXXyhXrlyqW7euddkXX3yhJEmSqGzZsnasEK/qZWfZc3d3V1hYmHr16qXBgwdbR8CGhITo+++/V5o0aeTh4WHrchHLDh48qPfee0979uxRwYIFtX79enl7e6t8+fJq3bq1MmXKJEn69ttvGTQQR0VMYL9jxw5t2rRJ7u7uKl26tIoXL67ly5frgw8+kJ+fn/bt26d//vlHq1ev1pw5c7Rz505CqRiyGH5+fWscO3ZMdevWVYMGDeTh4aGePXvK0dFR0rMJz7///nvt2bNHI0aM0Icffijp2aiqiHUQtx07dkyFChVSeHi4du7cqXHjxuns2bPav3+/UqVKpcePH78wdOTUw3HLsWPH1Lp1az169Ej3799X4cKFtWHDhkj7ccREqAUKFNCsWbO0b98+jRgxQtu2bVOJEiXsVTpi6EX7pjFGT548Uf/+/eXn56fp06crTZo0kqQLFy7o+vXrcnd3V86cOZUoUSJrwIG4I+JD7K5du7Rq1SqtWbNGvr6++uijjzR69Gi5ublpxowZ6tevn1KnTi1XV1c5ODho3rx5KlasmL3LxyuK6PdDhw7pwoULevLkiUqVKqVcuXJJkjp27KglS5Zo6dKlkYIpxE3Pn/L9xx9/1LVr1/To0SN17NhR77zzjlavXq0uXbqoRo0aqlmzplxcXDRt2jTdunVLBw8eVOLEiTltfBx34cIFderUSUeOHNG2bdtUoEABhYSEWH9INsboyy+/1M8//6ydO3cqR44cdq4Yr2L16tVq0aKFihQponv37ilp0qQaO3asatWqpa1bt6pDhw4KCwuTs7Oz0qZNq+nTp6to0aL2LjvuMXgrXLhwwbi7u5v+/ftHan/69Kn1/4cPHzYdOnQw+fLlM97e3rYuEbEsLCzM+v/du3cbi8ViVqxYYV22Y8cOU7FiRZMvXz5z69YtY4wxjx8/tkutiB2HDx82SZMmNf379zdHjhwx3333nbFYLKZ3797GGGPCw8MjrV++fHmTJ08e4+TkZPbt22ePkvEKBg0aZC5evGi9/N1335khQ4aY27dvW/f7P/74w7i4uJj169e/dDvPv0Ygblm3bp2xWCxmwoQJZvHixebjjz82+fPnN+3bt7e+np8/f978+eefZv/+/eb27dt2rhixwdvb26RMmdKUKVPGODk5mZIlS5ohQ4ZYl3fs2NGkSpXKLF++3I5V4nU9/17dr18/4+rqamrWrGk8PT1N1qxZzTfffGOMMWbx4sWmfv36JlmyZKZSpUrmww8/tH6Oe/7zPeKG5/s94v8XLlww77//vkmZMqU5deqUMebZe/ePP/5oGjVqZDJlymQOHjxol3rx+m7dumUGDx5sfvzxR2PMs+9r7du3N1mzZjVr1641xhjz8OFDc/jwYXPhwgVz7949e5YbpxFK2Vl4eLgJDw83Q4cONR988IG5e/fuC9eJcPToUfPJJ58YNzc3s2rVKluWilj0fJ/Onj3b9OnTx1gsFpM8eXKzbNkyY8z/BVOVKlUyBQsWNDdv3rRXuYgFFy9eNEmSJDFffvmltS0wMNC4ubmZ5s2bR1r36dOnJjg42Hh6ehqLxWKOHTtm63Lxinx8fEyHDh3MkydPjDHGhIaGmuHDh5uUKVOaKlWqmO7du5uAgABjjDF9+/Y17777rrlz5449S0YsCg8PN48fPzYtWrQwHTp0iLRs4sSJ5p133jEdO3Y0fn5+dqoQsen54PjYsWMmQ4YMZtasWSY4ONjcuHHDDBw40JQqVcoMHz7cul7Lli1N5syZzcOHD+1RMmKRv7+/qVOnjtm3b5/1c13Pnj1NoUKFzKxZs4wxxjx69MhcvXrVPHz40LpOxPsD4p4dO3ZE+Tx+/vx5U69ePZMqVSpz9uxZY4wx+/btM926dTOnT5+2R5mIBYcPHzaFCxc2JUuWjPTD8NGjR027du2Mh4eHWbNmjR0rjF845sfOLBaLLBaLtm/fruTJk1sP43ie+f9HWD569EiFChVSt27d1LRpUxUsWNDW5SKWRAzXHjhwoIYOHap8+fJp3Lhxqlixojp06KDFixcrUaJEKl++vMaMGaOnT5+qb9++dq4ar8oYo02bNildunQKCAiwtk+fPl23b9/WuXPnNHToUA0ePFhXrlzRw4cP5eLioqlTp+rMmTPs63FIzZo19cMPPyhx4sTy9vbWpUuXNGzYMJ09e1bVqlXT33//rXfeeUcjR46Ug4ODUqRIoQsXLti7bMQSi8UiR0dHJUqUSP7+/pEmSO3bt69KlSqlJUuWqEePHrp165YdK8XrmDFjhm7duhXp8Nxz584pVapU+vDDD+Xi4iJ3d3f17NlTlSpVko+Pj27evClJWrhwofbv369kyZLZq3zEgsmTJ6t06dIKCgqSu7u79XPdpEmTVLZsWU2YMEFPnz5V0qRJlSVLFiVLlkwWi0XGGA7JjmMiznj94MEDDRgwQCVLlrS+fhtjlDNnTo0fP17u7u6qVq2aTp06pZIlS2rSpEnKkyePPUvHa/D391eWLFl06tSpSGfCLlSokPr06aOaNWuqWbNm2rhxox2rjEfsGonBGPPsF5PixYubzz//3Hr5Rb788ktrUhsSEmKr8vCGXL582eTLl88sXbrU2nbmzBnTpUsXkzx5cuvw/rCwMHPkyBGGesdRQUFBxhhjAgICzLRp00yRIkXMZ599ZsaNG2fSpk1rJk2aZP78808zdOhQU7VqVZMlSxaTM2dO88MPP9i5csTU84fXnj9/3hQoUMDUr1/fHD161Bjzf6/to0ePNk2aNDHp0qUzFovFDBo0yC71IvZFvE4PHjzYFCpUyJw9ezbSyNhp06aZfPnymSZNmpjr16/bq0y8hsOHD5tatWqZ8+fPR2pfv369yZ49u3V/jxhFdfHiRWOxWMzvv/9u81oRe/59eP3mzZtNvnz5jKurq/W5EBoaaowxxs/PzyRNmvQ/D8/G2+/evXvW71t//vmnuXHjhtm1a5epWrWqyZs3b5QRUy1atDAWi8VkzZrVhIaGRnnOIO7ZtWuXqVatmsmbN6/Zs2dPpGWHDh0y3bt3t46Ow+shlLKTa9eumSVLlpgFCxaYEydOmOHDhxs3Nzdz+PBh6zrPDwu/evWq8fLyMrt27bJHuYgFO3bsMF9//bX55ptvzF9//WUePXpkkiVLZubNmxdpvePHj5u8efMaJyenKHOHEUzFLfv37zepU6e2Dt8OCgoy33//vSlYsKCxWCxmw4YNUa6zZs0aM3ToUHP8+HFbl4vXEHFInjHGLF261Dx9+tQsXLjQeHl5mUaNGkXpz2vXrpnff//dNG/enLni4rCILx0PHz40T548sfZlcHCwyZkzp6lUqZI5ffq0tb1v375m6NChxt/f32414/VF7O979+41N27cMMYYc/bsWZM+fXrTs2dPExwcbF33zp07pnjx4mbz5s12qRWxa+LEidb5ZXbs2GE8PDyMl5eXNZAyxphTp04ZDw8PPrPHYTdu3DA1a9Y0c+bMMYsWLTIWi8W6D+/Zs8dUqlTJ5MuXL9Lh95999pnx9vZmuo04KOK9fP/+/WbVqlVmypQp1vfpffv2mQYNGphixYqZvXv3Rrre8/s9Xg+hlB0cOXLE5MiRw+TPn984ODiYggULmhYtWphChQqZDz74wJw4cSLKdYYPH27Kli3LZKhx1OzZs0369OlN8eLFTfLkyU2OHDnMwoULTfv27U3Hjh2tH2ojtGjRwpQtW9ZkyZLFbNmyxU5V43UcPnzYuLq6ml69ehlj/u8NLzAw0Hz//femUKFCplOnTtb1Gf0Yd23YsMHkypXLBAYGmp49exo3NzfrnEELFiwwlStXNo0aNTInT560Xuffk5gTTMU9Efv02rVrTePGjU3RokVNnz59rF9crl+/bnLlymUKFChgqlSpYurXr2+SJElinQwXcc/zPwz5+/ubMmXKmHLlyllHvS1fvtwkSpTI9OjRw+zZs8dcu3bNDBo0yGTMmNFcvXrVXmUjljx8+NC0atXKtGrVyhjzbPTr9u3bTaZMmUylSpXM0qVLzYYNG0ydOnVMsWLF+CExDgsMDDQtW7Y0efPmNY6OjtYgMsLu3bvNe++9ZzJmzGjGjh1r2rVrZzJnzmx8fX3tUzBem7e3t0mfPr2pUaOGyZEjhylWrJiZOnWqMcaYrVu3mg8++MCULl3a7Ny5086Vxk/MKWVjR48eVbly5fThhx9q48aNWrVqlTJnzqzr16+raNGi2rZtmzp37iwfHx/5+/tr165d6tq1qyZPnqxZs2Ypffr09r4LiKE5c+aoW7dumjp1qnbu3Kk1a9Yoc+bMmjNnjtzc3LRjxw7NmzdPfn5+kp4dsx4aGqpOnTqpUKFCWrlypZ4+fRppbhK83Y4cOaJy5cqpe/fu+vbbbyU9m2vmzp07cnV1Vdu2bfXJJ59o79696tixoyTJyclJT58+tWfZeEUlSpRQihQpVKBAAf3444/atGmT3NzcJEmtWrVSx44dde/ePQ0ZMkSnTp2S9H/zykXs146OjvYpHq/MYrFo9erVaty4sfLnz6+WLVvq/Pnz6tmzp9atW6dMmTLp+PHj+uijj5QvXz5lzpxZBw8eVN68ee1dOmIgPDxckhQSEmJtO3DggNKmTas+ffooadKkatOmja5fv65GjRpp+fLlWrlypZo0aaIqVapo0aJF+uOPP5QlSxZ73QW8ouPHj8vf3996OVmyZKpTp45+//13nTx5UokTJ1b58uW1ZMkS3bhxQ82bN9cff/yhd955R7t375aDg4N1PiLEHeHh4XJ1dVX79u115coVZcmSRcYYPX782LpOuXLlNGfOHNWtW1dLly6Vr6+v1qxZo+zZs9uvcLyygwcPqlu3bpowYYLWr1+vLVu26PDhwwoODpYkVa5cWb1791ayZMn05ZdfKiQkhO9lsc2+mVjCcuXKFZMuXTrTpEmTSO3Tp083adKkMTdu3DDTpk0zJUuWNBaLxaROndrkyZPHlCtXzhw5csROVeN1bN261VgsFjNixAhjzP/9sj5u3Djj4eFhQkJCzKhRo0zBggXNu+++azp16mRKly5tSpYsaYwx5qOPPjJeXl52qx8xd/LkSePo6GjGjh0bqX306NGmXLly1jmmIg7lK1GihGnatKk9SkUs6tGjh7FYLCZ37tzWUVLPzyexYMECU61aNVO5cmV+SY0nTpw4YfLnz2+d/+3evXsmQ4YMJk+ePCZfvnzWs/JEPA8YNRF3XbhwwXh5eZkbN26YpUuXGovFYvbv32+MMebXX381lSpVMtWqVTPXrl0zxhjj6+tr/vrrL7Np0ybmDoujJk+ebDw8PMyVK1eiLHv//ffNRx99ZB3hHB4ebrZv327eeecdU6dOHet6jICO2/7++2+zZs0a0759e1O2bFkzZcqUFx6udffuXfPo0SM7VIjYsmzZMlOlShVjjDGnT582np6epmPHjtblt27dMsY8O2SXUa9vBiOlbCgsLEyenp4KDQ3Vzp07re25cuVSeHi4AgMD1bVrVy1fvlwbNmzQpEmTtGDBAv3+++8qXLiwHSvHq8qcObMqVqyogwcPavv27dbRERH/BgcHa/DgwRoxYoQqVKigK1euqHTp0tqxY4ck6cmTJ8qbNy8jaOIIY4zmz5+vp0+fqkaNGtb2cePGadKkSRo2bJhSpEghY4xSpEih9u3bq1mzZrpx44b1zEyImz7++GP9+eefSpMmjapVq6bz58/LYrFY991WrVrp448/Vr58+eTh4WHnavGqzHNnw02TJo3KlSunZs2aWV+7GzVqpB9//FHSs7Pt/fbbb9bX++fP1Ia4JW3atDp8+LCqVq2qli1b6qefflKJEiUkSR9++KF69OihJ0+eqG3btrpx44ayZ8+uMmXKqFq1asqUKZOdq0dMzZo1S/3799cPP/wgHx8f/frrrzpz5oykZ68BdevW1dGjR61n07VYLKpQoYLmzJmjQ4cOqUGDBpKejYBG3BHx+h7xvl2qVCnVrVtX3377rXLlyqWFCxdqzpw5evLkiSRp3rx5un37ttKkSaOkSZParW7EzLVr17Rw4ULNnj1bV65ckSRdv35dKVKkUFhYmKpXr67q1atr1qxZkqTffvtNs2bNUmhoqCpWrMio1zfFrpFYAnT27FlTq1YtU6NGDXPy5Enz4MEDkz59etO/f397l4Y35Pk+P3v2rNm8ebNxcnKynl3veRHzzNy9e9d88cUXJnXq1JHmocHby9fX1wQEBJhHjx6Z1q1bm2TJkpmzZ8+aKVOmmDRp0rz0LDyPHj0y9+7ds3G1eB3/ng/q+V9Ob926ZYoXL24KFCgQaUTU9OnTI51Z9d/bQNyxcuVK06lTJxMcHGyd5LZDhw6mRYsW1gmuP/zwQ+Pm5mbKly9vHjx4wFmY4rCIEW6LFy82FovF5MiRw1y4cCHKPvzrr7+aatWqmZIlS1pHSyLuiejndevWGWOMqV+/vilQoIDJmzevGTt2rLl06ZIxxpiCBQua3r17R7n+jh07TJIkSRgBHcdEvEZv2LDBdOvWzVSvXt38/PPP1jkAAwICTOvWrU358uXN559/bgYMGGAsFgtnXotjjh8/booUKWJat24d6bv3mTNnjJubm0mUKJH57LPPIl3ns88+Mw0bNjSBgYG2LjdBIZSyg7Nnz5ratWub9957z6ROndr07NnTuowvKvFTRJ8XL17cODo6mgULFhhjnn3Y/feXlTt37pg2bdqYPHnymEOHDtmhWsTU48ePTeXKlU2mTJnM/fv3TUhIiGnWrJmxWCzG2dnZeraO5/t6xIgRZs6cOfYqGa/o+dfomTNnmq5du5oGDRqYzZs3W0OnO3fumBIlSpjcuXObBQsWGC8vL1OoUCFe3+OwiH33zJkzxsPDw/z444/W/gwJCTGlS5c2w4YNM8Y8e13v1KmTmTJlCicniUfWrl1rfvrpJ5M7d25TsmRJc+jQoSjv37/++qupWbOmuXz5sp2qxOv44YcfjMViMWnTprVOu2CMMQcOHDAzZ840Hh4epnz58qZt27bmiy++MBUrVnxhX+/evZuwIg5auXKlSZYsmenevbtp3769KV++vGnSpIn1s3hAQIDp2bOnqVatmilevDif0eOY48ePm9SpU5vBgwdHCphWrVplli1bZiZPnmyyZ89unX7j4sWLZtCgQSZNmjQvPAkZYhehlJ2cPXvWVK1a1WTLls1s27bN2s6vqfFXRJ8XLFjQ/PXXX9b2F/X5+fPnOWY5jjl27JgpWbKkKViwoLl375559OiR6datm3F0dDT79u0zxvxfoDF8+HBjsVjMwYMH7VkyXsOAAQNMpkyZTMeOHU2nTp1MkiRJzLRp08zdu3eNMc/O0lS1alVTokQJ4+XlZT27HsFU3LV9+3Yzffp006VLl0g/KISGhpq2bduaypUrm8WLF5t+/fqZbNmy8Roex73s81hAQIDJkSOHKVmypDl69Kh1vYjRsMwtEzdNnz7dODk5mZ9++slMmjTJ5MuXz/Tt2zfSOteuXTOrVq0ylStXNilSpDAWi8UsWrTIGMPn97ju4MGDJmfOnNYfCwMDA03KlClNzpw5Tf369c3hw4eNMc9e74OCgkxAQIA9y0UM3b1711SqVMl07949Uvu4ceOMxWIxderUMZMmTTKjRo0yqVKlMu7u7qZQoUImT548fFa3EUIpOzp37pypVauWqVmzJqeXTCAi+rxWrVov7XM+2MQtEf0VFhZmTp06ZcqVK2dKlixp7t+/b4KDg03z5s1N0qRJrf09ePBg4+TkZA4cOGDPsvEa5s2bZzw8PKx9uHv3bmOxWIyTk5MZO3asNZgyxpjLly9bnyPPH76Ht9fLgsN69eoZi8ViihUrFmUY/9q1a029evVM5syZTYECBdi/47iIfXb79u1m/PjxpmvXrubw4cPWL6KBgYEmZ86cpnTp0sbb29t88cUXxmKxMEIqjtq3b5+xWCzG29vbGGOMn5+f+eqrr0z+/PlfOr3Gxo0bTZs2bUyxYsUYERlHPf9av3nzZtO5c2cTHh5ufH19TY4cOcynn35qfv75Z5MmTRrzwQcfWH9gRNxz8uRJkzNnTrNlyxZrv8+YMcM4OjqaKVOmmOrVq5vGjRubpUuXmqtXr5oFCxaYbdu2caIKGyKUsrOzZ8+a999/35QtW9bs2bPH3uXABs6ePWvq1q1rSpYsyVkV47B//vnH+v+IUTDGGNOnTx/rF9d79+6ZkJAQ06JFC5M6dWrTokULkzRpUutZmxD3BAcHm1mzZpmZM2caY4xZvXq1cXV1NYsXLzbjxo0zTk5O5rvvvrOeqSUCI6Tihoh+un79ulm+fLlZtGiRuXnzpnX5xx9/bBIlSmQWLVoU5cxa/v7+5sqVK9Z5phC3rVixwqRKlcrUqVPHVK5c2WTIkMFMnjzZOgIuMDDQlCxZ0hQrVsx4enoSRMZREfv8mTNnIl2+deuWNZgaMGCAdf3n5xDcunWryZMnj3UUDeKGgIAA65mQ161bZ+7fv2+MMebSpUsmLCzMNG7c2LRt29Y6n1zZsmWNm5ubadmyJWdUjKPmz59vHBwcIv3wf/XqVbN9+3ZjjDFHjx411apVMyVKlOAMyXZCKPUWOHXqlPnwww/5hS0BOXnypOnduzdfVOOoa9eumSZNmpgtW7ZEah8/frxJmzatmTNnjilRooT1UL7Q0FDTtGlT4+zszBeXOOZFIxdPnTplrly5Yi5fvmyKFClivv32W2t7smTJjMViMfPnz7d1qXhNEa/Hx48fN8WLFzfNmjUz/fr1i7Jew4YNTdq0ac2aNWsiBdKMco0/du/ebTJlymR++uknY8yzUY4ODg4mS5YsZuzYsdZfz0NCQszJkycZKRMPbNq0yQQHB5uwsDDra8Ht27etwdTAgQOt6z4/6tXT09P88ssvNq8Xr+bGjRumTJkyZunSpWb+/PnGYrGYlStXWpffu3fPFCxY0LrvP3jwwLRq1cqMGzeOUTNx2I4dOyKdZOr59+uI/f2HH34wpUqVivRDFGwnsb3P/gcpb968WrhwoZIkSWLvUmAj+fLl0zfffCNJCg8P51ThcUxoaKiuXbumr7/+WkmSJFGFChU0btw4TZw4UUuXLpWXl5fKly+vVq1aqWrVqtq8ebN++uknPXz4UG5ubvYuH9FkjJHFYpEkLV68WCdPntSoUaOUN29eSdKePXsUHh6uqlWrSnq2L/fs2VP58uVTs2bN7FY3Ys4Yo0SJEunEiRN699131b17d/Xs2VNp0qSRJPn4+Chx4sTy8vLSypUr1aBBA7Vt21bz58+Xl5eXHB0drc8VxD3m/58KPqIPL168qI8++kjt27eXr6+vqlatqq5du8rFxUVDhw6Vg4ODmjVrJg8PD+XLl8+epeM1GWN0+vRpVa9eXRs3blS1atWsy9KnT69PPvlEFotFixcvVmBgoKZPn67EiZ99fVq4cKHu3buncuXK2at8xJC7u7uyZ8+ufv366fr165o1a5YaNmxo/Sz+5MkTpUmTRnv37lW+fPm0du1aHT16VJMmTVL69OntXT5eUfbs2ZUyZUrNmzdPJUqUULZs2azLIr6DnTlzRtmzZ1eyZMnsVWbCZt9MDADiprNnz5patWqZBg0amE8++cSkT5/eOtFthFOnTpns2bOb8uXLMyoujnm+v/bu3Wvq1KljPD09zZQpU6zta9euNQ4ODmbRokXm4MGDpm7duqZRo0bW5cwhFbfcuXPHlCtXzvTo0SNS+/jx442Dg4OpV6+e2bBhg7W9UaNGxmKxRNnvEXf9/vvvZv/+/ebChQvm5MmTJjg42Hh5eZkOHTpY13F3dzepU6c2kydPth7eg7ivVatWpk2bNubhw4dRlt2+fdsMHDjQtGrVKtIIi40bN1oP+8PbLSgoyNy4ccMYY8yuXbuMk5OTyZgxo1myZIn1UL4IkydPNsWKFTPu7u4mW7ZsjHCPJ7y9vU2SJEnMRx99FOlseoGBgaZfv34mderU5vjx43asMGGzGPP/fx4CAMTI2bNn1b17d+3cuVOjRo1Snz59JEUe/Xb27Fk5OjrK09PTnqXiFfXr10/Hjh2TxWLR4cOH5eLioi5duqhfv36SpB49emjatGnKnj270qRJoz179sjR0dHOVeNVHDhwQC1atNC8efNUtmxZWSwWTZ48WQMHDtTw4cO1du1aZciQQZ07d1b16tUlSR999JGGDBmid955x87V41WZ/z8i8uDBgypZsqR+/vlntWnTRtKzEVMNGzbUxIkTVbNmTV27dk0DBw5UxowZ9emnnypXrlx2rh4x9e/R6RGXZ86cqa+//lo7d+5UxowZo6wXEBCglClTymKxKCwsTA4ODvYoH6/gzJkzGjZsmAoXLqx27drp0aNHunbtmubPn6/du3drwIABatKkiZInT269zuXLl+Xn56esWbMqU6ZMdqwesSUsLExz5sxR9+7dlStXLpUvX16Ojo66fv269u/fr7Vr16pYsWL2LjPBIpQCgNdw4cIFde3aVQ4ODvriiy9UsWJFSRyWGR8sXrxYXbt21YYNG1S4cGHdv39fAwcO1IkTJ9S0aVNrMLVr1y4lSZJExYsXl4ODg54+fWo9vANxx/Tp09WvXz89evTI2nb06FHdvXtXVapU0Y4dOzRo0CAlSZJE06ZN47CteOTw4cO6cuWKTpw4oUGDBlnb9+3bp/r162vixImqXLmyfvzxR/35559au3atXFxc7FgxXtdff/2lrFmzKnPmzNa24sWLq0iRIpo7d+5Lr2eeO6wbb7+jR4+qZs2aql27tpo1a6aaNWtGWt6qVSsdOHBAgwYNUuPGjZU8eXLNmjVLzZo1U6pUqexTNN6ovXv3asKECbpw4YJSpEihihUrqkOHDvzIYGeEUgDwms6dO6fPPvtMxhgNGTJEFSpUsHdJiAWjR4/W6tWrtXfvXmvAeOXKFXXr1k0HDx5U37591atXL0n/90WFX9DjrrVr16px48Zas2aNqlWr9sIvnxMmTNDKlSu1du1apU6d2k6VIjbdvn1bFSpU0IULF9SnTx9NnDgx0n788ccfy9vbW25ubgoICND69etVvHhxO1eNmHp+f968ebM6deqksLAwDRkyRCVKlFDRokX1888/a968efrpp5/k6elJABXHXb58WZUqVVKLFi00evToSD8WPf/DYcuWLXX06FG9//77Cg0N1Xfffafjx48rf/789iodbxif1d4+/IwPAK8pd+7c+v777+Xo6Ki+ffvqr7/+sndJeA1hYWGSnk1y+/jxY12/fl3Ssw+xHh4eGjRokB4+fKilS5dqypQpkv5vgmQ+5MRd2bNnl4ODgxYtWiQ/P79IX0Yjfr/z8/NTrly5ODFJPJI8eXKNHDlSBQoU0M6dOyU9249DQ0MlST/99JMWLFigb7/9Vn///TeBVBwVsT8HBQWpWrVq8vb2VqdOnTRu3Dh17txZffv2Vfbs2XXgwAFt2rQp0nUQN/n4+ChPnjwaNmyYNZC6dOmS1q5dq9GjR+uXX36RJC1atEiVK1fWoUOHtHPnTh06dIhAKp57/kgGxue8HTi+AABiQe7cuTVx4kQNGTKE+QfimH8fahnx/4oVK6pv37767rvvNHLkSCVNmlTSsw8w1apVU4oUKbRixQo1bdqUsyrGA/nz59fYsWPVq1cvubq66rPPPrPOBRccHKyRI0fql19+0fbt2zk7TxwWMfrFGKMnT54oadKkatKkiVxcXNS5c2c1aNBAq1evlpOTk0JCQuTs7Kz69evbu2y8oudf36dMmaJ58+Zp/vz5KlasmIoVK6YGDRro7Nmz6t+/vy5evKiHDx9q8uTJqlmzpjw8POxcPV7H3bt3FRwcrH/++UcuLi5atGiRli5dqn379illypQ6c+aMDh8+rG+//VZTp05VQECAHBwclCJFCnuXjjfs+cCZ8PntQCgFALEkb968WrhwIaMo4pDnv7DMnj1bp0+f1rlz59S5c2fVrVtXS5cuVf369RUSEqJ69eopW7Zs+uqrr1S4cGG1bdtWBQoU0L59+/T+++/b+Z4gJiKCiVOnTun69etydHRUsWLF1KNHD92/f1/Dhw/X7t27VblyZYWFhcnX11c7d+7Uxo0b+QU9Dovo9/Xr12vVqlU6ePCgmjVrpgoVKqhhw4YyxqhPnz764IMPtHLlSjk7OzNHXBz2/Ov79u3bFRYWpoMHD2rw4MEaMWKEChYsqAIFCqhAgQKqU6eO1qxZI3d3d82ZM0cnTpyQh4cH80PGMXfv3lXatGklSVmyZNGxY8c0YMAA3bt3T1u2bFG7du3Ut29fVahQQTNnztTgwYPVvn17FSpUiDmkADtiTikAQILXv39/LVy4UI0bN5YkTZ06VYMHD9bIkSO1du1a9erVSw8fPpSDg4PSp0+vnTt3Kjg4WJUqVdKPP/6osmXL2vkeILoigglvb2/17NlTiRMntg7fX7FihUqUKKHVq1drypQpunDhgtKnT6/y5cura9eunGUvjnp+bqDVq1erVatW6tq1q1KlSqXVq1fryZMnWrFihTJmzKi1a9dqwIABypo1q7Zs2WLnyhEbBg4cqF9++UU9evSQr6+vli9frqJFi2rq1KkvPGFBhw4ddPz4ce3atYtAMg7x9/dX06ZNVbRoUX377beSpLFjx+qvv/7Sw4cPNWjQIJUsWdIaPi1evFijR4/Wli1bGO0M2JsBACABW79+vfHw8DAHDx40xhhz4MABY7FYzKJFi6zr3Lx505w4ccLs3bvXhIeHG2OMGTBggMmZM6e5fv26XerGq9u7d69JkSKFmT17trl06ZI5ePCg+fDDD02qVKnM/v37jTHGPHr0yISGhhpjjAkLC7NnuXhFffr0MZs2bbJevnHjhildurSZOnWqMcaYf/75x6RKlcr07dvXus7Tp0/N4sWLTZEiRczVq1dtXjNi14EDB0y6dOnMxo0brW2nT582bm5uplq1aubEiRPW9idPnhhjjPntt99M+fLlTUBAgM3rxau7ffu2+fzzz03p0qXNF198YW0PDg629u3z+vXrZ2rUqEE/A28BxqMCABKUp0+fRrr84MEDFShQQMWKFdPixYv13nvvadq0aWrRooUCAwN18uRJZcyYUfnz51fp0qV15MgRNW/eXD/99JO8vb2ZQywOOn36tIoVK6Y2bdooW7ZsKlasmBYsWKCqVauqWbNmevDggZImTWo9FJc5J+Keb775Rt99953SpUtnbbNYLAoJCdEHH3ygCxcuKFeuXGrSpIkmTpwoSdqyZYsCAgLUqFEj7dixQ1myZLFX+YglYWFhcnR0VNasWSVJT548UZ48ebRu3Trt3r1bI0aM0KlTpyTJOipqx44dOnfunMLDw+1WN2LGGKP06dPriy++kJeXl9atW6fBgwdLklxcXCJNZn379m0NGDBAs2fP1tdff62UKVPaq2wA/x+hFAAgwfD29la3bt0UEBBgbQsMDNSNGze0evVqffrpp5owYYK6dOkiSVq3bp3Gjh2ru3fvSnr2wTd16tTKnj27/vzzTxUtWtQO9wKv6969ezpy5IgcHR0lPfvi6uTkpJ49e+rp06e6cOFCpPUJpeKWx48fa8OGDerVq5eKFCmirVu36tSpUwoLC9OTJ0909OhR1axZU7Vq1dLMmTMlSadOndLcuXN18uRJJUmShMmO4yDzghlJMmfOrMDAQG3cuFGS5OjoqPDwcGXLlk05c+bUqlWr1L9/f+tZV4OCgpQkSRKtXbtWqVOntmn9iLmHDx/q8ePH1tfoDBkyqFu3bqpdu7bWrFmjoUOHSpL1tb5fv37q1q2bVq9erT///FOFChWyW+0A/g+hFAAgwUiSJIlmz56t4cOH6/79+5KkunXrytXVVR988IGGDh1qDaT++ecfLVq0SIkSJVKaNGkkPQsnsmXLpjFjxjDhdRxWs2ZNZc2aVSNGjNCjR4/k4OAgSUqXLp0sFoseP35s5wrxOpIkSaISJUrIx8dHQ4YMUe3atXXt2jVlzpxZFStWVJ06dVSmTBnNmTPHOon1/PnzdeLECeXIkcPO1eNVhIeHW4OJkJAQGWMUFhamTJkyqV+/fho/frwWLFgg6dkZVp2dnVWpUiVt2LBBW7Zs0ZQpUyRJrq6uGjFihEqWLGm3+4LoOX36tDw9PVWrVi19/vnn2r17t27fvq1MmTJp4MCBqlevnv744w99+eWX1uvkz59fhQsX1rp161SkSBE7Vg/geczeBwCI18z/n+Q4PDxc9evX1x9//KEGDRooLCxMX331lTJkyKA2bdro4cOH2rlzpypVqqRr167phx9+0PXr17VixQrr9SO+wHI2prghou8PHTqka9euKV26dCpXrpxy5sypWrVqacuWLQoLC9OXX36pkJAQLVy4UI6OjsqWLZu9S8crOHr0qAoXLixJ+vTTT/Xnn39qzJgx6tmzp6pXry5JGjBggG7fvi0fHx8tW7ZMDx480NGjRzV37lzt2LFDmTNntuddwCt4/rX5m2++0V9//aXr16+revXqat26tfr376+7d++qR48e2r9/v7Jly6bff/9dgYGBmjJlisqWLaszZ85YtxcRUuPttm7dOt29e1e3bt3S+fPntWvXLl2+fFlNmzbVe++9p2bNmunx48favXu3Ro0apSFDhqh9+/Z68uSJdeQUgLcDZ98DAMRrISEhcnZ2jnQGrjVr1uiDDz7QJ598osmTJytRokSaN2+eFixYoL1796pQoULKkiWLlixZIkdHR4WFhfFFJY5avXq1mjVrply5cunkyZPq06ePRo0aJWOMhg8frrVr1+r8+fMqVKiQLl++rHXr1ql48eL2LhsxNGPGDPn4+GjevHlKlSqVTp06pVKlSil//vxKkiSJBg8erFq1akmSzpw5o2+++UabNm2Sq6urPDw8NHr0aGughbjh+dd0SRo0aJBmzZqlfv366dSpU7py5Ypu3bqlFStWKHfu3Jo7d64mTZqkNGnSKG3atPL29pajo6OqVq2qSpUqafjw4fa7M3glw4cP15IlS9S5c2eVKVNG586d0/r16+Xj46N8+fLp5s2bcnR01Llz5zRp0iR9/vnn9i4ZwAsQSgEA4q1ly5Zp6tSpat++vbJly6aqVavq6dOnSpw4sX7//Xc1btxY7du31/fffy8nJydJzw4JyJQpk1KkSCGLxWJdH3GLMUYhISFq0qSJGjZsqPr162vnzp1q2rSp2rZtqylTpihJkiS6deuW1q9fr/Tp06tw4cKMkoqjTpw4IScnJ+XKlUv+/v5KnTq1Tp8+reDgYE2YMEHXrl3TsGHDrMGUJF2/fl3p0qXT06dPlSxZMjtWj1cVMUrq1KlTaty4sSZPnqwaNWpIkvbu3atvvvlGFy5c0OrVq5UlSxaFhoZaX+ulZyPnFixYoG3btilXrlz2uhuIoed/KOrVq5d+//13devWTd27d5ejo6Pu3bunrVu3aseOHdq4caP1ct68ee1cOf5fe/ceVVWZ/3H8fbhJomh4QQNETU0yb4zjmOGYqKlJMyqmpiajiVdEVCS1pBQdSgUECTXxNoXlFWVEkrxkgVZWzqBljtpoalYWXhDkes7vDxd7YGz6ZRbHQ5/XWq5kn83uezies3g++3m+j8gP0W/ZIiJSLV28eJGFCxdy9OhRvv76a2rWrInZbKZbt24MGzaMnj17kpWVhZ+fH66ursycOZOGDRtW+qXVbDYrkLIx5bMnrl27hp2dHe3bt+exxx6jYcOGDBo0iIyMDPr164fJZOKvf/0rHh4ejBkzxtply8+Unp6Or68vbdq0AeDw4cPMnDmTKVOmEBgYCMCkSZNISkpi3rx5mEwm+vTpA0CjRo2wt7evFFLI3W/kyJH4+Pjw3HPPGcv2bty4wdmzZ6lVq5Zx3h/+8AcmTpzI1KlT+fTTT/H09DSWbR05coTXX3+djRs3snPnTgVSNqB81jPcXGJZHkzFxcUZ/wV46qmnaNSoEYGBgQQGBvLtt9/i5ORE3bp1rVi9iPwYNcUQEZFqqV69esybN48ePXrg5OREamoqTz31FP/+97954okn6NChA5s2bSIgIICYmBheeuklrl69Wuka6h1le0wmE9u2baNPnz74+vqSnJzMv/71L+Px3r17k5GRQUpKCmFhYXz99ddWrFbuxIcffsjMmTOZM2cO3333HQA1atSgpKSE1atXs3nzZgB69OjB5MmT8fT0ZMGCBaSnpwPqHWSLrl69SuPGjVm8eDEJCQnG8Xr16uHj48M//vGPShsV9OjRg4KCAg4fPgz85zO9VatWPPbYY2RnZ9OxY8eqfRJy2y5cuMCoUaPYv3+/caw8mAJYsmQJQ4YMIT4+njfeeMPYMRdu7sinQErk7qbftkVEpNqxWCw4OTnx+OOPM23aNEpLSwkLC2P27Nns2rWLXbt2ER0dzZEjR8jLywPg0KFDuLq6WrlyuVM5OTlMnjyZbt26MXLkSAoLC0lOTiYnJ8c4p3fv3mzZsoW33377B7eRF9vQuXNnxowZw+nTp5k1axbffPMN7dq1Y+XKlZSWlrJq1SojmHr00UcJCQmhZs2axMfHU1BQoNfeBtWpU4fw8HBmzJjB888/T3x8PADe3t488MADJCQk8M4772A2mwG4du0abm5utzSwd3FxoU+fPlquayOKioo4f/48MTExZGdnG8f/O5h68sknSUpKYuXKlUZQLSJ3P/WUEhGRaqPiLkzlCgsL2b9/P6GhoXh5ebFv3z7jsZKSEkpKSti/fz99+vTBwcHhlua5YjtOnDjBhg0bsFgszJ8/H4C3336b4OBgHn30UWbMmEHbtm2N8wsKCqhZs6a1ypU7UPG9HhcXx5YtW2jdujV//etfcXd35+jRo8yYMQOA4OBgnnzySQCysrJo2rQpnp6eVqtd7tw333zDypUrWbJkCS+++CLTp08HoFevXpw7d45u3brRqlUrdu/ezaVLl/jkk0+0FNvGnTx5ktDQUCwWC3PnzuWRRx4Bbt6EslgsxudBv379yMvLIy0tDTc3N2uWLCI/kUIpERGpFioOUrOzs7l69Squrq507twZJycndu/eTVhYGPfddx979+4FoLi4GCcnJ+Maampuu77//nsCAgI4fvw4gwYNYs2aNcZju3fvZty4cfTu3ZvJkycby3UUQNq2iu/X2NhYtm7dSuvWrVm4cCGNGjXi2LFjTJ8+HQcHB4YPH87IkSOtXLH8XOXv1Yrv2QsXLpCcnExMTAyRkZGEh4cDMHfuXHJycsjNzeX+++9n1apV2kW1mvhfwRTcvMmwYMECLl26xJw5c2jWrJkVKxWR26FQSkREqpXZs2fzxhtv0KBBA86dO4efnx8RERF06tSJt956i/DwcDw9PcnMzLR2qfILqDhIzczMJDIykvz8fOLj4/H39zfOy8zMJDAwkFGjRhEXF1cpjBTb8r/CxNjYWLZs2YKPj48RTH366aeMGTMGDw8P1q9fT+3ata1QsdyJijccLl68SGFhoRE4XL16lbi4OGJjYysFU6WlpRQXFxszIXXDofr4oWCquLiYGTNm8Morr3DkyBHat29v7TJF5DYolBIREZtWcYCalJREVFQUqampdOnShfnz5xMdHU16ejr+/v6UlJSwd+9ehg8fztNPP230IxHbU/665+fn4+TkZOyq9c477zBr1iy8vLwICQmhe/fuxvfs27cPLy8vWrZsaa2y5Q6Vv+5ZWVm8/fbblJaW8uCDDzJixAgA4uPj2bhxIz4+PsZSvuPHj+Pi4kKTJk2sXL3croqf75GRkaSmpnLp0iXc3NyYOXMmgYGB2Nvbs3jxYuLi4pg/fz5Tp079n9eQ6qFiMDVr1iwyMjJYtmyZGteL2CiFUiIiYpNycnJo164dgLEsIzg4mEaNGhEVFcWWLVsYO3Ys0dHRTJw4kRs3bhgN0A8fPkznzp21lMNGlQ8yMzIySEhI4Pr16wDExMTQuXNn9u3bx/PPP899993H1KlT6datm5Urll/Stm3bGDVqFI888giFhYVkZWUxfPhwEhMTqVOnDrGxsezYsQN3d3cSExNp2LChtUuWOxQdHU1MTAyJiYk0btyY9evX88EHHzB8+HCmTZvGjRs3WL58OZGRkWzcuNHoISbV18mTJ5k+fTrZ2dnk5+dz6NAhfH19rV2WiPwM2n1PRERszsyZMxk5cqTRG8re3p6SkhLOnz9Phw4dOHz4MKNHj+all15i4sSJlJaWsnLlSjIzM3FwcODhhx+utGuP2BaTycTOnTsZPHgwXbp0Yd68edSoUYN+/fpx7Ngx/P39mT9/Pt9++y1RUVEcPHjQ2iXLL+TMmTPMmDGDRYsWsXv3bg4cOMCBAwfYuXMnoaGhAEyfPp3HHnuMa9eu6T1u48xmM1euXCE9PZ3IyEiGDRtG9+7dWbNmDQMHDiQ5OZkPPviAevXq8Ze//IXVq1czcOBAa5ctVaBly5YsWbKEbt268cknnyiQErFhCqVERMTmhISEUKNGDaKjo9mzZw8Ajo6OdOnShTFjxuDn58fKlSuZMGECAPn5+fz973/n888/r3QdzZSyTQUFBSxbtozZs2fzwgsv4OPjw5kzZ3jyySd56KGHgJu7cEVERGAymbRsy8ZVnNRfUlKCyWTCz88PuBla+Pn5sX37djZs2MC2bdsAeO6559iwYQONGze2Ss3yyzCZTLi4uJCXl2cswSsqKgJgwYIFeHt7G8uwPT09GT16NA4ODpSWllqtZqk6DzzwAFu2bKFNmzbWLkVE7oBCKRERsSllZWV4e3uTmprKlStXiI6ONpqWjx07Fn9/fxo3bkzXrl0pLS3lq6++YtiwYVy/ft1ogiu2rbS0lPPnzzN48GByc3Pp1KkTPXv2ZMWKFQCsX7+ewsJCAgICSE1NxdPT08oVy50wmUxs2rSJ1atXU7t2bc6fP8+pU6eMx81mM507d6Zt27acOXPGOK7t4G3PRx99ZPx96dKl7NixA0dHRzw8PHjjjTcAqFGjBsXFxQC0b9+ee+6555brqKn5b0d5P0ERsV0KpURExKbY29tTWlqKp6cn27dv58qVK7z00kvs378fDw8PpkyZQsuWLfHx8aFDhw4EBARw+fJlsrKycHBw0HKeasDV1RVvb2+WL1+Or68vf/7zn1m2bBkAubm5pKSkGAPYHxqwyt2v4uyoY8eOMW7cOK5fv467uztBQUG8/PLLvPvuu9jZ2WFnZ8c999zDPffcY+zSJrbniy++oGfPnkyaNImIiAjmzJlDq1atAFi8eDFffvklgwYNAsDOzg6LxcLHH39MvXr1rFm2iIjcITU6FxERm1BxW/CKzp49y6BBg6hduzbz58/nj3/8IwUFBfz9738nLy8Pd3d3Hn/8cSPM0h1021L+uhcWFgLg7OwM3BykxsbG0qpVKw4cOGCcP2fOHHbs2MGuXbvw9va2Ss3y8/zQe/zYsWNs3ryZwsJCXn75ZQDeffddli5davSX8vT0JD09ndWrV/Phhx9y//33W6N8uUP5+flkZGQwatQoHBwcyMnJoWnTpsZSvN27dzNx4kScnZ1p1qwZV69e5dq1a+Tk5OhzXUTEhimUEhGRu17FLb23b9/O2bNn8fLy4sEHH6R169aVgqm5c+fSs2fPW65RvkOf3P0OHTpE69atuffeewFIS0tjzZo1XLp0ibFjxzJy5EgKCgqYMmUKR48epX379rRt25acnBzS0tLYv38/HTp0sO6TkNtSHkhduHCBrKwsysrKqF27Nm+++Sa7d+8mMDCQlStXGudnZWXx5ptvsnr1apo1a4aDgwPr16/XdvA2qGIYmZGRQWBgIM7OzsaOihVdunSJ+Ph4iouLqV27NrNnzzZ6SCmYEhGxTQqlRETkrlYxkAoPD2fNmjW4u7tjMpm4evUqq1evpm/fvpw9e5bAwEDuvfdeQkNDeeKJJ6xcudyu8uU4nTt3JioqioiICD744AP69evHiBEjyM/PZ8OGDYSEhLBgwQJKSkpYu3YtaWlpmM1mmjdvTkREhJre2pjyUCInJ4eBAwfi7OzMyZMnadeuHR4eHpSUlHDs2DHS0tJuCRu/+uor4OYyzfIQU2xHxUDq7NmzODo6Yjabyc7OZurUqfzpT3/i1Vdf/dFr6IaDiIhtUyglIiJ3rYqB1MGDB5k1axZLliyhffv2/Otf/yIxMZG1a9eSkZFBz549OXfuHI888ggDBgwgISHBytXL7aj4Wi9btoywsDAWL16MyWTCZDIRFhYGwKZNmwgODmbUqFG8+OKLRj8Zs9mMxWLR4NTGVAykHn74YUJCQpg6dSofffQRSUlJ5OXlMWDAANLS0nBzcyMqKop27dphsVgwm816vW1YxUDqhRde4MMPP2Tu3Ll07dqVa9eusW3bNmbNmsXAgQNZvnw5cPPGRI8ePejfv3+lzwwREbFdCqVEROSut2HDBnbs2EFhYSFbt241lml8++23hIeHc/r0aVJTU2nYsCGXLl3Czc1Ng1UbUj44/frrrzl//jze3t5kZmby9NNP4+HhQXh4OFOnTjXO37hxI8HBwYwdO5bJkyerh5CNO3fuHL6+vvTo0YNNmzYZx1esWMHs2bP55z//ySeffEJiYiK1atUiKiqKtm3bWrFiuRP/3Tts9uzZrFu3joSEBLp3707Dhg0BKCgoYPPmzUyfPp2HHnoIR0dHTp06xalTp7RUT0SkGtEnuoiI3PXef/999u7di4uLC9euXcPNzQ2LxULDhg3p168fM2fO5MaNGwA0aNAA0JIOW1E+QP3ss88YN24cNWvWpFatWmzbto2CggLGjx/P0aNHuXz5srE8a+jQodjb2zNkyBCcnZ2ZP3++Bqk2rKysjGbNmlFUVERWVhZ+fn4A3H///ZhMJvLz8xkwYABFRUWsWbOGqVOnsmzZMi3TtEFffvklTZo0Mb7Ozs4mJSWFrVu30rVrV4qLi7lw4QLHjx+nRYsWBAUF4eXlxapVq7j33nvJyMgwdlHV57uISPWg3+BEROSu8kM7cCUkJNCwYUNWrFhh9Bpq3LgxAK1bt8bJyYnLly9X2m1NA5a7n8Viwc7Ojk8//RQ/Pz8mTZrE+PHjjdc2ODiYkpISQkJCaNGiBRMnTqROnToADB48mK1bt/Lggw8qkLJxTZs2JSUlhdDQUKKioli6dCleXl6MGDGC4OBgfHx8gJthZFFREVu3bjX+HYjtmDlzJjk5OezevdtYeldUVES9evXw8PDgo48+YtOmTaSmpnLjxg2aNWtGYmIi/v7++Pv7G9dRU3MRkepFy/dEROSuUTGQOnLkCM7OzgDGoPS5554jPT2dtm3bEhYWhtlsZu7cuVy+fJlDhw7dEmbJ3S83N5c///nP+Pr6Eh8fbxyvOPBMSEggLCyMhQsXMnnyZFxdXa1VrvyKTp48ydSpUykoKCAnJ4egoCDi4uIAKCkpwdHREYC8vDxq165tzVLlZzh9+jQeHh44Ozvz/fffU69ePT777DM6depEp06dOHLkCMOGDaNXr164u7szevRoXnnlFR5//HHjGuojJSJS/eg2g4iI3BXKZ83AzWa2GzdupLCwEBcXF4YMGcKiRYtYuHAhJpOJpKQk0tLS8Pf3x8vLix07dmBnZ/eDs6zk7vb1119z8eJFAgMDK71+Dg4OmM1mTCYToaGhmEwmpk2bRn5+PhEREQqmqqGWLVsSHx/PhAkTcHV1ZeDAgcZjDg4ORiChQMr27Nmzh9/97nc4OzuTkpJCcHAwhw8fpk2bNhw8eJB9+/bx7LPP0r17d2rVqkVJSQlubm4UFRVVuo4CKRGR6kczpURExKrKg4fywUZmZibBwcGsX78es9nM6dOnmTZtGkOGDGHNmjUAREVFsXHjRgYNGsTkyZNxd3enuLgYJycnaz4V+Rk2bNhAUFAQxcXFmEymHwwWCwoKyMvLY+fOnYSHh3Pq1Clj1z2pfk6dOsWUKVOwWCzMnTuXRx55xNolyR3Iy8ujS5cuFBQUcOTIEb755htCQkI4deoUO3fupE2bNsZMuKKiIvLz8xkxYgS5ubkcPHhQS7FFRKo53U4WERGrsrOzMwKp7du3s2HDBoYPH86jjz6Kv78/wcHBpKamkpKSwpIlSwCYO3cuAQEBpKenk5SUxMWLFxVI2aimTZvi4ODAtm3bAH5wptuqVat4+umneeaZZzh9+rQCqWquRYsWJCQk4OjoSHh4OO+//761S5I7ULt2bTZv3kz9+vXp3r077u7uLF++nAceeIC+ffty4sQJHB0dKSwsZPHixfTv358rV66QlZWFvb09ZWVl1n4KIiLyK1IoJSIiVjFy5EgWLVoE3Fy6d+rUKZYuXcqOHTu4fPmycV5ZWRm9e/dmwoQJ7N+/n+vXrwPw0ksv0b9/f1577TXWrl2L2Wy2yvOQO+Pt7Y2rqyt/+9vfOHv2rHG84kTuc+fO0aFDB8xms7EDn1RvLVu2ZPHixXh6enLfffdZuxz5mco/l1u1asWWLVtwcnKif//+1K9fn1deeYU2bdrQu3dvTpw4gbOzM/3792fAgAFkZWXh6OhIaWmpZkqJiFRzCqVERKTK5eXl0bt3b6ZNmwbc7BPSokUL5syZQ6dOndi+fTtvvfUW8J9d9Nzc3MjNzaVGjRrGQGf+/PmMGTOGp556Sr2kbJSHhwfLly9n9+7dzJ07l88++wy4+W+ioKCAOXPmsGXLFsaOHVtpVp1Uf61btyYlJYUmTZpYuxS5Td9//z1wc+ZjcXExDg4OeHt7U79+fQ4dOoS/vz/16tUzgqm+ffuSk5NDx44defbZZ40ZUtplT0Sk+lNPKRERsapXX32VgwcPsm7dOgD27t1LTEwM+fn5PPvsszz++OPk5uYSGBhI/fr12bRpEyaTibKyMt1BrybMZjOrVq0iJCSEFi1a8PDDD+Ps7MyFCxd4//33eeutt+jYsaO1yxSRn+C9994jMjKSefPm8cc//tE4/uSTT3LixAni4uKIiIigtLSUAwcOkJuby9ChQ/H09CQ1NVU77ImI/MbotrKIiFSpiv1Bzp8/z8WLF/nggw8ICwsDoGfPnoSGhuLk5MSAAQP43e9+x6RJkygsLOT111/HZDJhsVgUSFUjdnZ2jB8/nuzsbB566CGOHDnCsWPH8PHxISsrS4GUiA1p2LAhFouFl19+mY8//hiAwYMHc/z4cdLT0+nZsyevv/469vb2+Pv7U7duXbZv387WrVsB7bAnIvJbo5lSIiJSZfLz83FxcQEgJCSEe+65h4kTJ5KamsratWvp0aMHy5YtA2Dfvn1ER0eTm5vL6NGjCQkJAdAue9WcZsCJ2L6TJ08SGhqKvb09V69eJT8/n23bttG0aVPjnM8//5yePXvSq1cv1q9fD/CDu2+KiEj1pk99ERGpEuvXrycuLg64OWDZs2cPf/rTn2jevDljxowhKCiI/fv3M2XKFAD8/f2ZPn06Xl5epKWl8d577wEokKrmKg5Idd9MxDa1bNmShIQEioqKOHr0KLNnzzYCqfKegK1bt+bgwYOsWbPG+D4FUiIivz2aKSUiIr+6V199lQkTJvDuu+9y+PBhjh8/jr29Pa+88ooxCMnNzWX16tW89tpr9OjRg/j4eADefvttEhMTuXjxIjExMXTr1s2aT0VERH6i06dPM3nyZOzs7JgzZw5+fn7ArTOiNENSROS3S7cjRETkV/Xaa68REhLCzp078fPz46uvviI5OZmjR48avUPMZjNubm4888wzBAUF8cYbbxizqnr37s348eNp3rw53t7e1nwqIiJyG+6//36WLVuGxWJh4cKFZGdnA7fOiFIgJSLy26WZUiIi8qtZt24dY8aMoVevXmRmZgLw3XffkZyczJw5c1ixYgXjxo3DYrFgsViws7Pju+++IzMzk6FDh1YaqBQUFFCzZk1rPRUREfmZTp48ybRp0/jmm29YvXo17dq1s3ZJIiJyl1AoJSIiv4pVq1YxYcIExowZw65duxg0aJDRxPzKlSvExsayYMEC1q9fz9NPP10pmCpXVlaGnZ2ddmMSEbFxx48fJzk5mcWLF6t3lIiIGBysXYCIiFQ/S5cuZfr06aSnp9OvXz9WrlzJ888/j8lkIiEhgbp16zJjxgwsFgujR4/GZDIxcuTIW8InLekQEakefHx8iImJAbTLnoiI/IdCKRER+cV17NiRDRs20K9fPwCGDRuGyWTiueeeAyAhIYE6deoQHh6OnZ0do0aNon79+vTt29eaZYuISBVQICUiIuUUSomIyC+ue/fuAFgsFkwmE3Xq1GHYsGEAtwRTYWFheHl50atXL6vVKyIiIiIiVU89pUREpMpcu3aNN998k+eff57hw4ezdOnSSo+Xlpbi4KD7JSIiIiIivwX6zV9ERKqMq6ursZRv/PjxNG3alLCwMONxBVIiIiIiIr8dmiklIiJV7sqVKxw4cICAgAA1MxcRERER+Y1SKCUiIlalJXsiIiIiIr9NCqVERERERERERKTKaT9WERERERERERGpcgqlRERERERERESkyimUEhERERERERGRKqdQSkREREREREREqpxCKRERERERERERqXIKpUREREREREREpMoplBIRERERERERkSqnUEpERERERERERKqcQikREREREREREalyCqVEREREfqK//OUvmEymW/6cOnXqjq+9bt066tate+dFioiIiNgIB2sXICIiImJL+vbty9q1aysda9CggZWq+WElJSU4OjpauwwRERGRH6WZUiIiIiK3oUaNGjRq1KjSH3t7e3bs2IGvry/Ozs40b96cefPmUVpaanxfbGwsbdu2xcXFBS8vLyZNmsT169cBeOeddxg9ejRXr141Zl+9+OKLAJhMJrZv316phrp167Ju3ToAzpw5g8lkYuPGjXTv3h1nZ2dSUlIASE5OxsfHB2dnZ1q3bk1SUpJxjeLiYkJCQmjcuDHOzs54e3sTHR396/3gRERERP6LZkqJiIiI3KH33nuPUaNGkZCQQLdu3Th9+jTjxo0D4IUXXgDAzs6OhIQEmjVrxhdffMGkSZOIiIggKSmJrl27snTpUiIjIzlx4gQAtWrVuq0aZs2aRUxMDB07djSCqcjISBITE+nYsSNHjhwhODgYFxcXgoKCSEhIIC0tjU2bNtGkSRPOnTvHuXPnftkfjIiIiMiPUCglIiIicht27txZKTDq168fly9fZtasWQQFBQHQvHlzoqKiiIiIMEKpsLAw43uaNm3KggULmDBhAklJSTg5OVGnTh1MJhONGjX6WXWFhYUxaNAg4+sXXniBmJgY41izZs347LPPWLlyJUFBQXz55Ze0bNkSPz8/TCYT3t7eP+v/KyIiIvJzKZQSERERuQ09evRg+fLlxtcuLi60a9eO7OxsFi5caBwvKyujsLCQgoICatasyZ49e4iOjubzzz/n2rVrlJaWVnr8TnXq1Mn4e35+PqdPn+aZZ54hODjYOF5aWkqdOnWAm03be/fuzQMPPEDfvn0JCAjgscceu+M6RERERH4qhVIiIiIit8HFxYUWLVpUOnb9+nXmzZtXaaZSOWdnZ86cOUNAQAATJ05k4cKFuLm5kZWVxTPPPENxcfGPhlImkwmLxVLpWElJyQ/WVbEegFWrVvGHP/yh0nn29vYA+Pr68u9//5uMjAz27NnDkCFD6NWrF1u2bPl/fgIiIiIivwyFUiIiIiJ3yNfXlxMnTtwSVpX7+OOPMZvNxMTEYGd3c5+ZTZs2VTrHycmJsrKyW763QYMGXLx40fj65MmTFBQU/Gg97u7u3HfffXzxxReMGDHif57n6urK0KFDGTp0KIMHD6Zv377k5ubi5ub2o9cXERER+SUolBIRERG5Q5GRkQQEBNCkSRMGDx6MnZ0d//znPzl27BgLFiygRYsWlJSUsGzZMp544gmys7NZsWJFpWs0bdqU69evs3fvXtq3b0/NmjWpWbMm/v7+JCYm8vDDD1NWVsazzz6Lo6Pj/1vTvHnzCA0NpU6dOvTt25eioiI++ugjLl++zPTp04mNjaVx48Z07NgROzs7Nm/eTKNGjahbt+6v9FMSERERqczO2gWIiIiI2Lo+ffqwc+dOMjMz+f3vf0+XLl2Ii4szmoe3b9+e2NhYXn75ZR566CFSUlKIjo6udI2uXbsyYcIEhg4dSoMGDVi0aBEAMTExeHl50a1bN4YPH054ePhP6kE1duxYkpOTWbt2LW3btqV79+6sW7eOZs2aAVC7dm0WLVpEp06d+P3vf8+ZM2fYtWuXMZNLRERE5Ndmsvx3kwIREREREREREZFfmW6FiYiIiIiIiIhIlVMoJSIiIiIiIiIiVU6hlIiIiIiIiIiIVDmFUiIiIiIiIiIiUuUUSomIiIiIiIiISJVTKCUiIiIiIiIiIlVOoZSIiIiIiIiIiFQ5hVIiIiIiIiIiIlLlFEqJiIiIiIiIiEiVUyglIiIiIiIiIiJVTqGUiIiIiIiIiIhUOYVSIiIiIiIiIiJS5f4Pe4mdyUacKNYAAAAASUVORK5CYII=",
+      "text/plain": [
+       "<Figure size 1200x600 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "=== Overall Missing Data Summary ===\n",
+      "Total number of missing values: 105\n",
+      "Total number of cells in dataset: 48400\n",
+      "Percentage of missing data: 0.22%\n"
+     ]
+    }
+   ],
+   "source": [
+    "import numpy as np\n",
+    "import pandas as pd\n",
+    "import matplotlib.pyplot as plt\n",
+    "\n",
+    "# Replace 999 with NaN for further analysis\n",
+    "df.replace(999, np.nan, inplace=True)\n",
+    "\n",
+    "# Count missing (999) values in each column\n",
+    "missing_counts = (df.isna().sum())\n",
+    "\n",
+    "# Filter out columns with no missing values\n",
+    "missing_counts = missing_counts[missing_counts > 0]\n",
+    "\n",
+    "# Display feature names and missing value counts\n",
+    "print(\"\\n=== Features with Missing Values ===\")\n",
+    "print(missing_counts)\n",
+    "\n",
+    "# Visualize the distribution of missing values\n",
+    "plt.figure(figsize=(12, 6))\n",
+    "missing_counts.plot(kind='bar', color='skyblue', edgecolor='black')\n",
+    "plt.title('Number of Missing Values by Feature')\n",
+    "plt.xlabel('Features')\n",
+    "plt.ylabel('Count of Missing Values')\n",
+    "plt.xticks(rotation=45, ha='right')\n",
+    "plt.tight_layout()\n",
+    "plt.show()\n",
+    "\n",
+    "# Summary statistics\n",
+    "total_missing = missing_counts.sum()\n",
+    "total_cells = df.size\n",
+    "missing_percentage = (total_missing / total_cells) * 100\n",
+    "\n",
+    "print(\"\\n=== Overall Missing Data Summary ===\")\n",
+    "print(f\"Total number of missing values: {total_missing}\")\n",
+    "print(f\"Total number of cells in dataset: {total_cells}\")\n",
+    "print(f\"Percentage of missing data: {missing_percentage:.2f}%\")\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "68cedf35-6386-426c-ac64-1127dd4c9935",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "PCR                0\n",
+      "PgR                0\n",
+      "HER2               0\n",
+      "TrippleNegative    0\n",
+      "ChemoGrade         0\n",
+      "Proliferation      0\n",
+      "HistologyType      0\n",
+      "LNStatus           0\n",
+      "Gene               0\n",
+      "dtype: int64\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\LLR User\\AppData\\Local\\Temp\\ipykernel_14388\\4226577187.py:12: FutureWarning: A value is trying to be set on a copy of a DataFrame or Series through chained assignment using an inplace method.\n",
+      "The behavior will change in pandas 3.0. This inplace method will never work because the intermediate object on which we are setting values always behaves as a copy.\n",
+      "\n",
+      "For example, when doing 'df[col].method(value, inplace=True)', try using 'df.method({col: value}, inplace=True)' or df[col] = df[col].method(value) instead, to perform the operation inplace on the original object.\n",
+      "\n",
+      "\n",
+      "  df[feature].fillna(mode_value, inplace=True)  # Replace NaN with the mode\n"
+     ]
+    }
+   ],
+   "source": [
+    "import pandas as pd\n",
+    "\n",
+    "# List of features you want to modify\n",
+    "features_to_modify = [\n",
+    "    \"PCR\", \"PgR\", \"HER2\", \"TrippleNegative\", \"ChemoGrade\", \n",
+    "    \"Proliferation\", \"HistologyType\", \"LNStatus\", \"Gene\"\n",
+    "]\n",
+    "\n",
+    "# Loop through the columns and replace NaN values with the mode of each column\n",
+    "for feature in features_to_modify:\n",
+    "    mode_value = df[feature].mode()[0]  # Get the mode of the feature\n",
+    "    df[feature].fillna(mode_value, inplace=True)  # Replace NaN with the mode\n",
+    "\n",
+    "# Check the changes\n",
+    "print(df[features_to_modify].isna().sum())  # Check how many NaN values are left\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "id": "995b86b2-4475-4b94-8a7e-044bebaf8556",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "df = df.drop('ID', axis=1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "id": "2f8b60cc-c014-4424-ba37-d7705812d8bc",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Number of outliers: 87\n",
+      "Number of outliers: PCR                                 65.000000\n",
+      "RelapseFreeSurvival (outcome)    17185.083333\n",
+      "Age                              16080.576595\n",
+      "ER                                 166.000000\n",
+      "PgR                                126.000000\n",
+      "                                     ...     \n",
+      "original_ngtdm_Busyness          36656.435955\n",
+      "original_ngtdm_Coarseness            4.608712\n",
+      "original_ngtdm_Complexity           18.424557\n",
+      "original_ngtdm_Contrast              1.797244\n",
+      "original_ngtdm_Strength              4.089319\n",
+      "Length: 120, dtype: float64\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Calculate Z-scores for the entire dataset\n",
+    "z_scores = np.abs((df - df.mean()) / df.std())\n",
+    "\n",
+    "# Identify rows that have z-scores above the threshold (3 in this case)\n",
+    "outliers = (z_scores > 3).any(axis=1)\n",
+    "\n",
+    "# Print the number of outliers\n",
+    "print(f\"Number of outliers: {outliers.sum()}\")\n",
+    "\n",
+    "# Remove outliers from the dataset\n",
+    "df_no_outliers = df[~outliers]\n",
+    "\n",
+    "print(f\"Number of outliers: {df_no_outliers.sum()}\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "37f55653-9445-404e-8730-c1c93c45e00f",
+   "metadata": {},
+   "source": [
+    "### Feature Selection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "id": "70899e82-33c6-495d-baaf-e8d6d5b4afa1",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "correlated features:  82\n"
+     ]
+    }
+   ],
+   "source": [
+    "df1 = df.copy()\n",
+    "# checking for correlated features of dataset\n",
+    "def correlation(data, threshold):\n",
+    "    col_corr = {}  # Dictionary to store correlated features\n",
+    "    corr_matrix = data.corr()\n",
+    "    for i in range(len(corr_matrix.columns)):\n",
+    "        for j in range(i):\n",
+    "            if abs(corr_matrix.iloc[i, j]) > threshold:  # We are interested in absolute coefficient value\n",
+    "                colname = corr_matrix.columns[i]\n",
+    "                if colname not in col_corr:\n",
+    "                    col_corr[colname] = set()\n",
+    "                col_corr[colname].add(corr_matrix.columns[j])\n",
+    "\n",
+    "    return col_corr\n",
+    "\n",
+    "corr_features = correlation(df1, 0.8)\n",
+    "print('correlated features: ', len(corr_features))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "id": "e8695d75-11cd-4f48-b301-d019ce147584",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(400, 38)"
+      ]
+     },
+     "execution_count": 11,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# removing the correlated features\n",
+    "df_corr= df1.drop(labels=corr_features, axis=1)\n",
+    "df_corr.shape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "id": "164fcddb-b51c-41b7-b23a-a4f6c7725c84",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjsAAAHHCAYAAABZbpmkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/GU6VOAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA2/0lEQVR4nO3deVyVZf7/8fcBATcWURZJxH3BcWnUiK+5E6hoOuq3bCzJcZkMLaVpHKfGrSnLMs29vo/MyclyaiadLHHBbZrIXMZcUkdNR00BkwQxBYXr90c/znQCXPDowavX8/G4Hw/u677OdX/uA8jb616OwxhjBAAAYCkvTxcAAABwMxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYASZMnT5bD4bgl++rSpYu6dOniXN+4caMcDofef//9W7L/Rx55RPXq1bsl+yqvvLw8DR8+XOHh4XI4HBo7dqynS/pJO3r0qBwOhxYvXuzpUoByIezAOosXL5bD4XAulStXVkREhBISEjR79mydO3fOLfs5efKkJk+erJ07d7plPHeqyLVdi+eff16LFy/WqFGjtGTJEj388MNX7H/p0iXNnj1b7du3l7+/v6pXr6727dtr9uzZunTpUrnr+PTTTzV58mSdPXu23GOgdF26dHH5PQ0ODlb79u21aNEiFRUVlei/ceNG9e/fX+Hh4fL19VVoaKj69Omjv/3tb84+xaGsePHy8lJwcLB69uyp9PT0W3l4qGgMYJk333zTSDJTp041S5YsMYsWLTLPP/+8iY+PNw6Hw0RFRZkvvvjC5TWXLl0yFy5cuK79bN261Ugyb7755nW9Lj8/3+Tn5zvXN2zYYCSZ995777rGKW9tBQUF5uLFi27b180QExNjOnTocE198/LyTOfOnY0k07t3bzN37lwzf/58c9999xlJpnPnziYvL69cdbz00ktGkjly5Ei5Xm+LoqIic+HCBXP58mW3jdm5c2dTp04ds2TJErNkyRLzyiuvmDZt2hhJZvz48S59J06caCSZxo0bm4kTJ5o33njDTJ8+3XTp0sVIMm+//bYxxpgjR44YSebBBx80S5YsMYsXLza///3vTVBQkPHz8zO7du1yW/24vRB2YJ3isLN169YS29LS0kyVKlVMVFSU+e67725oP9cbds6fP19q+60OO7eD+vXrm8TExGvqO3LkSCPJzJkzp8S2uXPnGknm0UcfLVcdhJ2bp3PnzqZFixYubefPnzd16tQx1apVMwUFBcYYY9577z0jyQwcONDZ9kOpqanmww8/NMb8N+y89NJLLn1WrVplJJlRo0bdpKNBRUfYgXWuFHaMMeb55583kszrr7/ubJs0aZL58UTnmjVrTIcOHUxgYKCpVq2aadKkiZkwYYIx5r8B5cdLcbgo/od827ZtpmPHjqZKlSrmiSeecG7r3Lmzcz/FY7377rtmwoQJJiwszFStWtX06dPHHDt2zKWmqKgok5SUVOKYfjjm1WpLSkoyUVFRLq/Py8szKSkppk6dOsbX19c0adLEvPTSS6aoqMilnySTnJxsPvjgA9OiRQvj6+troqOjzapVq0p9r38sMzPT/OpXvzKhoaHGz8/PtGrVyixevLjEe/Hjpaywcfz4cePt7W26detW5j67du1qKlWqZI4fP26M+e8fxNKCoCQzadIkY8x/fyauVMuSJUtM+/btTZUqVUxQUJDp2LGjWb16tcuY8+bNM9HR0cbX19fUrl3bPPbYY+bbb7916VP88/LFF1+YTp06mSpVqpiGDRs6A/DGjRvNXXfdZSpXrmyaNGli1q5dW6L2EydOmKFDh5rQ0FDn9+WNN94o0W/27NkmOjraWXPbtm2dMyNlKe09S0pKMtWqVTMnTpwwffv2NdWqVTO1atUyTz755DXNAJUWdowxZuDAgUaS+frrr40xxjRr1swEBweb3Nzcq45ZVtjJy8szkkx8fPxVx4CduGYHPznF13+sWbOmzD579+5V7969lZ+fr6lTp2rGjBm677779M9//lOS1Lx5c02dOlWSNHLkSC1ZskRLlixRp06dnGOcOXNGPXv2VJs2bTRr1ix17dr1inU999xz+uijjzR+/Hg9/vjjWrt2reLi4nThwoXrOr5rqe2HjDG67777NHPmTPXo0UOvvPKKmjZtqqeeekopKSkl+n/yySd67LHHNGjQIE2fPl0XL17UgAEDdObMmSvWdeHCBXXp0kVLlizR4MGD9dJLLykwMFCPPPKIXn31VWftS5YsUa1atdSmTRtn7SEhIaWOuWrVKhUWFmrIkCFl7nfIkCG6fPmyUlNTr1jfj/Xv318PPvigJGnmzJklapkyZYoefvhh+fj4aOrUqZoyZYoiIyO1fv165xiTJ09WcnKyIiIiNGPGDA0YMECvvfaa4uPjS1xL9O2336p3796KiYnR9OnT5efnp0GDBmnZsmUaNGiQevXqpRdeeEHnz5/XwIEDXa49y8zM1N13361169Zp9OjRevXVV9WoUSMNGzZMs2bNcvb7v//7Pz3++OOKjo7WrFmzNGXKFLVp00Zbtmy5rvemWGFhoRISElSzZk29/PLL6ty5s2bMmKHXX3+9XONJ0ldffSVvb28FBQXp4MGD2r9/v/r16yd/f/9yj3n06FFJUo0aNco9Bm5znk5bgLtdbWbHGGMCAwPNnXfe6Vz/8czOzJkzjSRz+vTpMse40qmi4mtIFi5cWOq20mZ27rjjDpf/vf7lL38xksyrr77qbLuWmZ2r1fbjmZ3ly5cbSeaPf/yjS7+BAwcah8NhDh065GyTZHx9fV3avvjiizJPI/3QrFmzjCTz5z//2dlWUFBgYmNjTfXq1V2OPSoq6ppOY40dO9ZIMv/617/K7LNjxw4jyaSkpBhjrn1mx5iyT2MdPHjQeHl5mV/84hemsLDQZVvxbFhWVpbx9fU18fHxLn2KT60tWrTI2Vb887J06VJn2/79+40k4+XlZT777DNn++rVq0vUP2zYMFO7dm3zzTffuNQyaNAgExgY6Dxl27dv31JnU66mrJkd/f9r437ozjvvNG3btr3qmJ07dzbNmjUzp0+fNqdPnzb79u0zjz/+uJFk+vTpY4wxZsWKFUaSmTlz5nXVOWXKFHP69GmTkZFh/vGPf5j27du7/VQxbi/M7OAnqXr16le8KysoKEiStGLFilLvDLkWfn5+Gjp06DX3HzJkiMv/XgcOHKjatWvr448/Ltf+r9XHH38sb29vPf744y7tTz75pIwxWrVqlUt7XFycGjZs6Fxv1aqVAgIC9NVXX111P+Hh4c7ZEkny8fHR448/rry8PG3atOm6ay/+Hl7pf/3F23Jzc697/LIsX75cRUVFmjhxory8XP8ZLX6Ewbp161RQUKCxY8e69BkxYoQCAgL00UcfubyuevXqGjRokHO9adOmCgoKUvPmzRUTE+NsL/66+P02xuivf/2r+vTpI2OMvvnmG+eSkJCgnJwc7dixQ9L3P9cnTpzQ1q1b3fZePProoy7rHTt2vOrPQrH9+/crJCREISEhat68uebMmaPExEQtWrRI0n+/Z9c7qzNp0iSFhIQoPDxcHTt21L59+zRjxgwNHDjwusaBPQg7+EnKy8u74j+gDzzwgDp06KDhw4crLCxMgwYN0l/+8pfrCj533HGHfH19r7l/48aNXdYdDocaNWrknIK/Wf7zn/8oIiKixPvRvHlz5/Yfqlu3bokxatSooW+//faq+2ncuHGJcFDWfq5Fcc1XCq7XEoiu1+HDh+Xl5aXo6Ogy+xQfT9OmTV3afX191aBBgxLHW6dOnRLPegoMDFRkZGSJNknO9/v06dM6e/asXn/9dWdwKF6Kw3ZWVpYkafz48apevbruuusuNW7cWMnJyc5Ts+VRuXLlEqcYr+VnoVi9evW0du1arVu3Tp988okyMjK0cuVK1apVS5IUEBAg6crf39KMHDlSa9eu1Ycffqhx48bpwoULKiwsvK4xYJdKni4AuNVOnDihnJwcNWrUqMw+VapU0ebNm7VhwwZ99NFHSk1N1bJly9StWzetWbNG3t7eV91PlSpV3Fm2JJX54MPCwsJrqskdytqPMeaW7P+HioPSrl271KZNm1L77Nq1S5KcweRK76EnlfW+Xu39Lg7gDz30kJKSkkrt26pVK0nfv18HDhzQypUrlZqaqr/+9a+aP3++Jk6cqClTprit5mtVrVo1xcXFlbm9WbNmkqTdu3df17iNGzd2jtu7d295e3vrd7/7nbp27ap27dqVv2DctpjZwU/OkiVLJEkJCQlX7Ofl5aXu3bvrlVde0ZdffqnnnntO69ev14YNGySV/UezvA4ePOiybozRoUOHXJ52XKNGjVIfcPfjWYLrqS0qKkonT54s8b/n/fv3O7e7Q1RUlA4ePFhiduxG9tOzZ095e3s7v6eleeutt1SpUiX16NFD0n8vUv3x+1jazFJZ72PDhg1VVFSkL7/8ssz9Fh/PgQMHXNoLCgp05MgRt72vISEh8vf3V2FhoeLi4kpdQkNDnf2rVaumBx54QG+++aaOHTumxMREPffcc7p48aJb6nGnJk2aqGnTplqxYoXy8vLKPc7TTz8tf39/PfPMM26sDrcTwg5+UtavX69nn31W9evX1+DBg8vsl52dXaKteOYgPz9f0vd/NKSSfzTL66233nIJHO+//75OnTqlnj17OtsaNmyozz77TAUFBc62lStX6vjx4y5jXU9tvXr1UmFhoebOnevSPnPmTDkcDpf934hevXopIyNDy5Ytc7ZdvnxZc+bMUfXq1dW5c+frHjMyMlJDhw7VunXrtGDBghLbFy5cqPXr12vYsGGqU6eOpO9PjdSqVUubN2926Tt//vwSry/rfezXr5+8vLw0derUEuGteMYlLi5Ovr6+mj17tsus1xtvvKGcnBwlJiZe9/GWxtvbWwMGDNBf//pX7dmzp8T206dPO7/+8R1zvr6+io6OljHmhp40fTNNmTJFZ86c0fDhw3X58uUS29esWaOVK1decYygoCD9+te/1urVq2/bp4rjxnAaC9ZatWqV9u/fr8uXLyszM1Pr16/X2rVrFRUVpb///e+qXLlyma+dOnWqNm/erMTEREVFRSkrK0vz589XnTp1dM8990j6PngEBQVp4cKF8vf3V7Vq1RQTE6P69euXq97g4GDdc889Gjp0qDIzMzVr1iw1atRII0aMcPYZPny43n//ffXo0UP333+/Dh8+rD//+c8uFwxfb219+vRR165d9fTTT+vo0aNq3bq11qxZoxUrVmjs2LElxi6vkSNH6rXXXtMjjzyi7du3q169enr//ff1z3/+U7NmzSr3NTUzZ87U/v379dhjjyk1NdU5g7N69WqtWLHCeTv0Dw0fPlwvvPCChg8frnbt2mnz5s3697//XWLstm3bSvp+ZmDQoEHy8fFRnz591KhRIz399NN69tln1bFjR/Xv319+fn7aunWrIiIiNG3aNIWEhGjChAmaMmWKevToofvuu08HDhzQ/Pnz1b59ez300EPlOt7SvPDCC9qwYYNiYmI0YsQIRUdHKzs7Wzt27NC6deuc4T0+Pl7h4eHq0KGDwsLCtG/fPs2dO1eJiYluvabJnR544AHt3r1bzz33nP71r3/pwQcfVFRUlM6cOaPU1FSlpaVp6dKlVx3niSee0KxZs/TCCy/o3XffvQWVo0Lx2H1gwE1SfOt58eLr62vCw8PNvffea1599dVSH07241vP09LSTN++fU1ERITx9fU1ERER5sEHHzT//ve/XV63YsUKEx0dbSpVqlTqQwVLU9at5++8846ZMGGCCQ0NNVWqVDGJiYnmP//5T4nXz5gxw9xxxx3Gz8/PdOjQwWzbtq3EmFeqrbSHCp47d86MGzfOREREGB8fH9O4ceMrPlTwx8q6Jf7HMjMzzdChQ02tWrWMr6+vadmyZam3gF/rrefF8vPzzcyZM03btm1NtWrVTNWqVc3Pf/5zM2vWrFKfuvvdd9+ZYcOGmcDAQOPv72/uv/9+k5WVVeLWc2OMefbZZ80dd9xhvLy8StyGvmjRInPnnXcaPz8/U6NGDdO5c+cSD/ybO3euadasmfHx8TFhYWFm1KhRZT5U8Frfh9K+D5mZmSY5OdlERkYaHx8fEx4ebrp37+7y8MzXXnvNdOrUydSsWdP4+fmZhg0bmqeeesrk5OSU9dYaY678UMEfK+0BnaW50u9IaYp/J0NDQ02lSpVMSEiI6dOnj1mxYkWJOn/8UMFijzzyiPH29nZ5dAJ+GhzGeOCqQgAAgFuEa3YAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKzGQwX1/WfLnDx5Uv7+/m7/CAAAAHBzGGN07tw5RURElPiQ4R8i7Eg6efJkiU8WBgAAt4fjx487PxKmNIQdyfmY9OPHjysgIMDD1QAAgGuRm5uryMjIq37cCWFH//1k44CAAMIOAAC3matdgsIFygAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrVfJ0Abar97uPPF0CUKEdfSHR0yUAsBwzOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYzaNhZ8GCBWrVqpUCAgIUEBCg2NhYrVq1yrn94sWLSk5OVs2aNVW9enUNGDBAmZmZLmMcO3ZMiYmJqlq1qkJDQ/XUU0/p8uXLt/pQAABABeXRsFOnTh298MIL2r59u7Zt26Zu3bqpb9++2rt3ryRp3Lhx+vDDD/Xee+9p06ZNOnnypPr37+98fWFhoRITE1VQUKBPP/1Uf/rTn7R48WJNnDjRU4cEAAAqGIcxxni6iB8KDg7WSy+9pIEDByokJERLly7VwIEDJUn79+9X8+bNlZ6errvvvlurVq1S7969dfLkSYWFhUmSFi5cqPHjx+v06dPy9fW9pn3m5uYqMDBQOTk5CggIcOvx1PvdR24dD7DN0RcSPV0CgNvUtf79rjDX7BQWFurdd9/V+fPnFRsbq+3bt+vSpUuKi4tz9mnWrJnq1q2r9PR0SVJ6erpatmzpDDqSlJCQoNzcXOfsEAAA+Gmr5OkCdu/erdjYWF28eFHVq1fXBx98oOjoaO3cuVO+vr4KCgpy6R8WFqaMjAxJUkZGhkvQKd5evK0s+fn5ys/Pd67n5ua66WgAAEBF4/GZnaZNm2rnzp3asmWLRo0apaSkJH355Zc3dZ/Tpk1TYGCgc4mMjLyp+wMAAJ7j8bDj6+urRo0aqW3btpo2bZpat26tV199VeHh4SooKNDZs2dd+mdmZio8PFySFB4eXuLurOL14j6lmTBhgnJycpzL8ePH3XtQAACgwvB42PmxoqIi5efnq23btvLx8VFaWppz24EDB3Ts2DHFxsZKkmJjY7V7925lZWU5+6xdu1YBAQGKjo4ucx9+fn7O292LFwAAYCePXrMzYcIE9ezZU3Xr1tW5c+e0dOlSbdy4UatXr1ZgYKCGDRumlJQUBQcHKyAgQGPGjFFsbKzuvvtuSVJ8fLyio6P18MMPa/r06crIyNAzzzyj5ORk+fn5efLQAABABeHRsJOVlaUhQ4bo1KlTCgwMVKtWrbR69Wrde++9kqSZM2fKy8tLAwYMUH5+vhISEjR//nzn6729vbVy5UqNGjVKsbGxqlatmpKSkjR16lRPHRIAAKhgKtxzdjyB5+wAnsNzdgCU1233nB0AAICbgbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1TwadqZNm6b27dvL399foaGh6tevnw4cOODSp0uXLnI4HC7Lo48+6tLn2LFjSkxMVNWqVRUaGqqnnnpKly9fvpWHAgAAKqhKntz5pk2blJycrPbt2+vy5cv6/e9/r/j4eH355ZeqVq2as9+IESM0depU53rVqlWdXxcWFioxMVHh4eH69NNPderUKQ0ZMkQ+Pj56/vnnb+nxAACAisejYSc1NdVlffHixQoNDdX27dvVqVMnZ3vVqlUVHh5e6hhr1qzRl19+qXXr1iksLExt2rTRs88+q/Hjx2vy5Mny9fW9qccAAAAqtgp1zU5OTo4kKTg42KX97bffVq1atfSzn/1MEyZM0Hfffefclp6erpYtWyosLMzZlpCQoNzcXO3du7fU/eTn5ys3N9dlAQAAdvLozM4PFRUVaezYserQoYN+9rOfOdt/+ctfKioqShEREdq1a5fGjx+vAwcO6G9/+5skKSMjwyXoSHKuZ2RklLqvadOmacqUKTfpSAAAQEVSYcJOcnKy9uzZo08++cSlfeTIkc6vW7Zsqdq1a6t79+46fPiwGjZsWK59TZgwQSkpKc713NxcRUZGlq9wAABQoVWI01ijR4/WypUrtWHDBtWpU+eKfWNiYiRJhw4dkiSFh4crMzPTpU/xelnX+fj5+SkgIMBlAQAAdvJo2DHGaPTo0frggw+0fv161a9f/6qv2blzpySpdu3akqTY2Fjt3r1bWVlZzj5r165VQECAoqOjb0rdAADg9uHR01jJyclaunSpVqxYIX9/f+c1NoGBgapSpYoOHz6spUuXqlevXqpZs6Z27dqlcePGqVOnTmrVqpUkKT4+XtHR0Xr44Yc1ffp0ZWRk6JlnnlFycrL8/Pw8eXgAAKAC8OjMzoIFC5STk6MuXbqodu3azmXZsmWSJF9fX61bt07x8fFq1qyZnnzySQ0YMEAffvihcwxvb2+tXLlS3t7eio2N1UMPPaQhQ4a4PJcHAAD8dHl0ZscYc8XtkZGR2rRp01XHiYqK0scff+yusgAAgEUqxAXKAAAANwthBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKt5NOxMmzZN7du3l7+/v0JDQ9WvXz8dOHDApc/FixeVnJysmjVrqnr16howYIAyMzNd+hw7dkyJiYmqWrWqQkND9dRTT+ny5cu38lAAAEAF5dGws2nTJiUnJ+uzzz7T2rVrdenSJcXHx+v8+fPOPuPGjdOHH36o9957T5s2bdLJkyfVv39/5/bCwkIlJiaqoKBAn376qf70pz9p8eLFmjhxoicOCQAAVDAOY4zxdBHFTp8+rdDQUG3atEmdOnVSTk6OQkJCtHTpUg0cOFCStH//fjVv3lzp6em6++67tWrVKvXu3VsnT55UWFiYJGnhwoUaP368Tp8+LV9f36vuNzc3V4GBgcrJyVFAQIBbj6ne7z5y63iAbY6+kOjpEgDcpq7173eFumYnJydHkhQcHCxJ2r59uy5duqS4uDhnn2bNmqlu3bpKT0+XJKWnp6tly5bOoCNJCQkJys3N1d69e0vdT35+vnJzc10WAABgpwoTdoqKijR27Fh16NBBP/vZzyRJGRkZ8vX1VVBQkEvfsLAwZWRkOPv8MOgUby/eVppp06YpMDDQuURGRrr5aAAAQEVRYcJOcnKy9uzZo3ffffem72vChAnKyclxLsePH7/p+wQAAJ5RydMFSNLo0aO1cuVKbd68WXXq1HG2h4eHq6CgQGfPnnWZ3cnMzFR4eLizz+eff+4yXvHdWsV9fszPz09+fn5uPgoAAFAReXRmxxij0aNH64MPPtD69etVv359l+1t27aVj4+P0tLSnG0HDhzQsWPHFBsbK0mKjY3V7t27lZWV5eyzdu1aBQQEKDo6+tYcCAAAqLA8OrOTnJyspUuXasWKFfL393deYxMYGKgqVaooMDBQw4YNU0pKioKDgxUQEKAxY8YoNjZWd999tyQpPj5e0dHRevjhhzV9+nRlZGTomWeeUXJyMrM3AADAs2FnwYIFkqQuXbq4tL/55pt65JFHJEkzZ86Ul5eXBgwYoPz8fCUkJGj+/PnOvt7e3lq5cqVGjRql2NhYVatWTUlJSZo6deqtOgwAAFCBVajn7HgKz9kBPIfn7AAor9vyOTsAAADuRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGrlCjsNGjTQmTNnSrSfPXtWDRo0uOGiAAAA3KVcYefo0aMqLCws0Z6fn6+vv/76hosCAABwl0rX0/nvf/+78+vVq1crMDDQuV5YWKi0tDTVq1fPbcUBAADcqOsKO/369ZMkORwOJSUluWzz8fFRvXr1NGPGDLcVBwAAcKOuK+wUFRVJkurXr6+tW7eqVq1aN6UoAAAAd7musFPsyJEj7q4DAADgpihX2JGktLQ0paWlKSsryznjU2zRokU3XBgAAIA7lCvsTJkyRVOnTlW7du1Uu3ZtORwOd9cFAADgFuUKOwsXLtTixYv18MMPu7seAAAAtyrXc3YKCgr0P//zP+6uBQAAwO3KFXaGDx+upUuXursWAAAAtyvXaayLFy/q9ddf17p169SqVSv5+Pi4bH/llVfcUhwAAMCNKlfY2bVrl9q0aSNJ2rNnj8s2LlYGAAAVSbnCzoYNG9xdBwAAwE1Rrmt2AAAAbhflmtnp2rXrFU9XrV+/vtwFAQAAuFO5wk7x9TrFLl26pJ07d2rPnj0lPiAUAADAk8oVdmbOnFlq++TJk5WXl3dDBQEAALiTW6/Zeeihh/hcLAAAUKG4Neykp6ercuXK7hwSAADghpTrNFb//v1d1o0xOnXqlLZt26Y//OEPbikMAADAHcoVdgIDA13Wvby81LRpU02dOlXx8fFuKQwAAMAdyhV23nzzTXfXAQAAcFOUK+wU2759u/bt2ydJatGihe688063FAUAAOAu5Qo7WVlZGjRokDZu3KigoCBJ0tmzZ9W1a1e9++67CgkJcWeNAAAA5Vauu7HGjBmjc+fOae/evcrOzlZ2drb27Nmj3NxcPf744+6uEQAAoNzKNbOTmpqqdevWqXnz5s626OhozZs3jwuUAQBAhVKumZ2ioiL5+PiUaPfx8VFRUdENFwUAAOAu5Qo73bp10xNPPKGTJ086277++muNGzdO3bt3d1txAAAAN6pcYWfu3LnKzc1VvXr11LBhQzVs2FD169dXbm6u5syZ4+4aAQAAyq1c1+xERkZqx44dWrdunfbv3y9Jat68ueLi4txaHAAAwI26rpmd9evXKzo6Wrm5uXI4HLr33ns1ZswYjRkzRu3bt1eLFi30j3/845rH27x5s/r06aOIiAg5HA4tX77cZfsjjzwih8PhsvTo0cOlT3Z2tgYPHqyAgAAFBQVp2LBhfPI6AABwuq6wM2vWLI0YMUIBAQEltgUGBurXv/61XnnllWse7/z582rdurXmzZtXZp8ePXro1KlTzuWdd95x2T548GDt3btXa9eu1cqVK7V582aNHDny2g8KAABY7bpOY33xxRd68cUXy9weHx+vl19++ZrH69mzp3r27HnFPn5+fgoPDy912759+5SamqqtW7eqXbt2kqQ5c+aoV69eevnllxUREXHNtQAAADtd18xOZmZmqbecF6tUqZJOnz59w0X90MaNGxUaGqqmTZtq1KhROnPmjHNbenq6goKCnEFHkuLi4uTl5aUtW7a4tQ4AAHB7uq6ZnTvuuEN79uxRo0aNSt2+a9cu1a5d2y2FSd+fwurfv7/q16+vw4cP6/e//7169uyp9PR0eXt7KyMjQ6GhoS6vqVSpkoKDg5WRkVHmuPn5+crPz3eu5+bmuq1mAABQsVxX2OnVq5f+8Ic/qEePHqpcubLLtgsXLmjSpEnq3bu324obNGiQ8+uWLVuqVatWatiwoTZu3HhDz/OZNm2apkyZ4o4SAQBABXddp7GeeeYZZWdnq0mTJpo+fbpWrFihFStW6MUXX1TTpk2VnZ2tp59++mbVqgYNGqhWrVo6dOiQJCk8PFxZWVkufS5fvqzs7Owyr/ORpAkTJignJ8e5HD9+/KbVDAAAPOu6ZnbCwsL06aefatSoUZowYYKMMZIkh8OhhIQEzZs3T2FhYTelUEk6ceKEzpw54zxVFhsbq7Nnz2r79u1q27atpO9vjy8qKlJMTEyZ4/j5+cnPz++m1QkAACqO636oYFRUlD7++GN9++23OnTokIwxaty4sWrUqHHdO8/Ly3PO0kjSkSNHtHPnTgUHBys4OFhTpkzRgAEDFB4ersOHD+u3v/2tGjVqpISEBEnfP8iwR48eGjFihBYuXKhLly5p9OjRGjRoEHdiAQAASeV8grIk1ahRQ+3bt7+hnW/btk1du3Z1rqekpEiSkpKStGDBAu3atUt/+tOfdPbsWUVERCg+Pl7PPvusy6zM22+/rdGjR6t79+7y8vLSgAEDNHv27BuqCwAA2KPcYccdunTp4jwVVprVq1dfdYzg4GAtXbrUnWUBAACLlOuDQAEAAG4XhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACs5tGws3nzZvXp00cRERFyOBxavny5y3ZjjCZOnKjatWurSpUqiouL08GDB136ZGdna/DgwQoICFBQUJCGDRumvLy8W3gUAACgIvNo2Dl//rxat26tefPmlbp9+vTpmj17thYuXKgtW7aoWrVqSkhI0MWLF519Bg8erL1792rt2rVauXKlNm/erJEjR96qQwAAABVcJU/uvGfPnurZs2ep24wxmjVrlp555hn17dtXkvTWW28pLCxMy5cv16BBg7Rv3z6lpqZq69atateunSRpzpw56tWrl15++WVFRETcsmMBAAAVU4W9ZufIkSPKyMhQXFycsy0wMFAxMTFKT0+XJKWnpysoKMgZdCQpLi5OXl5e2rJlS5lj5+fnKzc312UBAAB2qrBhJyMjQ5IUFhbm0h4WFubclpGRodDQUJftlSpVUnBwsLNPaaZNm6bAwEDnEhkZ6ebqAQBARVFhw87NNGHCBOXk5DiX48ePe7okAABwk1TYsBMeHi5JyszMdGnPzMx0bgsPD1dWVpbL9suXLys7O9vZpzR+fn4KCAhwWQAAgJ0qbNipX7++wsPDlZaW5mzLzc3Vli1bFBsbK0mKjY3V2bNntX37dmef9evXq6ioSDExMbe8ZgAAUPF49G6svLw8HTp0yLl+5MgR7dy5U8HBwapbt67Gjh2rP/7xj2rcuLHq16+vP/zhD4qIiFC/fv0kSc2bN1ePHj00YsQILVy4UJcuXdLo0aM1aNAg7sQCAACSPBx2tm3bpq5duzrXU1JSJElJSUlavHixfvvb3+r8+fMaOXKkzp49q3vuuUepqamqXLmy8zVvv/22Ro8ere7du8vLy0sDBgzQ7Nmzb/mxAACAislhjDGeLsLTcnNzFRgYqJycHLdfv1Pvdx+5dTzANkdfSPR0CQBuU9f697vCXrMDAADgDoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALCaRz8IFABswefgAWXz9GfgMbMDAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxWocPO5MmT5XA4XJZmzZo5t1+8eFHJycmqWbOmqlevrgEDBigzM9ODFQMAgIqmQocdSWrRooVOnTrlXD755BPntnHjxunDDz/Ue++9p02bNunkyZPq37+/B6sFAAAVTSVPF3A1lSpVUnh4eIn2nJwcvfHGG1q6dKm6desmSXrzzTfVvHlzffbZZ7r77rtvdakAAKACqvAzOwcPHlRERIQaNGigwYMH69ixY5Kk7du369KlS4qLi3P2bdasmerWrav09PQrjpmfn6/c3FyXBQAA2KlCh52YmBgtXrxYqampWrBggY4cOaKOHTvq3LlzysjIkK+vr4KCglxeExYWpoyMjCuOO23aNAUGBjqXyMjIm3gUAADAkyr0aayePXs6v27VqpViYmIUFRWlv/zlL6pSpUq5x50wYYJSUlKc67m5uQQeAAAsVaFndn4sKChITZo00aFDhxQeHq6CggKdPXvWpU9mZmap1/j8kJ+fnwICAlwWAABgp9sq7OTl5enw4cOqXbu22rZtKx8fH6WlpTm3HzhwQMeOHVNsbKwHqwQAABVJhT6N9Zvf/EZ9+vRRVFSUTp48qUmTJsnb21sPPvigAgMDNWzYMKWkpCg4OFgBAQEaM2aMYmNjuRMLAAA4Veiwc+LECT344IM6c+aMQkJCdM899+izzz5TSEiIJGnmzJny8vLSgAEDlJ+fr4SEBM2fP9/DVQMAgIqkQoedd99994rbK1eurHnz5mnevHm3qCIAAHC7ua2u2QEAALhehB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYjbADAACsRtgBAABWI+wAAACrEXYAAIDVCDsAAMBqhB0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArEbYAQAAViPsAAAAqxF2AACA1Qg7AADAaoQdAABgNcIOAACwGmEHAABYzZqwM2/ePNWrV0+VK1dWTEyMPv/8c0+XBAAAKgArws6yZcuUkpKiSZMmaceOHWrdurUSEhKUlZXl6dIAAICHWRF2XnnlFY0YMUJDhw5VdHS0Fi5cqKpVq2rRokWeLg0AAHjYbR92CgoKtH37dsXFxTnbvLy8FBcXp/T0dA9WBgAAKoJKni7gRn3zzTcqLCxUWFiYS3tYWJj2799f6mvy8/OVn5/vXM/JyZEk5ebmur2+ovzv3D4mYJOb8XvnCfyuA2W7Wb/nxeMaY67Y77YPO+Uxbdo0TZkypUR7ZGSkB6oBftoCZ3m6AgA3283+PT937pwCAwPL3H7bh51atWrJ29tbmZmZLu2ZmZkKDw8v9TUTJkxQSkqKc72oqEjZ2dmqWbOmHA7HTa0XnpObm6vIyEgdP35cAQEBni4HwE3C7/pPhzFG586dU0RExBX73fZhx9fXV23btlVaWpr69esn6fvwkpaWptGjR5f6Gj8/P/n5+bm0BQUF3eRKUVEEBATwDyDwE8Dv+k/DlWZ0it32YUeSUlJSlJSUpHbt2umuu+7SrFmzdP78eQ0dOtTTpQEAAA+zIuw88MADOn36tCZOnKiMjAy1adNGqampJS5aBgAAPz1WhB1JGj16dJmnrQDp+9OXkyZNKnEKE4Bd+F3HjznM1e7XAgAAuI3d9g8VBAAAuBLCDgAAsBphBwAAWI2wAwAArEbYwU/GvHnzVK9ePVWuXFkxMTH6/PPPPV0SADfavHmz+vTpo4iICDkcDi1fvtzTJaGCIOzgJ2HZsmVKSUnRpEmTtGPHDrVu3VoJCQnKysrydGkA3OT8+fNq3bq15s2b5+lSUMFw6zl+EmJiYtS+fXvNnTtX0vcfKRIZGakxY8bod7/7nYerA+BuDodDH3zwgfNjhPDTxswOrFdQUKDt27crLi7O2ebl5aW4uDilp6d7sDIAwK1A2IH1vvnmGxUWFpb4+JCwsDBlZGR4qCoAwK1C2AEAAFYj7MB6tWrVkre3tzIzM13aMzMzFR4e7qGqAAC3CmEH1vP19VXbtm2VlpbmbCsqKlJaWppiY2M9WBkA4Faw5lPPgStJSUlRUlKS2rVrp7vuukuzZs3S+fPnNXToUE+XBsBN8vLydOjQIef6kSNHtHPnTgUHB6tu3boerAyexq3n+MmYO3euXnrpJWVkZKhNmzaaPXu2YmJiPF0WADfZuHGjunbtWqI9KSlJixcvvvUFocIg7AAAAKtxzQ4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDoBb6vjx4/rVr36liIgI+fr6KioqSk888YTOnDlzzWMcPXpUDodDO3fuvHmFArAGYQfALfPVV1+pXbt2OnjwoN555x0dOnRICxcudH4oa3Z2tqdLBGAhwg6AWyY5OVm+vr5as2aNOnfurLp166pnz55at26dvv76az399NOSJIfDoeXLl7u8NigoyPn5RvXr15ck3XnnnXI4HOrSpYuz36JFi9SiRQv5+fmpdu3aGj16tHPbsWPH1LdvX1WvXl0BAQG6//77lZmZ6dw+efJktWnTRosWLVLdunVVvXp1PfbYYyosLNT06dMVHh6u0NBQPffccy61nT17VsOHD1dISIgCAgLUrVs3ffHFF2585wDcCMIOgFsiOztbq1ev1mOPPaYqVaq4bAsPD9fgwYO1bNkyXcvH9X3++eeSpHXr1unUqVP629/+JklasGCBkpOTNXLkSO3evVt///vf1ahRI0lSUVGR+vbtq+zsbG3atElr167VV199pQceeMBl7MOHD2vVqlVKTU3VO++8ozfeeEOJiYk6ceKENm3apBdffFHPPPOMtmzZ4nzN//7v/yorK0urVq3S9u3b9fOf/1zdu3dnpgqoICp5ugAAPw0HDx6UMUbNmzcvdXvz5s317bff6vTp01cdKyQkRJJUs2ZNhYeHO9v/+Mc/6sknn9QTTzzhbGvfvr0kKS0tTbt379aRI0cUGRkpSXrrrbfUokULbd261dmvqKhIixYtkr+/v6Kjo9W1a1cdOHBAH3/8sby8vNS0aVO9+OKL2rBhg2JiYvTJJ5/o888/V1ZWlvz8/CRJL7/8spYvX673339fI0eOLMe7BcCdCDsAbqlrmbkpj6ysLJ08eVLdu3cvdfu+ffsUGRnpDDqSFB0draCgIO3bt88ZdurVqyd/f39nn7CwMHl7e8vLy8ulLSsrS5L0xRdfKC8vTzVr1nTZ34ULF3T48GG3HR+A8iPsALglGjVqJIfDoX379ukXv/hFie379u1TjRo1FBISIofDUSIUXbp06Yrj//jUWHn5+Pi4rDscjlLbioqKJEl5eXmqXbu2Nm7cWGKsoKAgt9QE4MZwzQ6AW6JmzZq69957NX/+fF24cMFlW0ZGht5++2098MADcjgcCgkJ0alTp5zbDx48qO+++8657uvrK0kqLCx0tvn7+6tevXpKS0srdf/NmzfX8ePHdfz4cWfbl19+qbNnzyo6Orrcx/Xzn/9cGRkZqlSpkho1auSy1KpVq9zjAnAfwg6AW2bu3LnKz89XQkKCNm/erOPHjys1NVX33nuv7rjjDuddTt26ddPcuXP1r3/9S9u2bdOjjz7qMrsSGhqqKlWqKDU1VZmZmcrJyZH0/d1UM2bM0OzZs3Xw4EHt2LFDc+bMkSTFxcWpZcuWGjx4sHbs2KHPP/9cQ4YMUefOndWuXbtyH1NcXJxiY2PVr18/rVmzRkePHtWnn36qp59+Wtu2bbuBdwuAuxB2ANwyjRs31rZt29SgQQPdf//9atiwoUaOHKmuXbsqPT1dwcHBkqQZM2YoMjJSHTt21C9/+Uv95je/UdWqVZ3jVKpUSbNnz9Zrr72miIgI9e3bV5KUlJSkWbNmaf78+WrRooV69+6tgwcPSvr+1NOKFStUo0YNderUSXFxcWrQoIGWLVt2Q8fkcDj08ccfq1OnTho6dKiaNGmiQYMG6T//+Y/CwsJuaGwA7uEwN+tqQQAAgAqAmR0AAGA1wg4AALAaYQcAAFiNsAMAAKxG2AEAAFYj7AAAAKsRdgAAgNUIOwAAwGqEHQAAYDXCDgAAsBphBwAAWI2wAwAArPb/ABnlDvMkrDnlAAAAAElFTkSuQmCC",
+      "text/plain": [
+       "<Figure size 640x480 with 1 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# checking the count of target values after data processing and feature selection\n",
+    "outcomes = df_corr['PCR'].value_counts()\n",
+    "outcome_labels =['0','1']\n",
+    "outcome_values = outcomes.values\n",
+    "plt.bar(outcome_labels, outcome_values)\n",
+    "plt.xlabel('Outcome')\n",
+    "plt.ylabel('Count')\n",
+    "plt.title('Distribution of Outcomes in PCR')\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "id": "8f4b2fc4-664b-4c47-af59-e5646acb9aed",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# dataset using correlated feature selection\n",
+    "X_corr = df_corr.drop([\"PCR\",\"RelapseFreeSurvival (outcome)\"],axis=1)\n",
+    "y_corr = df_corr[\"PCR\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "id": "683c242f-c3ba-4268-8318-eb01d4f1d9aa",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/html": [
+       "<div>\n",
+       "<style scoped>\n",
+       "    .dataframe tbody tr th:only-of-type {\n",
+       "        vertical-align: middle;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe tbody tr th {\n",
+       "        vertical-align: top;\n",
+       "    }\n",
+       "\n",
+       "    .dataframe thead th {\n",
+       "        text-align: right;\n",
+       "    }\n",
+       "</style>\n",
+       "<table border=\"1\" class=\"dataframe\">\n",
+       "  <thead>\n",
+       "    <tr style=\"text-align: right;\">\n",
+       "      <th></th>\n",
+       "      <th>Age</th>\n",
+       "      <th>ER</th>\n",
+       "      <th>PgR</th>\n",
+       "      <th>HER2</th>\n",
+       "      <th>TrippleNegative</th>\n",
+       "      <th>ChemoGrade</th>\n",
+       "      <th>HistologyType</th>\n",
+       "      <th>LNStatus</th>\n",
+       "      <th>TumourStage</th>\n",
+       "      <th>Gene</th>\n",
+       "      <th>...</th>\n",
+       "      <th>original_gldm_SmallDependenceEmphasis</th>\n",
+       "      <th>original_glrlm_LongRunLowGrayLevelEmphasis</th>\n",
+       "      <th>original_glrlm_ShortRunHighGrayLevelEmphasis</th>\n",
+       "      <th>original_glszm_GrayLevelNonUniformity</th>\n",
+       "      <th>original_glszm_GrayLevelNonUniformityNormalized</th>\n",
+       "      <th>original_glszm_LargeAreaEmphasis</th>\n",
+       "      <th>original_glszm_SizeZoneNonUniformityNormalized</th>\n",
+       "      <th>original_glszm_SmallAreaEmphasis</th>\n",
+       "      <th>original_ngtdm_Busyness</th>\n",
+       "      <th>original_ngtdm_Strength</th>\n",
+       "    </tr>\n",
+       "  </thead>\n",
+       "  <tbody>\n",
+       "    <tr>\n",
+       "      <th>0</th>\n",
+       "      <td>41.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>3.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>2</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.005563</td>\n",
+       "      <td>10.779989</td>\n",
+       "      <td>0.789987</td>\n",
+       "      <td>27.545455</td>\n",
+       "      <td>0.834711</td>\n",
+       "      <td>4067578.818</td>\n",
+       "      <td>0.180900</td>\n",
+       "      <td>0.403535</td>\n",
+       "      <td>473.464852</td>\n",
+       "      <td>0.000758</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>1</th>\n",
+       "      <td>39.0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>3.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>2</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.006518</td>\n",
+       "      <td>27.650685</td>\n",
+       "      <td>0.442279</td>\n",
+       "      <td>78.025000</td>\n",
+       "      <td>0.975313</td>\n",
+       "      <td>2403756.075</td>\n",
+       "      <td>0.198125</td>\n",
+       "      <td>0.444391</td>\n",
+       "      <td>59.459710</td>\n",
+       "      <td>0.003685</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>2</th>\n",
+       "      <td>31.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>2.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>2</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.007181</td>\n",
+       "      <td>25.338218</td>\n",
+       "      <td>0.503046</td>\n",
+       "      <td>72.027027</td>\n",
+       "      <td>0.973338</td>\n",
+       "      <td>1561963.432</td>\n",
+       "      <td>0.275749</td>\n",
+       "      <td>0.534549</td>\n",
+       "      <td>33.935384</td>\n",
+       "      <td>0.006447</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>3</th>\n",
+       "      <td>35.0</td>\n",
+       "      <td>0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>3.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>3</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.004902</td>\n",
+       "      <td>31.461354</td>\n",
+       "      <td>0.399896</td>\n",
+       "      <td>99.019802</td>\n",
+       "      <td>0.980394</td>\n",
+       "      <td>7007670.723</td>\n",
+       "      <td>0.253014</td>\n",
+       "      <td>0.506185</td>\n",
+       "      <td>46.859265</td>\n",
+       "      <td>0.004543</td>\n",
+       "    </tr>\n",
+       "    <tr>\n",
+       "      <th>4</th>\n",
+       "      <td>61.0</td>\n",
+       "      <td>1</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>2.0</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>0.0</td>\n",
+       "      <td>2</td>\n",
+       "      <td>1.0</td>\n",
+       "      <td>...</td>\n",
+       "      <td>0.007222</td>\n",
+       "      <td>27.916261</td>\n",
+       "      <td>0.473278</td>\n",
+       "      <td>56.034483</td>\n",
+       "      <td>0.966112</td>\n",
+       "      <td>1288913.690</td>\n",
+       "      <td>0.216409</td>\n",
+       "      <td>0.462282</td>\n",
+       "      <td>39.621023</td>\n",
+       "      <td>0.005626</td>\n",
+       "    </tr>\n",
+       "  </tbody>\n",
+       "</table>\n",
+       "<p>5 rows × 36 columns</p>\n",
+       "</div>"
+      ],
+      "text/plain": [
+       "    Age  ER  PgR  HER2  TrippleNegative  ChemoGrade  HistologyType  LNStatus  \\\n",
+       "0  41.0   0  0.0   0.0              1.0         3.0            1.0       1.0   \n",
+       "1  39.0   1  1.0   0.0              0.0         3.0            1.0       1.0   \n",
+       "2  31.0   0  0.0   0.0              1.0         2.0            1.0       0.0   \n",
+       "3  35.0   0  0.0   0.0              1.0         3.0            1.0       1.0   \n",
+       "4  61.0   1  0.0   0.0              0.0         2.0            1.0       0.0   \n",
+       "\n",
+       "   TumourStage  Gene  ...  original_gldm_SmallDependenceEmphasis  \\\n",
+       "0            2   1.0  ...                               0.005563   \n",
+       "1            2   0.0  ...                               0.006518   \n",
+       "2            2   1.0  ...                               0.007181   \n",
+       "3            3   1.0  ...                               0.004902   \n",
+       "4            2   1.0  ...                               0.007222   \n",
+       "\n",
+       "   original_glrlm_LongRunLowGrayLevelEmphasis  \\\n",
+       "0                                   10.779989   \n",
+       "1                                   27.650685   \n",
+       "2                                   25.338218   \n",
+       "3                                   31.461354   \n",
+       "4                                   27.916261   \n",
+       "\n",
+       "   original_glrlm_ShortRunHighGrayLevelEmphasis  \\\n",
+       "0                                      0.789987   \n",
+       "1                                      0.442279   \n",
+       "2                                      0.503046   \n",
+       "3                                      0.399896   \n",
+       "4                                      0.473278   \n",
+       "\n",
+       "   original_glszm_GrayLevelNonUniformity  \\\n",
+       "0                              27.545455   \n",
+       "1                              78.025000   \n",
+       "2                              72.027027   \n",
+       "3                              99.019802   \n",
+       "4                              56.034483   \n",
+       "\n",
+       "   original_glszm_GrayLevelNonUniformityNormalized  \\\n",
+       "0                                         0.834711   \n",
+       "1                                         0.975313   \n",
+       "2                                         0.973338   \n",
+       "3                                         0.980394   \n",
+       "4                                         0.966112   \n",
+       "\n",
+       "   original_glszm_LargeAreaEmphasis  \\\n",
+       "0                       4067578.818   \n",
+       "1                       2403756.075   \n",
+       "2                       1561963.432   \n",
+       "3                       7007670.723   \n",
+       "4                       1288913.690   \n",
+       "\n",
+       "   original_glszm_SizeZoneNonUniformityNormalized  \\\n",
+       "0                                        0.180900   \n",
+       "1                                        0.198125   \n",
+       "2                                        0.275749   \n",
+       "3                                        0.253014   \n",
+       "4                                        0.216409   \n",
+       "\n",
+       "   original_glszm_SmallAreaEmphasis  original_ngtdm_Busyness  \\\n",
+       "0                          0.403535               473.464852   \n",
+       "1                          0.444391                59.459710   \n",
+       "2                          0.534549                33.935384   \n",
+       "3                          0.506185                46.859265   \n",
+       "4                          0.462282                39.621023   \n",
+       "\n",
+       "   original_ngtdm_Strength  \n",
+       "0                 0.000758  \n",
+       "1                 0.003685  \n",
+       "2                 0.006447  \n",
+       "3                 0.004543  \n",
+       "4                 0.005626  \n",
+       "\n",
+       "[5 rows x 36 columns]"
+      ]
+     },
+     "execution_count": 14,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "X_corr.head()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "id": "b361c4c4-54c2-4de3-b5d3-c9a276019f36",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# saving the feature in text file for future use in model training and prediction\n",
+    "with open('30cor.txt', 'w') as f:\n",
+    "    for feature in X_corr.columns:\n",
+    "        f.write(feature + '\\n')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "id": "766f1c86-ea45-4a3d-be13-c1335f27268b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from sklearn.model_selection import train_test_split\n",
+    "from sklearn.preprocessing import StandardScaler\n",
+    "# Split the correlated feature into training and testing sets ( correlated features)\n",
+    "X_train1, X_test1, y_train1, y_test1 = train_test_split(X_corr, y_corr, test_size=0.2,shuffle=True,random_state=42)\n",
+    "# Standardize the features\n",
+    "scaler = StandardScaler()\n",
+    "X_train_sc1 = scaler.fit_transform(X_train1)\n",
+    "X_test_sc1 = scaler.fit_transform(X_test1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "id": "790b0a2c-e36e-4f70-ac0f-a2bc8a2f30ce",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "((320, 36),)"
+      ]
+     },
+     "execution_count": 17,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "X_train1.shape, "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "id": "08017321-a5fe-4ae4-82c8-8f65b5b9191c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# dataset using correlated feature selection for forward feature selection\n",
+    "X_for = df_corr.drop([\"PCR\",\"RelapseFreeSurvival (outcome)\"],axis=1)\n",
+    "y_for = df_corr[\"PCR\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "id": "9e854be0-bf8e-4ad2-9d12-85584abe4f16",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Selected features: Index(['ER', 'PgR', 'HER2', 'TrippleNegative', 'ChemoGrade', 'HistologyType',\n",
+      "       'LNStatus', 'Gene', 'original_shape_Elongation',\n",
+      "       'original_shape_MeshVolume', 'original_firstorder_InterquartileRange',\n",
+      "       'original_firstorder_Kurtosis', 'original_glcm_Imc1',\n",
+      "       'original_gldm_SmallDependenceEmphasis',\n",
+      "       'original_glrlm_LongRunLowGrayLevelEmphasis',\n",
+      "       'original_glszm_GrayLevelNonUniformity',\n",
+      "       'original_glszm_GrayLevelNonUniformityNormalized',\n",
+      "       'original_glszm_SizeZoneNonUniformityNormalized'],\n",
+      "      dtype='object')\n"
+     ]
+    }
+   ],
+   "source": [
+    "import pandas as pd\n",
+    "from sklearn.ensemble import RandomForestClassifier\n",
+    "from sklearn.feature_selection import SequentialFeatureSelector\n",
+    "\n",
+    "# Handle missing values in y_for\n",
+    "y_for = y_for.dropna()  # Alternatively, use fillna()\n",
+    "\n",
+    "# Ensure X_for matches the cleaned y_for\n",
+    "X_for = X_for.loc[y_for.index]\n",
+    "\n",
+    "# Sequential Feature Selector\n",
+    "selector = SequentialFeatureSelector(\n",
+    "    estimator=RandomForestClassifier(n_estimators=100),\n",
+    "    n_features_to_select='auto'\n",
+    ")\n",
+    "\n",
+    "# Fit the selector to the data\n",
+    "selector.fit(X_for, y_for)\n",
+    "\n",
+    "# Get the selected features\n",
+    "selected_features1 = X_for.columns[selector.get_support()]\n",
+    "print(\"Selected features:\", selected_features1)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "id": "662c1167-d3b1-4f56-ab3d-de46dfaf915e",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "<class 'pandas.core.frame.DataFrame'>\n",
+      "RangeIndex: 400 entries, 0 to 399\n",
+      "Data columns (total 18 columns):\n",
+      " #   Column                                           Non-Null Count  Dtype  \n",
+      "---  ------                                           --------------  -----  \n",
+      " 0   ER                                               400 non-null    int64  \n",
+      " 1   PgR                                              400 non-null    float64\n",
+      " 2   HER2                                             400 non-null    float64\n",
+      " 3   TrippleNegative                                  400 non-null    float64\n",
+      " 4   ChemoGrade                                       400 non-null    float64\n",
+      " 5   HistologyType                                    400 non-null    float64\n",
+      " 6   LNStatus                                         400 non-null    float64\n",
+      " 7   Gene                                             400 non-null    float64\n",
+      " 8   original_shape_Elongation                        400 non-null    float64\n",
+      " 9   original_shape_MeshVolume                        400 non-null    float64\n",
+      " 10  original_firstorder_InterquartileRange           400 non-null    float64\n",
+      " 11  original_firstorder_Kurtosis                     400 non-null    float64\n",
+      " 12  original_glcm_Imc1                               400 non-null    float64\n",
+      " 13  original_gldm_SmallDependenceEmphasis            400 non-null    float64\n",
+      " 14  original_glrlm_LongRunLowGrayLevelEmphasis       400 non-null    float64\n",
+      " 15  original_glszm_GrayLevelNonUniformity            400 non-null    float64\n",
+      " 16  original_glszm_GrayLevelNonUniformityNormalized  400 non-null    float64\n",
+      " 17  original_glszm_SizeZoneNonUniformityNormalized   400 non-null    float64\n",
+      "dtypes: float64(17), int64(1)\n",
+      "memory usage: 56.4 KB\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Create a copy of the dataset with the selected features\n",
+    "X_selected_for1 = df_corr[selected_features1]\n",
+    "X_selected_for1.info()\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "id": "fd51f64a-7f57-493b-8485-41f3e313a61f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# X and y for forward feature selection\n",
+    "df3 = df.copy()\n",
+    "X2 = df3[selected_features1]\n",
+    "y2 = df3[\"PCR\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "id": "c6f0e344-3a90-4418-8f20-2646f5e66f13",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "(400, 18)"
+      ]
+     },
+     "execution_count": 22,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "X2.shape"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "id": "d81d0dcb-5b32-4a06-93aa-9c8d97a7d575",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Splitting the data into training and testing sets ( forward feature selection)\n",
+    "X_train2, X_test2, y_train2, y_test2 = train_test_split(X2, y2, test_size=0.2,shuffle=True,random_state=42)\n",
+    "# Standardize the features\n",
+    "scaler = StandardScaler()\n",
+    "X_train_sc2 = scaler.fit_transform(X_train2)\n",
+    "X_test_sc2 = scaler.fit_transform(X_test2)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "ed7db500-58aa-4326-9dd4-d8c4589bac06",
+   "metadata": {},
+   "source": [
+    "## Models"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "97247500-54a1-4046-bc15-4d8c2d511e97",
+   "metadata": {},
+   "source": [
+    "### Linear Regression "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 42,
+   "id": "b17e1da1-7a02-493f-a2a1-4907be57276b",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Initial Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.82      0.87      0.85        94\n",
+      "         1.0       0.40      0.31      0.35        26\n",
+      "\n",
+      "    accuracy                           0.75       120\n",
+      "   macro avg       0.61      0.59      0.60       120\n",
+      "weighted avg       0.73      0.75      0.74       120\n",
+      "\n",
+      "\n",
+      "Initial Balanced Accuracy Score:\n",
+      "0.5900163666121113\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Import required libraries\n",
+    "from sklearn.linear_model import LogisticRegression\n",
+    "from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, confusion_matrix, balanced_accuracy_score\n",
+    "\n",
+    "# Splitting the data into training and testing sets\n",
+    "X_train, X_test, y_train, y_test = train_test_split(X_corr, y_corr, test_size=0.3, shuffle=True, random_state=64)\n",
+    "\n",
+    "# Standardizing the features\n",
+    "scaler = StandardScaler()\n",
+    "X_train_norm = scaler.fit_transform(X_train)\n",
+    "X_test_norm = scaler.transform(X_test)\n",
+    "\n",
+    "# Initial training of Logistic Regression model (without SMOTE)\n",
+    "log_reg = LogisticRegression(random_state=42)\n",
+    "log_reg.fit(X_train_norm, y_train)\n",
+    "\n",
+    "# Predicting class labels for testing data (initial prediction)\n",
+    "y_pred = log_reg.predict(X_test_norm)\n",
+    "\n",
+    "# Calculating classification report and balanced accuracy score (initial performance)\n",
+    "report = classification_report(y_test, y_pred)\n",
+    "balanced_accuracy = balanced_accuracy_score(y_test, y_pred)\n",
+    "\n",
+    "print('Initial Classification Report:')\n",
+    "print(report)\n",
+    "\n",
+    "print('\\nInitial Balanced Accuracy Score:')\n",
+    "print(balanced_accuracy)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "315e1a8f-b4d6-49f1-b7a3-704178f5074f",
+   "metadata": {},
+   "source": [
+    "### Random forest"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d5a51ba9-666c-4dfd-b370-a9f84555a35f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from sklearn.ensemble import RandomForestClassifier\n",
+    "from sklearn.model_selection import GridSearchCV\n",
+    "from sklearn.metrics import balanced_accuracy_score, classification_report\n",
+    "from sklearn.impute import SimpleImputer\n",
+    "import pandas as pd\n",
+    "\n",
+    "# Handling missing values in features\n",
+    "imputer = SimpleImputer(strategy='mean')\n",
+    "X_train_sc1 = imputer.fit_transform(X_train_sc1)\n",
+    "X_train_sc2 = imputer.fit_transform(X_train_sc2)\n",
+    "\n",
+    "# Handling missing values in targets\n",
+    "y_train1 = pd.Series(y_train1).fillna(pd.Series(y_train1).mode()[0])\n",
+    "y_train2 = pd.Series(y_train2).fillna(pd.Series(y_train2).mode()[0])\n",
+    "\n",
+    "# Defining hyperparameter grid for RandomForestClassifier\n",
+    "param_grid = {\n",
+    "    'n_estimators': [100, 200, 300],\n",
+    "    'max_depth': [2, 5, 10],\n",
+    "    'min_samples_split': [2, 5, 10],\n",
+    "    'min_samples_leaf': [1, 2, 4],\n",
+    "    'random_state': [42]\n",
+    "}\n",
+    "\n",
+    "# Instantiate RandomForestClassifier\n",
+    "model1 = RandomForestClassifier()\n",
+    "\n",
+    "# Creating GridSearchCV objects for dataset1 and dataset2\n",
+    "grid_search1 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "grid_search2 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "\n",
+    "# Fitting the GridSearchCV objects for dataset1 and dataset2\n",
+    "grid_search1.fit(X_train_sc1, y_train1)\n",
+    "grid_search2.fit(X_train_sc2, y_train2)\n",
+    "\n",
+    "# Getting the best parameters from GridSearchCV for dataset1 and dataset2\n",
+    "best_params1 = grid_search1.best_params_\n",
+    "best_params2 = grid_search2.best_params_\n",
+    "\n",
+    "# Printing the best parameters for each dataset\n",
+    "print(\"Best Parameters for Dataset 1:\", best_params1)\n",
+    "print(\"Best Parameters for Dataset 2:\", best_params2)\n",
+    "\n",
+    "# Getting the balanced accuracy for the best model from GridSearchCV for each dataset\n",
+    "best_model1 = RandomForestClassifier(**best_params1)\n",
+    "best_model1.fit(X_train_sc1, y_train1)\n",
+    "y_pred1 = best_model1.predict(X_test_sc1)\n",
+    "balanced_accuracy1 = balanced_accuracy_score(y_test1, y_pred1)\n",
+    "\n",
+    "best_model2 = RandomForestClassifier(**best_params2)\n",
+    "best_model2.fit(X_train_sc2, y_train2)\n",
+    "y_pred2 = best_model2.predict(X_test_sc2)\n",
+    "balanced_accuracy2 = balanced_accuracy_score(y_test2, y_pred2)\n",
+    "\n",
+    "# Printing the balanced accuracy for each dataset\n",
+    "print(\"Dataset 1 Balanced Accuracy:\", balanced_accuracy1)\n",
+    "print(\"Dataset 2 Balanced Accuracy:\", balanced_accuracy2)\n",
+    "\n",
+    "# Getting the classification report for the best model from GridSearchCV for each dataset\n",
+    "print(\"Dataset 1 Classification Report:\\n\", classification_report(y_test1, y_pred1))\n",
+    "print(\"Dataset 2 Classification Report:\\n\", classification_report(y_test2, y_pred2))\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "id": "da95f0ab-9167-4bdf-a459-27a1a5caa8c9",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l1)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_logistic.py:1197: UserWarning: l1_ratio parameter is only used when penalty is 'elasticnet'. Got (penalty=l2)\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n",
+      "C:\\Users\\LLR User\\miniconda3\\envs\\MLE\\Lib\\site-packages\\sklearn\\linear_model\\_sag.py:349: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge\n",
+      "  warnings.warn(\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Best Parameters: {'C': 1, 'l1_ratio': 0.9, 'penalty': 'elasticnet', 'solver': 'saga'}\n",
+      "Balanced Accuracy: 0.671875\n",
+      "Classification Report:\n",
+      "               precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.87      0.91      0.89        64\n",
+      "         1.0       0.54      0.44      0.48        16\n",
+      "\n",
+      "    accuracy                           0.81        80\n",
+      "   macro avg       0.70      0.67      0.68        80\n",
+      "weighted avg       0.80      0.81      0.80        80\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "from sklearn.linear_model import LogisticRegression\n",
+    "from sklearn.model_selection import GridSearchCV\n",
+    "from sklearn.metrics import balanced_accuracy_score, classification_report\n",
+    "\n",
+    "# Simplified hyperparameter grid\n",
+    "param_grid = {\n",
+    "    'penalty': ['l1', 'l2', 'elasticnet'],  # Common penalties\n",
+    "    'solver': ['saga'],  # 'saga' supports all penalties including 'elasticnet'\n",
+    "    'C': [0.01, 0.1, 1, 10],  # Regularization strength\n",
+    "    'l1_ratio': [0.1, 0.5, 0.9]  # Required only for 'elasticnet'\n",
+    "}\n",
+    "\n",
+    "# Instantiate Logistic Regression model\n",
+    "model = LogisticRegression(max_iter=1000, random_state=42)  # Increased max_iter for convergence\n",
+    "\n",
+    "# Create GridSearchCV\n",
+    "grid_search = GridSearchCV(estimator=model, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "\n",
+    "# Fit the GridSearchCV to the data\n",
+    "grid_search.fit(X_train_sc1, y_train1)\n",
+    "\n",
+    "# Get the best parameters and score\n",
+    "best_params = grid_search.best_params_\n",
+    "print(\"Best Parameters:\", best_params)\n",
+    "\n",
+    "# Evaluate on the test set\n",
+    "best_model = LogisticRegression(**best_params, max_iter=1000, random_state=42)\n",
+    "best_model.fit(X_train_sc1, y_train1)\n",
+    "y_pred = best_model.predict(X_test_sc1)\n",
+    "\n",
+    "# Balanced accuracy and classification report\n",
+    "balanced_accuracy = balanced_accuracy_score(y_test1, y_pred)\n",
+    "print(\"Balanced Accuracy:\", balanced_accuracy)\n",
+    "print(\"Classification Report:\\n\", classification_report(y_test1, y_pred))\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "2f818089-499c-41bc-a8e7-1f79940f18a9",
+   "metadata": {},
+   "source": [
+    "### SVC"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "id": "7e32d5dd-48fe-415a-8ab5-69a8c5716144",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Dataset1: {'C': 100, 'gamma': 0.001, 'kernel': 'linear'}\n",
+      "Dataset2: {'C': 150, 'gamma': 0.01, 'kernel': 'rbf'}\n",
+      "correlation balanced accuracy: 0.640625\n",
+      "forward balanced accuracy: 0.546875\n",
+      "correlation classification report:\n",
+      "               precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.86      0.84      0.85        64\n",
+      "         1.0       0.41      0.44      0.42        16\n",
+      "\n",
+      "    accuracy                           0.76        80\n",
+      "   macro avg       0.63      0.64      0.64        80\n",
+      "weighted avg       0.77      0.76      0.77        80\n",
+      "\n",
+      "forward classification report:\n",
+      "               precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.82      0.84      0.83        64\n",
+      "         1.0       0.29      0.25      0.27        16\n",
+      "\n",
+      "    accuracy                           0.72        80\n",
+      "   macro avg       0.55      0.55      0.55        80\n",
+      "weighted avg       0.71      0.72      0.72        80\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "from sklearn.svm import SVC\n",
+    "from sklearn.model_selection import GridSearchCV\n",
+    "from sklearn.metrics import balanced_accuracy_score, classification_report\n",
+    "\n",
+    "# Defining hyperparameter grid for SVC\n",
+    "param_grid = {\n",
+    "    'kernel': ['linear', 'rbf', 'poly'],\n",
+    "    'C': [0.01, 0.1, 1, 10, 100, 150],\n",
+    "    'gamma': [0.001, 0.01, 0.1, 1]\n",
+    "}\n",
+    "\n",
+    "# Instantiating SVC classifier \n",
+    "model1 = SVC()\n",
+    "\n",
+    "# Creating the GridSearchCV objects for dataset 1 and 2 (correlation and forward selection datasets)\n",
+    "grid_search1 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "grid_search2 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "\n",
+    "# Fitting the GridSearchCV objects for dataset 1 and 2\n",
+    "grid_search1.fit(X_train_sc1, y_train1)\n",
+    "grid_search2.fit(X_train_sc2, y_train2)\n",
+    "\n",
+    "# Getting the best parameters from GridSearchCV for dataset 1 and 2\n",
+    "best_params1 = grid_search1.best_params_\n",
+    "best_params2 = grid_search2.best_params_\n",
+    "\n",
+    "# Printing the best parameters for each dataset\n",
+    "print(\"Dataset1:\", best_params1)\n",
+    "print(\"Dataset2:\", best_params2)\n",
+    "\n",
+    "# Getting the balanced accuracy for the best model from GridSearchCV for each dataset\n",
+    "best_model1 = SVC(**best_params1)\n",
+    "best_model1.fit(X_train_sc1, y_train1)\n",
+    "y_pred1 = best_model1.predict(X_test_sc1)\n",
+    "balanced_accuracy1 = balanced_accuracy_score(y_test1, y_pred1)\n",
+    "\n",
+    "best_model2 = SVC(**best_params2)\n",
+    "best_model2.fit(X_train_sc2, y_train2)\n",
+    "y_pred2 = best_model2.predict(X_test_sc2)\n",
+    "balanced_accuracy2 = balanced_accuracy_score(y_test2, y_pred2)\n",
+    "\n",
+    "# Printing the balanced accuracy for each dataset\n",
+    "print(\"correlation balanced accuracy:\", balanced_accuracy1)\n",
+    "print(\"forward balanced accuracy:\", balanced_accuracy2)\n",
+    "\n",
+    "# Getting the classification report for the best model from GridSearchCV for each dataset\n",
+    "print(\"correlation classification report:\\n\", classification_report(y_test1, y_pred1))\n",
+    "print(\"forward classification report:\\n\", classification_report(y_test2, y_pred2))\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "936900ac-9890-449e-a8e7-f1d7aef72777",
+   "metadata": {},
+   "source": [
+    "### Decision Tree"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "id": "360fab3f-6fd3-488b-a6af-9432216714d1",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Dataset1: {'criterion': 'gini', 'max_depth': 5, 'min_samples_leaf': 4, 'min_samples_split': 2, 'random_state': 42}\n",
+      "Dataset2: {'criterion': 'gini', 'max_depth': 10, 'min_samples_leaf': 1, 'min_samples_split': 5, 'random_state': 42}\n",
+      "correlation balanced accuracy: 0.515625\n",
+      "forward balanced accuracy: 0.53125\n",
+      "correlation classification report:\n",
+      "               precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.81      0.91      0.85        64\n",
+      "         1.0       0.25      0.12      0.17        16\n",
+      "\n",
+      "    accuracy                           0.75        80\n",
+      "   macro avg       0.53      0.52      0.51        80\n",
+      "weighted avg       0.69      0.75      0.72        80\n",
+      "\n",
+      "forward classification report:\n",
+      "               precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.81      0.91      0.85        64\n",
+      "         1.0       0.25      0.12      0.17        16\n",
+      "\n",
+      "    accuracy                           0.75        80\n",
+      "   macro avg       0.53      0.52      0.51        80\n",
+      "weighted avg       0.69      0.75      0.72        80\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "from sklearn.tree import DecisionTreeClassifier\n",
+    "\n",
+    "# Defining hyperparameter grid for DecisionTreeClassifier\n",
+    "param_grid = {\n",
+    "    'criterion': ['entropy', 'gini'],\n",
+    "    'max_depth': [2, 5, 10],\n",
+    "    'min_samples_split': [2, 5, 10],\n",
+    "    'min_samples_leaf': [1, 2, 4],\n",
+    "    'random_state': [42]\n",
+    "}\n",
+    "\n",
+    "# Instantiating DecisionTreeClassifier \n",
+    "model1 = DecisionTreeClassifier()\n",
+    "\n",
+    "# Creating GridSearchCV objects for dataset 1 and 2\n",
+    "grid_search1 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "grid_search2 = GridSearchCV(estimator=model1, param_grid=param_grid, scoring='balanced_accuracy', cv=5)\n",
+    "\n",
+    "# Fitting the GridSearchCV objects for dataset 1 and 2\n",
+    "grid_search1.fit(X_train_sc1, y_train1)\n",
+    "grid_search2.fit(X_train_sc2, y_train2)\n",
+    "\n",
+    "# Getting the best parameters from GridSearchCV for dataset 1 and 2\n",
+    "best_params1 = grid_search1.best_params_\n",
+    "best_params2 = grid_search2.best_params_\n",
+    "\n",
+    "# Printting the best parameters for each dataset\n",
+    "print(\"Dataset1:\", best_params1)\n",
+    "print(\"Dataset2:\", best_params2)\n",
+    "\n",
+    "# Getting the balanced accuracy for the best model from GridSearchCV for each dataset\n",
+    "best_model1 = DecisionTreeClassifier(**best_params1)\n",
+    "best_model1.fit(X_train_sc1, y_train1)\n",
+    "y_pred1 = best_model1.predict(X_test_sc1)\n",
+    "balanced_accuracy1 = balanced_accuracy_score(y_test1, y_pred1)\n",
+    "\n",
+    "best_model2 = DecisionTreeClassifier(**best_params2)\n",
+    "best_model2.fit(X_train_sc2, y_train2)\n",
+    "y_pred2 = best_model2.predict(X_test_sc2)\n",
+    "balanced_accuracy2 = balanced_accuracy_score(y_test2, y_pred2)\n",
+    "\n",
+    "# Printting the balanced accuracy for each dataset\n",
+    "print(\"correlation balanced accuracy:\", balanced_accuracy1)\n",
+    "print(\"forward balanced accuracy:\", balanced_accuracy2)\n",
+    "\n",
+    "# Getting the classification report for the best model from GridSearchCV for each dataset\n",
+    "print(\"correlation classification report:\\n\", classification_report(y_test1, y_pred1))\n",
+    "print(\"forward classification report:\\n\", classification_report(y_test2, y_pred1))\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "id": "a26e6775-c605-4dd2-8ef2-5d89a255a8f5",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.79      1.00      0.88        94\n",
+      "         1.0       1.00      0.04      0.07        26\n",
+      "\n",
+      "    accuracy                           0.79       120\n",
+      "   macro avg       0.89      0.52      0.48       120\n",
+      "weighted avg       0.84      0.79      0.71       120\n",
+      "\n",
+      "\n",
+      "Balanced Accuracy Score:\n",
+      "0.5192307692307693\n"
+     ]
+    }
+   ],
+   "source": [
+    "# training the random forest model for correlated features dataset\n",
+    "X_train, X_test, y_train, y_test = train_test_split(X_corr, y_corr, test_size=0.3,shuffle= True, random_state=64)\n",
+    "scaler = StandardScaler()\n",
+    "X_train_norm = scaler.fit_transform(X_train)\n",
+    "X_test_norm = scaler.transform(X_test)\n",
+    "\n",
+    "# Creating and train Random Forest classifier with specific parameters got from gridsearch\n",
+    "rf_classifier = RandomForestClassifier(max_depth=10, min_samples_leaf=3, min_samples_split=15, n_estimators=1000, random_state=42)\n",
+    "rf_classifier.fit(X_train_norm, y_train)\n",
+    "\n",
+    "# Predicting class labels for testing data\n",
+    "y_pred = rf_classifier.predict(X_test_norm)\n",
+    "\n",
+    "# Calculating classification report and balanced accuracy score\n",
+    "classification_report = classification_report(y_test, y_pred)\n",
+    "balanced_accuracy = balanced_accuracy_score(y_test, y_pred)\n",
+    "\n",
+    "print('Classification Report:')\n",
+    "print(classification_report)\n",
+    "\n",
+    "print('\\nBalanced Accuracy Score:')\n",
+    "print(balanced_accuracy)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b2491b03-0e49-4f7b-934c-300f6bfd5fa9",
+   "metadata": {},
+   "source": [
+    "### Smote for Random forest"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "id": "95e1c8ab-bec2-4eee-a4d3-ff255ff2461d",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "\n",
+      "Classification Reports for each fold:\n",
+      "\n",
+      "Fold 1 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.84      0.97      0.90        33\n",
+      "         1.0       0.50      0.14      0.22         7\n",
+      "\n",
+      "    accuracy                           0.82        40\n",
+      "   macro avg       0.67      0.56      0.56        40\n",
+      "weighted avg       0.78      0.82      0.78        40\n",
+      "\n",
+      "\n",
+      "Fold 2 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.82      0.90      0.86        31\n",
+      "         1.0       0.50      0.33      0.40         9\n",
+      "\n",
+      "    accuracy                           0.78        40\n",
+      "   macro avg       0.66      0.62      0.63        40\n",
+      "weighted avg       0.75      0.78      0.76        40\n",
+      "\n",
+      "\n",
+      "Fold 3 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.79      0.97      0.87        31\n",
+      "         1.0       0.50      0.11      0.18         9\n",
+      "\n",
+      "    accuracy                           0.78        40\n",
+      "   macro avg       0.64      0.54      0.53        40\n",
+      "weighted avg       0.72      0.78      0.71        40\n",
+      "\n",
+      "\n",
+      "Fold 4 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.86      0.91      0.88        33\n",
+      "         1.0       0.40      0.29      0.33         7\n",
+      "\n",
+      "    accuracy                           0.80        40\n",
+      "   macro avg       0.63      0.60      0.61        40\n",
+      "weighted avg       0.78      0.80      0.79        40\n",
+      "\n",
+      "\n",
+      "Fold 5 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.79      0.79      0.79        29\n",
+      "         1.0       0.45      0.45      0.45        11\n",
+      "\n",
+      "    accuracy                           0.70        40\n",
+      "   macro avg       0.62      0.62      0.62        40\n",
+      "weighted avg       0.70      0.70      0.70        40\n",
+      "\n",
+      "\n",
+      "Fold 6 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.81      0.94      0.87        31\n",
+      "         1.0       0.50      0.22      0.31         9\n",
+      "\n",
+      "    accuracy                           0.78        40\n",
+      "   macro avg       0.65      0.58      0.59        40\n",
+      "weighted avg       0.74      0.78      0.74        40\n",
+      "\n",
+      "\n",
+      "Fold 7 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.78      1.00      0.88        29\n",
+      "         1.0       1.00      0.27      0.43        11\n",
+      "\n",
+      "    accuracy                           0.80        40\n",
+      "   macro avg       0.89      0.64      0.65        40\n",
+      "weighted avg       0.84      0.80      0.75        40\n",
+      "\n",
+      "\n",
+      "Fold 8 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.94      0.86      0.90        37\n",
+      "         1.0       0.17      0.33      0.22         3\n",
+      "\n",
+      "    accuracy                           0.82        40\n",
+      "   macro avg       0.55      0.60      0.56        40\n",
+      "weighted avg       0.88      0.82      0.85        40\n",
+      "\n",
+      "\n",
+      "Fold 9 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.83      0.94      0.88        32\n",
+      "         1.0       0.50      0.25      0.33         8\n",
+      "\n",
+      "    accuracy                           0.80        40\n",
+      "   macro avg       0.67      0.59      0.61        40\n",
+      "weighted avg       0.77      0.80      0.77        40\n",
+      "\n",
+      "\n",
+      "Fold 10 Classification Report:\n",
+      "              precision    recall  f1-score   support\n",
+      "\n",
+      "         0.0       0.84      0.90      0.87        30\n",
+      "         1.0       0.62      0.50      0.56        10\n",
+      "\n",
+      "    accuracy                           0.80        40\n",
+      "   macro avg       0.73      0.70      0.71        40\n",
+      "weighted avg       0.79      0.80      0.79        40\n",
+      "\n",
+      "\n",
+      "Balanced Accuracy Scores for each fold:\n",
+      "Fold 1 Balanced Accuracy: 0.5563\n",
+      "Fold 2 Balanced Accuracy: 0.6183\n",
+      "Fold 3 Balanced Accuracy: 0.5394\n",
+      "Fold 4 Balanced Accuracy: 0.5974\n",
+      "Fold 5 Balanced Accuracy: 0.6238\n",
+      "Fold 6 Balanced Accuracy: 0.5789\n",
+      "Fold 7 Balanced Accuracy: 0.6364\n",
+      "Fold 8 Balanced Accuracy: 0.5991\n",
+      "Fold 9 Balanced Accuracy: 0.5938\n",
+      "Fold 10 Balanced Accuracy: 0.7000\n",
+      "\n",
+      "Mean Balanced Accuracy Score across all folds: 0.6043275980337994\n"
+     ]
+    }
+   ],
+   "source": [
+    "from sklearn.model_selection import KFold\n",
+    "from imblearn.over_sampling import SMOTE\n",
+    "from sklearn.preprocessing import StandardScaler\n",
+    "from sklearn.ensemble import RandomForestClassifier\n",
+    "from sklearn.metrics import classification_report as clf_report, balanced_accuracy_score\n",
+    "import numpy as np\n",
+    "\n",
+    "# Initialize KFold with 10 splits\n",
+    "k = KFold(n_splits=10, shuffle=True, random_state=42)\n",
+    "\n",
+    "# Initialize SMOTE\n",
+    "sm = SMOTE(random_state=42)\n",
+    "\n",
+    "# Initialize Random Forest model\n",
+    "rf_classifier = RandomForestClassifier(max_depth=10, min_samples_leaf=3, min_samples_split=10, n_estimators=1000, random_state=42)\n",
+    "\n",
+    "# Lists to hold the results\n",
+    "all_classification_reports = []\n",
+    "all_balanced_accuracies = []\n",
+    "\n",
+    "# K-Fold Cross-Validation Loop\n",
+    "for train_idx, test_idx in k.split(X_corr, y_corr):\n",
+    "    # Split the data into train and test based on the indices\n",
+    "    X_train, X_test = X_corr.iloc[train_idx], X_corr.iloc[test_idx]\n",
+    "    y_train, y_test = y_corr.iloc[train_idx], y_corr.iloc[test_idx]\n",
+    "    \n",
+    "    # Resampling the data using SMOTE only for the training data\n",
+    "    X_train_resampled, y_train_resampled = sm.fit_resample(X_train, y_train)\n",
+    "    \n",
+    "    # Scaling data with StandardScaler\n",
+    "    scaler = StandardScaler()\n",
+    "    X_train_norm = scaler.fit_transform(X_train_resampled)\n",
+    "    X_test_norm = scaler.transform(X_test)  # Only transform test set\n",
+    "    \n",
+    "    # Train the Random Forest model\n",
+    "    rf_classifier.fit(X_train_norm, y_train_resampled)\n",
+    "    \n",
+    "    # Predicting\n",
+    "    y_pred = rf_classifier.predict(X_test_norm)\n",
+    "    \n",
+    "    # Generating the classification report and balanced accuracy score for each fold\n",
+    "    report = clf_report(y_test, y_pred, zero_division=0)\n",
+    "    balanced_accuracy = balanced_accuracy_score(y_test, y_pred)\n",
+    "    \n",
+    "    # Append results\n",
+    "    all_classification_reports.append(report)\n",
+    "    all_balanced_accuracies.append(balanced_accuracy)\n",
+    "\n",
+    "# Displaying the results\n",
+    "print(\"\\nClassification Reports for each fold:\")\n",
+    "for i, report in enumerate(all_classification_reports, 1):\n",
+    "    print(f\"\\nFold {i} Classification Report:\")\n",
+    "    print(report)\n",
+    "\n",
+    "print(\"\\nBalanced Accuracy Scores for each fold:\")\n",
+    "for i, score in enumerate(all_balanced_accuracies, 1):\n",
+    "    print(f\"Fold {i} Balanced Accuracy: {score:.4f}\")\n",
+    "\n",
+    "# Optionally, you can also display the mean of the balanced accuracies\n",
+    "print(\"\\nMean Balanced Accuracy Score across all folds:\", np.mean(all_balanced_accuracies))\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "2b182154-61c7-48ec-b601-7edf2537fd0c",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.13.0"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}