4204 lines (4203 with data), 205.8 kB
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from os.path import join\n",
"\n",
"import pandas as pd\n",
"import numpy as np\n",
"\n",
"label_col = 'survival_months'\n",
"n_bins = 4\n",
"eps = 1e-6"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"def add_bins(slide_data):\n",
" assert 'case_id' in slide_data.columns and 'censorship' in slide_data.columns\n",
" \n",
" patients_df = slide_data.drop_duplicates(['case_id']).copy()\n",
" uncensored_df = patients_df[patients_df['censorship'] < 1]\n",
" disc_labels, q_bins = pd.qcut(uncensored_df[label_col], q=n_bins, retbins=True, labels=False)\n",
" q_bins[-1] = slide_data[label_col].max() + eps\n",
" q_bins[0] = slide_data[label_col].min() - eps\n",
"\n",
" disc_labels, q_bins = pd.cut(patients_df[label_col], bins=q_bins, retbins=True, labels=False, right=False, include_lowest=True)\n",
" patients_df.insert(2, 'label', disc_labels.values.astype(int))\n",
"\n",
" patient_dict = {}\n",
" slide_data = slide_data.set_index('case_id')\n",
" for patient in patients_df['case_id']:\n",
" slide_ids = slide_data.loc[patient, 'slide_id']\n",
" if isinstance(slide_ids, str):\n",
" slide_ids = np.array(slide_ids).reshape(-1)\n",
" else:\n",
" slide_ids = slide_ids.values\n",
" patient_dict.update({patient:slide_ids})\n",
" \n",
" return q_bins, patient_dict, patients_df"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"slide_data = pd.read_csv('./tcga_gbmlgg_all_clean.csv.zip', compression='zip', header=0, index_col=0, sep=',', low_memory=False)\n",
"\n",
"n_bins = 4\n",
"eps = 1e-6\n",
"\n",
"### Asserts that 'case_id' is a column, not an index.\n",
"if 'case_id' not in slide_data:\n",
" slide_data.index = slide_data.index.str[:12]\n",
" slide_data['case_id'] = slide_data.index\n",
" slide_data = slide_data.reset_index(drop=True)\n",
"\n",
"q_bins, patients_dict, slide_data = add_bins(slide_data)\n",
"\n",
"slide_data.reset_index(drop=True, inplace=True)\n",
"slide_data = slide_data.assign(slide_id=slide_data['case_id'])\n",
"\n",
"label_dict = {}\n",
"key_count = 0\n",
"for i in range(len(q_bins)-1):\n",
" for c in [0, 1]:\n",
" label_dict.update({(i, c):key_count})\n",
" key_count+=1\n",
"\n",
"for i in slide_data.index:\n",
" key = slide_data.loc[i, 'label']\n",
" slide_data.at[i, 'disc_label'] = key\n",
" censorship = slide_data.loc[i, 'censorship']\n",
" key = (key, int(censorship))\n",
" slide_data.at[i, 'label'] = label_dict[key]\n",
"\n",
"bins = q_bins\n",
"num_classes=len(label_dict)\n",
"patients_df = slide_data.drop_duplicates(['case_id'])\n",
"patient_data = {'case_id':patients_df['case_id'].values, 'label':patients_df['label'].values}\n",
"\n",
"new_cols = list(slide_data.columns[-2:]) + list(slide_data.columns[:-2])\n",
"slide_data = slide_data[new_cols]\n",
"metadata = slide_data.columns[:11]"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.pipeline import Pipeline\n",
"from sklearn.decomposition import PCA\n",
"from sklearn.preprocessing import StandardScaler\n",
"\n",
"\n",
"def series_intersection(s1, s2):\n",
" return pd.Series(list(set(s1) & set(s2)))\n",
"\n",
"genomic_features = slide_data.drop(metadata, axis=1)\n",
"scaler_omic = StandardScaler().fit(genomic_features)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (2) have mixed types.Specify dtype option on import or set low_memory=False.\n",
" has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n"
]
}
],
"source": [
"signatures = pd.read_csv('./signatures.csv')\n",
"slide_df = pd.read_csv('./tcga_gbmlgg_all_clean.csv.zip')"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {},
"outputs": [],
"source": [
"omic_from_signatures = []\n",
"for col in signatures.columns:\n",
" omic = signatures[col].dropna().unique()\n",
" omic_from_signatures.append(omic)\n",
"\n",
"omic_from_signatures = np.concatenate(omic_from_signatures)"
]
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (2) have mixed types.Specify dtype option on import or set low_memory=False.\n",
" has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n",
"/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (4) have mixed types.Specify dtype option on import or set low_memory=False.\n",
" has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n"
]
}
],
"source": [
"for fname in os.listdir('./'):\n",
" if fname.endswith('.csv.zip'):\n",
" slide_df = pd.read_csv(fname)\n",
" omic_overlap = np.concatenate([omic_from_signatures+mode for mode in ['_mut', '_cnv', '_rnaseq']])\n",
" omic_overlap = sorted(series_intersection(omic_overlap, slide_df.columns))\n",
" slide_df[list(slide_df.columns[:9]) + omic_overlap].to_csv('../dataset_csv/%s' % fname)"
]
},
{
"cell_type": "code",
"execution_count": 36,
"metadata": {},
"outputs": [],
"source": [
"omic_from_signatures = []\n",
"for col in signatures.columns:\n",
" omic = signatures[col].dropna().unique()\n",
" omic_from_signatures.append(omic)\n",
"\n",
"omic_from_signatures = np.concatenate(omic_from_signatures)\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Tumor Suppressor Genes Embedding Size: 84\n",
"Oncogenes Embedding Size: 314\n",
"Protein Kinases Embedding Size: 498\n",
"Cell Differentiation Markers Embedding Size: 415\n",
"Transcription Factors Embedding Size: 1396\n",
"Cytokines and Growth Factors Embedding Size: 428\n"
]
}
],
"source": [
"\n",
"def series_intersection(s1, s2):\n",
" return pd.Series(list(set(s1) & set(s2)))\n",
"\n",
"sig_names = []\n",
"for col in signatures.columns:\n",
" sig = signatures[col].dropna().unique()\n",
" sig = np.concatenate([sig+mode for mode in ['_mut', '_cnv', '_rnaseq']])\n",
" sig = sorted(series_intersection(sig, genomic_features.columns))\n",
" sig_names.append(sig)\n",
" print('%s Embedding Size: %d' % (col, len(sig)))\n",
"sig_sizes = [len(sig) for sig in sig_names]"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['IFNA10_cnv',\n",
" 'IFNA13_cnv',\n",
" 'IFNA14_cnv',\n",
" 'IFNA16_cnv',\n",
" 'IFNA17_cnv',\n",
" 'IFNA1_cnv',\n",
" 'IFNA21_cnv',\n",
" 'IFNA2_cnv',\n",
" 'IFNA4_cnv',\n",
" 'IFNA5_cnv',\n",
" 'IFNA6_cnv',\n",
" 'IFNA7_cnv',\n",
" 'IFNA8_cnv',\n",
" 'IFNB1_cnv',\n",
" 'IFNE_cnv',\n",
" 'IFNW1_cnv',\n",
" 'PDGFRA_cnv']"
]
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"sig"
]
},
{
"cell_type": "code",
"execution_count": 434,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>NDUFS5_cnv</th>\n",
" <th>MACF1_cnv</th>\n",
" <th>RNA5SP44_cnv</th>\n",
" <th>KIAA0754_cnv</th>\n",
" <th>BMP8A_cnv</th>\n",
" <th>PABPC4_cnv</th>\n",
" <th>SNORA55_cnv</th>\n",
" <th>HEYL_cnv</th>\n",
" <th>HPCAL4_cnv</th>\n",
" <th>NT5C1A_cnv</th>\n",
" <th>...</th>\n",
" <th>ZWINT_rnaseq</th>\n",
" <th>ZXDA_rnaseq</th>\n",
" <th>ZXDB_rnaseq</th>\n",
" <th>ZXDC_rnaseq</th>\n",
" <th>ZYG11A_rnaseq</th>\n",
" <th>ZYG11B_rnaseq</th>\n",
" <th>ZYX_rnaseq</th>\n",
" <th>ZZEF1_rnaseq</th>\n",
" <th>ZZZ3_rnaseq</th>\n",
" <th>TPTEP1_rnaseq</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>...</td>\n",
" <td>-0.8388</td>\n",
" <td>4.1375</td>\n",
" <td>3.9664</td>\n",
" <td>1.8437</td>\n",
" <td>-0.3959</td>\n",
" <td>-0.2561</td>\n",
" <td>-0.2866</td>\n",
" <td>1.8770</td>\n",
" <td>-0.3179</td>\n",
" <td>-0.3633</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>...</td>\n",
" <td>-0.1083</td>\n",
" <td>0.3393</td>\n",
" <td>0.2769</td>\n",
" <td>1.7320</td>\n",
" <td>-0.0975</td>\n",
" <td>2.6955</td>\n",
" <td>-0.6741</td>\n",
" <td>1.0323</td>\n",
" <td>1.2766</td>\n",
" <td>-0.3982</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.4155</td>\n",
" <td>1.6846</td>\n",
" <td>0.7711</td>\n",
" <td>-0.3061</td>\n",
" <td>-0.5016</td>\n",
" <td>2.8548</td>\n",
" <td>-0.6171</td>\n",
" <td>-0.8608</td>\n",
" <td>-0.0486</td>\n",
" <td>-0.3962</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.8143</td>\n",
" <td>0.8344</td>\n",
" <td>1.5075</td>\n",
" <td>3.6068</td>\n",
" <td>-0.5004</td>\n",
" <td>-0.0747</td>\n",
" <td>-0.2185</td>\n",
" <td>-0.4379</td>\n",
" <td>1.6913</td>\n",
" <td>1.7748</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0983</td>\n",
" <td>-0.7908</td>\n",
" <td>-0.0053</td>\n",
" <td>-0.0643</td>\n",
" <td>-0.3706</td>\n",
" <td>0.3870</td>\n",
" <td>-0.5589</td>\n",
" <td>-0.5979</td>\n",
" <td>0.0047</td>\n",
" <td>-0.3548</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>368</th>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>...</td>\n",
" <td>-0.0291</td>\n",
" <td>-0.1058</td>\n",
" <td>-0.6721</td>\n",
" <td>0.2802</td>\n",
" <td>1.9504</td>\n",
" <td>-0.8784</td>\n",
" <td>0.9506</td>\n",
" <td>0.0607</td>\n",
" <td>1.1883</td>\n",
" <td>-0.3521</td>\n",
" </tr>\n",
" <tr>\n",
" <th>369</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0497</td>\n",
" <td>0.3673</td>\n",
" <td>-0.2208</td>\n",
" <td>0.3034</td>\n",
" <td>3.2580</td>\n",
" <td>-0.2089</td>\n",
" <td>1.6053</td>\n",
" <td>-0.8746</td>\n",
" <td>-0.4491</td>\n",
" <td>-0.3450</td>\n",
" </tr>\n",
" <tr>\n",
" <th>370</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>...</td>\n",
" <td>0.3822</td>\n",
" <td>-0.7003</td>\n",
" <td>-0.7661</td>\n",
" <td>-1.7035</td>\n",
" <td>-0.5423</td>\n",
" <td>-0.3488</td>\n",
" <td>1.3713</td>\n",
" <td>-0.4365</td>\n",
" <td>2.3456</td>\n",
" <td>-0.3866</td>\n",
" </tr>\n",
" <tr>\n",
" <th>371</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.6853</td>\n",
" <td>-1.0240</td>\n",
" <td>-1.2890</td>\n",
" <td>-1.5666</td>\n",
" <td>-0.1270</td>\n",
" <td>-1.4662</td>\n",
" <td>0.3981</td>\n",
" <td>-0.5976</td>\n",
" <td>-1.3822</td>\n",
" <td>-0.4157</td>\n",
" </tr>\n",
" <tr>\n",
" <th>372</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0517</td>\n",
" <td>-0.3570</td>\n",
" <td>-0.4843</td>\n",
" <td>-0.3792</td>\n",
" <td>-0.1964</td>\n",
" <td>0.4200</td>\n",
" <td>3.2547</td>\n",
" <td>-0.1232</td>\n",
" <td>3.4519</td>\n",
" <td>-0.1962</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>373 rows × 20395 columns</p>\n",
"</div>"
],
"text/plain": [
" NDUFS5_cnv MACF1_cnv RNA5SP44_cnv KIAA0754_cnv BMP8A_cnv PABPC4_cnv \\\n",
"0 -1 -1 -1 -1 -1 -1 \n",
"1 2 2 2 2 2 2 \n",
"2 0 0 0 0 0 0 \n",
"3 0 0 0 0 0 0 \n",
"4 0 0 0 0 0 0 \n",
".. ... ... ... ... ... ... \n",
"368 2 2 2 2 2 2 \n",
"369 0 0 0 0 0 0 \n",
"370 1 1 1 1 1 1 \n",
"371 0 0 0 0 0 0 \n",
"372 0 0 0 0 0 0 \n",
"\n",
" SNORA55_cnv HEYL_cnv HPCAL4_cnv NT5C1A_cnv ... ZWINT_rnaseq \\\n",
"0 -1 -1 -1 -1 ... -0.8388 \n",
"1 2 2 2 2 ... -0.1083 \n",
"2 0 0 0 0 ... -0.4155 \n",
"3 0 0 0 0 ... -0.8143 \n",
"4 0 0 0 0 ... 0.0983 \n",
".. ... ... ... ... ... ... \n",
"368 2 2 2 2 ... -0.0291 \n",
"369 0 0 0 0 ... 0.0497 \n",
"370 1 1 1 1 ... 0.3822 \n",
"371 0 0 0 0 ... -0.6853 \n",
"372 0 0 0 0 ... 0.0517 \n",
"\n",
" ZXDA_rnaseq ZXDB_rnaseq ZXDC_rnaseq ZYG11A_rnaseq ZYG11B_rnaseq \\\n",
"0 4.1375 3.9664 1.8437 -0.3959 -0.2561 \n",
"1 0.3393 0.2769 1.7320 -0.0975 2.6955 \n",
"2 1.6846 0.7711 -0.3061 -0.5016 2.8548 \n",
"3 0.8344 1.5075 3.6068 -0.5004 -0.0747 \n",
"4 -0.7908 -0.0053 -0.0643 -0.3706 0.3870 \n",
".. ... ... ... ... ... \n",
"368 -0.1058 -0.6721 0.2802 1.9504 -0.8784 \n",
"369 0.3673 -0.2208 0.3034 3.2580 -0.2089 \n",
"370 -0.7003 -0.7661 -1.7035 -0.5423 -0.3488 \n",
"371 -1.0240 -1.2890 -1.5666 -0.1270 -1.4662 \n",
"372 -0.3570 -0.4843 -0.3792 -0.1964 0.4200 \n",
"\n",
" ZYX_rnaseq ZZEF1_rnaseq ZZZ3_rnaseq TPTEP1_rnaseq \n",
"0 -0.2866 1.8770 -0.3179 -0.3633 \n",
"1 -0.6741 1.0323 1.2766 -0.3982 \n",
"2 -0.6171 -0.8608 -0.0486 -0.3962 \n",
"3 -0.2185 -0.4379 1.6913 1.7748 \n",
"4 -0.5589 -0.5979 0.0047 -0.3548 \n",
".. ... ... ... ... \n",
"368 0.9506 0.0607 1.1883 -0.3521 \n",
"369 1.6053 -0.8746 -0.4491 -0.3450 \n",
"370 1.3713 -0.4365 2.3456 -0.3866 \n",
"371 0.3981 -0.5976 -1.3822 -0.4157 \n",
"372 3.2547 -0.1232 3.4519 -0.1962 \n",
"\n",
"[373 rows x 20395 columns]"
]
},
"execution_count": 434,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"genomic_features"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import torch.nn as nn\n",
"import torch.nn.functional as F\n",
"import pdb\n",
"import numpy as np\n",
"\n",
"class MIL_Sum_FC_surv(nn.Module):\n",
" def __init__(self, size_arg = \"small\", dropout=0.25, n_classes=4):\n",
" super(MIL_Sum_FC_surv, self).__init__()\n",
"\n",
" self.size_dict = {\"small\": [1024, 512, 256], \"big\": [1024, 512, 384]}\n",
" size = self.size_dict[size_arg]\n",
" self.phi = nn.Sequential(*[nn.Linear(size[0], size[1]), nn.ReLU(), nn.Dropout(dropout)])\n",
" self.rho = nn.Sequential(*[nn.Linear(size[1], size[2]), nn.ReLU(), nn.Dropout(dropout)])\n",
" self.classifier = nn.Linear(size[2], n_classes)\n",
"\n",
" def relocate(self):\n",
" device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
" if torch.cuda.device_count() >= 1:\n",
" device_ids = list(range(torch.cuda.device_count()))\n",
" self.phi = nn.DataParallel(self.phi, device_ids=device_ids).to('cuda:0')\n",
"\n",
" self.rho = self.rho.to(device)\n",
" self.classifier = self.classifier.to(device)\n",
"\n",
" def forward(self, **kwargs):\n",
" h = kwargs['x_path']\n",
"\n",
" h = self.phi(h).sum(axis=0)\n",
" h = self.rho(h)\n",
" logits = self.classifier(h).unsqueeze(0)\n",
" Y_hat = torch.topk(logits, 1, dim = 1)[1]\n",
" hazards = torch.sigmoid(logits)\n",
" S = torch.cumprod(1 - hazards, dim=1)\n",
" \n",
" return hazards, S, Y_hat, None, None\n",
"\n",
"from os.path import join\n",
"from collections import OrderedDict\n",
"\n",
"import torch\n",
"import torch.nn as nn\n",
"import torch.nn.functional as F\n",
"import pdb\n",
"import numpy as np\n",
"\n",
"\"\"\"\n",
"A Modified Implementation of Deep Attention MIL\n",
"\"\"\"\n",
"\n",
"\n",
"\"\"\"\n",
"Attention Network without Gating (2 fc layers)\n",
"args:\n",
" L: input feature dimension\n",
" D: hidden layer dimension\n",
" dropout: whether to use dropout (p = 0.25)\n",
" n_classes: number of classes (experimental usage for multiclass MIL)\n",
"\"\"\"\n",
"class Attn_Net(nn.Module):\n",
"\n",
" def __init__(self, L = 1024, D = 256, dropout = False, n_classes = 1):\n",
" super(Attn_Net, self).__init__()\n",
" self.module = [\n",
" nn.Linear(L, D),\n",
" nn.Tanh()]\n",
"\n",
" if dropout:\n",
" self.module.append(nn.Dropout(0.25))\n",
"\n",
" self.module.append(nn.Linear(D, n_classes))\n",
" \n",
" self.module = nn.Sequential(*self.module)\n",
" \n",
" def forward(self, x):\n",
" return self.module(x), x # N x n_classes\n",
"\n",
"\"\"\"\n",
"Attention Network with Sigmoid Gating (3 fc layers)\n",
"args:\n",
" L: input feature dimension\n",
" D: hidden layer dimension\n",
" dropout: whether to use dropout (p = 0.25)\n",
" n_classes: number of classes (experimental usage for multiclass MIL)\n",
"\"\"\"\n",
"class Attn_Net_Gated(nn.Module):\n",
"\n",
" def __init__(self, L = 1024, D = 256, dropout = False, n_classes = 1):\n",
" super(Attn_Net_Gated, self).__init__()\n",
" self.attention_a = [\n",
" nn.Linear(L, D),\n",
" nn.Tanh()]\n",
" \n",
" self.attention_b = [nn.Linear(L, D),\n",
" nn.Sigmoid()]\n",
" if dropout:\n",
" self.attention_a.append(nn.Dropout(0.25))\n",
" self.attention_b.append(nn.Dropout(0.25))\n",
"\n",
" self.attention_a = nn.Sequential(*self.attention_a)\n",
" self.attention_b = nn.Sequential(*self.attention_b)\n",
" \n",
" self.attention_c = nn.Linear(D, n_classes)\n",
"\n",
" def forward(self, x):\n",
" a = self.attention_a(x)\n",
" b = self.attention_b(x)\n",
" A = a.mul(b)\n",
" A = self.attention_c(A) # N x n_classes\n",
" return A, x\n",
" \n",
"class MIL_Cluster_FC_surv(nn.Module):\n",
" def __init__(self, num_clusters=10, size_arg = \"small\", dropout=0.25, n_classes=4):\n",
" super(MIL_Cluster_FC_surv, self).__init__()\n",
" self.size_dict = {\"small\": [1024, 512, 256], \"big\": [1024, 512, 384]}\n",
" self.num_clusters = num_clusters\n",
" \n",
" ### Phenotype Learning\n",
" size = self.size_dict[size_arg]\n",
" phis = []\n",
" for phenotype_i in range(num_clusters):\n",
" phi = [nn.Linear(size[0], size[1]), nn.ReLU(), nn.Dropout(dropout),\n",
" nn.Linear(size[1], size[1]), nn.ReLU(), nn.Dropout(dropout)]\n",
" phis.append(nn.Sequential(*phi))\n",
" self.phis = nn.ModuleList(phis)\n",
" self.pool1d = nn.AdaptiveAvgPool1d(1)\n",
" \n",
" \n",
" ### WSI Attention MIL Construction\n",
" fc = [nn.Linear(size[1], size[1]), nn.ReLU()]\n",
" fc.append(nn.Dropout(0.25))\n",
" attention_net = Attn_Net_Gated(L=size[1], D=size[2], dropout=dropout, n_classes=1)\n",
" fc.append(attention_net)\n",
" self.attention_net = nn.Sequential(*fc)\n",
"\n",
" \n",
" self.rho = nn.Sequential(*[nn.Linear(size[1], size[2]), nn.ReLU(), nn.Dropout(dropout)])\n",
" self.classifier = nn.Linear(size[2], n_classes)\n",
"\n",
" def relocate(self):\n",
" device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
" if torch.cuda.device_count() >= 1:\n",
" device_ids = list(range(torch.cuda.device_count()))\n",
" self.phis = nn.DataParallel(self.phi, device_ids=device_ids).to('cuda:0')\n",
"\n",
" self.rho = self.rho.to(device)\n",
" self.classifier = self.classifier.to(device)\n",
"\n",
" def forward(self, **kwargs):\n",
" x_path = kwargs['x_path']\n",
" ### Phenotyping\n",
" h_phenotypes = []\n",
" from sklearn.cluster import KMeans\n",
" kmeans = KMeans(n_clusters=self.num_clusters, random_state=2021).fit(X)\n",
" #cluster_ids_x, cluster_centers = kmeans(X=x_path, num_clusters=self.num_clusters, distance='euclidean', device=torch.device('cpu'))\n",
" cluster_ids_x = KMeans(n_clusters=10, random_state=2021, max_iter=20).fit_predict(x_path)\n",
" for i in range(self.num_clusters):\n",
" h_phenotypes_i = self.phis[i](x_path[cluster_ids_x==i])\n",
" h_phenotypes.append(self.pool1d(h_phenotypes_i.T.unsqueeze(0)).squeeze(2))\n",
" h_phenotypes = torch.stack(h_phenotypes, dim=1).squeeze(0)\n",
"\n",
"\n",
" ### Attention MIL\n",
" A, h = self.attention_net(h_phenotypes) \n",
" A = torch.transpose(A, 1, 0)\n",
" if 'attention_only' in kwargs.keys():\n",
" if kwargs['attention_only']:\n",
" return A\n",
" A_raw = A \n",
" A = F.softmax(A, dim=1) \n",
" h = torch.mm(A, h_phenotypes)\n",
"\n",
" \n",
" h = self.rho(h)\n",
" logits = self.classifier(h).unsqueeze(0)\n",
" Y_hat = torch.topk(logits, 1, dim = 1)[1]\n",
" hazards = torch.sigmoid(logits)\n",
" S = torch.cumprod(1 - hazards, dim=1)\n",
" \n",
" return hazards, S, Y_hat, None, None"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"x_path = torch.randint(10, size=(500, 1024)).type(torch.cuda.FloatTensor)\n"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.cluster import KMeans\n",
"kmeans = KMeans(n_clusters=10, random_state=2021, max_iter=20).fit_predict(x_path.cpu())"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([5, 5, 3, 5, 8, 4, 8, 7, 5, 4, 9, 1, 9, 1, 6, 1, 1, 0, 5, 0, 4, 3,\n",
" 0, 6, 3, 1, 0, 7, 9, 8, 0, 5, 5, 3, 0, 1, 5, 1, 0, 6, 6, 4, 1, 5,\n",
" 3, 0, 1, 0, 8, 5, 1, 8, 1, 0, 5, 0, 2, 5, 6, 5, 0, 0, 5, 1, 2, 7,\n",
" 4, 6, 5, 3, 0, 7, 9, 1, 3, 4, 4, 5, 7, 9, 9, 5, 0, 1, 9, 1, 2, 0,\n",
" 6, 3, 1, 1, 2, 4, 0, 5, 1, 1, 1, 0, 0, 9, 8, 1, 5, 5, 0, 9, 2, 3,\n",
" 7, 0, 1, 6, 7, 5, 3, 5, 0, 1, 6, 1, 6, 2, 8, 7, 6, 1, 6, 2, 5, 0,\n",
" 1, 6, 0, 9, 2, 1, 0, 1, 7, 7, 6, 1, 6, 0, 3, 4, 1, 3, 2, 4, 4, 5,\n",
" 4, 1, 1, 9, 6, 0, 3, 6, 4, 8, 7, 9, 6, 5, 5, 9, 0, 6, 0, 1, 9, 2,\n",
" 3, 5, 1, 9, 6, 1, 0, 6, 6, 0, 0, 6, 7, 1, 6, 1, 1, 1, 4, 0, 2, 1,\n",
" 9, 5, 7, 5, 9, 0, 1, 0, 6, 2, 2, 1, 1, 5, 3, 5, 3, 6, 5, 6, 9, 5,\n",
" 2, 2, 2, 6, 0, 0, 0, 5, 2, 6, 6, 0, 2, 5, 1, 9, 2, 4, 4, 0, 4, 7,\n",
" 4, 1, 1, 3, 6, 0, 1, 2, 4, 0, 8, 1, 8, 5, 5, 7, 4, 1, 6, 1, 0, 8,\n",
" 6, 1, 1, 4, 8, 7, 5, 2, 3, 0, 2, 9, 5, 6, 4, 3, 6, 5, 5, 4, 6, 6,\n",
" 0, 1, 5, 1, 1, 1, 1, 9, 5, 7, 3, 0, 2, 4, 0, 5, 4, 0, 5, 0, 6, 0,\n",
" 3, 1, 4, 6, 3, 7, 1, 6, 7, 0, 1, 4, 6, 1, 6, 0, 6, 0, 5, 9, 1, 1,\n",
" 3, 1, 5, 6, 1, 6, 6, 8, 2, 0, 7, 9, 9, 6, 0, 6, 2, 6, 8, 0, 8, 5,\n",
" 1, 3, 1, 9, 2, 3, 5, 8, 2, 5, 6, 6, 5, 2, 9, 0, 1, 8, 5, 9, 5, 1,\n",
" 0, 1, 0, 8, 6, 1, 7, 2, 8, 3, 1, 6, 2, 2, 1, 6, 0, 2, 6, 1, 1, 4,\n",
" 5, 6, 4, 0, 5, 0, 9, 0, 4, 8, 0, 7, 6, 5, 5, 0, 4, 1, 1, 2, 2, 0,\n",
" 0, 6, 4, 0, 7, 7, 2, 3, 1, 4, 7, 9, 4, 7, 2, 4, 5, 6, 4, 5, 7, 9,\n",
" 8, 0, 6, 2, 0, 6, 6, 3, 5, 4, 4, 0, 1, 0, 5, 3, 1, 6, 0, 7, 4, 1,\n",
" 6, 3, 6, 0, 4, 1, 5, 7, 3, 1, 4, 8, 0, 7, 0, 6, 1, 1, 0, 1, 5, 1,\n",
" 2, 3, 2, 3, 8, 8, 4, 6, 5, 6, 1, 0, 7, 6, 4, 4], dtype=int32)"
]
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"kmeans"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(tensor([[0.9992, 0.0000, 0.0000, 1.0000]], grad_fn=<SigmoidBackward>),\n",
" tensor([[0.0008, 0.0008, 0.0008, 0.0000]], grad_fn=<CumprodBackward>),\n",
" tensor([[3]]),\n",
" None,\n",
" None)"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
"model = MIL_Sum_FC_surv()\n",
"model.forward(x_path=x_path)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(tensor([[4.2595e-07, 1.0000e+00, 0.0000e+00, 7.2488e-12]],\n",
" grad_fn=<SigmoidBackward>),\n",
" tensor([[1.0000, 0.0000, 0.0000, 0.0000]], grad_fn=<CumprodBackward>),\n",
" tensor([[1]]),\n",
" None,\n",
" None)"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
"self = MIL_Cluster_FC_surv()\n",
"model.forward(x_path=x_path)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"fname = os.path.join('/media/ssd1/pan-cancer/tcga_gbm_20x_features/h5_files/TCGA-02-0001-01Z-00-DX1.83fce43e-42ac-4dcd-b156-2908e75f2e47.h5')"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [],
"source": [
"import h5py\n",
"h5 = h5py.File(fname, \"r\")\n",
"coords = np.array(h5['coords'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fm"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([43121, 29428])"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.array(h5['coords'])[0]"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([43121, 29940])"
]
},
"execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.array(h5['coords'])[1]"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"512"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.array(h5['coords'])[1][1] - np.array(h5['coords'])[0][1]"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"512"
]
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.array(h5['coords'])[2][1] - np.array(h5['coords'])[1][1]"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [],
"source": [
"import nmslib\n",
"class Hnsw:\n",
"\n",
" def __init__(self, space='cosinesimil', index_params=None,\n",
" query_params=None, print_progress=True):\n",
" self.space = space\n",
" self.index_params = index_params\n",
" self.query_params = query_params\n",
" self.print_progress = print_progress\n",
"\n",
" def fit(self, X):\n",
" index_params = self.index_params\n",
" if index_params is None:\n",
" index_params = {'M': 16, 'post': 0, 'efConstruction': 400}\n",
"\n",
" query_params = self.query_params\n",
" if query_params is None:\n",
" query_params = {'ef': 90}\n",
"\n",
" # this is the actual nmslib part, hopefully the syntax should\n",
" # be pretty readable, the documentation also has a more verbiage\n",
" # introduction: https://nmslib.github.io/nmslib/quickstart.html\n",
" index = nmslib.init(space=self.space, method='hnsw')\n",
" index.addDataPointBatch(X)\n",
" index.createIndex(index_params, print_progress=self.print_progress)\n",
" index.setQueryTimeParams(query_params)\n",
"\n",
" self.index_ = index\n",
" self.index_params_ = index_params\n",
" self.query_params_ = query_params\n",
" return self\n",
"\n",
" def query(self, vector, topn):\n",
" # the knnQuery returns indices and corresponding distance\n",
" # we will throw the distance away for now\n",
" indices, _ = self.index_.knnQuery(vector, k=topn)\n",
" return indices"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x"
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([85, 87, 88, 73, 75, 76, 63, 29], dtype=int32)"
]
},
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model = Hnsw(space='l2')\n",
"model.fit(coords)\n",
"model.query(coords, topn=8)"
]
},
{
"cell_type": "code",
"execution_count": 59,
"metadata": {},
"outputs": [],
"source": [
"import networkx as nx\n",
"G = nx.Graph()\n"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([43121, 29428])"
]
},
"execution_count": 56,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"for"
]
},
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"130"
]
},
"execution_count": 52,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"temp[3]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"model"
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 7440, 13280])"
]
},
"execution_count": 29,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"coords[100]"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {},
"outputs": [],
"source": [
"indices = model.query(coords[100], topn =10)"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 7440, 13280],\n",
" [ 7440, 13792],\n",
" [ 7952, 13280],\n",
" [ 6928, 13792],\n",
" [ 7952, 12768],\n",
" [ 7952, 13792],\n",
" [ 7440, 14304],\n",
" [ 8464, 13280],\n",
" [ 6928, 14304],\n",
" [ 8464, 13792]])"
]
},
"execution_count": 34,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"coords[indices]"
]
},
{
"cell_type": "code",
"execution_count": 84,
"metadata": {},
"outputs": [],
"source": [
"def do_KmeansPCA(X=None, y=None, scaler=None, n_clusters=4, n_components=5):\n",
" import pandas as pd\n",
" import seaborn as sns\n",
" from sklearn.datasets import make_blobs\n",
" from sklearn import decomposition\n",
" from sklearn.decomposition import PCA, TruncatedSVD\n",
" from sklearn.preprocessing import StandardScaler, Normalizer\n",
" from sklearn.pipeline import make_pipeline\n",
" from sklearn.cluster import KMeans\n",
" ### Initialize Scaler\n",
" if scaler is None: \n",
" scaler = StandardScaler()\n",
" ### Get Random Data\n",
" X, y = make_blobs(n_features=10, n_samples=100, centers=4, random_state=4, cluster_std=7)\n",
" ### Scale Data\n",
" X = scaler.fit_transform(X)\n",
" ### Perform K-Means Clustering\n",
" cls = KMeans(n_clusters=n_clusters, init='k-means++', n_jobs=-1, n_init=1)\n",
" y_pred = cls.fit_predict(X)\n",
" ### Perform PCA\n",
" pca = PCA(n_components=n_components)\n",
" pc = pca.fit_transform(X)\n",
" ### Plot Results\n",
" columns = ['PC%d'%c for c in range(1, n_components+1)]\n",
" pc_df = pd.DataFrame(data=pc, columns=columns)\n",
" pc_df['y_pred'] = y_pred\n",
" pc_df['y'] = y\n",
" df = pd.DataFrame({'Variance Explained':pca.explained_variance_ratio_, 'Principal Components': columns})\n",
" sns.barplot(x='Principal Components',y=\"Variance Explained\", data=df, color=\"c\")\n",
" sns.lmplot( x=\"PC1\", y=\"PC2\", data=pc_df, fit_reg=False, \n",
" hue='y', legend=True, scatter_kws={\"s\": 80})\n",
" sns.lmplot( x=\"PC1\", y=\"PC2\", data=pc_df, fit_reg=False, \n",
" hue='y', legend=True, scatter_kws={\"s\": 80})"
]
},
{
"cell_type": "code",
"execution_count": 85,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAX2UlEQVR4nO3de7RedX3n8feHoCDiBSFtEYhBjW2htaABRMVLFUVbREZnCI4dsFbGcVi2dZxWZ2YBYtslVp06Faeg0loGjVcwVZSiXMQLl3ARCIoE5BLBioKCiGDgO3/sfczDyT4n+4Ts85ycvF9rPevs+/N9dk7O59mX32+nqpAkabKtxl2AJGluMiAkSZ0MCElSJwNCktTJgJAkddp63AVsKjvttFMtXrx43GVI0mbl0ksv/VFVLeyaN28CYvHixaxcuXLcZUjSZiXJTVPN8xSTJKmTASFJ6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqdO8aUk9nZ2XLx93CYO4bdmycZcgaR7zCEKS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdRo0IJIclOTaJKuTvK1j/luSXJPkyiRfSfKkkXlHJLmufR0xZJ2SpPUNFhBJFgAnAi8D9gAOT7LHpMUuB5ZW1dOBTwPvbtd9AnAssB+wL3Bskh2GqlWStL6tB9z2vsDqqroBIMly4BDgmokFqurckeUvBF7bDr8UOLuq7mjXPRs4CPj4gPVuEXZevnzcJQzitmXLxl2CNO8MeYppF+CWkfE17bSpvB744kzWTXJUkpVJVt5+++0Ps1xJ0qghAyId06pzweS1wFLgb2eyblWdXFVLq2rpwoULN7pQSdL6hgyINcBuI+O7ArdOXijJi4H/Cbyiqu6bybqSpOEMGRCXAEuS7J7kkcAyYMXoAkn2Bk6iCYcfjsw6C3hJkh3ai9MvaadJkmbJYBepq2ptkqNp/rAvAE6pqlVJjgdWVtUKmlNK2wOfSgJwc1W9oqruSPJOmpABOH7igrUkaXYMeRcTVXUmcOakaceMDL94mnVPAU4ZrjpJ0nRsSS1J6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqZMBIUnqZEBIkjoZEJKkTgaEJKmTASFJ6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqZMBIUnqZEBIkjoZEJKkTgaEJKnT1lPNSPKW6Vasqvdt+nIkSXPFlAEBPKb9+ZvAPsCKdvxg4KtDFiVJGr8pA6Kq3gGQ5F+BZ1TV3e34ccCnZqU6SdLY9LkGsQi4f2T8fmDxINVIkuaM6U4xTTgVuDjJ6UABhwL/PGhVkqSx22BAVNVfJ/kicEA76XVVdfmwZUmSxq3vba7bAXdV1fuBNUl2H7AmSdIcsMGASHIs8JfA29tJjwD+35BFSZLGr88RxKHAK4B7AKrqVtbdAitJmqf6BMT9VVU0F6hJ8uhhS5IkzQV9AuKTSU4CHp/kDcCXgQ8NW5Ykadz63MX0niQHAnfRtKo+pqrOHrwySdJY9WkHQRsIhoIkbUH63MX075Jcl+SnSe5KcneSu/psPMlBSa5NsjrJ2zrmPy/JZUnWJnn1pHkPJLmifa2YvK4kaVh9jiDeDRxcVd+eyYaTLABOBA4E1gCXJFlRVdeMLHYzcCTw1o5N3FtVe83kPSVJm06fgPi3mYZDa19gdVXdAJBkOXAI8KuAqKob23kPbsT2JUkD6hMQK5N8AjgDuG9iYlV9dgPr7QLcMjK+BthvBrVtm2QlsBZ4V1WdMXmBJEcBRwEsWrRoBpuWJG1In4B4LPBz4CUj0wrYUECkY1r1rAtgUVXdmuTJwDlJrqqq6x+ysaqTgZMBli5dOpNtS5I2oM9trq/byG2vAXYbGd8VuLXvym2LbarqhiTnAXsD10+7kiRpk5nukaN/UVXvTvL3dHzzr6o3b2DblwBL2o79vg8sA17Tp6gkOwA/r6r7kuwEPIfmYrkkaZZMdwQxcWF65cZsuKrWJjkaOAtYAJxSVauSHA+srKoVSfYBTgd2AA5O8o6q2hP4beCk9uL1VjTXIK6Z4q0kSQOY7pGj/9L+/OjGbryqzgTOnDTtmJHhS2hOPU1e7xvA727s+0qSHr4NXoNIspCmu+89gG0nplfV7w9YlyRpzPp01ncazemm3YF3ADfSXF+QJM1jfQJix6r6CPDLqjq/qv4YeNbAdUmSxqxPO4hftj9vS/IHNLeqrnfdQJI0v/QJiL9K8jjgvwF/T9Nw7s8HrUqSNHZ9Gsp9vh38KfDCYcuRJM0V0zWU62wgN6FHQzlJ0mZsuiOIjWogJ0maH6ZrKPeQBnJJHttMrrsHr0qSNHZ9nii3NMlVwJXA1Um+leSZw5cmSRqnPncxnQK8qaouAEjyXOAfgacPWZgkabz6NJS7eyIcAKrqa4CnmSRpnutzBHFxkpOAj9Pc1XQYcF6SZwBU1WUD1idJGpM+AbFX+/PYSdOfTRMYdtonSfNQn4ZyNo6TpC1Qn7uYTm272pgYf1KSrwxbliRp3PpcpP4acFGSlyd5A3A28HfDliVJGrc+p5hOSrIKOBf4EbB3Vf1g8MokSWPV5xTTH9G0hfhPwD8BZyb5vYHrkiSNWZ+7mF4FPLeqfgh8PMnpwEdZd3eTJGke6nOK6ZWTxi9Osu9wJUmS5oIpTzEl+eTI8AmTZn8eSdK8Nt01iCUjwwdOmrdwgFokSXPIdKeYpnxY0AbmSZuFnZcvH3cJg7ht2bJxl6B5YrqA2C7J3jRHGY9qh9O+HjUbxUmSxme6gLgNeF87/IOR4YlxSdI8Nt0T5eyDSZK2YH262pAkbYEMCElSJwNCktSpT19MSfLaJMe044tsSS1J81+fI4gPAvsDh7fjdwMnDlaRJGlO6NNZ335V9YwklwNU1Z1JHjlwXZKkMetzBPHLJAtoW08nWQg8OGhVkqSx6xMQ/wc4Hfi1JH9N84S5vxm0KknS2PXp7vu0JJcCL6LpZuOVVfXtwSuTNGvsl0pdNhgQSZ4FrKqqE9vxxyTZr6ouGrw6SdLY9DnF9H+Bn42M39NO26AkByW5NsnqJG/rmP+8JJclWZvk1ZPmHZHkuvZ1RJ/3kyRtOn0CIlX1q+69q+pB+h15LKC5HfZlwB7A4Un2mLTYzcCRwMcmrfsE4FhgP2Bf4NgkO/SoVZK0ifQJiBuSvDnJI9rXnwI39FhvX2B1Vd1QVfcDy4FDRheoqhur6krWvyvqpcDZVXVHVd0JnA0c1OM9JUmbSJ+AeCPwbOD7wBqab/VH9VhvF+CWkfE17bQ+eq2b5KgkK5OsvP3223tuWpLUR5+7mH4IbMytAOna3KZct6pOBk4GWLp0qU+5k6RNqM+1hIXAG4DFo8tX1R9vYNU1wG4j47sCt/asaw3wgknrntdzXUnSJtCnq43PARcAXwYemMG2LwGWJNmd5vTUMuA1Pdc9C/ibkQvTLwHePoP3liQ9TH0CYruq+suZbriq1iY5muaP/QLglKpaleR4YGVVrUiyD00r7R2Ag5O8o6r2rKo7kryTJmQAjq+qO2ZagyRp4/UJiM8neXlVnTnTjbfrnDlp2jEjw5fQnD7qWvcU4JSZvqckadPocxfTn9KExL1J7kpyd5K7hi5MkjRefe5iesxsFCJJmlv6nGKivVi8BNh2YlpVfXWooiRJ49fnNtc/oTnNtCtwBfAs4JvA7w9bmiRpnPpeg9gHuKmqXgjsDdhsWZLmuT4B8Yuq+gVAkm2q6jvAbw5bliRp3Ppcg1iT5PHAGcDZSe6kf4toSdJmqs9dTIe2g8clORd4HPClQauSJI3dlAGR5LFVdVf7bIYJV7U/twds2SxJ89h0RxAfA/4QuJSmJ9VM+vnkwauTJI3NlAFRVX+YJMDzq+rmWaxJkjQHTHsXU/uo0dNnqRZJ0hzS5y6mC5Ps03asJ0nz2s7Ll4+7hEHctmzmz33rExAvBP5zkpuAe2ivQVTV02f8bpKkzUafgHjZ4FVIkuacPu0gbgJI8muMdNYnSZrfNtjVRpJXJLkO+B5wPnAj8MWB65IkjVmfvpjeSdOD63eranfgRcDXB61KkjR2fQLil1X1Y2CrJFtV1bnAXgPXJUkasz4XqX+SZHvgq8BpSX4IrB22LEnSuPU5gjgEuBf4c5pO+q4HDh6yKEnS+E3XWd8HgI9V1TdGJn90+JIkSXPBdEcQ1wHvTXJjkhOSeN1BkrYgUwZEVb2/qvYHnk/Ttfc/Jvl2kmOSPG3WKpQkjcUGr0FU1U1VdUJV7Q28BjgU+PbglUmSxqpPQ7lHJDk4yWk0DeS+C7xq8MokSWM13UXqA4HDgT8ALgaWA0dV1T2zVJskaYymawfxP2ieKvfWqvLxopK0hZnuiXIvnM1CJElzS5+GcpKkLZABIUnqZEBIkjoZEJKkTgaEJKmTASFJ6mRASJI6GRCSpE6DBkSSg5Jcm2R1krd1zN8mySfa+RclWdxOX5zk3iRXtK9/GLJOSdL6+jxydKMkWQCcCBwIrAEuSbKiqq4ZWez1wJ1V9dQky4ATgMPaeddXlc+gkKQxGfIIYl9gdVXdUFX303T2d8ikZQ5h3VPqPg28KEkGrEmS1NOQAbELcMvI+Jp2WucyVbUW+CmwYztv9ySXJzk/yQED1ilJ6jDYKSag60igei5zG7Coqn6c5JnAGUn2rKq7HrJychRwFMCiRYs2QcmSpAlDHkGsAXYbGd8VuHWqZZJsDTwOuKOq7quqHwNU1aXA9cB6jzmtqpOramlVLV24cOEAH0GStlxDBsQlwJIkuyd5JLAMWDFpmRXAEe3wq4FzqqqSLGwvcpPkycAS4IYBa5UkTTLYKaaqWpvkaOAsYAFwSlWtSnI8sLKqVgAfAU5Nshq4gyZEAJ4HHJ9kLfAA8EYfWiRJs2vIaxBU1ZnAmZOmHTMy/Avg33es9xngM0PWJkmani2pJUmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUqdBAyLJQUmuTbI6yds65m+T5BPt/IuSLB6Z9/Z2+rVJXjpknZKk9Q0WEEkWACcCLwP2AA5PssekxV4P3FlVTwX+N3BCu+4ewDJgT+Ag4IPt9iRJs2TII4h9gdVVdUNV3Q8sBw6ZtMwhwEfb4U8DL0qSdvryqrqvqr4HrG63J0maJVsPuO1dgFtGxtcA+021TFWtTfJTYMd2+oWT1t1l8hskOQo4qh39WZJrN03pD8tOwI9m441y+OGz8TYPh/tiHffFOu6LdebCvnjSVDOGDIh0TKuey/RZl6o6GTh55qUNJ8nKqlo67jrmAvfFOu6LddwX68z1fTHkKaY1wG4j47sCt061TJKtgccBd/RcV5I0oCED4hJgSZLdkzyS5qLziknLrACOaIdfDZxTVdVOX9be5bQ7sAS4eMBaJUmTDHaKqb2mcDRwFrAAOKWqViU5HlhZVSuAjwCnJllNc+SwrF13VZJPAtcAa4H/WlUPDFXrJjanTnmNmftiHffFOu6Ldeb0vkjzhV2SpIeyJbUkqZMBIUnqZEDMQJIHklyR5Ookn0qyXTv9N5IsT3J9kmuSnJnkae28LyX5SZLPj7f6TWum+yLJXkm+mWRVkiuTHDbuz7CpbMS+eFKSS9t1ViV547g/w6ayMf9H2vmPTfL9JB8YX/Wb1kb+vZhY54okk2/qmX1V5avnC/jZyPBpwFto2mx8E3jjyLy9gAPa4RcBBwOfH3f949wXwNOAJe20JwK3AY8f9+cY0754JLBNO2174EbgieP+HOPYFyPj7wc+Bnxg3J9hnPtidJ258Bqyodx8dwHwdOCFwC+r6h8mZlTVFSPDX0nygtkvb1b12hcj025N8kNgIfCTWatydsxoXwDbMH+P5HvtiyTPBH4d+BIwZxuNPUwz/b2YE+brL+ag2kZ9LwOuAn4HuHS8FY3PxuyLJPvSfIu+ftjqZtdM9kWS3ZJcSdPVzAlVNa8agvbdF0m2At4L/PfZq252zfD/yLZJVia5MMkrZ6XAaRgQM/OoJFcAK4GbadpxbKk2al8k2Rk4FXhdVT04YH2zacb7oqpuqaqnA08Fjkjy6wPXOFtmui/eBJxZVbdsYLnN0cb8H1lUTdcbrwH+LslThixwQzzFNDP3VtVeoxOSrKJpBb6lmfG+SPJY4AvA/6qqC6dabjO00b8X7em2VTTXJj49UH2zaab7Yn/ggCRvorke88gkP6uq9Z4fsxma8e/FxJFkVd2Q5Dxgb8Z4pO0RxMN3DrBNkjdMTEiyT5Lnj7GmcZlyX7TdrZwO/HNVfWpsFc6e6fbFrkke1U7bAXgOMBd6Ih7KlPuiqv5jVS2qqsXAW2l+P+ZDOExlut+LHZJs007bieb34pox1QkYEA9bNbceHAoc2N62tgo4jrZzwSQXAJ+iedbFmszjp+NtYF/8B+B5wJEjt/HtNfXWNm8b2Be/DVyU5FvA+cB7quqqsRU7sA39H9mS9Pi9WNn+XpwLvKuqxhoQdrUhSerkEYQkqZMBIUnqZEBIkjoZEJKkTgaEJKmTAaE5ZaoeMDuWOzPJ4zdi+09MstEN0pLc2N6jPnn69klOmrh1MclXk+y3se8zF6Tpgffl465D42NAaK65t6r2qqrfAe4HHtIVdhpbVdXLq2rGHf1V1a1VNUTL9w/TPDZ3SVXtCRwJrBckm5m9AANiC2ZAaC67AHhqksVJvp3kg8BlwG4T3+RH5n2o/eb+ryOtlJ+a5MtJvpXksiRPaZe/up1/ZJLPpXlmx7VJjp144yRnpHlmw6okR01XZNtfzn40XYg8CE1XCVX1hXb+W9ojoquT/Fk7bXGS7yT5cDv9tCQvTvL1JNel6dCQJMclOTXJOe30N7TTk+Rv23WvSvt8jSQvSHJekk+32z8tSdp5z0xyfvu5zkrTLxbt8ickuTjJd5MckKbl+/HAYe0R3WFta9+JRo6XJ3nMJvp31lw17v7GffkafdH2h0/TT9jngP8CLAYeBJ41styNNN/QFwNrgb3a6Z8EXtsOXwQc2g5vC2zXLn91O+1ImudS7Ag8CrgaWNrOe0L7c2L6jqPvO6nmVwCnT/F5nknTi+ejafoaWkXTv85E3b9L80XtUuAUmucFHAKc0a5/HPCtto6daHp/fSLwKuBsYAFNV9k3AzsDLwB+CuzabvebwHOBRwDfABa22z0MOKUdPg94bzv8cuDLI/vnAyOf5V+A57TD2wNbj/v3xdewLzvr01wz0QMmNEcQH6H5g3hTTd3B3/dqXZ/6lwKL22+3u1TV6QBV9QuA9sv0qLOr6sftvM/S/DFdCbw5yaHtMrsBS4Afb8TneS5NeNwz8h4HACvauq9qp68CvlJVleQqmgCZ8Lmquhe4N8m5wL7tdj9eVQ8A/5bkfGAf4C7g4qpa0273inZbP6Hpavrsdh8soAnHCZ9tf1466b1HfR14X5LTgM9OvIfmLwNCc01XD5gA90yzzn0jww/QfNteLwmmMLmvmUrzgKcXA/tX1c/T9Kq57TTbWAX8XnttZHIX5tPVMVr3gyPjD/LQ/5vr1TiD7T7QbivAqqrafwPrTCy/nqp6V5Iv0BxlXJjkxVX1nWnq0GbOaxCal6rqLmBN2oeuJNlmijuiDkzyhPa6xStpviU/DrizDYffAp61gfe6nuao4x0j5/uXJDkE+CrwyiTbJXk0TUdtF8zw4xySZNskO9KcQrqk3e5hSRYkWUjTEeLF02zjWmBhkv3b+h6RZM8NvO/dwK+uMyR5SlVdVVUntJ/3t2b4ObSZMSA0n/0RzamiK2nOv/9GxzJfo3mA0RXAZ6pqJc2jL7du13sn0OfZFX/Sbn91e4roQ8CtVXUZ8E80f7wvAj5cVZfP8HNcTPMcjQuBd1bzzIDTgStprk+cA/xFVf1gqg1U1f00zyE4IU1voVcAz97A+54L7DFxkRr4s/ai+LeAe4EvzvBzaDNjb67aYiU5kuai9NHjrmUqSY6juXD/nnHXoi2PRxCSpE4eQUiSOnkEIUnqZEBIkjoZEJKkTgaEJKmTASFJ6vT/AQBQk+iug2T9AAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAFgCAYAAABKY1XKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5Rc9XXo+e8+p6rf6ocksIwFFgQcMA+D0xL4ARjHsWXFOJIIHidOrgVJOrAuTrxWxgmJZybc65V1k/FMEhtnxbcTg0iMzTUxKA6QYCcyA36AJB4BFGGDFZIoRkgtqaXurlI9ztnzx6lqVbequqrrdc6p2h8WS+qqrq5ft6Szz2/v32//RFUxxhhjluKEPQBjjDHRZ8HCGGNMVRYsjDHGVGXBwhhjTFUWLIwxxlSVCHsAy7Fx40b9h3/4h7CHYYwx9ZCwB9CIWM0spqamwh6CMcZ0pVgFC2OMMeGwYGGMMaYqCxbGGGOqsmBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qK1aa8TpHOenx//xTH5nKMDSZ5x3mr6e9xwx7WvKiPzxjTfhYs2khVuX/PASaf2E/e8/F8xXWEhOswcfV53Di+FpHwNnlGfXzGmPBYsGij+/cc4M6dLzPYm6A/mZx/POf53LnzZQA+sv7ssIYX+fEZY8ITWs1CRPpEZJeI/LOI7BWR/xbWWNohnfWYfGI/g70Jku7CH3vSdRjsTTD5xH7SWc/GZ4yJnDAL3Bngvar6NuByYKOIXBXieFrq+/unyHv+aRfioqTrkPd8ntx/pM0jC0R9fMaYcIWWhtLg8O/ZwofJwv8deyD4sbkcnr/0t+f5ytG5bJtGtFDUx2eMCVeoS2dFxBWR54BDwLdU9akwx9NKY4NJXGfp4rDrCCsHe9o0ooWiPj5jTLhCDRaq6qnq5cBaYIOIXLL4c0RkQkT2iMiew4cPt3+QTfKO81aTcB1ynl/2+Zznk3AdrjpvVZtHFoj6+Iwx4YrEpjxVnQYeAzaWeW5SVcdVdfyMM85o+9iapb/HZeLq85jL5E+7IOc8n1Q2z8TV54W2nyHq4zPGhCu0moWInAHkVHVaRPqB9wF/FNZ42uHG8bUATD6xn5O53IJ9DLddd8H88zY+Y0zUSFBnDuGNRS4D7gFcghnO11T1vy/1mvHxcd2zZ087htdS6azHk/uPcHQuy8rBHq46b1Wk7tijPj5jYirWO1pDCxb16JRgYYzpSrEOFpGoWRhjjIk2CxbGGGOqsmBhjDGmKgsWxhhjqrKus8a0gZ0RYuLOgoUxLWRnhJhOYcHCRFKn3Ik364yQTvl5mPiyYGEipZPuxGs9I+T6t51V8cLfST8PE29W4DaRUrwTTzjCir4kowM9rOhLknCEO3e+zP17DoQ9xJo144yQTvp5mHizYGEio9NO62v0jJBO+3mYeLNgYSKj007ra/SMkE77eZh4s5qFWSDMQmqnndZXekZIuQt+tTNCOu3nYeLNgoUBolFI7bTT+opnhBRXQ5UGjOIZIbddd0HFYNxpPw8TbxYsDNC8JZ6NaPROPIoaOSOkE38eJr4sWJimLPFshkbvxKNIRPjI+rO5/m1nLfuMkE78eZj4smBh5guppTOKUknX4WQux5P7j3DdhWe2dCydelpff49b18+uU38eJn4sWJhIFVIbuRPvRPbzMFFhwcJEspBa7514p7KfhwmbBYuYacXSViukGmOqsWARE61c2mqFVGNMNRYsYqLVS1utkGqMWYqoLl3YjJLx8XHds2dP2MNou3TW4/ovfIeEIxXTRHlf+bvb3t3w3X8661kh1ZjWiHV7YJtZxEA7l7ZaIdUYU441EoyBKC1tNcZ0J5tZtEgzVy1FcWmrMaa7WLBoslasWrKlrcaYsFkaqslacbJZcWnrXCZPzvMXPFdc2jpx9XmRLESnsx47X3qdrz99gJ0vvW4H9RgTUzazaKJWNuSL29LWpWZY2965jjeO9DKdyrf9zAxjTH0sWDRRK1ctxa1HUPl9IcrhmSx3fGMvI/0JBnoSbT8zwxhTH0tDNVE7Vi3197hcdd4qxgaTHJ3L8v39U5FL7VSaYR2dy3FkLoMrMJvxGO5LNpyiM8a0h80smqjVq5aicJpdLcrNsHxVpmYzuCKICJ7vM5vNM9yXbOuZGcaY+tjMoolKVy2V0+iqpVYUz1uh3AxrNpNHlflgprDgc5KuQ97zeXL/kXYO1RhTIwsWTdTKVUu1Fs+jkJIqN8PyfEU5FRwEyn6ObSw0JposWDTZjeNr+cR7LyDvKzMnc0ynssyczJH3taFVS8XUTrl9FhCtO/NyMyzXEaTQGkdVERGGehZmQW1joTHRZTWLJmvVqqU4tfxY3PLcFUFV8VXxCwHkDcN9OCUzC9tYaEy0WbBokWY35Itby48bx9eiqvy/3/oBx1I5fFVUwddgnKqnZhh2ZoYx0WfBIibi1vJDCqueehIubxh2AMF1hFzeZ2o2w6GZDOmcx0h/surGwlacDmiMWR4LFjERt9PsigX5oTIF+bHBHo6ns+Tyyu0fvJBr33Jm2XHHZamwMd3AgkWMxKnlx1K72R0RxgZ6mTmZozfhVgxwrT4d0BhTu9CChYicDfwVsAbwgUlV/VxY44mDOLX8aLQg38o+W8aY5QtzZpEHfktVnxGRFcDTIvItVf2XEMcUC3E4za7Rgnw7Twc0xlQX2j4LVX1NVZ8p/H4G2Ae8KazxmOZqdDd7nJYKG9MNIrEpT0TWAVcAT5V5bkJE9ojInsOHD7d7aKZOje5mj9tSYWM6XejBQkSGgK8Dn1TVE4ufV9VJVR1X1fEzzjij/QM0dWtkN3ur+2wZY5Yn1NVQIpIkCBT3quoDYY7FNF8jBfm4LRU2ptOFuRpKgC8B+1T1j8Mah2m9egvycVoqbEynE9Wli4gte2ORdwNPAC8QLJ0F+D1VfaTSa8bHx3XPnj3tGJ6JkHTWi/xSYWNqEOsdpKHNLFT1O8T8h2faIw5LhY3pdKEXuI0xxkSfBQtjjDFVWbAwxhhTlTUSNMaYcrIpePUJSB2FgZWw7mroGQh7VKGxYGGMMaVU4dl74XufBz8HvgeOC04S3vkbcMXHoAtb41uwMMaYUs/eC4//39AzBMn+U497ueBxgLf/UjhjC5EFi5iI6mlxUR2X6RDtTgVlU8GMomcI3EUdj91k8Pj3Pg+XbO26lJQFi4iL6mlxUR2X6RBhpYJefSJ4v9IZRSk3CfmT8Op34C3vb/77R5gFi4iL6mlxUR2X6RBhpYJSR4PAtBTfg9SR5r93xNnS2Qir9bS4dLbKX+4uGZfpELWmgrKp5r/3wMpgBrMUx4WB7ut2bMEiwoqnxS2+IBclXYe85/Pk/vbe5bR7XOmsx86XXufrTx9g50uvWxDqdMVU0OJAUeQmwc8HqaBmW3d1kOrycuWf93LgJGDdu5v/3hFnaagIi+ppce0aVyvrIlaYj7AwU0E9A0FNpJgCKw1YXg6yc3DNp7quuA0WLCItqqfFtWtcraiLWGE+BsJOBV3xseDX730+KGbPF9cTQaAoPt9lLFhEWOlpceVSPmGdFteOcdVaF7n+bWcta0ZghfkYKE0FlUtFFVNBb3o7/PDR5i+rFQmK55dsDVJdqSNBYFr37q6cURRZzSLCGj3HOs7jakVdxArzMVFMBWVnT68deDnIzMLa9XD3JviH2+Gx/xH8OvkeeObLwbLbZo3jLe+Hy38h+LWLAwXYzCLyonpaXKvH1Yq6SDEAlc4oSiVdh5O5HE/uP2LnZ4RtqVTQuVcHRXDbYd1WFiwirpFzrOM8rlbURaK6YMCUUSkV9Ka3BzMK22HddhYsYiKqp8W1alytqItEdcGAWUIxFVT0w0dth3VIrGZhIqkVdZHSAFROWAsGzDLYDuvQWLAwkXXj+Fo+8d4LyPvKzMkc06ksMydz5H2tqy4S1QUDZhnCXlbbxSwNZSKrFXWRqC4YMDWqdVltF+6wbjXRZi0za4Px8XHds2dP2MMwHSCd9SK1YMAswzNfrr7DOpqroWK929NmFqYrRXXBgKmB7bAOhQUL03LWh8k0le2wDoUFC9My1ofJtNTiZbWmpSxYREwn3YVbHyZjOocFi4jotLvwVjUCNMaEw/ZZRETxLjzhCCv6kowO9LCiL0nCEe7c+TL37zkQ9hCXJaoHNxlj6mPBIgI6sRtqM/sw2Ul5xoTP0lAR0IndUJvRh6nTUnPGxJkFiwjoxG6ozWgEaAVyY6LD0lAR0IndUBvtw9SJqbm2yqaCDq3PfTX4NZsKe0Qm5mxmEQHl7sJ9VWYzeTxfUZS+hBu7bqiN9GHqxNRcW6jCs/cGu5v9XMnu5mRw+twVHws2tRmzTBYsIqB4Fx6kXFxmTnpMzWZQBV99QFg5mOTv/vnHscrTN9IIsBNTc23x7L2n+ibZKXKmiSxYRETxLvv/+eYPODKXwUEQAddxWD3Uw1BvIrZ5+nr6MHViaq7lsqlgRmGnyJkWsJpFRIgI17/tLIb6ErxxpJ83jPTxxtF+zj9jiJWDvfQk3K7K09tBRXV49Ykg9VSudTcEj/v5oJ+SMctkwSJCvr9/Ct9XxgZ6GBvoYbgviVNyd91NG9nsoKI62ClypoUsDRUhlqdfyA4qWiY7Rc60kAWLCLE8/UKtOCmvo9kpcqaFQg0WInIX8CHgkKpeEuZYoqAZG9k6kR1UVKOegWB5bLVT5KJc3M6mgtpL6mgwU1p3dbTH20XCnllsB74A/FXI44iEhUtoF25GK+bpb7vuArurNpXF9RQ52x8SeaEGC1V9XETWhTmGqIlbnr6Tzt/oCHE9Rc72h0SeqC5dUG35AIJg8VClNJSITAATAOecc85P/du//Vv7BheidNaLdJ7emvyZpsmmYPI9weynUq3Fz8PEY9EOeNXF+h9E2GmoqlR1EpgEGB8fDzeytVHU8/TW5M80TXF/SOmMopSbDFJqr37HjlENke2zMMtmTf5MU9n+kFiwYGGWrfQUPJ8sqcQLzCafJJV4AZ9sV20eNE1g+0NiIeyls18F3gOsFpEDwO+r6pfCHJOp7thcjrzvM5P8Hid6voXiofgIDtLrMpz9GfKpn+qazYOmQbY/JBbCXg31C2G+v6nP2GCS/MAu5noeRejF4dQmQSXP8Z5H6B3wWDl4aYijNLHRCftDuoClocyyXXHOICcHdoL2IovuN4QEaC8nB3Zy+Tn2j9vU6IqPwTW/Hax6ysxAejr41c9He39IF4n8aigTPS8cfYbhfocTKRdl4V4pVVB1GRkIPu+agWtCG6eJkbjuD+kiFizMsk1nphnoEfrcXg7PZvBLmsKKwJkrenESaaYz0+EN0sRTz4Atj40oCxZm2UZ7R3Edl5G+HkYGepjL5Ml7SsIVBnsTOAKz2SyjvaNhD9UY0yQWLMyyrV+znoSTIOfnSDpJVvQt/GuU83MknATr16wPaYTGmGazArc5TTqf5vEDj/ONH32Dxw88TjqfXvB8f6KfbRdvI51Lk/NzC57L+TnSuTTbLt5Gf6LCjlxjTOzYzMLMU1V2vLKD7Xu3k/fzeOrhikvCSbDt4m1sPn/zfL+nzedvBmD73u1k8hl89XHEIeEkmLhsYv55Y0xnCL2R4HKMj4/rnj17wh5Gx3rw5QeZfH6S/mQ/Saek31NhtjBx2QRbLtiy4DXpfJrdB3cznZlmtHeU9WvW24yiHnaOQzeIdSNBCxYGCC76H33oo7iOuyBQFOX8HJ7vcd+H7rNg0Ex2jkM3ifUfpNUsDAC7D+4m7+fLBgqApJMk7+fZfXB3m0fW4YrnODgJ6B2G/rHgVycRPP7svWGP0BjAahYtEcfUzHRmGk+X7vzpq297J5opmwpmFItbXEDwcc9Q8PwlWy0lZUJnwaKJllMgjprR3lFcWbrzpyOO7Z1opmrnODhusJN55x/AeddYHcOEyoJFE+14Zcd8gbgv0Tf/eM7PMfn8JMBpBeKoWLx3YjHbO9EClc5xUIX0UZg9BOrB81+FHz5idQwTKqtZNEk6n2b73u2nrSSCIN/fn+xn+97tp+1ZiArbO1FBNgU/fBSe+2rwazbVvK9d6RyH9FGYfT0ICOJC34jVMUzobGbRJMUCcemMolTSSZLJZ9h9cDfXrI1mcz3bO1GiHauUyp3joH4wo5DCfZwQ1C7A6hgmVBYslqlS8boTCsQiwpYLtrDx3I2xK9A3XXGVUs/QwpqClwseh6BLaiPKneOQmYGgl28QOIbWnAocYOdRm9BYsKhRteL1yr6VHVMg7k/01zT7ieOqr5q0c5VS8ZyG730+CAKZmSBIiASBYmDl6a+x86hbwzZGLsmCRY2qFa9vuvimrikQx3nVV02qrVJq5t394nMc9j8GL34dhs5cOKMoZedRN5dtjKyJFbhrUEvx+t6X7uUXL/zFrigQFwOn67gM9Qwx0jvCUM8QruMy+fwkO17ZEfYQG1NplVKpZt/dF89xeO//ERS0K72/nUfdfLYxsiYWLGpQ6+7mNw6+kYnLJvB8j9nsLCcyJ5jNzuL5XscUiOO+6qsmlVYplWrV3X2xjpGdDQJDqeJ51O/8DUuPNEutKcdmroKLKUtD1aDm4nV2uuMLxJ2w6quqcquUSrX67n5xHWM+LZKw86ibrZ0pxyYTkc8AU6r6ucLHfwC8rqqfb8X7WbCowXJ3N9daII6jTlj1VVW5VUpFxbv7az7Vurt7O4+6fcJIOTbPl4AHgM+JiAN8FNjQqjezYFED2918Ste0BYnC3b2dR916YaYcG6Sqr4rIERG5AngD8KyqtiyqWbCoQXF38+Tzk5Ck4lkPnZJqWkrXBE67u+8OYaccG/eXwDZgDXBXK9/ICtw12nz+5o4vXtei69qCFO/uL/+F4FcLFJ0l/gsKHgQ2AuuBR1v5RjazqJHtbj7F2oKYjhKFlGOdVDUrIt8GplWrFBMbZCflmbo1uoM7nfX4/v4pjs3lGBtM8o7zVtPfUyV/bEyrZFOtTjk2fWdfobD9DHCjqr7c7K9fymYWpm71rvpSVe7fc4DJJ/aT93w8X3EdIeE6TFx9HjeOr433DnATTzFbUCAibwUeAh5sdaAACxYmBPfvOcCdO19msDdBf7JksYDnc+fO4O/8R9afXfPXsxmK6Uaq+i/Aee16PwsWpq3SWY/JJ/Yz2Jsg6S5cX5F0HQZ7E0w+sZ/r33ZW1Qu+zVCMaR9bDWXa6vv7p8h7/mmBoijpOuQ9nyf3V18uXpyhJBxhRV+S0YEeVvQlSTjCnTtf5v49B5o9fGO6VtWZhYgMA2eo6o8WPX6Zqj7fspGZjnRsLofnL72owvOVo3PZJT+n2gxl1PH57r1/y3X/dhZ9q1cyeOWVOP3dtWotMqz1d0dYMliIyEeAPwUOiUgS2KaquwtPbwfe3trhmU4zNpjEdZZODbmOsHKwZ8nPKc5QSmseAKgy/tL3uPbZR8HLc/DZBP19PUgiwaqbb2Jk61ZLTbXLclt/W1CJtGozi98DfkpVXxORDcBfi8jvqeoDtGAZmOl87zhvNQnXIVchFZXzfBKuw1XnLd1eodIMZfyl7/HTex4ik+glm+wnN9DL0EAPmssx9edfBGD0hhua882YpdV62qCdJ1HRutsfHgCuA1YBR4Bvv/qHP9twC1wR2Qh8DnCBv1TVP6z2mmrBwlXV1wBUdZeIXAc8JCJrCc5+NGZZ+ntcJq4+b341VGnAyHk+qWye2667oGpxu9wMJZnLcO2zj5JJ9OK7CUSVhBN8fUkmcQYGOHLX3Qxv2mQpqVZbzmmDLz7Q+iNsY2bd7Q8L8HHgU0CS4FqdB3Lrbn/4s8A9r/7hz9Z1DRYRF/gz4GeAA8BuEflGYXVVRdUK3DMi8hPFDwqB4z3AzwEX1zNQY24cX8sn3nsBeV+ZOZljOpVl5mSOvK/cdt0F3Di+turXKJ2hFP3Ef/4Ax/fw3QSKIsBg76mgI8kkms+T2rWrFd+WKVVs/V2u3xIEj/t5eOWf7DyJ8j4O/J8EAeIEcLTwa77w+Mcb+NobgFdUdb+qZoH7CK7pS6o2s7iVRekmVZ0pTGE+Uu9ITXcTET6y/myuf9tZPLn/CEfnsqwc7OGq81bVvD+i3Axl4OQsjvooiu/DGcO9OIvTF55H/tixFnxXZoFaW3//+/dje55EqxRST58CZoFFDavIFR7/1LrbH/5anSmpNwH/UfLxAeDKai+qFizmCFrfvrLo8auAJ5czOtMcjbbYiJL+HpfrLjyz7tcXZyCTT+znZC7HYekjj6AaBIqVA2WK5K5LYmys7vc0Naq19TcS5/MkWuU6gtRTpeMmc0A/QZbnkTq+frkCUNWUVrVg8acERe7F0oXnrq8+LtMMqsqOV3awfe928n4eTz1ccUk4CbZdvI3N52/uulU+i2cox46ezcqXHmGgvwe35/RAobkckkgwsKFl58OYolpbf59zJfywyvUuoudJtNAqaqgnA6vr/PoHgNIWCWuBH1d7UbWaxbpyeylUdQ+wbjmjK0dENorID0TkFRG5vdGv18l2vLKDyecncR2XoZ4hRnpHGOoZwnVcJp+fZMcrO8IeYmiKM5St7zyfc279NUin0dzC2bvmcvipFKtuvsmK2+1Qa+vv8993KqiUE/3zJFrhCEFtYikeMFXn198NXCAi54pID8EJe9+o9qJq0av8QcuBhv7F1VuR70bpfJrte7fTn+yfP3DIV5+53Nz8Ead3vXgXG8/dGNuUVLOMbN0KwJG77sbPZMDzwHWRRILVt94y/3xLddJ+gUa+l1paf4uEe4RtNH2bINWU5PSaBSWPP1bPF1fVvIjcRnD+hQvcpap7q71uyRblIvJVYKeq/sWix38FeL+q/m/1DLbwNd4B3KGqHyh8/LuFb+R/VHpNN7UoL61NHJg5wN/96O8Y7h1GVTmeOc7UySlK/+wU5aM/+VF+e/1vd106qhw/nSa1axf5Y8dIjI0xsGFD62cUnbRfoJnfS7XW3wveK78wqMTt57a0mr+Jdbc/vI1g1dPiIncSGAI+8+of/uz2Zg6ummozi08CD4rIx4CnC4+NAz3Algbfu6aKvIhMABMA55xzToNvGX3lahMn8yc5njmOrz6qytTJKZxCBjFYIiqoKg/vf5i3jL2FLRc0+kcTf05/P0PXXtveN611E1ocNPN7qdb6246wLeeewq+fIsjiuASppxzwmZLn26amw48Km/EuKXy4V1V3NvzGIjcCH1DVXy18/MvABlX9RKXXdMPM4sGXH2Ty+ckFKafZ7Cyvzb4WLBxRH0Hm009FijLaO8rKvpXc96H7uj4d1XbZFEy+J7gbrlTQ9fMw8Vj0L4Cl34vjBnUHPx983DMU3PnH5XuJlmVPjwrLaN9DUMyeAh5rxg7uelTrDdUH3AKcD7wAfElVqxVealVXRb6TlatNAAwkBxBH8H1/PkjIor93gjCXm8MVl90Hd9d1KJFpQHETWifsFyh+L7k0zL0epImKRGDwDcH3E4fvJeYKgaGe5bFNVy0NdQ/BtOcJ4IPARQSpqWaYr8gD/0lQkf/FJn3tWNp9cDd5P09fYuG6AkccVvet5vXU6xVfm3ASOOJwPHOcQ6lDdY+hk/ZxtFWtm9DauV+g3uJ06iicPA6ZGRAHnJJFk6owexB6V3Tb3oeuVy1YvFVVLwUQkS8BTeuTUG9FvpNNZ6ZPSy8VjfaNciJ7gtnc7GnPJZwErgQboBStK1jUs4/DAkuJpTahqR9ceHMpOPZqcBFvZfqm0eJ0zyCcPBG8ZvHniQBO8HzPUOu+BxM51YLFfBW+cHFv6pur6iNEZIoVBaO9o/MX/XJW9KxgNjeLg4NbuDCVfr5qUOw+c6D6rujFF/rXZl+bT4GVzmxyfo7J5ycB5gvnYW0QjPTxqeU2oalC+ijMHgI0+Pj5/wV7H2ztKp9OKrSbyKgWLN4mIicKvxegv/CxAKqqwy0dXZdZv2Y9CSdBzs8tqFkUCYIjDsX/Si/IqoqPz3DP8JLBotyF3sHhYOogY31jrJAVCz4/6SQhCdv3bp/fx1HcIFhLYGmGWo9PDXWmU9yEVrpfIH0UZl9nvq654o3QN9Lai/Zyur1Wmt1k56B3GLIzgLMwoKkGM6Xe4aDwbVrrjpHTWpRzx/GGCtwichfwIeCQql5S7fOLlgwWqhqR27bu0J/oZ9vF24ILbpIFASPn5xARVvauxHEcpjPT+Hqq46qIMNY7xmBikPVr1ld8j3IX+tnsLCgcO3mMhCQY7Rtd8JqkkySTz7D74G7Wr1lftghf/LzFgaUZF/Di8amDvYkFhx3lPJ87d76MqtIz9nT4rVBKN6HlUjBzEJCgT8LgmiBVBbVftOvRjEL7wEroGw7GNXsoCA5KIeYJDL0hmEV1VwuO9rpjpGKLcu4Y+SxwD3ccr/eYiO3AF4C/Ws6Lqh6ratpr8/mbgeCCm8ln8NXHEYeEk+DXL/t1VJW/eOEvOHvF2WS97PyFscftIZPPcNMlN1W8GFdabZXXfOGa5jB1corh3mEcWdgJxlef6cx0xSJ8UTGw7HptF0dPHm34Al7t+NTB3gSf2/UVVqx5jMGegbbMdCoq3S/w3c/Bni8FheCeoaBQXKpVq6OaUWhfdzW4PZAcgP4xyJQsne0tWTrbXS042q3YonyWhQ0Fk4XHIbjoL5uqPi4i65b7uq4KFnEoyIoIWy7YwsZzN5Ydq6oiImzfuz1IS+HMb8qbuGxiPtiUU+lCn5AEgiAi821EVvQsTEc54jDaO7pkEb7IV5+d/76TXQd3NZyqqnh8aoHr5kn1/yMr/MGaZjpt0TMAY+uCi23vEpnaVqyOqrXb61KzgsUptb6S76F7W3C0T5B6qtqinDtGvtZoSmo5uiJYxLFja3+iv+xeiWrBZCmVLvQDyQGQYCUVcNrn5PwcCSfB+jXr2X1w95JF+KLv/fh7rOhd0fAFvNLxqUUnEz9A8aiUMS1NobV170kzLtr1qLXba7VZQS19nUyrtLpFeV26Ili0uyDbDpWCyVIqrbYq7uM4nD4MLFxhlfNzpHNpJi6boD/RX7UIn/Nz5P08SSdZ9nmofgH302nmnnoKb3qaNbPQ5/cTdJg5nS9zgJJwKwf7YgqtrZp10V6ucoX20vesdVZgLV5/1oAAAB7mSURBVDjC1OoW5XXp+GBRKU8PIaYpQrLUhX60bxTP9ziaOYqqciJzYr5WUpreqlaET+fSvPOsd/LUwaeWHEu5C7iqcvyBBzhy191oPg+ex5mOw++eyPLty9/Pc29992lLTTU/gCSD2kUlxRRaWzXrol2PZs4KqvV1Mq3Q6hblden4YFFrQbYbWmRUu9An3SS3r7+dNUNrlkxvLVWEn7hsgpV9K9nz+tI9vMpdwI8/8ABTf/5FnIEBnL5Tf16jmuJn9jwMwHMXX31qzJ6Plz2PsVV9eJrDkfIznWIKre3CSuXYrCDuWtqivNBN/D3AahE5APy+qn6p2us6PljUWpBte5oiJNUu9LXUb6rVTdL5dNVU1eILuJ9Oc+Suu3EGBpBFxeyx4eACd91z3+TJN7+dk05yfp/FJ667mOTIrzH5QuWZTjGF1nZhX7RtVhBPdxxPFZbHLtmivN7itqr+Qj2v6/hgUW1XNISUpmix0ry/OzrK4JVX4vT3N1QgX6xS3aSWVNXiC/jcU0+h+fyCGUWpseEBVojPHWdnOHjRxawc7OGq81bR3+OiuhZk6QAYqk64aHfSgU7xELkW5R0fLGopyIaWpmiBcnn/4klxq26+iZGtWxGRugrkUPvy41pmMKW86elgrEsQ3+PiIZ93/dTahY83MQCaRTrpQKc4CTbcbeeOka+xqEV5O5fLlur4YFHPXW6cVcr7ay7H1J9/EYDRG25Y9tdd7vLj5V7A3dFRcKssNXVdEmNjFZ+uNwCaJVifqXAFgSES/fNqOvwoKuo9/Gjxha70Ljeq+yzq4afT/OvP34i47ml5fwgChnoe5/7N/cs+YrTcoUywMOA2svy4lWM3dar1QKebHoH/fNpSVNXF+iLT8TML6J40RbW8vyST+JkMqV27lnXkaDuWHzv9/ay6+ab5WVFpwNBcDj+VYvWtt1igaKdqfaacRNA76n9eC4meaKSorLbSMl0RLIo6PU1RS94fzyN/7Niyvm67lh+PbN0KwJG77sbPZBbUW1bfesv881FWaWFBLFXrM5U+CiePwdCZC9uahJGistpKy3VVsOh0zcj7l9Ou5cciwugNNzC8aROpXbvIHztGYmyMgQ0bIn/BrXVhQaxUO9Bp9hDgwOKbiFZ21a2kQ2srl95z6Wktyl/4+AuNtig/m6Dj7BrAByZV9XPVXmfBooMMXnklkkiguVzFvL8kEgxs2LCsr9vu5cdOf/+y0mRR0KqFBaFaqmVJZgZQcGThiXnqB+dc+HnIZ+GVf4K3Xt/acTbjDI+IufSeSyu2KL/0nks/C9zzwsdfqLfgnAd+S1WfEZEVwNMi8i1V/ZelXuQs9aSpLp1P8/iBx/nGj77B4wceJ52v1Pur9Yp5fz+VQnMLN34W8/6rbr5p2XfppcuPy+m05cfLtdSGQkkmcQYGgtRaOry/G3UptizJzgYBo1Q+G6R6Bt9wqv166ihM/QCOHwjO8khNwSP/Ozzz5SBN1CrF2kq5IjwEj/v5YGNkfBRblOeBE8DRwq/5wuMfr/cLq+prqvpM4fczwD7gTdVeZzOLOkW1k20r8v7dtvx4uVq1sCASKrUsEYI0VfFAp9RRmD0YBA6nEDx8gnMxWp0GasYZHhFSSD1VbVF+6T2Xfq0JKal1wBXA0s3csGBRt6h2sm1V3n+5m+y6SasWFkRCpZYlb3o73L2p0D3XhbnXg0BRvEHSwtF6/aPBhbqVaaCw2sG3TltalIvIEPB14JOqeqLa51uwqEMcOtk2O+/fLcuP69GqhQWRUq5lSbGrLhoEh+KMonhO91AhReU6rTkVsCisdvCt0/IW5SKSJAgU96rqA7W8xoJFHbq5k22rlh/H4RTDSlq1sCDyiimqb38mCA4eC8/p7l956nNbmQYKsx18a7S0RbkE+fEvAftU9Y9rfZ0FizpYJ9vmiWrtZzm6dkNhMUXVNwyPfCqoTxTP6V585nir00CddbJfS1uUA+8Cfhl4QUSeKzz2e6q6ZErLgkUdurWTbStEtfazXJHfUNjKnc3nvw/6RpZuC9LqNFDY7eCb6IWPv5AqLI9dskV5vcVtVf0OdbQesWBRh27rZNsqcaj91CqyGwrbsbM5SmmgTmgHH7AW5Z3AlpI2plifePK1J5nJzrCqv3x6Io61n8htKGzXzubOSgOFrrDhbvul91x6WovyRpfL1suCRZ1sKenyLa5PzGRnOJE9wfHscVb3rWa07/S0ndV+GtDOnc0dlAaKkkJgiESLcgsWdYrqUtJ0Ps13//O7PHPoGQTh8jMv591venckZjmL6xOKMpubRRAOpw8DnBYwrPbTgGpdY91k85e0dk4ayCxiwaJBUelkq6o8+PKD3PnsnUxnpymeU/KVfV9hrG+MT1z+CTZfEN7KonL1icHkYDAeDYLC1MkphnuHcQoraaz206AO29lswmXBokPseGUHf/r0nzKTn8HFRZwgKCjKsZPH+JNn/gQkvJVF5famOOKwum81h9OHcTQIEKlciqGeIav9NEPUdjbbWROxZsGiA6Tzae568S5m87NBoCg93hTBdVxmc7Pc/eLdoa0sqrQ3ZaR3BICpk1N4vsdMdgbAaj/NEJWdzXbWRN32XXjRaS3KL3ppX6P9oPqAx4FeghjwN6r6+9VeZ8GiA+w+uJu53BxA2TSTFP6bzc2GtrKo0t4UEWG0b5Th3mGOpI/wwXM/yJVvvDL02k9HiMqS1g49a6KV9l14UcUW5fsuvOizwD0XvbSv3la+GeC9qjpbaPvxHRH5e1V9cqkXWbDoANOZafL+0t0BFCXv50NbWVRtb4qnHit6VnDbFbdZkGimsJe0duBZE21SbFE+y8KGgsnC4wDb6/nCGhQ0Z0u+XhKoGngsWHSA0d5REs7Sf5SCkHASoa0ssr0pIQl7SWsYK7JirpB6qtqifN+FF32t3pSUiLjA08D5wJ+pqrUo7wbr16xnMDnIdCZYBbU4FaWF/4aSQ6GuLLK9KSEKa0mrrciqR8tblKuqB1wuIqPAgyJyiaq+uNRrLFh0gP5EPzdfcjN/sudPgtVQeqrIrSieH6R4brrkplDv3Nu1N8VPp5l76im86Wnc0VEGr7yy85r4xUXUVmTFQ8tblBep6rSIPAZsBCxYdIPN529GVU/ts/CDFKQgp/ZZROTOvVV7U1SV4w88wJG77kbz+QXN/FbdfBMjW7dGvoNtx4nKiqx4aXWL8jOAXCFQ9APvA/6o2ussWHQIEWHrW7bywfM+yHf/87s8e+hZAK448wre9aZ3dUUt4PgDD8y3CS894lRzOab+/IsAjN5wQ1jD605RWZEVL61uUf5G4J5C3cIBvqaqD1V7kWgrD1JvsvHxcd2zZ0/YwzAR5KfT/OvP34i4bsUDiNTzOPdv7reUVLst2GeRX7giq7v2WdT8Te678KJtVGlRftFL+7Y3c3DVhDKzEJEbgTuAi4ANqmoRwDRk7qmn0Hx+wYyilCST+JkMqV27otUVthuEvSIrnqxFecGLwFbgf4b0/qaKuBWJvenpoEax5Cd55I8da8+AzOmsyWDNChvutu+78KLTWpQ3uoO7XqEEC1XdB+V3G5twxbVI7I6Ogltl1Y3rkhgba8+AjGmCQmCwFuW1EJEJYALgnHPOCXk0nS+uReLBK69EEgk0l6tYs5BEgoENG0IYXZ2s8Z6JkJYFCxH5R2BNmac+rap/W+vXUdVJYBKCAneThmfK8NNpjtx1N87AwGkXXEkmcQYGOHLX3Qxv2hS5lJTT38+qm29i6s+/iPT1odnsqVlRTw968iSrb70lcuMuyxrvmQhqWbBQ1fe16mub1oh7kXh4yxZSTz/N8b97KLjgqgYXVRFGrv8Qw1vCac++bNZ4z0SQE/YATHTEvUh84sEHSe3aTc+b30zyrLNIrllD8qyz6Hnzm0nt2s2JBx8Me4jV1dp4LxtKjdN0sVCChYhsEZEDwDuAh0Xk0TDGYRaKc5G4NIXm9PbirliBOzqKu2IFTm/vfArNT1dqtxMRxcZ75XY7Q/C4nw+WoBrTRqEEC1V9UFXXqmqvqr5BVT8QxjjMQoNXXgmOQ/7YMbzpabyZGdT355+PcpG4mEIr1lrU9/FmZua/D1wXzedJ7doV8kirsMZ7JqIivxrKnK4VeyBUlROPPII/O4t39GiQ63ccBHBXr8YZHETT6bqKxO3Ys1FMoalqECCmpoIG/YW6hQAyOEju6NGmvm/TWeM9E1EWLGKklXsgiktmE2NjOMkk+SNHQBX1ffKHDuGOjXHmJ3+Tka1bIzHexYopNG96Gu/wYdRxFnxtVUVnZsjsewmiXOe2xnsmoixYRFClO/FW7YFYvGTWHRvDGRnBn5ubv1t3ensZ3rRpWRf3du7ZKKbQvKmp0wIFBE151HGYe+IJ/PQno7uE1hrvmYiyYBEhS92Jr/ylj3H0y19uyR6IYr6f3h5mczN4vofruAwODeJIUNbyZmeXtWS2WXs2ak1hOf39DL373Rz7yleQxKK/1qrg+yTPOAP1/cgu/Z0X9lGoxpRhwSJClroTP/yFPwMguabcPsfG9kB4x6aZO3mCQ/mDKBqcxivBWRir+lcz1ju67CWzje7ZqCeF1ffWt+IMr0BTafD9BfssEmecgTs2hnf8eGSX/s6zxnsmgixYRES1O3FJJslPTZE480zEqbCIrc49ELvS++jLzyCJRDCTKFyDVZWp1GEAht3EspbMNrpno54Uljs2ijs8grPmjfMpNFwXZ3Dw1M8sokt/y7LGeyZCbFNeRCxe+rmY9PQABBfBSuq4EKbzaSYT38N3HVx/4XMigiMOx2YPg+ssa8lsI3s2ak1hLd4zUewPhect2GdRDBRRXvprTNRZsIiIanfizuAgQNDzqIx6L4S7D+4mnfD5/nvOoCfj43gL22+5PvRmfI5svXpZtZDSxn7LHW/VwJlMlt0zUewP5adSp72v5nL4qRSrbr4pusVtYyLMgkVEVLsTF8fBGRlB87mmXginM9N46vHc+pU8/r4zcDyl56RHXypPz0kPx1O+ed0Ir7/nrcv6uktduP1MhvyRI/RfcTlzTz2Fn07jp9PMPPYY0zt2MPfkkxWDzLwKKayRrVtZfestqOfhzc7iHT+ONzuLeh6rb71lWUt/jTGnWM0iImppse2OjLDqv/wyR//6y/iZzIKib70XwtHeUVxxQYTnNqxi7+VjrPvRLANzHqlBl1d/YohjpHhf3/Lz/MXxHLnrbvxMBs3n8U+exD9xAnd4mNSTTzG3azdaSCc5AwPgefiZDF4hELijo+WX61ZIYYkIozfcwPCmTaR27SJ/7BiJsTEGNmywGYUxDbAzuCNk+utfny/qlgaM4sxh9a23MHrDDfjpdNMuhOl8mo8+9FFcxyXpnB6kcn4Oz/e470P30Z+o7z2K4z3xzW8x+/89hjs6htPbC0D+2DHyhw4BkDzzTNyxMdT3yfzoR+D7JM4887SgYOdpm5iKdV95S0NFSK0pFKe/n6Frr2V082aGrr22oQtmf6KfbRdvI51Lk/MXpn5yfo50Ls22i7fVHSiK4x3YsIH0c8+RWLlqPlCo7+NNTQWzI9clf+QI6vuI45BcvRog2GS3qD+V1R6MaT9LQ0VIWCmUzedvBmD73u1k8hl89XHEIeEkmLhsYv75RpTbd+HPzQVbOoppJt/Hn5sLVjIVZhO5Q4fIHzmC09PTcMrNVGAn8pkaWLCIoOLMoV1EhC0XbGHjuRvZfXA305lpRntHWb9mfdUZRTqfruk1ZVd7eV6wca5IdcHnuGNjILDiZ95P74U/abWHZrMT+cwyWLAw8/oT/Vyz9pqaPjeVS3Hns3fy6KvBUSRJJ0nCSZBwEmy7eBubz9+8oDBdXO2lhdkDnoe/eBmwyOkrwhJJBt9xVbTbc8SVnchnlsGChVkWVWXHKzv4/DOf51jmGILM7/pe3bea3kQvk89PArDlglPtXQc2bMBPpci99lpQ5VMNWojn86gq4gYrsor7ScA20bVUrSfyXbLVUlIGsAK3WaYdr+zgi//8RY5nj5OQYCbhiIMgHE4fZi47R3+yn+17t5POn9phPfP3fx/0ePJ9tDCDENcFxwkCRi5HYtWqBbutrZDdQnYin1kmCxamZul8mu17t88vACxNMxVnGFMnp3DFJe/n2X1wN3CqfUdi9eqgt1XhnAz1vCBYFGYVJFzbRNcudiKfWSZLQ5ma7T64m7yfr/i8IKgqqVwKX32mM9PAqZVQ7tAQTk8PbslZGcVGf97sLCMf/jDJtWutkN0OdiKfWSYLFqZmxdYgrlS+yChKXvMkJMFo7yhw+koocRzcFSsWvE5USa5dy+jmxpfpmhrYiXxmmSwNZWpWbA0ymBxEJJhFLCYIKCScBOvXrAca60BrWqR4Il92NggMpYon8r3zN6y4bebZzMLUbP2a9SScBJ56rO5bzeH0YRw9dYSpFv4TkQW7vmvpe2WrnkIQ1xP5bBNhKCxYmJoVW4NMPj/JQHKAMziDqZNT+OoHYUKVlX0r+fXLfn3Bru9iB9pqfa+sRtFmcTuRzzYRhsqChVmW0tYgCSfB6r7V8z2lPrDuA/zXC38VfeYFju/92wVnZi/uQNuMjrmmSeJyIp9tIgyVdZ01dVnc5mP8DeNkv/H3Vc/MbmbHXNNFsimYfE+QIqtUkPfzMPFYNGdFgVhPe2xmYeqyuDVIaXv1pc7MbnffK9MhipsIkxVuLNxkUHd59TvxmCXFkK2GMg2r98xsY2pmmwhDZ8HCNKzeM7ONqZltIgydpaFMw8q2Hz/tk8qfmR03fjrN3FNP4U1PLyjgmxazTYShs2BhGtYNm+5UleMPPFC1gF9kQaXJipsIi6uhSgNGcRPhNZ+KcnE79ixYmIZ1w6a74w88UFMBf7lBxSxDXDcRdghbOmuaonQ1VKVNd6M33BDiCOvnp9P868/fiLhuxWConse5f3M/Jx55pGN/DpGRTcVjE+HpYn2XYDML0xSdvOmu3PnhpSSZxM9kmH3iOzWtChvetMlSUo2IyybCDmPBwjSFiDB6ww0Mb9rUcZvuai3gp555uqagktq1y/aamNixYGGaqhM33dVawAe6ZlWY6T62z8KYKkoL+OUUC/j9b397x68KM93LgoUxVRS75vqp1GkBo/Ss8BVXX11TUInzqjDTvSwNZVpqccPB9WvWz59zESe1FPBFxFqxm45lS2dNS6gqO17Zwfa928n7+fnjWBNOgm0Xb2Pz+Ztjud+gWtdc22dhlhDrP/hQgoWIfBa4HsgCPwJuUtXpaq+zYBEfD778IJPPT9Kf7CfpnLrDzvk50rk0E5dNsOWCLSGOsLWsFbspw4LFst9U5P3ATlXNi8gfAajq71R7nQWLeEjn03z0oY/iOu6CQFGU83N4vsd9H7ovlikpY+oU62ARSoFbVb+pqvnCh08Ca8MYh2mN3Qd3k/fzZQMFQNJJkvfz7D64u80jM8bUKwoF7puB/1XpSRGZACYAzjnnnHaNqaO0u6nddGYaT5feb+Crz3SmaubRGBMRLQsWIvKPwJoyT31aVf+28DmfBvLAvZW+jqpOApMQpKFaMNSOFVaxdbR3FFeW3m/giMNo72jT39sY0xotCxaq+r6lnheRjwMfAn5a47QkK0Zq7ZTabOvXrCfhJMj5uYo1i4STYP2a9U1/b2NMa4RSsxCRjcDvAB9W1VQYY+h0YR512p/oZ9vF20jn0uT8hRvUiquhtl28zYrbxsRIWDu4vwCsAL4lIs+JyBdDGkfHCvuo083nb2bisgk832M2O8uJzAlms7N4vsfEZRNsPn9zS97XGNMaoRS4VfX8MN63m4R91KmIsOWCLWw8d2NH7OA2pttFYTWUaYGoHHXan+jnmrXXtPQ9jDGtZ40EO1StnVKtqZ0xphYWLDpUrZ1SrQWFMaYWlobqYJ181Kkxpr2s62wXsKZ2xkRCrHtD2cyiC3TiUafGmPaymoUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKls6aWGr36X/GdDsLFiZWwjr9z5huZ8HCxEpYp/8Z0+2sZmFiI8zT/4zpdhYsTE38dJqZxx5jescOZh57LJQLctin/xnTzSwNZZYUpRpB2Kf/GdPNLFiYJUWpRhCV0/+M6UaWhjIVRa1GYKf/GRMeCxZdbqlaRNRqBHb6nzHhsTRUl6qlFhHFGoGd/mdMOCxYdKlaahHuqlWRqxGICKM33MDwpk12+p8xbWTBogvVWot481//1XyNoFwqqt4aQTNaddjpf8a0lwWLLlSsRZTOKEpJMomfyXDyhRdYdfNN8zOQ0oAxXyP41V+p+cIfpWW4xpjlsWDRhZZTi6hcI3AZ2LCeI3/118FjNVz4o7QM1xizPBYsutBy9itUqhHkfvxjjnzprmDGUTKTqHThrzX1Nbxpk9UejIkgWzrbherZr1CsEYxu3szAhg0c/fK9y9p/EbVluMaY5bFg0YUa3a9Qz4U/istwjTG1szRUl2pkv0I9F35r1WFMvFmw6FKN7Feo58Jfmvpq5jJcY0x7WLDocvXsV6jnwl9MfS21DHf1rbdYcduYiLKahVm2emseI1u3svrWW1DPw5udxTt+HG92FvU8a9VhTMSJqoY9hpqNj4/rnj17wh6GobENdn46ba06TDeK9Y5TCxamIXbhN6ZmsQ4WVrMwDbEeTcZ0B6tZGGOMqcqChTHGmKosWBhjjKkqlGAhIp8RkedF5DkR+aaInBXGOIwxxtQmrJnFZ1X1MlW9HHgI+L9CGocxxpgahBIsVPVEyYeDQHzW7xpjTBcKbemsiPwB8F+A48B1S3zeBDABcM4557RncMYYYxZo2aY8EflHYE2Zpz6tqn9b8nm/C/Sp6u9X+5q2Kc8YE2O2Ka8cVX1fjZ/6FeBhoGqwMMYYE46wVkNdUPLhh4GXwhiHMcaY2oRVs/hDEflJwAf+DbglpHEYY4ypQawaCYrIYYLgsthqYKrNw6lHXMYJ8RlrXMYJ8RlrXMYJ8RnrauAlVd0Y9kDqFatgUYmI7FHV8bDHUU1cxgnxGWtcxgnxGWtcxgnxGWtcxrkUa/dhjDGmKgsWxhhjquqUYDEZ9gBqFJdxQnzGGpdxQnzGGpdxQnzGGpdxVtQRNQtjjDGt1SkzC2OMMS1kwcIYY0xVHRMs4nJGhoh8VkReKoz1QREZDXtMlYjIjSKyV0R8EYncsj8R2SgiPxCRV0Tk9rDHU4mI3CUih0TkxbDHshQROVtEvi0i+wp/7r8Z9pjKEZE+EdklIv9cGOd/C3tMSxERV0SeFZGHwh5LIzomWBCfMzK+BVyiqpcBPwR+N+TxLOVFYCvweNgDWUxEXODPgA8CbwV+QUTeGu6oKtoOxGEzVh74LVW9CLgK+K8R/ZlmgPeq6tuAy4GNInJVyGNaym8C+8IeRKM6JljE5YwMVf2mquYLHz4JrA1zPEtR1X2q+oOwx1HBBuAVVd2vqlngPuDnQh5TWar6OHA07HFUo6qvqeozhd/PEFzg3hTuqE6ngdnCh8nC/5H89y4ia4GfBf4y7LE0qmOCBQRnZIjIfwAfI7ozi1I3A38f9iBi6k3Af5R8fIAIXtjiSkTWAVcAT4U7kvIKqZ3ngEPAt1Q1kuME/hT4bYI+eLEWq2AhIv8oIi+W+f/nAFT106p6NnAvcFtUx1n4nE8TTPvvDWuchXFUHWtElTsbIJJ3l3EjIkPA14FPLpqxR4aqeoWU81pgg4hcEvaYFhORDwGHVPXpsMfSDKGdlFePuJyRUW2cIvJx4EPAT2vIG12W8TONmgPA2SUfrwV+HNJYOoaIJAkCxb2q+kDY46lGVadF5DGCmlDUFhC8C/iwiGwC+oBhEfmyqv5SyOOqS6xmFkuJyxkZIrIR+B3gw6qaCns8MbYbuEBEzhWRHuCjwDdCHlOsiYgAXwL2qeofhz2eSkTkjOIqQhHpB95HBP+9q+rvqupaVV1H8PdzZ1wDBXRQsCA4I+NFEXkeeD/BCoQo+gKwAvhWYZnvF8MeUCUiskVEDgDvAB4WkUfDHlNRYZHAbcCjBIXYr6nq3nBHVZ6IfBX4PvCTInJARH4l7DFV8C7gl4H3Fv5uPle4K46aNwLfLvxb301Qs4j1stQ4sHYfxhhjquqkmYUxxpgWsWBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qyYGG6goh4haWgL4rI/SIyUHh8jYjcJyI/EpF/EZFHROQthef+QUSm494t1JhmsGBhukVaVS9X1UuALHBLYRPag8BjqvoTqvpW4PeANxRe81mCfQfGdD0LFqYbPQGcD1wH5FR1fmOkqj6nqk8Ufv9PwEw4QzQmWixYmK4iIgmCMzBeAC4BOqLJmzGtZsHCdIv+QkvrPcC/E/RAMsbUKFZdZ41pQLrQ0nqeiOwFfj6k8RgTKzazMN1sJ9ArIr9WfEBE1ovItSGOyZhIsmBhulbhLJEtwM8Uls7uBe6gcC6GiDwB3A/8dKFb7AdCG6wxIbOus8YYY6qymYUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKgoUxxpiq/n9jPPDBLiPgsQAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 402.375x360 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAFgCAYAAABKY1XKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5Rc9XXo+e8+p6rf6ocksIwFFgQcMA+D0xL4ARjHsWXFOJIIHidOrgVJOrAuTrxWxgmJZybc65V1k/FMEhtnxbcTg0iMzTUxKA6QYCcyA36AJB4BFGGDFZIoRkgtqaXurlI9ztnzx6lqVbequqrrdc6p2h8WS+qqrq5ft6Szz2/v32//RFUxxhhjluKEPQBjjDHRZ8HCGGNMVRYsjDHGVGXBwhhjTFUWLIwxxlSVCHsAy7Fx40b9h3/4h7CHYYwx9ZCwB9CIWM0spqamwh6CMcZ0pVgFC2OMMeGwYGGMMaYqCxbGGGOqsmBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qK1aa8TpHOenx//xTH5nKMDSZ5x3mr6e9xwx7WvKiPzxjTfhYs2khVuX/PASaf2E/e8/F8xXWEhOswcfV53Di+FpHwNnlGfXzGmPBYsGij+/cc4M6dLzPYm6A/mZx/POf53LnzZQA+sv7ssIYX+fEZY8ITWs1CRPpEZJeI/LOI7BWR/xbWWNohnfWYfGI/g70Jku7CH3vSdRjsTTD5xH7SWc/GZ4yJnDAL3Bngvar6NuByYKOIXBXieFrq+/unyHv+aRfioqTrkPd8ntx/pM0jC0R9fMaYcIWWhtLg8O/ZwofJwv8deyD4sbkcnr/0t+f5ytG5bJtGtFDUx2eMCVeoS2dFxBWR54BDwLdU9akwx9NKY4NJXGfp4rDrCCsHe9o0ooWiPj5jTLhCDRaq6qnq5cBaYIOIXLL4c0RkQkT2iMiew4cPt3+QTfKO81aTcB1ynl/2+Zznk3AdrjpvVZtHFoj6+Iwx4YrEpjxVnQYeAzaWeW5SVcdVdfyMM85o+9iapb/HZeLq85jL5E+7IOc8n1Q2z8TV54W2nyHq4zPGhCu0moWInAHkVHVaRPqB9wF/FNZ42uHG8bUATD6xn5O53IJ9DLddd8H88zY+Y0zUSFBnDuGNRS4D7gFcghnO11T1vy/1mvHxcd2zZ087htdS6azHk/uPcHQuy8rBHq46b1Wk7tijPj5jYirWO1pDCxb16JRgYYzpSrEOFpGoWRhjjIk2CxbGGGOqsmBhjDGmKgsWxhhjqrKus8a0gZ0RYuLOgoUxLWRnhJhOYcHCRFKn3Ik364yQTvl5mPiyYGEipZPuxGs9I+T6t51V8cLfST8PE29W4DaRUrwTTzjCir4kowM9rOhLknCEO3e+zP17DoQ9xJo144yQTvp5mHizYGEio9NO62v0jJBO+3mYeLNgYSKj007ra/SMkE77eZh4s5qFWSDMQmqnndZXekZIuQt+tTNCOu3nYeLNgoUBolFI7bTT+opnhBRXQ5UGjOIZIbddd0HFYNxpPw8TbxYsDNC8JZ6NaPROPIoaOSOkE38eJr4sWJimLPFshkbvxKNIRPjI+rO5/m1nLfuMkE78eZj4smBh5guppTOKUknX4WQux5P7j3DdhWe2dCydelpff49b18+uU38eJn4sWJhIFVIbuRPvRPbzMFFhwcJEspBa7514p7KfhwmbBYuYacXSViukGmOqsWARE61c2mqFVGNMNRYsYqLVS1utkGqMWYqoLl3YjJLx8XHds2dP2MNou3TW4/ovfIeEIxXTRHlf+bvb3t3w3X8661kh1ZjWiHV7YJtZxEA7l7ZaIdUYU441EoyBKC1tNcZ0J5tZtEgzVy1FcWmrMaa7WLBoslasWrKlrcaYsFkaqslacbJZcWnrXCZPzvMXPFdc2jpx9XmRLESnsx47X3qdrz99gJ0vvW4H9RgTUzazaKJWNuSL29LWpWZY2965jjeO9DKdyrf9zAxjTH0sWDRRK1ctxa1HUPl9IcrhmSx3fGMvI/0JBnoSbT8zwxhTH0tDNVE7Vi3197hcdd4qxgaTHJ3L8v39U5FL7VSaYR2dy3FkLoMrMJvxGO5LNpyiM8a0h80smqjVq5aicJpdLcrNsHxVpmYzuCKICJ7vM5vNM9yXbOuZGcaY+tjMoolKVy2V0+iqpVYUz1uh3AxrNpNHlflgprDgc5KuQ97zeXL/kXYO1RhTIwsWTdTKVUu1Fs+jkJIqN8PyfEU5FRwEyn6ObSw0JposWDTZjeNr+cR7LyDvKzMnc0ynssyczJH3taFVS8XUTrl9FhCtO/NyMyzXEaTQGkdVERGGehZmQW1joTHRZTWLJmvVqqU4tfxY3PLcFUFV8VXxCwHkDcN9OCUzC9tYaEy0WbBokWY35Itby48bx9eiqvy/3/oBx1I5fFVUwddgnKqnZhh2ZoYx0WfBIibi1vJDCqueehIubxh2AMF1hFzeZ2o2w6GZDOmcx0h/surGwlacDmiMWR4LFjERt9PsigX5oTIF+bHBHo6ns+Tyyu0fvJBr33Jm2XHHZamwMd3AgkWMxKnlx1K72R0RxgZ6mTmZozfhVgxwrT4d0BhTu9CChYicDfwVsAbwgUlV/VxY44mDOLX8aLQg38o+W8aY5QtzZpEHfktVnxGRFcDTIvItVf2XEMcUC3E4za7Rgnw7Twc0xlQX2j4LVX1NVZ8p/H4G2Ae8KazxmOZqdDd7nJYKG9MNIrEpT0TWAVcAT5V5bkJE9ojInsOHD7d7aKZOje5mj9tSYWM6XejBQkSGgK8Dn1TVE4ufV9VJVR1X1fEzzjij/QM0dWtkN3ur+2wZY5Yn1NVQIpIkCBT3quoDYY7FNF8jBfm4LRU2ptOFuRpKgC8B+1T1j8Mah2m9egvycVoqbEynE9Wli4gte2ORdwNPAC8QLJ0F+D1VfaTSa8bHx3XPnj3tGJ6JkHTWi/xSYWNqEOsdpKHNLFT1O8T8h2faIw5LhY3pdKEXuI0xxkSfBQtjjDFVWbAwxhhTlTUSNMaYcrIpePUJSB2FgZWw7mroGQh7VKGxYGGMMaVU4dl74XufBz8HvgeOC04S3vkbcMXHoAtb41uwMMaYUs/eC4//39AzBMn+U497ueBxgLf/UjhjC5EFi5iI6mlxUR2X6RDtTgVlU8GMomcI3EUdj91k8Pj3Pg+XbO26lJQFi4iL6mlxUR2X6RBhpYJefSJ4v9IZRSk3CfmT8Op34C3vb/77R5gFi4iL6mlxUR2X6RBhpYJSR4PAtBTfg9SR5r93xNnS2Qir9bS4dLbKX+4uGZfpELWmgrKp5r/3wMpgBrMUx4WB7ut2bMEiwoqnxS2+IBclXYe85/Pk/vbe5bR7XOmsx86XXufrTx9g50uvWxDqdMVU0OJAUeQmwc8HqaBmW3d1kOrycuWf93LgJGDdu5v/3hFnaagIi+ppce0aVyvrIlaYj7AwU0E9A0FNpJgCKw1YXg6yc3DNp7quuA0WLCItqqfFtWtcraiLWGE+BsJOBV3xseDX730+KGbPF9cTQaAoPt9lLFhEWOlpceVSPmGdFteOcdVaF7n+bWcta0ZghfkYKE0FlUtFFVNBb3o7/PDR5i+rFQmK55dsDVJdqSNBYFr37q6cURRZzSLCGj3HOs7jakVdxArzMVFMBWVnT68deDnIzMLa9XD3JviH2+Gx/xH8OvkeeObLwbLbZo3jLe+Hy38h+LWLAwXYzCLyonpaXKvH1Yq6SDEAlc4oSiVdh5O5HE/uP2LnZ4RtqVTQuVcHRXDbYd1WFiwirpFzrOM8rlbURaK6YMCUUSkV9Ka3BzMK22HddhYsYiKqp8W1alytqItEdcGAWUIxFVT0w0dth3VIrGZhIqkVdZHSAFROWAsGzDLYDuvQWLAwkXXj+Fo+8d4LyPvKzMkc06ksMydz5H2tqy4S1QUDZhnCXlbbxSwNZSKrFXWRqC4YMDWqdVltF+6wbjXRZi0za4Px8XHds2dP2MMwHSCd9SK1YMAswzNfrr7DOpqroWK929NmFqYrRXXBgKmB7bAOhQUL03LWh8k0le2wDoUFC9My1ofJtNTiZbWmpSxYREwn3YVbHyZjOocFi4jotLvwVjUCNMaEw/ZZRETxLjzhCCv6kowO9LCiL0nCEe7c+TL37zkQ9hCXJaoHNxlj6mPBIgI6sRtqM/sw2Ul5xoTP0lAR0IndUJvRh6nTUnPGxJkFiwjoxG6ozWgEaAVyY6LD0lAR0IndUBvtw9SJqbm2yqaCDq3PfTX4NZsKe0Qm5mxmEQHl7sJ9VWYzeTxfUZS+hBu7bqiN9GHqxNRcW6jCs/cGu5v9XMnu5mRw+twVHws2tRmzTBYsIqB4Fx6kXFxmTnpMzWZQBV99QFg5mOTv/vnHscrTN9IIsBNTc23x7L2n+ibZKXKmiSxYRETxLvv/+eYPODKXwUEQAddxWD3Uw1BvIrZ5+nr6MHViaq7lsqlgRmGnyJkWsJpFRIgI17/tLIb6ErxxpJ83jPTxxtF+zj9jiJWDvfQk3K7K09tBRXV49Ykg9VSudTcEj/v5oJ+SMctkwSJCvr9/Ct9XxgZ6GBvoYbgviVNyd91NG9nsoKI62ClypoUsDRUhlqdfyA4qWiY7Rc60kAWLCLE8/UKtOCmvo9kpcqaFQg0WInIX8CHgkKpeEuZYoqAZG9k6kR1UVKOegWB5bLVT5KJc3M6mgtpL6mgwU1p3dbTH20XCnllsB74A/FXI44iEhUtoF25GK+bpb7vuArurNpXF9RQ52x8SeaEGC1V9XETWhTmGqIlbnr6Tzt/oCHE9Rc72h0SeqC5dUG35AIJg8VClNJSITAATAOecc85P/du//Vv7BheidNaLdJ7emvyZpsmmYPI9weynUq3Fz8PEY9EOeNXF+h9E2GmoqlR1EpgEGB8fDzeytVHU8/TW5M80TXF/SOmMopSbDFJqr37HjlENke2zMMtmTf5MU9n+kFiwYGGWrfQUPJ8sqcQLzCafJJV4AZ9sV20eNE1g+0NiIeyls18F3gOsFpEDwO+r6pfCHJOp7thcjrzvM5P8Hid6voXiofgIDtLrMpz9GfKpn+qazYOmQbY/JBbCXg31C2G+v6nP2GCS/MAu5noeRejF4dQmQSXP8Z5H6B3wWDl4aYijNLHRCftDuoClocyyXXHOICcHdoL2IovuN4QEaC8nB3Zy+Tn2j9vU6IqPwTW/Hax6ysxAejr41c9He39IF4n8aigTPS8cfYbhfocTKRdl4V4pVVB1GRkIPu+agWtCG6eJkbjuD+kiFizMsk1nphnoEfrcXg7PZvBLmsKKwJkrenESaaYz0+EN0sRTz4Atj40oCxZm2UZ7R3Edl5G+HkYGepjL5Ml7SsIVBnsTOAKz2SyjvaNhD9UY0yQWLMyyrV+znoSTIOfnSDpJVvQt/GuU83MknATr16wPaYTGmGazArc5TTqf5vEDj/ONH32Dxw88TjqfXvB8f6KfbRdvI51Lk/NzC57L+TnSuTTbLt5Gf6LCjlxjTOzYzMLMU1V2vLKD7Xu3k/fzeOrhikvCSbDt4m1sPn/zfL+nzedvBmD73u1k8hl89XHEIeEkmLhsYv55Y0xnCL2R4HKMj4/rnj17wh5Gx3rw5QeZfH6S/mQ/Saek31NhtjBx2QRbLtiy4DXpfJrdB3cznZlmtHeU9WvW24yiHnaOQzeIdSNBCxYGCC76H33oo7iOuyBQFOX8HJ7vcd+H7rNg0Ex2jkM3ifUfpNUsDAC7D+4m7+fLBgqApJMk7+fZfXB3m0fW4YrnODgJ6B2G/rHgVycRPP7svWGP0BjAahYtEcfUzHRmGk+X7vzpq297J5opmwpmFItbXEDwcc9Q8PwlWy0lZUJnwaKJllMgjprR3lFcWbrzpyOO7Z1opmrnODhusJN55x/AeddYHcOEyoJFE+14Zcd8gbgv0Tf/eM7PMfn8JMBpBeKoWLx3YjHbO9EClc5xUIX0UZg9BOrB81+FHz5idQwTKqtZNEk6n2b73u2nrSSCIN/fn+xn+97tp+1ZiArbO1FBNgU/fBSe+2rwazbVvK9d6RyH9FGYfT0ICOJC34jVMUzobGbRJMUCcemMolTSSZLJZ9h9cDfXrI1mcz3bO1GiHauUyp3joH4wo5DCfZwQ1C7A6hgmVBYslqlS8boTCsQiwpYLtrDx3I2xK9A3XXGVUs/QwpqClwseh6BLaiPKneOQmYGgl28QOIbWnAocYOdRm9BYsKhRteL1yr6VHVMg7k/01zT7ieOqr5q0c5VS8ZyG730+CAKZmSBIiASBYmDl6a+x86hbwzZGLsmCRY2qFa9vuvimrikQx3nVV02qrVJq5t394nMc9j8GL34dhs5cOKMoZedRN5dtjKyJFbhrUEvx+t6X7uUXL/zFrigQFwOn67gM9Qwx0jvCUM8QruMy+fwkO17ZEfYQG1NplVKpZt/dF89xeO//ERS0K72/nUfdfLYxsiYWLGpQ6+7mNw6+kYnLJvB8j9nsLCcyJ5jNzuL5XscUiOO+6qsmlVYplWrV3X2xjpGdDQJDqeJ51O/8DUuPNEutKcdmroKLKUtD1aDm4nV2uuMLxJ2w6quqcquUSrX67n5xHWM+LZKw86ibrZ0pxyYTkc8AU6r6ucLHfwC8rqqfb8X7WbCowXJ3N9daII6jTlj1VVW5VUpFxbv7az7Vurt7O4+6fcJIOTbPl4AHgM+JiAN8FNjQqjezYFED2918Ste0BYnC3b2dR916YaYcG6Sqr4rIERG5AngD8KyqtiyqWbCoQXF38+Tzk5Ck4lkPnZJqWkrXBE67u+8OYaccG/eXwDZgDXBXK9/ICtw12nz+5o4vXtei69qCFO/uL/+F4FcLFJ0l/gsKHgQ2AuuBR1v5RjazqJHtbj7F2oKYjhKFlGOdVDUrIt8GplWrFBMbZCflmbo1uoM7nfX4/v4pjs3lGBtM8o7zVtPfUyV/bEyrZFOtTjk2fWdfobD9DHCjqr7c7K9fymYWpm71rvpSVe7fc4DJJ/aT93w8X3EdIeE6TFx9HjeOr433DnATTzFbUCAibwUeAh5sdaAACxYmBPfvOcCdO19msDdBf7JksYDnc+fO4O/8R9afXfPXsxmK6Uaq+i/Aee16PwsWpq3SWY/JJ/Yz2Jsg6S5cX5F0HQZ7E0w+sZ/r33ZW1Qu+zVCMaR9bDWXa6vv7p8h7/mmBoijpOuQ9nyf3V18uXpyhJBxhRV+S0YEeVvQlSTjCnTtf5v49B5o9fGO6VtWZhYgMA2eo6o8WPX6Zqj7fspGZjnRsLofnL72owvOVo3PZJT+n2gxl1PH57r1/y3X/dhZ9q1cyeOWVOP3dtWotMqz1d0dYMliIyEeAPwUOiUgS2KaquwtPbwfe3trhmU4zNpjEdZZODbmOsHKwZ8nPKc5QSmseAKgy/tL3uPbZR8HLc/DZBP19PUgiwaqbb2Jk61ZLTbXLclt/W1CJtGozi98DfkpVXxORDcBfi8jvqeoDtGAZmOl87zhvNQnXIVchFZXzfBKuw1XnLd1eodIMZfyl7/HTex4ik+glm+wnN9DL0EAPmssx9edfBGD0hhua882YpdV62qCdJ1HRutsfHgCuA1YBR4Bvv/qHP9twC1wR2Qh8DnCBv1TVP6z2mmrBwlXV1wBUdZeIXAc8JCJrCc5+NGZZ+ntcJq4+b341VGnAyHk+qWye2667oGpxu9wMJZnLcO2zj5JJ9OK7CUSVhBN8fUkmcQYGOHLX3Qxv2mQpqVZbzmmDLz7Q+iNsY2bd7Q8L8HHgU0CS4FqdB3Lrbn/4s8A9r/7hz9Z1DRYRF/gz4GeAA8BuEflGYXVVRdUK3DMi8hPFDwqB4z3AzwEX1zNQY24cX8sn3nsBeV+ZOZljOpVl5mSOvK/cdt0F3Di+turXKJ2hFP3Ef/4Ax/fw3QSKIsBg76mgI8kkms+T2rWrFd+WKVVs/V2u3xIEj/t5eOWf7DyJ8j4O/J8EAeIEcLTwa77w+Mcb+NobgFdUdb+qZoH7CK7pS6o2s7iVRekmVZ0pTGE+Uu9ITXcTET6y/myuf9tZPLn/CEfnsqwc7OGq81bVvD+i3Axl4OQsjvooiu/DGcO9OIvTF55H/tixFnxXZoFaW3//+/dje55EqxRST58CZoFFDavIFR7/1LrbH/5anSmpNwH/UfLxAeDKai+qFizmCFrfvrLo8auAJ5czOtMcjbbYiJL+HpfrLjyz7tcXZyCTT+znZC7HYekjj6AaBIqVA2WK5K5LYmys7vc0Naq19TcS5/MkWuU6gtRTpeMmc0A/QZbnkTq+frkCUNWUVrVg8acERe7F0oXnrq8+LtMMqsqOV3awfe928n4eTz1ccUk4CbZdvI3N52/uulU+i2cox46ezcqXHmGgvwe35/RAobkckkgwsKFl58OYolpbf59zJfywyvUuoudJtNAqaqgnA6vr/PoHgNIWCWuBH1d7UbWaxbpyeylUdQ+wbjmjK0dENorID0TkFRG5vdGv18l2vLKDyecncR2XoZ4hRnpHGOoZwnVcJp+fZMcrO8IeYmiKM5St7zyfc279NUin0dzC2bvmcvipFKtuvsmK2+1Qa+vv8993KqiUE/3zJFrhCEFtYikeMFXn198NXCAi54pID8EJe9+o9qJq0av8QcuBhv7F1VuR70bpfJrte7fTn+yfP3DIV5+53Nz8Ead3vXgXG8/dGNuUVLOMbN0KwJG77sbPZMDzwHWRRILVt94y/3xLddJ+gUa+l1paf4uEe4RtNH2bINWU5PSaBSWPP1bPF1fVvIjcRnD+hQvcpap7q71uyRblIvJVYKeq/sWix38FeL+q/m/1DLbwNd4B3KGqHyh8/LuFb+R/VHpNN7UoL61NHJg5wN/96O8Y7h1GVTmeOc7UySlK/+wU5aM/+VF+e/1vd106qhw/nSa1axf5Y8dIjI0xsGFD62cUnbRfoJnfS7XW3wveK78wqMTt57a0mr+Jdbc/vI1g1dPiIncSGAI+8+of/uz2Zg6ummozi08CD4rIx4CnC4+NAz3Algbfu6aKvIhMABMA55xzToNvGX3lahMn8yc5njmOrz6qytTJKZxCBjFYIiqoKg/vf5i3jL2FLRc0+kcTf05/P0PXXtveN611E1ocNPN7qdb6246wLeeewq+fIsjiuASppxzwmZLn26amw48Km/EuKXy4V1V3NvzGIjcCH1DVXy18/MvABlX9RKXXdMPM4sGXH2Ty+ckFKafZ7Cyvzb4WLBxRH0Hm009FijLaO8rKvpXc96H7uj4d1XbZFEy+J7gbrlTQ9fMw8Vj0L4Cl34vjBnUHPx983DMU3PnH5XuJlmVPjwrLaN9DUMyeAh5rxg7uelTrDdUH3AKcD7wAfElVqxVealVXRb6TlatNAAwkBxBH8H1/PkjIor93gjCXm8MVl90Hd9d1KJFpQHETWifsFyh+L7k0zL0epImKRGDwDcH3E4fvJeYKgaGe5bFNVy0NdQ/BtOcJ4IPARQSpqWaYr8gD/0lQkf/FJn3tWNp9cDd5P09fYuG6AkccVvet5vXU6xVfm3ASOOJwPHOcQ6lDdY+hk/ZxtFWtm9DauV+g3uJ06iicPA6ZGRAHnJJFk6owexB6V3Tb3oeuVy1YvFVVLwUQkS8BTeuTUG9FvpNNZ6ZPSy8VjfaNciJ7gtnc7GnPJZwErgQboBStK1jUs4/DAkuJpTahqR9ceHMpOPZqcBFvZfqm0eJ0zyCcPBG8ZvHniQBO8HzPUOu+BxM51YLFfBW+cHFv6pur6iNEZIoVBaO9o/MX/XJW9KxgNjeLg4NbuDCVfr5qUOw+c6D6rujFF/rXZl+bT4GVzmxyfo7J5ycB5gvnYW0QjPTxqeU2oalC+ijMHgI0+Pj5/wV7H2ztKp9OKrSbyKgWLN4mIicKvxegv/CxAKqqwy0dXZdZv2Y9CSdBzs8tqFkUCYIjDsX/Si/IqoqPz3DP8JLBotyF3sHhYOogY31jrJAVCz4/6SQhCdv3bp/fx1HcIFhLYGmGWo9PDXWmU9yEVrpfIH0UZl9nvq654o3QN9Lai/Zyur1Wmt1k56B3GLIzgLMwoKkGM6Xe4aDwbVrrjpHTWpRzx/GGCtwichfwIeCQql5S7fOLlgwWqhqR27bu0J/oZ9vF24ILbpIFASPn5xARVvauxHEcpjPT+Hqq46qIMNY7xmBikPVr1ld8j3IX+tnsLCgcO3mMhCQY7Rtd8JqkkySTz7D74G7Wr1lftghf/LzFgaUZF/Di8amDvYkFhx3lPJ87d76MqtIz9nT4rVBKN6HlUjBzEJCgT8LgmiBVBbVftOvRjEL7wEroGw7GNXsoCA5KIeYJDL0hmEV1VwuO9rpjpGKLcu4Y+SxwD3ccr/eYiO3AF4C/Ws6Lqh6ratpr8/mbgeCCm8ln8NXHEYeEk+DXL/t1VJW/eOEvOHvF2WS97PyFscftIZPPcNMlN1W8GFdabZXXfOGa5jB1corh3mEcWdgJxlef6cx0xSJ8UTGw7HptF0dPHm34Al7t+NTB3gSf2/UVVqx5jMGegbbMdCoq3S/w3c/Bni8FheCeoaBQXKpVq6OaUWhfdzW4PZAcgP4xyJQsne0tWTrbXS042q3YonyWhQ0Fk4XHIbjoL5uqPi4i65b7uq4KFnEoyIoIWy7YwsZzN5Ydq6oiImzfuz1IS+HMb8qbuGxiPtiUU+lCn5AEgiAi821EVvQsTEc54jDaO7pkEb7IV5+d/76TXQd3NZyqqnh8aoHr5kn1/yMr/MGaZjpt0TMAY+uCi23vEpnaVqyOqrXb61KzgsUptb6S76F7W3C0T5B6qtqinDtGvtZoSmo5uiJYxLFja3+iv+xeiWrBZCmVLvQDyQGQYCUVcNrn5PwcCSfB+jXr2X1w95JF+KLv/fh7rOhd0fAFvNLxqUUnEz9A8aiUMS1NobV170kzLtr1qLXba7VZQS19nUyrtLpFeV26Ili0uyDbDpWCyVIqrbYq7uM4nD4MLFxhlfNzpHNpJi6boD/RX7UIn/Nz5P08SSdZ9nmofgH302nmnnoKb3qaNbPQ5/cTdJg5nS9zgJJwKwf7YgqtrZp10V6ucoX20vesdVZgLV5/1oAAAB7mSURBVDjC1OoW5XXp+GBRKU8PIaYpQrLUhX60bxTP9ziaOYqqciJzYr5WUpreqlaET+fSvPOsd/LUwaeWHEu5C7iqcvyBBzhy191oPg+ex5mOw++eyPLty9/Pc29992lLTTU/gCSD2kUlxRRaWzXrol2PZs4KqvV1Mq3Q6hblden4YFFrQbYbWmRUu9An3SS3r7+dNUNrlkxvLVWEn7hsgpV9K9nz+tI9vMpdwI8/8ABTf/5FnIEBnL5Tf16jmuJn9jwMwHMXX31qzJ6Plz2PsVV9eJrDkfIznWIKre3CSuXYrCDuWtqivNBN/D3AahE5APy+qn6p2us6PljUWpBte5oiJNUu9LXUb6rVTdL5dNVU1eILuJ9Oc+Suu3EGBpBFxeyx4eACd91z3+TJN7+dk05yfp/FJ667mOTIrzH5QuWZTjGF1nZhX7RtVhBPdxxPFZbHLtmivN7itqr+Qj2v6/hgUW1XNISUpmix0ry/OzrK4JVX4vT3N1QgX6xS3aSWVNXiC/jcU0+h+fyCGUWpseEBVojPHWdnOHjRxawc7OGq81bR3+OiuhZk6QAYqk64aHfSgU7xELkW5R0fLGopyIaWpmiBcnn/4klxq26+iZGtWxGRugrkUPvy41pmMKW86elgrEsQ3+PiIZ93/dTahY83MQCaRTrpQKc4CTbcbeeOka+xqEV5O5fLlur4YFHPXW6cVcr7ay7H1J9/EYDRG25Y9tdd7vLj5V7A3dFRcKssNXVdEmNjFZ+uNwCaJVifqXAFgSES/fNqOvwoKuo9/Gjxha70Ljeq+yzq4afT/OvP34i47ml5fwgChnoe5/7N/cs+YrTcoUywMOA2svy4lWM3dar1QKebHoH/fNpSVNXF+iLT8TML6J40RbW8vyST+JkMqV27lnXkaDuWHzv9/ay6+ab5WVFpwNBcDj+VYvWtt1igaKdqfaacRNA76n9eC4meaKSorLbSMl0RLIo6PU1RS94fzyN/7Niyvm67lh+PbN0KwJG77sbPZBbUW1bfesv881FWaWFBLFXrM5U+CiePwdCZC9uahJGistpKy3VVsOh0zcj7l9Ou5cciwugNNzC8aROpXbvIHztGYmyMgQ0bIn/BrXVhQaxUO9Bp9hDgwOKbiFZ21a2kQ2srl95z6Wktyl/4+AuNtig/m6Dj7BrAByZV9XPVXmfBooMMXnklkkiguVzFvL8kEgxs2LCsr9vu5cdOf/+y0mRR0KqFBaFaqmVJZgZQcGThiXnqB+dc+HnIZ+GVf4K3Xt/acTbjDI+IufSeSyu2KL/0nks/C9zzwsdfqLfgnAd+S1WfEZEVwNMi8i1V/ZelXuQs9aSpLp1P8/iBx/nGj77B4wceJ52v1Pur9Yp5fz+VQnMLN34W8/6rbr5p2XfppcuPy+m05cfLtdSGQkkmcQYGgtRaOry/G3UptizJzgYBo1Q+G6R6Bt9wqv166ihM/QCOHwjO8khNwSP/Ozzz5SBN1CrF2kq5IjwEj/v5YGNkfBRblOeBE8DRwq/5wuMfr/cLq+prqvpM4fczwD7gTdVeZzOLOkW1k20r8v7dtvx4uVq1sCASKrUsEYI0VfFAp9RRmD0YBA6nEDx8gnMxWp0GasYZHhFSSD1VbVF+6T2Xfq0JKal1wBXA0s3csGBRt6h2sm1V3n+5m+y6SasWFkRCpZYlb3o73L2p0D3XhbnXg0BRvEHSwtF6/aPBhbqVaaCw2sG3TltalIvIEPB14JOqeqLa51uwqEMcOtk2O+/fLcuP69GqhQWRUq5lSbGrLhoEh+KMonhO91AhReU6rTkVsCisdvCt0/IW5SKSJAgU96rqA7W8xoJFHbq5k22rlh/H4RTDSlq1sCDyiimqb38mCA4eC8/p7l956nNbmQYKsx18a7S0RbkE+fEvAftU9Y9rfZ0FizpYJ9vmiWrtZzm6dkNhMUXVNwyPfCqoTxTP6V585nir00CddbJfS1uUA+8Cfhl4QUSeKzz2e6q6ZErLgkUdurWTbStEtfazXJHfUNjKnc3nvw/6RpZuC9LqNFDY7eCb6IWPv5AqLI9dskV5vcVtVf0OdbQesWBRh27rZNsqcaj91CqyGwrbsbM5SmmgTmgHH7AW5Z3AlpI2plifePK1J5nJzrCqv3x6Io61n8htKGzXzubOSgOFrrDhbvul91x6WovyRpfL1suCRZ1sKenyLa5PzGRnOJE9wfHscVb3rWa07/S0ndV+GtDOnc0dlAaKkkJgiESLcgsWdYrqUtJ0Ps13//O7PHPoGQTh8jMv591venckZjmL6xOKMpubRRAOpw8DnBYwrPbTgGpdY91k85e0dk4ayCxiwaJBUelkq6o8+PKD3PnsnUxnpymeU/KVfV9hrG+MT1z+CTZfEN7KonL1icHkYDAeDYLC1MkphnuHcQoraaz206AO29lswmXBokPseGUHf/r0nzKTn8HFRZwgKCjKsZPH+JNn/gQkvJVF5famOOKwum81h9OHcTQIEKlciqGeIav9NEPUdjbbWROxZsGiA6Tzae568S5m87NBoCg93hTBdVxmc7Pc/eLdoa0sqrQ3ZaR3BICpk1N4vsdMdgbAaj/NEJWdzXbWRN32XXjRaS3KL3ppX6P9oPqAx4FeghjwN6r6+9VeZ8GiA+w+uJu53BxA2TSTFP6bzc2GtrKo0t4UEWG0b5Th3mGOpI/wwXM/yJVvvDL02k9HiMqS1g49a6KV9l14UcUW5fsuvOizwD0XvbSv3la+GeC9qjpbaPvxHRH5e1V9cqkXWbDoANOZafL+0t0BFCXv50NbWVRtb4qnHit6VnDbFbdZkGimsJe0duBZE21SbFE+y8KGgsnC4wDb6/nCGhQ0Z0u+XhKoGngsWHSA0d5REs7Sf5SCkHASoa0ssr0pIQl7SWsYK7JirpB6qtqifN+FF32t3pSUiLjA08D5wJ+pqrUo7wbr16xnMDnIdCZYBbU4FaWF/4aSQ6GuLLK9KSEKa0mrrciqR8tblKuqB1wuIqPAgyJyiaq+uNRrLFh0gP5EPzdfcjN/sudPgtVQeqrIrSieH6R4brrkplDv3Nu1N8VPp5l76im86Wnc0VEGr7yy85r4xUXUVmTFQ8tblBep6rSIPAZsBCxYdIPN529GVU/ts/CDFKQgp/ZZROTOvVV7U1SV4w88wJG77kbz+QXN/FbdfBMjW7dGvoNtx4nKiqx4aXWL8jOAXCFQ9APvA/6o2ussWHQIEWHrW7bywfM+yHf/87s8e+hZAK448wre9aZ3dUUt4PgDD8y3CS894lRzOab+/IsAjN5wQ1jD605RWZEVL61uUf5G4J5C3cIBvqaqD1V7kWgrD1JvsvHxcd2zZ0/YwzAR5KfT/OvP34i4bsUDiNTzOPdv7reUVLst2GeRX7giq7v2WdT8Te678KJtVGlRftFL+7Y3c3DVhDKzEJEbgTuAi4ANqmoRwDRk7qmn0Hx+wYyilCST+JkMqV27otUVthuEvSIrnqxFecGLwFbgf4b0/qaKuBWJvenpoEax5Cd55I8da8+AzOmsyWDNChvutu+78KLTWpQ3uoO7XqEEC1XdB+V3G5twxbVI7I6Ogltl1Y3rkhgba8+AjGmCQmCwFuW1EJEJYALgnHPOCXk0nS+uReLBK69EEgk0l6tYs5BEgoENG0IYXZ2s8Z6JkJYFCxH5R2BNmac+rap/W+vXUdVJYBKCAneThmfK8NNpjtx1N87AwGkXXEkmcQYGOHLX3Qxv2hS5lJTT38+qm29i6s+/iPT1odnsqVlRTw968iSrb70lcuMuyxrvmQhqWbBQ1fe16mub1oh7kXh4yxZSTz/N8b97KLjgqgYXVRFGrv8Qw1vCac++bNZ4z0SQE/YATHTEvUh84sEHSe3aTc+b30zyrLNIrllD8qyz6Hnzm0nt2s2JBx8Me4jV1dp4LxtKjdN0sVCChYhsEZEDwDuAh0Xk0TDGYRaKc5G4NIXm9PbirliBOzqKu2IFTm/vfArNT1dqtxMRxcZ75XY7Q/C4nw+WoBrTRqEEC1V9UFXXqmqvqr5BVT8QxjjMQoNXXgmOQ/7YMbzpabyZGdT355+PcpG4mEIr1lrU9/FmZua/D1wXzedJ7doV8kirsMZ7JqIivxrKnK4VeyBUlROPPII/O4t39GiQ63ccBHBXr8YZHETT6bqKxO3Ys1FMoalqECCmpoIG/YW6hQAyOEju6NGmvm/TWeM9E1EWLGKklXsgiktmE2NjOMkk+SNHQBX1ffKHDuGOjXHmJ3+Tka1bIzHexYopNG96Gu/wYdRxFnxtVUVnZsjsewmiXOe2xnsmoixYRFClO/FW7YFYvGTWHRvDGRnBn5ubv1t3ensZ3rRpWRf3du7ZKKbQvKmp0wIFBE151HGYe+IJ/PQno7uE1hrvmYiyYBEhS92Jr/ylj3H0y19uyR6IYr6f3h5mczN4vofruAwODeJIUNbyZmeXtWS2WXs2ak1hOf39DL373Rz7yleQxKK/1qrg+yTPOAP1/cgu/Z0X9lGoxpRhwSJClroTP/yFPwMguabcPsfG9kB4x6aZO3mCQ/mDKBqcxivBWRir+lcz1ju67CWzje7ZqCeF1ffWt+IMr0BTafD9BfssEmecgTs2hnf8eGSX/s6zxnsmgixYRES1O3FJJslPTZE480zEqbCIrc49ELvS++jLzyCJRDCTKFyDVZWp1GEAht3EspbMNrpno54Uljs2ijs8grPmjfMpNFwXZ3Dw1M8sokt/y7LGeyZCbFNeRCxe+rmY9PQABBfBSuq4EKbzaSYT38N3HVx/4XMigiMOx2YPg+ssa8lsI3s2ak1hLd4zUewPhect2GdRDBRRXvprTNRZsIiIanfizuAgQNDzqIx6L4S7D+4mnfD5/nvOoCfj43gL22+5PvRmfI5svXpZtZDSxn7LHW/VwJlMlt0zUewP5adSp72v5nL4qRSrbr4pusVtYyLMgkVEVLsTF8fBGRlB87mmXginM9N46vHc+pU8/r4zcDyl56RHXypPz0kPx1O+ed0Ir7/nrcv6uktduP1MhvyRI/RfcTlzTz2Fn07jp9PMPPYY0zt2MPfkkxWDzLwKKayRrVtZfestqOfhzc7iHT+ONzuLeh6rb71lWUt/jTGnWM0iImppse2OjLDqv/wyR//6y/iZzIKib70XwtHeUVxxQYTnNqxi7+VjrPvRLANzHqlBl1d/YohjpHhf3/Lz/MXxHLnrbvxMBs3n8U+exD9xAnd4mNSTTzG3azdaSCc5AwPgefiZDF4hELijo+WX61ZIYYkIozfcwPCmTaR27SJ/7BiJsTEGNmywGYUxDbAzuCNk+utfny/qlgaM4sxh9a23MHrDDfjpdNMuhOl8mo8+9FFcxyXpnB6kcn4Oz/e470P30Z+o7z2K4z3xzW8x+/89hjs6htPbC0D+2DHyhw4BkDzzTNyxMdT3yfzoR+D7JM4887SgYOdpm5iKdV95S0NFSK0pFKe/n6Frr2V082aGrr22oQtmf6KfbRdvI51Lk/MXpn5yfo50Ls22i7fVHSiK4x3YsIH0c8+RWLlqPlCo7+NNTQWzI9clf+QI6vuI45BcvRog2GS3qD+V1R6MaT9LQ0VIWCmUzedvBmD73u1k8hl89XHEIeEkmLhsYv75RpTbd+HPzQVbOoppJt/Hn5sLVjIVZhO5Q4fIHzmC09PTcMrNVGAn8pkaWLCIoOLMoV1EhC0XbGHjuRvZfXA305lpRntHWb9mfdUZRTqfruk1ZVd7eV6wca5IdcHnuGNjILDiZ95P74U/abWHZrMT+cwyWLAw8/oT/Vyz9pqaPjeVS3Hns3fy6KvBUSRJJ0nCSZBwEmy7eBubz9+8oDBdXO2lhdkDnoe/eBmwyOkrwhJJBt9xVbTbc8SVnchnlsGChVkWVWXHKzv4/DOf51jmGILM7/pe3bea3kQvk89PArDlglPtXQc2bMBPpci99lpQ5VMNWojn86gq4gYrsor7ScA20bVUrSfyXbLVUlIGsAK3WaYdr+zgi//8RY5nj5OQYCbhiIMgHE4fZi47R3+yn+17t5POn9phPfP3fx/0ePJ9tDCDENcFxwkCRi5HYtWqBbutrZDdQnYin1kmCxamZul8mu17t88vACxNMxVnGFMnp3DFJe/n2X1wN3CqfUdi9eqgt1XhnAz1vCBYFGYVJFzbRNcudiKfWSZLQ5ma7T64m7yfr/i8IKgqqVwKX32mM9PAqZVQ7tAQTk8PbslZGcVGf97sLCMf/jDJtWutkN0OdiKfWSYLFqZmxdYgrlS+yChKXvMkJMFo7yhw+koocRzcFSsWvE5USa5dy+jmxpfpmhrYiXxmmSwNZWpWbA0ymBxEJJhFLCYIKCScBOvXrAca60BrWqR4Il92NggMpYon8r3zN6y4bebZzMLUbP2a9SScBJ56rO5bzeH0YRw9dYSpFv4TkQW7vmvpe2WrnkIQ1xP5bBNhKCxYmJoVW4NMPj/JQHKAMziDqZNT+OoHYUKVlX0r+fXLfn3Bru9iB9pqfa+sRtFmcTuRzzYRhsqChVmW0tYgCSfB6r7V8z2lPrDuA/zXC38VfeYFju/92wVnZi/uQNuMjrmmSeJyIp9tIgyVdZ01dVnc5mP8DeNkv/H3Vc/MbmbHXNNFsimYfE+QIqtUkPfzMPFYNGdFgVhPe2xmYeqyuDVIaXv1pc7MbnffK9MhipsIkxVuLNxkUHd59TvxmCXFkK2GMg2r98xsY2pmmwhDZ8HCNKzeM7ONqZltIgydpaFMw8q2Hz/tk8qfmR03fjrN3FNP4U1PLyjgmxazTYShs2BhGtYNm+5UleMPPFC1gF9kQaXJipsIi6uhSgNGcRPhNZ+KcnE79ixYmIZ1w6a74w88UFMBf7lBxSxDXDcRdghbOmuaonQ1VKVNd6M33BDiCOvnp9P868/fiLhuxWConse5f3M/Jx55pGN/DpGRTcVjE+HpYn2XYDML0xSdvOmu3PnhpSSZxM9kmH3iOzWtChvetMlSUo2IyybCDmPBwjSFiDB6ww0Mb9rUcZvuai3gp555uqagktq1y/aamNixYGGaqhM33dVawAe6ZlWY6T62z8KYKkoL+OUUC/j9b397x68KM93LgoUxVRS75vqp1GkBo/Ss8BVXX11TUInzqjDTvSwNZVpqccPB9WvWz59zESe1FPBFxFqxm45lS2dNS6gqO17Zwfa928n7+fnjWBNOgm0Xb2Pz+Ztjud+gWtdc22dhlhDrP/hQgoWIfBa4HsgCPwJuUtXpaq+zYBEfD778IJPPT9Kf7CfpnLrDzvk50rk0E5dNsOWCLSGOsLWsFbspw4LFst9U5P3ATlXNi8gfAajq71R7nQWLeEjn03z0oY/iOu6CQFGU83N4vsd9H7ovlikpY+oU62ARSoFbVb+pqvnCh08Ca8MYh2mN3Qd3k/fzZQMFQNJJkvfz7D64u80jM8bUKwoF7puB/1XpSRGZACYAzjnnnHaNqaO0u6nddGYaT5feb+Crz3SmaubRGBMRLQsWIvKPwJoyT31aVf+28DmfBvLAvZW+jqpOApMQpKFaMNSOFVaxdbR3FFeW3m/giMNo72jT39sY0xotCxaq+r6lnheRjwMfAn5a47QkK0Zq7ZTabOvXrCfhJMj5uYo1i4STYP2a9U1/b2NMa4RSsxCRjcDvAB9W1VQYY+h0YR512p/oZ9vF20jn0uT8hRvUiquhtl28zYrbxsRIWDu4vwCsAL4lIs+JyBdDGkfHCvuo083nb2bisgk832M2O8uJzAlms7N4vsfEZRNsPn9zS97XGNMaoRS4VfX8MN63m4R91KmIsOWCLWw8d2NH7OA2pttFYTWUaYGoHHXan+jnmrXXtPQ9jDGtZ40EO1StnVKtqZ0xphYWLDpUrZ1SrQWFMaYWlobqYJ181Kkxpr2s62wXsKZ2xkRCrHtD2cyiC3TiUafGmPaymoUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKls6aWGr36X/GdDsLFiZWwjr9z5huZ8HCxEpYp/8Z0+2sZmFiI8zT/4zpdhYsTE38dJqZxx5jescOZh57LJQLctin/xnTzSwNZZYUpRpB2Kf/GdPNLFiYJUWpRhCV0/+M6UaWhjIVRa1GYKf/GRMeCxZdbqlaRNRqBHb6nzHhsTRUl6qlFhHFGoGd/mdMOCxYdKlaahHuqlWRqxGICKM33MDwpk12+p8xbWTBogvVWot481//1XyNoFwqqt4aQTNaddjpf8a0lwWLLlSsRZTOKEpJMomfyXDyhRdYdfNN8zOQ0oAxXyP41V+p+cIfpWW4xpjlsWDRhZZTi6hcI3AZ2LCeI3/118FjNVz4o7QM1xizPBYsutBy9itUqhHkfvxjjnzprmDGUTKTqHThrzX1Nbxpk9UejIkgWzrbherZr1CsEYxu3szAhg0c/fK9y9p/EbVluMaY5bFg0YUa3a9Qz4U/istwjTG1szRUl2pkv0I9F35r1WFMvFmw6FKN7Feo58Jfmvpq5jJcY0x7WLDocvXsV6jnwl9MfS21DHf1rbdYcduYiLKahVm2emseI1u3svrWW1DPw5udxTt+HG92FvU8a9VhTMSJqoY9hpqNj4/rnj17wh6GobENdn46ba06TDeK9Y5TCxamIXbhN6ZmsQ4WVrMwDbEeTcZ0B6tZGGOMqcqChTHGmKosWBhjjKkqlGAhIp8RkedF5DkR+aaInBXGOIwxxtQmrJnFZ1X1MlW9HHgI+L9CGocxxpgahBIsVPVEyYeDQHzW7xpjTBcKbemsiPwB8F+A48B1S3zeBDABcM4557RncMYYYxZo2aY8EflHYE2Zpz6tqn9b8nm/C/Sp6u9X+5q2Kc8YE2O2Ka8cVX1fjZ/6FeBhoGqwMMYYE46wVkNdUPLhh4GXwhiHMcaY2oRVs/hDEflJwAf+DbglpHEYY4ypQawaCYrIYYLgsthqYKrNw6lHXMYJ8RlrXMYJ8RlrXMYJ8RnrauAlVd0Y9kDqFatgUYmI7FHV8bDHUU1cxgnxGWtcxgnxGWtcxgnxGWtcxrkUa/dhjDGmKgsWxhhjquqUYDEZ9gBqFJdxQnzGGpdxQnzGGpdxQnzGGpdxVtQRNQtjjDGt1SkzC2OMMS1kwcIYY0xVHRMs4nJGhoh8VkReKoz1QREZDXtMlYjIjSKyV0R8EYncsj8R2SgiPxCRV0Tk9rDHU4mI3CUih0TkxbDHshQROVtEvi0i+wp/7r8Z9pjKEZE+EdklIv9cGOd/C3tMSxERV0SeFZGHwh5LIzomWBCfMzK+BVyiqpcBPwR+N+TxLOVFYCvweNgDWUxEXODPgA8CbwV+QUTeGu6oKtoOxGEzVh74LVW9CLgK+K8R/ZlmgPeq6tuAy4GNInJVyGNaym8C+8IeRKM6JljE5YwMVf2mquYLHz4JrA1zPEtR1X2q+oOwx1HBBuAVVd2vqlngPuDnQh5TWar6OHA07HFUo6qvqeozhd/PEFzg3hTuqE6ngdnCh8nC/5H89y4ia4GfBf4y7LE0qmOCBQRnZIjIfwAfI7ozi1I3A38f9iBi6k3Af5R8fIAIXtjiSkTWAVcAT4U7kvIKqZ3ngEPAt1Q1kuME/hT4bYI+eLEWq2AhIv8oIi+W+f/nAFT106p6NnAvcFtUx1n4nE8TTPvvDWuchXFUHWtElTsbIJJ3l3EjIkPA14FPLpqxR4aqeoWU81pgg4hcEvaYFhORDwGHVPXpsMfSDKGdlFePuJyRUW2cIvJx4EPAT2vIG12W8TONmgPA2SUfrwV+HNJYOoaIJAkCxb2q+kDY46lGVadF5DGCmlDUFhC8C/iwiGwC+oBhEfmyqv5SyOOqS6xmFkuJyxkZIrIR+B3gw6qaCns8MbYbuEBEzhWRHuCjwDdCHlOsiYgAXwL2qeofhz2eSkTkjOIqQhHpB95HBP+9q+rvqupaVV1H8PdzZ1wDBXRQsCA4I+NFEXkeeD/BCoQo+gKwAvhWYZnvF8MeUCUiskVEDgDvAB4WkUfDHlNRYZHAbcCjBIXYr6nq3nBHVZ6IfBX4PvCTInJARH4l7DFV8C7gl4H3Fv5uPle4K46aNwLfLvxb301Qs4j1stQ4sHYfxhhjquqkmYUxxpgWsWBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qyYGG6goh4haWgL4rI/SIyUHh8jYjcJyI/EpF/EZFHROQthef+QUSm494t1JhmsGBhukVaVS9X1UuALHBLYRPag8BjqvoTqvpW4PeANxRe81mCfQfGdD0LFqYbPQGcD1wH5FR1fmOkqj6nqk8Ufv9PwEw4QzQmWixYmK4iIgmCMzBeAC4BOqLJmzGtZsHCdIv+QkvrPcC/E/RAMsbUKFZdZ41pQLrQ0nqeiOwFfj6k8RgTKzazMN1sJ9ArIr9WfEBE1ovItSGOyZhIsmBhulbhLJEtwM8Uls7uBe6gcC6GiDwB3A/8dKFb7AdCG6wxIbOus8YYY6qymYUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKgoUxxpiq/n9jPPDBLiPgsQAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 402.375x360 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"do_KmeansPCA()"
]
},
{
"cell_type": "code",
"execution_count": 76,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(tensor([[1.0000, 0.0000, 1.0000, 0.9998]], grad_fn=<SigmoidBackward>),\n",
" tensor([[0., 0., 0., 0.]], grad_fn=<CumprodBackward>),\n",
" tensor([[2]]),\n",
" None,\n",
" None)"
]
},
"execution_count": 76,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.forward(x_path=x_path)"
]
},
{
"cell_type": "code",
"execution_count": 69,
"metadata": {},
"outputs": [
{
"ename": "SyntaxError",
"evalue": "invalid syntax (<ipython-input-69-c543913fa78f>, line 1)",
"output_type": "error",
"traceback": [
"\u001b[0;36m File \u001b[0;32m\"<ipython-input-69-c543913fa78f>\"\u001b[0;36m, line \u001b[0;32m1\u001b[0m\n\u001b[0;31m import ..models\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
]
}
],
"source": [
"import ..models"
]
},
{
"cell_type": "code",
"execution_count": 63,
"metadata": {},
"outputs": [],
"source": [
"x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)"
]
},
{
"cell_type": "code",
"execution_count": 65,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'MultiheadAttention' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-65-f85a99af33ee>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mself\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMM_CoAttn_Transformer_Surv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0momic_sizes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m<ipython-input-62-9e5f322e30a0>\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, omic_sizes, n_classes, model_size_wsi, model_size_omic, dropout)\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[0;31m### Multihead Attention\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 30\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcoattn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMultiheadAttention\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membed_dim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m256\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_heads\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 31\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[0;31m### Transformer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'MultiheadAttention' is not defined"
]
}
],
"source": [
"self = MM_CoAttn_Transformer_Surv(omic_sizes=sig_sizes)"
]
},
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'sig_size' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-52-097a03ed0c40>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mself\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMM_CoAttn_Surv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mx_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m500\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1024\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFloatTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0msig_feats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFloatTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0msize\u001b[0m \u001b[0;32min\u001b[0m \u001b[0msig_sizes\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mx_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_net\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_path\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munsqueeze\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m<ipython-input-43-4469ba9e1eea>\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, sig_sizes, n_classes, model_size_wsi, model_size_omic, dropout)\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0mhidden\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize_dict_omic\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmodel_size_omic\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0msig_networks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0minput_dim\u001b[0m \u001b[0;32min\u001b[0m \u001b[0msig_size\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 22\u001b[0m \u001b[0mfc_omic\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mSNN_Block\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdim1\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minput_dim\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdim2\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mhidden\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhidden\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'sig_size' is not defined"
]
}
],
"source": [
"self = MM_CoAttn_Surv(sig_sizes=sig_sizes)\n",
"x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
"sig_feats = [torch.randint(10, size=(size,)).type(torch.FloatTensor) for size in sig_sizes]\n",
"\n",
"x_path = self.attention_net(x_path).unsqueeze(1)\n",
"x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).unsqueeze(1)\n",
"\n",
"out, attention_weights = self.coattn(x_omic, x_path, x_path)\n",
"out = self.transformer(out)\n",
"out = self.conv(out.squeeze(1).T.unsqueeze(0))\n",
"#out = self.classifier(out.squeeze(0).squeeze(1))"
]
},
{
"cell_type": "code",
"execution_count": 471,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 256, 1])"
]
},
"execution_count": 471,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out.shape"
]
},
{
"cell_type": "code",
"execution_count": 472,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[[ 0.5998, 1.9873, -1.1435, ..., -0.0048, 0.2963, 1.1112]],\n",
"\n",
" [[-0.4201, -0.1456, 0.2057, ..., -0.2175, 0.4188, 0.4702]],\n",
"\n",
" [[ 1.0294, 3.1634, 0.4595, ..., 1.2059, 0.5845, 1.4114]],\n",
"\n",
" [[-1.1435, -1.1435, -1.1435, ..., 0.1951, -0.4378, 0.2051]],\n",
"\n",
" [[ 0.9948, 1.1596, 2.1419, ..., -0.1225, 1.3597, -0.3037]],\n",
"\n",
" [[ 0.4019, -1.1435, -0.1522, ..., -0.2058, 0.0351, -1.1435]]],\n",
" grad_fn=<UnsqueezeBackward0>)"
]
},
"execution_count": 472,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x_omic"
]
},
{
"cell_type": "code",
"execution_count": 474,
"metadata": {},
"outputs": [],
"source": [
"self = MM_CoAttn_Surv(sig_sizes=sig_sizes)\n",
"x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
"sig_feats = [torch.randint(10, size=(size,)).type(torch.FloatTensor) for size in sig_sizes]\n",
"\n",
"x_path = self.attention_net(x_path).unsqueeze(1)\n",
"x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).unsqueeze(1)\n",
"out, attention_weights = self.coattn(x_omic, x_path, x_path)\n",
"\n",
"out = self.transformer(out)\n"
]
},
{
"cell_type": "code",
"execution_count": 491,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1536])"
]
},
"execution_count": 491,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"torch.cat([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).shape"
]
},
{
"cell_type": "code",
"execution_count": 484,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([6, 1, 512])"
]
},
"execution_count": 484,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"torch.cat([out, out], axis=2).shape"
]
},
{
"cell_type": "code",
"execution_count": 455,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([6, 1, 256])"
]
},
"execution_count": 455,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out.shape"
]
},
{
"cell_type": "code",
"execution_count": 452,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([6, 1, 256])"
]
},
"execution_count": 452,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out.shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 423,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 8, 6, 500])"
]
},
"execution_count": 423,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights.shape"
]
},
{
"cell_type": "code",
"execution_count": 415,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[[0.0018, 0.0020, 0.0012, ..., 0.0016, 0.0025, 0.0031],\n",
" [0.0026, 0.0015, 0.0016, ..., 0.0021, 0.0021, 0.0016],\n",
" [0.0019, 0.0014, 0.0011, ..., 0.0020, 0.0013, 0.0025],\n",
" [0.0016, 0.0013, 0.0023, ..., 0.0009, 0.0015, 0.0027],\n",
" [0.0015, 0.0013, 0.0023, ..., 0.0026, 0.0019, 0.0026],\n",
" [0.0013, 0.0019, 0.0025, ..., 0.0022, 0.0020, 0.0021]]],\n",
" grad_fn=<DivBackward0>)"
]
},
"execution_count": 415,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights_0"
]
},
{
"cell_type": "code",
"execution_count": 416,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[[0.0018, 0.0020, 0.0012, ..., 0.0016, 0.0025, 0.0031],\n",
" [0.0026, 0.0015, 0.0016, ..., 0.0021, 0.0021, 0.0016],\n",
" [0.0019, 0.0014, 0.0011, ..., 0.0020, 0.0013, 0.0025],\n",
" [0.0016, 0.0013, 0.0023, ..., 0.0009, 0.0015, 0.0027],\n",
" [0.0015, 0.0013, 0.0023, ..., 0.0026, 0.0019, 0.0026],\n",
" [0.0013, 0.0019, 0.0025, ..., 0.0022, 0.0020, 0.0021]]],\n",
" grad_fn=<DivBackward0>)"
]
},
"execution_count": 416,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"softmax(attention_weights_1, dim=-1).sum(axis=1) / 8"
]
},
{
"cell_type": "code",
"execution_count": 411,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 1, 6, 500])"
]
},
"execution_count": 411,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"softmax(attention_weights_1, dim=-1).shape"
]
},
{
"cell_type": "code",
"execution_count": 339,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor(1.0000, grad_fn=<SumBackward0>)"
]
},
"execution_count": 339,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights_0[0][0].sum()"
]
},
{
"cell_type": "code",
"execution_count": 396,
"metadata": {},
"outputs": [],
"source": [
"test = softmax(attention_weights_2, dim=-1)"
]
},
{
"cell_type": "code",
"execution_count": 402,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([0.0024, 0.0030, 0.0019, 0.0018, 0.0038, 0.0015, 0.0020, 0.0016, 0.0015,\n",
" 0.0019, 0.0015, 0.0035, 0.0026, 0.0017, 0.0014, 0.0013, 0.0023, 0.0020,\n",
" 0.0017, 0.0010], grad_fn=<SliceBackward>)"
]
},
"execution_count": 402,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights_0[0][0][:20]"
]
},
{
"cell_type": "code",
"execution_count": 404,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([0.0028, 0.0033, 0.0019, 0.0013, 0.0042, 0.0016, 0.0024, 0.0018, 0.0019,\n",
" 0.0024, 0.0016, 0.0033, 0.0022, 0.0014, 0.0016, 0.0013, 0.0023, 0.0021,\n",
" 0.0013, 0.0013], grad_fn=<SliceBackward>)"
]
},
"execution_count": 404,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"test[0][0][:20]"
]
},
{
"cell_type": "code",
"execution_count": 366,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[[False, False, False, ..., False, False, False],\n",
" [False, False, False, ..., False, False, False],\n",
" [False, False, False, ..., False, False, False],\n",
" [False, False, False, ..., False, False, False],\n",
" [False, False, False, ..., False, False, False],\n",
" [False, False, False, ..., False, False, False]]])"
]
},
"execution_count": 366,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"torch.eq(attention_weights_0, test)"
]
},
{
"cell_type": "code",
"execution_count": 320,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 8, 6, 500])"
]
},
"execution_count": 320,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights_1.shape"
]
},
{
"cell_type": "code",
"execution_count": 318,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 6, 500])"
]
},
"execution_count": 318,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights_2.shape"
]
},
{
"cell_type": "code",
"execution_count": 282,
"metadata": {},
"outputs": [],
"source": [
"out = self.classifier(out.squeeze(0).squeeze(1))"
]
},
{
"cell_type": "code",
"execution_count": 284,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([ 0.2832, 0.1548, -0.0972, -0.2801], grad_fn=<AddBackward0>)"
]
},
"execution_count": 284,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out"
]
},
{
"cell_type": "code",
"execution_count": 269,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[0.0018, 0.0019, 0.0019, ..., 0.0019, 0.0022, 0.0018],\n",
" [0.0020, 0.0020, 0.0021, ..., 0.0021, 0.0020, 0.0020],\n",
" [0.0019, 0.0022, 0.0021, ..., 0.0019, 0.0019, 0.0020],\n",
" [0.0021, 0.0022, 0.0019, ..., 0.0018, 0.0020, 0.0021],\n",
" [0.0019, 0.0019, 0.0020, ..., 0.0020, 0.0018, 0.0019],\n",
" [0.0021, 0.0021, 0.0019, ..., 0.0019, 0.0021, 0.0021]],\n",
" grad_fn=<SelectBackward>)"
]
},
"execution_count": 269,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"attention_weights[0]"
]
},
{
"cell_type": "code",
"execution_count": 241,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(tensor([[[-0.0504, 0.0757, -0.0366, ..., -0.0275, -0.0294, 0.1300]],\n",
" \n",
" [[-0.0500, 0.0762, -0.0352, ..., -0.0253, -0.0289, 0.1311]],\n",
" \n",
" [[-0.0497, 0.0772, -0.0321, ..., -0.0246, -0.0288, 0.1301]],\n",
" \n",
" [[-0.0491, 0.0794, -0.0337, ..., -0.0260, -0.0278, 0.1281]],\n",
" \n",
" [[-0.0483, 0.0781, -0.0343, ..., -0.0246, -0.0301, 0.1321]],\n",
" \n",
" [[-0.0499, 0.0768, -0.0305, ..., -0.0257, -0.0280, 0.1321]]],\n",
" grad_fn=<AddBackward0>),\n",
" tensor([[[0.0019, 0.0019, 0.0019, ..., 0.0020, 0.0021, 0.0021],\n",
" [0.0017, 0.0020, 0.0020, ..., 0.0019, 0.0019, 0.0018],\n",
" [0.0019, 0.0018, 0.0019, ..., 0.0019, 0.0019, 0.0021],\n",
" [0.0020, 0.0020, 0.0019, ..., 0.0020, 0.0021, 0.0019],\n",
" [0.0017, 0.0023, 0.0021, ..., 0.0019, 0.0020, 0.0020],\n",
" [0.0021, 0.0021, 0.0020, ..., 0.0021, 0.0021, 0.0020]]],\n",
" grad_fn=<DivBackward0>))"
]
},
"execution_count": 241,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"self.coattn(x_omic, x_path, x_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"h"
]
},
{
"cell_type": "code",
"execution_count": 208,
"metadata": {},
"outputs": [],
"source": [
"sig_feats = [torch.randn(size) for size in sig_sizes]\n",
"x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)])\n"
]
},
{
"cell_type": "code",
"execution_count": 204,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 206,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([6, 256])"
]
},
"execution_count": 206,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x_omic.shape"
]
},
{
"cell_type": "code",
"execution_count": 166,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'sig1' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-166-aea4cb4c555c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0msig1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig4\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig6\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mNameError\u001b[0m: name 'sig1' is not defined"
]
}
],
"source": [
"sig1, sig2, sig3, sig4, sig5, sig6 = torch.randn()"
]
},
{
"cell_type": "code",
"execution_count": 158,
"metadata": {},
"outputs": [],
"source": [
"src = torch.rand(6, 1, 256)\n",
"out = transformer(src)\n",
"out = out.squeeze(1).T.unsqueeze(0)"
]
},
{
"cell_type": "code",
"execution_count": 163,
"metadata": {},
"outputs": [],
"source": [
"conv = nn.Conv1d(in_channels=256, out_channels=256, kernel_size=4, stride=4)"
]
},
{
"cell_type": "code",
"execution_count": 164,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 256, 6])"
]
},
"execution_count": 164,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out.shape"
]
},
{
"cell_type": "code",
"execution_count": 165,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 256, 1])"
]
},
"execution_count": 165,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"conv(out).shape"
]
},
{
"cell_type": "code",
"execution_count": 112,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1536])"
]
},
"execution_count": 112,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x.reshape(-1).shape"
]
},
{
"cell_type": "code",
"execution_count": 106,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3072"
]
},
"execution_count": 106,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"256 * 12"
]
},
{
"cell_type": "code",
"execution_count": 88,
"metadata": {},
"outputs": [],
"source": [
"net = Attn_Net_Gated()\n",
"wsi_feats = torch.randn(500, 1, 256)\n",
"sig_feats = torch.randn(6, 1, 256)"
]
},
{
"cell_type": "code",
"execution_count": 89,
"metadata": {},
"outputs": [],
"source": [
"multihead_attn = nn.MultiheadAttention(embed_dim=256, num_heads=8)"
]
},
{
"cell_type": "code",
"execution_count": 90,
"metadata": {},
"outputs": [],
"source": [
"out, coattn_weights = multihead_attn(sig_feats, wsi_feats, wsi_feats)"
]
},
{
"cell_type": "code",
"execution_count": 96,
"metadata": {},
"outputs": [],
"source": [
"cotton = DenseCoAttn(dim1=256, dim2=256, num_attn=8, num_none=3, dropout=0.3b)"
]
},
{
"cell_type": "code",
"execution_count": 100,
"metadata": {},
"outputs": [],
"source": [
"from math import sqrt\n",
"wsi_feats = torch.randn(1, 500, 256)\n",
"sig_feats = torch.randn(1, 6, 256)\n",
"_ = cotton(wsi_feats, sig_feats)"
]
},
{
"cell_type": "code",
"execution_count": 103,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 6, 256])"
]
},
"execution_count": 103,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_[0].shape"
]
},
{
"cell_type": "code",
"execution_count": 104,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([1, 500, 256])"
]
},
"execution_count": 104,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_[1].shape"
]
},
{
"cell_type": "code",
"execution_count": 94,
"metadata": {},
"outputs": [],
"source": [
"\n",
"import torch\n",
"import torch.nn as nn\n",
"import torch.nn.functional as F\n",
"\n",
"\n",
"def qkv_attention(query, key, value, mask=None, dropout=None):\n",
"\td_k = query.size(-1)\n",
"\tscores = torch.matmul(query, key.transpose(-2,-1)) / sqrt(d_k)\n",
"\tif mask is not None:\n",
"\t\tscores.data.masked_fill_(mask.eq(0), -65504.0)\n",
"\t\n",
"\tp_attn = F.softmax(scores, dim=-1)\n",
"\tif dropout is not None:\n",
"\t\tp_attn = dropout(p_attn)\n",
"\n",
"\treturn torch.matmul(p_attn, value), p_attn\n",
"\n",
"\n",
"class DenseCoAttn(nn.Module):\n",
"\n",
"\tdef __init__(self, dim1, dim2, num_attn, num_none, dropout, is_multi_head=False):\n",
"\t\tsuper(DenseCoAttn, self).__init__()\n",
"\t\tdim = min(dim1, dim2)\n",
"\t\tself.linears = nn.ModuleList([nn.Linear(dim1, dim, bias=False),\n",
"\t\t\t\t\t\t\t\t\t nn.Linear(dim2, dim, bias=False)])\n",
"\t\tself.nones = nn.ParameterList([nn.Parameter(nn.init.xavier_uniform_(torch.empty(num_none, dim1))),\n",
"\t\t\t\t\t\t\t\t\t nn.Parameter(nn.init.xavier_uniform_(torch.empty(num_none, dim2)))])\n",
"\t\tself.d_k = dim // num_attn\n",
"\t\tself.h = num_attn\n",
"\t\tself.num_none = num_none\n",
"\t\tself.is_multi_head = is_multi_head\n",
"\t\tself.attn = None\n",
"\t\tself.dropouts = nn.ModuleList([nn.Dropout(p=dropout) for _ in range(2)])\n",
"\n",
"\tdef forward(self, value1, value2, mask1=None, mask2=None):\n",
"\t\tbatch = value1.size(0)\n",
"\t\tdim1, dim2 = value1.size(-1), value2.size(-1)\n",
"\t\tvalue1 = torch.cat([self.nones[0].unsqueeze(0).expand(batch, self.num_none, dim1), value1], dim=1)\n",
"\t\tvalue2 = torch.cat([self.nones[1].unsqueeze(0).expand(batch, self.num_none, dim2), value2], dim=1)\n",
"\t\tnone_mask = value1.new_ones((batch, self.num_none))\n",
"\n",
"\t\tif mask1 is not None:\n",
"\t\t\tmask1 = torch.cat([none_mask, mask1], dim=1)\n",
"\t\t\tmask1 = mask1.unsqueeze(1).unsqueeze(2)\n",
"\t\tif mask2 is not None:\n",
"\t\t\tmask2 = torch.cat([none_mask, mask2], dim=1)\n",
"\t\t\tmask2 = mask2.unsqueeze(1).unsqueeze(2)\n",
"\n",
"\t\tquery1, query2 = [l(x).view(batch, -1, self.h, self.d_k).transpose(1, 2) \n",
"\t\t\tfor l, x in zip(self.linears, (value1, value2))]\n",
"\n",
"\t\tif self.is_multi_head:\n",
"\t\t\tweighted1, attn1 = qkv_attention(query2, query1, query1, mask=mask1, dropout=self.dropouts[0])\n",
"\t\t\tweighted1 = weighted1.transpose(1, 2).contiguous()[:, self.num_none:, :]\n",
"\t\t\tweighted2, attn2 = qkv_attention(query1, query2, query2, mask=mask2, dropout=self.dropouts[1])\n",
"\t\t\tweighted2 = weighted2.transpose(1, 2).contiguous()[:, self.num_none:, :]\n",
"\t\telse:\n",
"\t\t\tweighted1, attn1 = qkv_attention(query2, query1, value1.unsqueeze(1), mask=mask1, \n",
"\t\t\t\tdropout=self.dropouts[0])\n",
"\t\t\tweighted1 = weighted1.mean(dim=1)[:, self.num_none:, :]\n",
"\t\t\tweighted2, attn2 = qkv_attention(query1, query2, value2.unsqueeze(1), mask=mask2, \n",
"\t\t\t\tdropout=self.dropouts[1])\n",
"\t\t\tweighted2 = weighted2.mean(dim=1)[:, self.num_none:, :]\n",
"\t\tself.attn = [attn1[:,:,self.num_none:,self.num_none:], attn2[:,:,self.num_none:,self.num_none:]]\n",
"\n",
"\t\treturn weighted1, weighted2\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 417,
"metadata": {},
"outputs": [],
"source": [
"from torch.nn.functional import *\n",
"\n",
"def multi_head_attention_forward(\n",
" query: Tensor,\n",
" key: Tensor,\n",
" value: Tensor,\n",
" embed_dim_to_check: int,\n",
" num_heads: int,\n",
" in_proj_weight: Tensor,\n",
" in_proj_bias: Tensor,\n",
" bias_k: Optional[Tensor],\n",
" bias_v: Optional[Tensor],\n",
" add_zero_attn: bool,\n",
" dropout_p: float,\n",
" out_proj_weight: Tensor,\n",
" out_proj_bias: Tensor,\n",
" training: bool = True,\n",
" key_padding_mask: Optional[Tensor] = None,\n",
" need_weights: bool = True,\n",
" need_raw: bool = True,\n",
" attn_mask: Optional[Tensor] = None,\n",
" use_separate_proj_weight: bool = False,\n",
" q_proj_weight: Optional[Tensor] = None,\n",
" k_proj_weight: Optional[Tensor] = None,\n",
" v_proj_weight: Optional[Tensor] = None,\n",
" static_k: Optional[Tensor] = None,\n",
" static_v: Optional[Tensor] = None,\n",
") -> Tuple[Tensor, Optional[Tensor]]:\n",
" r\"\"\"\n",
" Args:\n",
" query, key, value: map a query and a set of key-value pairs to an output.\n",
" See \"Attention Is All You Need\" for more details.\n",
" embed_dim_to_check: total dimension of the model.\n",
" num_heads: parallel attention heads.\n",
" in_proj_weight, in_proj_bias: input projection weight and bias.\n",
" bias_k, bias_v: bias of the key and value sequences to be added at dim=0.\n",
" add_zero_attn: add a new batch of zeros to the key and\n",
" value sequences at dim=1.\n",
" dropout_p: probability of an element to be zeroed.\n",
" out_proj_weight, out_proj_bias: the output projection weight and bias.\n",
" training: apply dropout if is ``True``.\n",
" key_padding_mask: if provided, specified padding elements in the key will\n",
" be ignored by the attention. This is an binary mask. When the value is True,\n",
" the corresponding value on the attention layer will be filled with -inf.\n",
" need_weights: output attn_output_weights.\n",
" attn_mask: 2D or 3D mask that prevents attention to certain positions. A 2D mask will be broadcasted for all\n",
" the batches while a 3D mask allows to specify a different mask for the entries of each batch.\n",
" use_separate_proj_weight: the function accept the proj. weights for query, key,\n",
" and value in different forms. If false, in_proj_weight will be used, which is\n",
" a combination of q_proj_weight, k_proj_weight, v_proj_weight.\n",
" q_proj_weight, k_proj_weight, v_proj_weight, in_proj_bias: input projection weight and bias.\n",
" static_k, static_v: static key and value used for attention operators.\n",
" Shape:\n",
" Inputs:\n",
" - query: :math:`(L, N, E)` where L is the target sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - key: :math:`(S, N, E)`, where S is the source sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - value: :math:`(S, N, E)` where S is the source sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - key_padding_mask: :math:`(N, S)` where N is the batch size, S is the source sequence length.\n",
" If a ByteTensor is provided, the non-zero positions will be ignored while the zero positions\n",
" will be unchanged. If a BoolTensor is provided, the positions with the\n",
" value of ``True`` will be ignored while the position with the value of ``False`` will be unchanged.\n",
" - attn_mask: 2D mask :math:`(L, S)` where L is the target sequence length, S is the source sequence length.\n",
" 3D mask :math:`(N*num_heads, L, S)` where N is the batch size, L is the target sequence length,\n",
" S is the source sequence length. attn_mask ensures that position i is allowed to attend the unmasked\n",
" positions. If a ByteTensor is provided, the non-zero positions are not allowed to attend\n",
" while the zero positions will be unchanged. If a BoolTensor is provided, positions with ``True``\n",
" are not allowed to attend while ``False`` values will be unchanged. If a FloatTensor\n",
" is provided, it will be added to the attention weight.\n",
" - static_k: :math:`(N*num_heads, S, E/num_heads)`, where S is the source sequence length,\n",
" N is the batch size, E is the embedding dimension. E/num_heads is the head dimension.\n",
" - static_v: :math:`(N*num_heads, S, E/num_heads)`, where S is the source sequence length,\n",
" N is the batch size, E is the embedding dimension. E/num_heads is the head dimension.\n",
" Outputs:\n",
" - attn_output: :math:`(L, N, E)` where L is the target sequence length, N is the batch size,\n",
" E is the embedding dimension.\n",
" - attn_output_weights: :math:`(N, L, S)` where N is the batch size,\n",
" L is the target sequence length, S is the source sequence length.\n",
" \"\"\"\n",
" tens_ops = (query, key, value, in_proj_weight, in_proj_bias, bias_k, bias_v, out_proj_weight, out_proj_bias)\n",
" if has_torch_function(tens_ops):\n",
" return handle_torch_function(\n",
" multi_head_attention_forward,\n",
" tens_ops,\n",
" query,\n",
" key,\n",
" value,\n",
" embed_dim_to_check,\n",
" num_heads,\n",
" in_proj_weight,\n",
" in_proj_bias,\n",
" bias_k,\n",
" bias_v,\n",
" add_zero_attn,\n",
" dropout_p,\n",
" out_proj_weight,\n",
" out_proj_bias,\n",
" training=training,\n",
" key_padding_mask=key_padding_mask,\n",
" need_weights=need_weights,\n",
" need_raw=need_raw,\n",
" attn_mask=attn_mask,\n",
" use_separate_proj_weight=use_separate_proj_weight,\n",
" q_proj_weight=q_proj_weight,\n",
" k_proj_weight=k_proj_weight,\n",
" v_proj_weight=v_proj_weight,\n",
" static_k=static_k,\n",
" static_v=static_v,\n",
" )\n",
" tgt_len, bsz, embed_dim = query.size()\n",
" assert embed_dim == embed_dim_to_check\n",
" # allow MHA to have different sizes for the feature dimension\n",
" assert key.size(0) == value.size(0) and key.size(1) == value.size(1)\n",
"\n",
" head_dim = embed_dim // num_heads\n",
" assert head_dim * num_heads == embed_dim, \"embed_dim must be divisible by num_heads\"\n",
" scaling = float(head_dim) ** -0.5\n",
"\n",
" if not use_separate_proj_weight:\n",
" if (query is key or torch.equal(query, key)) and (key is value or torch.equal(key, value)):\n",
" # self-attention\n",
" q, k, v = linear(query, in_proj_weight, in_proj_bias).chunk(3, dim=-1)\n",
"\n",
" elif key is value or torch.equal(key, value):\n",
" # encoder-decoder attention\n",
" # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
" _b = in_proj_bias\n",
" _start = 0\n",
" _end = embed_dim\n",
" _w = in_proj_weight[_start:_end, :]\n",
" if _b is not None:\n",
" _b = _b[_start:_end]\n",
" q = linear(query, _w, _b)\n",
"\n",
" if key is None:\n",
" assert value is None\n",
" k = None\n",
" v = None\n",
" else:\n",
"\n",
" # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
" _b = in_proj_bias\n",
" _start = embed_dim\n",
" _end = None\n",
" _w = in_proj_weight[_start:, :]\n",
" if _b is not None:\n",
" _b = _b[_start:]\n",
" k, v = linear(key, _w, _b).chunk(2, dim=-1)\n",
"\n",
" else:\n",
" # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
" _b = in_proj_bias\n",
" _start = 0\n",
" _end = embed_dim\n",
" _w = in_proj_weight[_start:_end, :]\n",
" if _b is not None:\n",
" _b = _b[_start:_end]\n",
" q = linear(query, _w, _b)\n",
"\n",
" # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
" _b = in_proj_bias\n",
" _start = embed_dim\n",
" _end = embed_dim * 2\n",
" _w = in_proj_weight[_start:_end, :]\n",
" if _b is not None:\n",
" _b = _b[_start:_end]\n",
" k = linear(key, _w, _b)\n",
"\n",
" # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
" _b = in_proj_bias\n",
" _start = embed_dim * 2\n",
" _end = None\n",
" _w = in_proj_weight[_start:, :]\n",
" if _b is not None:\n",
" _b = _b[_start:]\n",
" v = linear(value, _w, _b)\n",
" else:\n",
" q_proj_weight_non_opt = torch.jit._unwrap_optional(q_proj_weight)\n",
" len1, len2 = q_proj_weight_non_opt.size()\n",
" assert len1 == embed_dim and len2 == query.size(-1)\n",
"\n",
" k_proj_weight_non_opt = torch.jit._unwrap_optional(k_proj_weight)\n",
" len1, len2 = k_proj_weight_non_opt.size()\n",
" assert len1 == embed_dim and len2 == key.size(-1)\n",
"\n",
" v_proj_weight_non_opt = torch.jit._unwrap_optional(v_proj_weight)\n",
" len1, len2 = v_proj_weight_non_opt.size()\n",
" assert len1 == embed_dim and len2 == value.size(-1)\n",
"\n",
" if in_proj_bias is not None:\n",
" q = linear(query, q_proj_weight_non_opt, in_proj_bias[0:embed_dim])\n",
" k = linear(key, k_proj_weight_non_opt, in_proj_bias[embed_dim : (embed_dim * 2)])\n",
" v = linear(value, v_proj_weight_non_opt, in_proj_bias[(embed_dim * 2) :])\n",
" else:\n",
" q = linear(query, q_proj_weight_non_opt, in_proj_bias)\n",
" k = linear(key, k_proj_weight_non_opt, in_proj_bias)\n",
" v = linear(value, v_proj_weight_non_opt, in_proj_bias)\n",
" q = q * scaling\n",
"\n",
" if attn_mask is not None:\n",
" assert (\n",
" attn_mask.dtype == torch.float32\n",
" or attn_mask.dtype == torch.float64\n",
" or attn_mask.dtype == torch.float16\n",
" or attn_mask.dtype == torch.uint8\n",
" or attn_mask.dtype == torch.bool\n",
" ), \"Only float, byte, and bool types are supported for attn_mask, not {}\".format(attn_mask.dtype)\n",
" if attn_mask.dtype == torch.uint8:\n",
" warnings.warn(\"Byte tensor for attn_mask in nn.MultiheadAttention is deprecated. Use bool tensor instead.\")\n",
" attn_mask = attn_mask.to(torch.bool)\n",
"\n",
" if attn_mask.dim() == 2:\n",
" attn_mask = attn_mask.unsqueeze(0)\n",
" if list(attn_mask.size()) != [1, query.size(0), key.size(0)]:\n",
" raise RuntimeError(\"The size of the 2D attn_mask is not correct.\")\n",
" elif attn_mask.dim() == 3:\n",
" if list(attn_mask.size()) != [bsz * num_heads, query.size(0), key.size(0)]:\n",
" raise RuntimeError(\"The size of the 3D attn_mask is not correct.\")\n",
" else:\n",
" raise RuntimeError(\"attn_mask's dimension {} is not supported\".format(attn_mask.dim()))\n",
" # attn_mask's dim is 3 now.\n",
"\n",
" # convert ByteTensor key_padding_mask to bool\n",
" if key_padding_mask is not None and key_padding_mask.dtype == torch.uint8:\n",
" warnings.warn(\n",
" \"Byte tensor for key_padding_mask in nn.MultiheadAttention is deprecated. Use bool tensor instead.\"\n",
" )\n",
" key_padding_mask = key_padding_mask.to(torch.bool)\n",
"\n",
" if bias_k is not None and bias_v is not None:\n",
" if static_k is None and static_v is None:\n",
" k = torch.cat([k, bias_k.repeat(1, bsz, 1)])\n",
" v = torch.cat([v, bias_v.repeat(1, bsz, 1)])\n",
" if attn_mask is not None:\n",
" attn_mask = pad(attn_mask, (0, 1))\n",
" if key_padding_mask is not None:\n",
" key_padding_mask = pad(key_padding_mask, (0, 1))\n",
" else:\n",
" assert static_k is None, \"bias cannot be added to static key.\"\n",
" assert static_v is None, \"bias cannot be added to static value.\"\n",
" else:\n",
" assert bias_k is None\n",
" assert bias_v is None\n",
"\n",
" q = q.contiguous().view(tgt_len, bsz * num_heads, head_dim).transpose(0, 1)\n",
" if k is not None:\n",
" k = k.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)\n",
" if v is not None:\n",
" v = v.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)\n",
"\n",
" if static_k is not None:\n",
" assert static_k.size(0) == bsz * num_heads\n",
" assert static_k.size(2) == head_dim\n",
" k = static_k\n",
"\n",
" if static_v is not None:\n",
" assert static_v.size(0) == bsz * num_heads\n",
" assert static_v.size(2) == head_dim\n",
" v = static_v\n",
"\n",
" src_len = k.size(1)\n",
"\n",
" if key_padding_mask is not None:\n",
" assert key_padding_mask.size(0) == bsz\n",
" assert key_padding_mask.size(1) == src_len\n",
"\n",
" if add_zero_attn:\n",
" src_len += 1\n",
" k = torch.cat([k, torch.zeros((k.size(0), 1) + k.size()[2:], dtype=k.dtype, device=k.device)], dim=1)\n",
" v = torch.cat([v, torch.zeros((v.size(0), 1) + v.size()[2:], dtype=v.dtype, device=v.device)], dim=1)\n",
" if attn_mask is not None:\n",
" attn_mask = pad(attn_mask, (0, 1))\n",
" if key_padding_mask is not None:\n",
" key_padding_mask = pad(key_padding_mask, (0, 1))\n",
"\n",
" attn_output_weights = torch.bmm(q, k.transpose(1, 2))\n",
" assert list(attn_output_weights.size()) == [bsz * num_heads, tgt_len, src_len]\n",
"\n",
" if attn_mask is not None:\n",
" if attn_mask.dtype == torch.bool:\n",
" attn_output_weights.masked_fill_(attn_mask, float(\"-inf\"))\n",
" else:\n",
" attn_output_weights += attn_mask\n",
"\n",
" if key_padding_mask is not None:\n",
" attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
" attn_output_weights = attn_output_weights.masked_fill(\n",
" key_padding_mask.unsqueeze(1).unsqueeze(2),\n",
" float(\"-inf\"),\n",
" )\n",
" attn_output_weights = attn_output_weights.view(bsz * num_heads, tgt_len, src_len)\n",
" \n",
" attn_output_weights_raw = attn_output_weights\n",
" attn_output_weights = softmax(attn_output_weights, dim=-1)\n",
" attn_output_weights = dropout(attn_output_weights, p=dropout_p, training=training)\n",
"\n",
" attn_output = torch.bmm(attn_output_weights, v)\n",
" assert list(attn_output.size()) == [bsz * num_heads, tgt_len, head_dim]\n",
" attn_output = attn_output.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)\n",
" attn_output = linear(attn_output, out_proj_weight, out_proj_bias)\n",
" \n",
" if need_weights:\n",
" if need_raw:\n",
" \n",
" attn_output_weights_raw = attn_output_weights_raw.view(bsz, num_heads, tgt_len, src_len)\n",
" return attn_output,attn_output_weights_raw\n",
" \n",
" #attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
" #return attn_output, attn_output_weights.sum(dim=1) / num_heads, attn_output_weights_raw, attn_output_weights_raw.sum(dim=1) / num_heads\n",
" else:\n",
" # average attention weights over heads\n",
" attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
" return attn_output, attn_output_weights.sum(dim=1) / num_heads\n",
" else:\n",
" return attn_output, None\n"
]
},
{
"cell_type": "code",
"execution_count": 418,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"from torch import Tensor\n",
"from torch.nn.modules.linear import _LinearWithBias\n",
"from torch.nn.init import xavier_uniform_\n",
"from torch.nn.init import constant_\n",
"from torch.nn.init import xavier_normal_\n",
"from torch.nn.parameter import Parameter\n",
"from torch.nn import Module\n",
"\n",
"class MultiheadAttention(Module):\n",
" r\"\"\"Allows the model to jointly attend to information\n",
" from different representation subspaces.\n",
" See reference: Attention Is All You Need\n",
"\n",
" .. math::\n",
" \\text{MultiHead}(Q, K, V) = \\text{Concat}(head_1,\\dots,head_h)W^O\n",
" \\text{where} head_i = \\text{Attention}(QW_i^Q, KW_i^K, VW_i^V)\n",
"\n",
" Args:\n",
" embed_dim: total dimension of the model.\n",
" num_heads: parallel attention heads.\n",
" dropout: a Dropout layer on attn_output_weights. Default: 0.0.\n",
" bias: add bias as module parameter. Default: True.\n",
" add_bias_kv: add bias to the key and value sequences at dim=0.\n",
" add_zero_attn: add a new batch of zeros to the key and\n",
" value sequences at dim=1.\n",
" kdim: total number of features in key. Default: None.\n",
" vdim: total number of features in value. Default: None.\n",
"\n",
" Note: if kdim and vdim are None, they will be set to embed_dim such that\n",
" query, key, and value have the same number of features.\n",
"\n",
" Examples::\n",
"\n",
" >>> multihead_attn = nn.MultiheadAttention(embed_dim, num_heads)\n",
" >>> attn_output, attn_output_weights = multihead_attn(query, key, value)\n",
" \"\"\"\n",
" bias_k: Optional[torch.Tensor]\n",
" bias_v: Optional[torch.Tensor]\n",
"\n",
" def __init__(self, embed_dim, num_heads, dropout=0., bias=True, add_bias_kv=False, add_zero_attn=False, kdim=None, vdim=None):\n",
" super(MultiheadAttention, self).__init__()\n",
" self.embed_dim = embed_dim\n",
" self.kdim = kdim if kdim is not None else embed_dim\n",
" self.vdim = vdim if vdim is not None else embed_dim\n",
" self._qkv_same_embed_dim = self.kdim == embed_dim and self.vdim == embed_dim\n",
"\n",
" self.num_heads = num_heads\n",
" self.dropout = dropout\n",
" self.head_dim = embed_dim // num_heads\n",
" assert self.head_dim * num_heads == self.embed_dim, \"embed_dim must be divisible by num_heads\"\n",
"\n",
" if self._qkv_same_embed_dim is False:\n",
" self.q_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim))\n",
" self.k_proj_weight = Parameter(torch.Tensor(embed_dim, self.kdim))\n",
" self.v_proj_weight = Parameter(torch.Tensor(embed_dim, self.vdim))\n",
" self.register_parameter('in_proj_weight', None)\n",
" else:\n",
" self.in_proj_weight = Parameter(torch.empty(3 * embed_dim, embed_dim))\n",
" self.register_parameter('q_proj_weight', None)\n",
" self.register_parameter('k_proj_weight', None)\n",
" self.register_parameter('v_proj_weight', None)\n",
"\n",
" if bias:\n",
" self.in_proj_bias = Parameter(torch.empty(3 * embed_dim))\n",
" else:\n",
" self.register_parameter('in_proj_bias', None)\n",
" self.out_proj = _LinearWithBias(embed_dim, embed_dim)\n",
"\n",
" if add_bias_kv:\n",
" self.bias_k = Parameter(torch.empty(1, 1, embed_dim))\n",
" self.bias_v = Parameter(torch.empty(1, 1, embed_dim))\n",
" else:\n",
" self.bias_k = self.bias_v = None\n",
"\n",
" self.add_zero_attn = add_zero_attn\n",
"\n",
" self._reset_parameters()\n",
"\n",
" def _reset_parameters(self):\n",
" if self._qkv_same_embed_dim:\n",
" xavier_uniform_(self.in_proj_weight)\n",
" else:\n",
" xavier_uniform_(self.q_proj_weight)\n",
" xavier_uniform_(self.k_proj_weight)\n",
" xavier_uniform_(self.v_proj_weight)\n",
"\n",
" if self.in_proj_bias is not None:\n",
" constant_(self.in_proj_bias, 0.)\n",
" constant_(self.out_proj.bias, 0.)\n",
" if self.bias_k is not None:\n",
" xavier_normal_(self.bias_k)\n",
" if self.bias_v is not None:\n",
" xavier_normal_(self.bias_v)\n",
"\n",
" def __setstate__(self, state):\n",
" # Support loading old MultiheadAttention checkpoints generated by v1.1.0\n",
" if '_qkv_same_embed_dim' not in state:\n",
" state['_qkv_same_embed_dim'] = True\n",
"\n",
" super(MultiheadAttention, self).__setstate__(state)\n",
"\n",
" def forward(self, query, key, value, key_padding_mask=None,\n",
" need_weights=True, need_raw=True, attn_mask=None):\n",
" # type: (Tensor, Tensor, Tensor, Optional[Tensor], bool, Optional[Tensor]) -> Tuple[Tensor, Optional[Tensor]]\n",
" r\"\"\"\n",
" Args:\n",
" query, key, value: map a query and a set of key-value pairs to an output.\n",
" See \"Attention Is All You Need\" for more details.\n",
" key_padding_mask: if provided, specified padding elements in the key will\n",
" be ignored by the attention. When given a binary mask and a value is True,\n",
" the corresponding value on the attention layer will be ignored. When given\n",
" a byte mask and a value is non-zero, the corresponding value on the attention\n",
" layer will be ignored\n",
" need_weights: output attn_output_weights.\n",
" attn_mask: 2D or 3D mask that prevents attention to certain positions. A 2D mask will be broadcasted for all\n",
" the batches while a 3D mask allows to specify a different mask for the entries of each batch.\n",
"\n",
" Shape:\n",
" - Inputs:\n",
" - query: :math:`(L, N, E)` where L is the target sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - key: :math:`(S, N, E)`, where S is the source sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - value: :math:`(S, N, E)` where S is the source sequence length, N is the batch size, E is\n",
" the embedding dimension.\n",
" - key_padding_mask: :math:`(N, S)` where N is the batch size, S is the source sequence length.\n",
" If a ByteTensor is provided, the non-zero positions will be ignored while the position\n",
" with the zero positions will be unchanged. If a BoolTensor is provided, the positions with the\n",
" value of ``True`` will be ignored while the position with the value of ``False`` will be unchanged.\n",
" - attn_mask: 2D mask :math:`(L, S)` where L is the target sequence length, S is the source sequence length.\n",
" 3D mask :math:`(N*num_heads, L, S)` where N is the batch size, L is the target sequence length,\n",
" S is the source sequence length. attn_mask ensure that position i is allowed to attend the unmasked\n",
" positions. If a ByteTensor is provided, the non-zero positions are not allowed to attend\n",
" while the zero positions will be unchanged. If a BoolTensor is provided, positions with ``True``\n",
" is not allowed to attend while ``False`` values will be unchanged. If a FloatTensor\n",
" is provided, it will be added to the attention weight.\n",
"\n",
" - Outputs:\n",
" - attn_output: :math:`(L, N, E)` where L is the target sequence length, N is the batch size,\n",
" E is the embedding dimension.\n",
" - attn_output_weights: :math:`(N, L, S)` where N is the batch size,\n",
" L is the target sequence length, S is the source sequence length.\n",
" \"\"\"\n",
" if not self._qkv_same_embed_dim:\n",
" return multi_head_attention_forward(\n",
" query, key, value, self.embed_dim, self.num_heads,\n",
" self.in_proj_weight, self.in_proj_bias,\n",
" self.bias_k, self.bias_v, self.add_zero_attn,\n",
" self.dropout, self.out_proj.weight, self.out_proj.bias,\n",
" training=self.training,\n",
" key_padding_mask=key_padding_mask, need_weights=need_weights, need_raw=need_raw,\n",
" attn_mask=attn_mask, use_separate_proj_weight=True,\n",
" q_proj_weight=self.q_proj_weight, k_proj_weight=self.k_proj_weight,\n",
" v_proj_weight=self.v_proj_weight)\n",
" else:\n",
" return multi_head_attention_forward(\n",
" query, key, value, self.embed_dim, self.num_heads,\n",
" self.in_proj_weight, self.in_proj_bias,\n",
" self.bias_k, self.bias_v, self.add_zero_attn,\n",
" self.dropout, self.out_proj.weight, self.out_proj.bias,\n",
" training=self.training,\n",
" key_padding_mask=key_padding_mask, need_weights=need_weights, need_raw=need_raw,\n",
" attn_mask=attn_mask)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 104,
"metadata": {},
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'torch'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-104-6bb47b25d46a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmath\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'torch'"
]
}
],
"source": [
"import math\n",
"\n",
"import torch\n",
"from torch import nn\n",
"\n",
"############\n",
"# Omic Model\n",
"############\n",
"def init_max_weights(module):\n",
" for m in module.modules():\n",
" if type(m) == nn.Linear:\n",
" stdv = 1. / math.sqrt(m.weight.size(1))\n",
" m.weight.data.normal_(0, stdv)\n",
" m.bias.data.zero_()\n",
"\n",
"def SNN_Block(dim1, dim2, dropout=0.25):\n",
" return nn.Sequential(\n",
" nn.Linear(dim1, dim2),\n",
" nn.ELU(),\n",
" nn.AlphaDropout(p=dropout, inplace=False))\n",
"\n",
"class MaxNet(nn.Module):\n",
" def __init__(self, input_dim: int, meta_dim: int=0, model_size_omic: str='small', n_classes: int=4):\n",
" super(MaxNet, self).__init__()\n",
" self.meta_dim = meta_dim\n",
" self.n_classes = n_classes\n",
" self.size_dict_omic = {'small': [256, 256, 256, 256], 'big': [1024, 1024, 1024, 256]}\n",
" \n",
" ### Constructing Genomic SNN\n",
" hidden = self.size_dict_omic[model_size_omic]\n",
" fc_omic = [SNN_Block(dim1=input_dim, dim2=hidden[0])]\n",
" for i, _ in enumerate(hidden[1:]):\n",
" fc_omic.append(SNN_Block(dim1=hidden[i], dim2=hidden[i+1], dropout=0.25))\n",
" self.fc_omic = nn.Sequential(*fc_omic)\n",
" self.classifier = nn.Linear(hidden[-1]+self.meta_dim, n_classes)\n",
" init_max_weights(self)\n",
"\n",
" def forward(self, **kwargs):\n",
" x = kwargs['x_omic']\n",
" meta = kwargs['meta']\n",
" features = self.fc_omic(x)\n",
"\n",
" if self.meta_dim: \n",
" axis_dim = 1 if len(meta.shape) > 1 else 0\n",
" features = torch.cat((features, meta), axis_dim)\n",
"\n",
" logits = self.classifier(features).unsqueeze(0)\n",
" Y_hat = torch.topk(logits, 1, dim=1)[1]\n",
" hazards = torch.sigmoid(logits)\n",
" S = torch.cumprod(1 - hazards, dim=1)\n",
" return hazards, S, Y_hat, None, None\n",
"\n",
" def relocate(self):\n",
" device=torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"\n",
" if torch.cuda.device_count() > 1:\n",
" device_ids = list(range(torch.cuda.device_count()))\n",
" self.fc_omic = nn.DataParallel(self.fc_omic, device_ids=device_ids).to('cuda:0')\n",
" else:\n",
" self.fc_omic = self.fc_omic.to(device)\n",
"\n",
"\n",
" self.classifier = self.classifier.to(device)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 88,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>CXCL14_rnaseq</th>\n",
" <th>FGF1_rnaseq</th>\n",
" <th>IFNA8_cnv</th>\n",
" <th>ADM_rnaseq</th>\n",
" <th>LTBP2_rnaseq</th>\n",
" <th>CCL28_rnaseq</th>\n",
" <th>IFNA7_rnaseq</th>\n",
" <th>GH2_rnaseq</th>\n",
" <th>AIMP1_rnaseq</th>\n",
" <th>DEFB1_rnaseq</th>\n",
" <th>...</th>\n",
" <th>NPPB_rnaseq</th>\n",
" <th>CCL27_rnaseq</th>\n",
" <th>FASLG_rnaseq</th>\n",
" <th>FGF20_cnv</th>\n",
" <th>FAM3C_rnaseq</th>\n",
" <th>IL18_rnaseq</th>\n",
" <th>GDF10_rnaseq</th>\n",
" <th>MYDGF_rnaseq</th>\n",
" <th>IL10_rnaseq</th>\n",
" <th>IFNW1_rnaseq</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>-0.1170</td>\n",
" <td>-0.2221</td>\n",
" <td>1</td>\n",
" <td>-0.5126</td>\n",
" <td>-0.3289</td>\n",
" <td>-0.7331</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>0.5942</td>\n",
" <td>-0.4707</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>1.2033</td>\n",
" <td>0.9826</td>\n",
" <td>-1</td>\n",
" <td>-0.6161</td>\n",
" <td>-0.5643</td>\n",
" <td>-0.2165</td>\n",
" <td>-0.2836</td>\n",
" <td>0.9991</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>-0.2330</td>\n",
" <td>-0.4343</td>\n",
" <td>-1</td>\n",
" <td>-0.2381</td>\n",
" <td>-0.4799</td>\n",
" <td>-0.0520</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>1.1854</td>\n",
" <td>-0.4820</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.2946</td>\n",
" <td>-0.5443</td>\n",
" <td>-1</td>\n",
" <td>-0.3499</td>\n",
" <td>-0.7958</td>\n",
" <td>-0.3140</td>\n",
" <td>-0.3359</td>\n",
" <td>-0.4865</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>-0.1384</td>\n",
" <td>-0.1597</td>\n",
" <td>-1</td>\n",
" <td>-0.1521</td>\n",
" <td>-0.3348</td>\n",
" <td>-0.5310</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>0.3889</td>\n",
" <td>-0.3607</td>\n",
" <td>...</td>\n",
" <td>3.4177</td>\n",
" <td>-0.2946</td>\n",
" <td>-0.5320</td>\n",
" <td>0</td>\n",
" <td>0.4581</td>\n",
" <td>-0.6179</td>\n",
" <td>-0.2107</td>\n",
" <td>0.2751</td>\n",
" <td>-0.5108</td>\n",
" <td>1.0629</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>-0.1624</td>\n",
" <td>-0.3463</td>\n",
" <td>-1</td>\n",
" <td>0.0272</td>\n",
" <td>-0.7623</td>\n",
" <td>0.8196</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>-0.0416</td>\n",
" <td>0.1661</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.1020</td>\n",
" <td>-0.4682</td>\n",
" <td>-1</td>\n",
" <td>-0.4391</td>\n",
" <td>-0.7275</td>\n",
" <td>-0.2876</td>\n",
" <td>-0.4696</td>\n",
" <td>-0.6248</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>-0.2346</td>\n",
" <td>-0.4090</td>\n",
" <td>-1</td>\n",
" <td>-0.2078</td>\n",
" <td>0.5702</td>\n",
" <td>-0.4219</td>\n",
" <td>-0.1244</td>\n",
" <td>0.5257</td>\n",
" <td>-0.9790</td>\n",
" <td>0.3938</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.1035</td>\n",
" <td>-0.4688</td>\n",
" <td>-1</td>\n",
" <td>1.2596</td>\n",
" <td>-0.5807</td>\n",
" <td>0.4108</td>\n",
" <td>0.1801</td>\n",
" <td>-0.6086</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>368</th>\n",
" <td>-0.2417</td>\n",
" <td>10.1423</td>\n",
" <td>-1</td>\n",
" <td>-0.5456</td>\n",
" <td>0.8742</td>\n",
" <td>-0.1822</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>-1.2395</td>\n",
" <td>-0.5125</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.2946</td>\n",
" <td>0.0777</td>\n",
" <td>0</td>\n",
" <td>-0.8242</td>\n",
" <td>-0.6727</td>\n",
" <td>0.1938</td>\n",
" <td>0.9210</td>\n",
" <td>0.4479</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>369</th>\n",
" <td>-0.2412</td>\n",
" <td>1.3253</td>\n",
" <td>1</td>\n",
" <td>-0.5680</td>\n",
" <td>1.0719</td>\n",
" <td>-0.1707</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>-1.6694</td>\n",
" <td>-0.4528</td>\n",
" <td>...</td>\n",
" <td>0.5679</td>\n",
" <td>-0.2661</td>\n",
" <td>1.0215</td>\n",
" <td>-2</td>\n",
" <td>-0.5327</td>\n",
" <td>0.3335</td>\n",
" <td>-0.1730</td>\n",
" <td>0.0147</td>\n",
" <td>0.6012</td>\n",
" <td>2.2526</td>\n",
" </tr>\n",
" <tr>\n",
" <th>370</th>\n",
" <td>-0.2396</td>\n",
" <td>0.0435</td>\n",
" <td>0</td>\n",
" <td>-0.3610</td>\n",
" <td>3.1965</td>\n",
" <td>1.3670</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>0.4439</td>\n",
" <td>-0.5099</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.2289</td>\n",
" <td>0.0521</td>\n",
" <td>-1</td>\n",
" <td>1.0317</td>\n",
" <td>-0.1473</td>\n",
" <td>-0.1517</td>\n",
" <td>0.9384</td>\n",
" <td>-0.3165</td>\n",
" <td>0.6239</td>\n",
" </tr>\n",
" <tr>\n",
" <th>371</th>\n",
" <td>-0.2393</td>\n",
" <td>-0.4475</td>\n",
" <td>0</td>\n",
" <td>0.4772</td>\n",
" <td>2.9612</td>\n",
" <td>-0.7799</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.1693</td>\n",
" <td>0.5778</td>\n",
" <td>1.7607</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>9.4098</td>\n",
" <td>-0.5443</td>\n",
" <td>0</td>\n",
" <td>0.2992</td>\n",
" <td>-0.5451</td>\n",
" <td>-0.2456</td>\n",
" <td>0.8898</td>\n",
" <td>-0.5781</td>\n",
" <td>-0.3899</td>\n",
" </tr>\n",
" <tr>\n",
" <th>372</th>\n",
" <td>-0.1936</td>\n",
" <td>-0.2281</td>\n",
" <td>0</td>\n",
" <td>-0.4124</td>\n",
" <td>-0.1873</td>\n",
" <td>-0.1200</td>\n",
" <td>-0.1244</td>\n",
" <td>-0.0326</td>\n",
" <td>-0.8786</td>\n",
" <td>-0.3912</td>\n",
" <td>...</td>\n",
" <td>-0.2276</td>\n",
" <td>-0.2570</td>\n",
" <td>-0.3810</td>\n",
" <td>-1</td>\n",
" <td>-0.6399</td>\n",
" <td>-0.9128</td>\n",
" <td>0.3367</td>\n",
" <td>-0.4686</td>\n",
" <td>0.8995</td>\n",
" <td>1.3522</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>373 rows × 347 columns</p>\n",
"</div>"
],
"text/plain": [
" CXCL14_rnaseq FGF1_rnaseq IFNA8_cnv ADM_rnaseq LTBP2_rnaseq \\\n",
"0 -0.1170 -0.2221 1 -0.5126 -0.3289 \n",
"1 -0.2330 -0.4343 -1 -0.2381 -0.4799 \n",
"2 -0.1384 -0.1597 -1 -0.1521 -0.3348 \n",
"3 -0.1624 -0.3463 -1 0.0272 -0.7623 \n",
"4 -0.2346 -0.4090 -1 -0.2078 0.5702 \n",
".. ... ... ... ... ... \n",
"368 -0.2417 10.1423 -1 -0.5456 0.8742 \n",
"369 -0.2412 1.3253 1 -0.5680 1.0719 \n",
"370 -0.2396 0.0435 0 -0.3610 3.1965 \n",
"371 -0.2393 -0.4475 0 0.4772 2.9612 \n",
"372 -0.1936 -0.2281 0 -0.4124 -0.1873 \n",
"\n",
" CCL28_rnaseq IFNA7_rnaseq GH2_rnaseq AIMP1_rnaseq DEFB1_rnaseq ... \\\n",
"0 -0.7331 -0.1244 -0.1693 0.5942 -0.4707 ... \n",
"1 -0.0520 -0.1244 -0.1693 1.1854 -0.4820 ... \n",
"2 -0.5310 -0.1244 -0.1693 0.3889 -0.3607 ... \n",
"3 0.8196 -0.1244 -0.1693 -0.0416 0.1661 ... \n",
"4 -0.4219 -0.1244 0.5257 -0.9790 0.3938 ... \n",
".. ... ... ... ... ... ... \n",
"368 -0.1822 -0.1244 -0.1693 -1.2395 -0.5125 ... \n",
"369 -0.1707 -0.1244 -0.1693 -1.6694 -0.4528 ... \n",
"370 1.3670 -0.1244 -0.1693 0.4439 -0.5099 ... \n",
"371 -0.7799 -0.1244 -0.1693 0.5778 1.7607 ... \n",
"372 -0.1200 -0.1244 -0.0326 -0.8786 -0.3912 ... \n",
"\n",
" NPPB_rnaseq CCL27_rnaseq FASLG_rnaseq FGF20_cnv FAM3C_rnaseq \\\n",
"0 -0.2276 1.2033 0.9826 -1 -0.6161 \n",
"1 -0.2276 -0.2946 -0.5443 -1 -0.3499 \n",
"2 3.4177 -0.2946 -0.5320 0 0.4581 \n",
"3 -0.2276 -0.1020 -0.4682 -1 -0.4391 \n",
"4 -0.2276 -0.1035 -0.4688 -1 1.2596 \n",
".. ... ... ... ... ... \n",
"368 -0.2276 -0.2946 0.0777 0 -0.8242 \n",
"369 0.5679 -0.2661 1.0215 -2 -0.5327 \n",
"370 -0.2276 -0.2289 0.0521 -1 1.0317 \n",
"371 -0.2276 9.4098 -0.5443 0 0.2992 \n",
"372 -0.2276 -0.2570 -0.3810 -1 -0.6399 \n",
"\n",
" IL18_rnaseq GDF10_rnaseq MYDGF_rnaseq IL10_rnaseq IFNW1_rnaseq \n",
"0 -0.5643 -0.2165 -0.2836 0.9991 -0.3899 \n",
"1 -0.7958 -0.3140 -0.3359 -0.4865 -0.3899 \n",
"2 -0.6179 -0.2107 0.2751 -0.5108 1.0629 \n",
"3 -0.7275 -0.2876 -0.4696 -0.6248 -0.3899 \n",
"4 -0.5807 0.4108 0.1801 -0.6086 -0.3899 \n",
".. ... ... ... ... ... \n",
"368 -0.6727 0.1938 0.9210 0.4479 -0.3899 \n",
"369 0.3335 -0.1730 0.0147 0.6012 2.2526 \n",
"370 -0.1473 -0.1517 0.9384 -0.3165 0.6239 \n",
"371 -0.5451 -0.2456 0.8898 -0.5781 -0.3899 \n",
"372 -0.9128 0.3367 -0.4686 0.8995 1.3522 \n",
"\n",
"[373 rows x 347 columns]"
]
},
"execution_count": 88,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"genomic_features[series_intersecdef series_intersection(s1, s2):\n",
" return pd.Series(list(set(s1) & set(s2)))\n",
"tion(sig, genomic_features.columns)]"
]
},
{
"cell_type": "code",
"execution_count": 84,
"metadata": {},
"outputs": [],
"source": [
"def series_intersection(s1, s2):\n",
" return pd.Series(list(set(s1) & set(s2)))\n"
]
},
{
"cell_type": "code",
"execution_count": 68,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>NDUFS5_cnv</th>\n",
" <th>MACF1_cnv</th>\n",
" <th>RNA5SP44_cnv</th>\n",
" <th>KIAA0754_cnv</th>\n",
" <th>BMP8A_cnv</th>\n",
" <th>PABPC4_cnv</th>\n",
" <th>SNORA55_cnv</th>\n",
" <th>HEYL_cnv</th>\n",
" <th>HPCAL4_cnv</th>\n",
" <th>NT5C1A_cnv</th>\n",
" <th>...</th>\n",
" <th>ZWINT_rnaseq</th>\n",
" <th>ZXDA_rnaseq</th>\n",
" <th>ZXDB_rnaseq</th>\n",
" <th>ZXDC_rnaseq</th>\n",
" <th>ZYG11A_rnaseq</th>\n",
" <th>ZYG11B_rnaseq</th>\n",
" <th>ZYX_rnaseq</th>\n",
" <th>ZZEF1_rnaseq</th>\n",
" <th>ZZZ3_rnaseq</th>\n",
" <th>TPTEP1_rnaseq</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>...</td>\n",
" <td>-0.8388</td>\n",
" <td>4.1375</td>\n",
" <td>3.9664</td>\n",
" <td>1.8437</td>\n",
" <td>-0.3959</td>\n",
" <td>-0.2561</td>\n",
" <td>-0.2866</td>\n",
" <td>1.8770</td>\n",
" <td>-0.3179</td>\n",
" <td>-0.3633</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>...</td>\n",
" <td>-0.1083</td>\n",
" <td>0.3393</td>\n",
" <td>0.2769</td>\n",
" <td>1.7320</td>\n",
" <td>-0.0975</td>\n",
" <td>2.6955</td>\n",
" <td>-0.6741</td>\n",
" <td>1.0323</td>\n",
" <td>1.2766</td>\n",
" <td>-0.3982</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.4155</td>\n",
" <td>1.6846</td>\n",
" <td>0.7711</td>\n",
" <td>-0.3061</td>\n",
" <td>-0.5016</td>\n",
" <td>2.8548</td>\n",
" <td>-0.6171</td>\n",
" <td>-0.8608</td>\n",
" <td>-0.0486</td>\n",
" <td>-0.3962</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.8143</td>\n",
" <td>0.8344</td>\n",
" <td>1.5075</td>\n",
" <td>3.6068</td>\n",
" <td>-0.5004</td>\n",
" <td>-0.0747</td>\n",
" <td>-0.2185</td>\n",
" <td>-0.4379</td>\n",
" <td>1.6913</td>\n",
" <td>1.7748</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0983</td>\n",
" <td>-0.7908</td>\n",
" <td>-0.0053</td>\n",
" <td>-0.0643</td>\n",
" <td>-0.3706</td>\n",
" <td>0.3870</td>\n",
" <td>-0.5589</td>\n",
" <td>-0.5979</td>\n",
" <td>0.0047</td>\n",
" <td>-0.3548</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>368</th>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>...</td>\n",
" <td>-0.0291</td>\n",
" <td>-0.1058</td>\n",
" <td>-0.6721</td>\n",
" <td>0.2802</td>\n",
" <td>1.9504</td>\n",
" <td>-0.8784</td>\n",
" <td>0.9506</td>\n",
" <td>0.0607</td>\n",
" <td>1.1883</td>\n",
" <td>-0.3521</td>\n",
" </tr>\n",
" <tr>\n",
" <th>369</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0497</td>\n",
" <td>0.3673</td>\n",
" <td>-0.2208</td>\n",
" <td>0.3034</td>\n",
" <td>3.2580</td>\n",
" <td>-0.2089</td>\n",
" <td>1.6053</td>\n",
" <td>-0.8746</td>\n",
" <td>-0.4491</td>\n",
" <td>-0.3450</td>\n",
" </tr>\n",
" <tr>\n",
" <th>370</th>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>...</td>\n",
" <td>0.3822</td>\n",
" <td>-0.7003</td>\n",
" <td>-0.7661</td>\n",
" <td>-1.7035</td>\n",
" <td>-0.5423</td>\n",
" <td>-0.3488</td>\n",
" <td>1.3713</td>\n",
" <td>-0.4365</td>\n",
" <td>2.3456</td>\n",
" <td>-0.3866</td>\n",
" </tr>\n",
" <tr>\n",
" <th>371</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>-0.6853</td>\n",
" <td>-1.0240</td>\n",
" <td>-1.2890</td>\n",
" <td>-1.5666</td>\n",
" <td>-0.1270</td>\n",
" <td>-1.4662</td>\n",
" <td>0.3981</td>\n",
" <td>-0.5976</td>\n",
" <td>-1.3822</td>\n",
" <td>-0.4157</td>\n",
" </tr>\n",
" <tr>\n",
" <th>372</th>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0.0517</td>\n",
" <td>-0.3570</td>\n",
" <td>-0.4843</td>\n",
" <td>-0.3792</td>\n",
" <td>-0.1964</td>\n",
" <td>0.4200</td>\n",
" <td>3.2547</td>\n",
" <td>-0.1232</td>\n",
" <td>3.4519</td>\n",
" <td>-0.1962</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>373 rows × 20395 columns</p>\n",
"</div>"
],
"text/plain": [
" NDUFS5_cnv MACF1_cnv RNA5SP44_cnv KIAA0754_cnv BMP8A_cnv PABPC4_cnv \\\n",
"0 -1 -1 -1 -1 -1 -1 \n",
"1 2 2 2 2 2 2 \n",
"2 0 0 0 0 0 0 \n",
"3 0 0 0 0 0 0 \n",
"4 0 0 0 0 0 0 \n",
".. ... ... ... ... ... ... \n",
"368 2 2 2 2 2 2 \n",
"369 0 0 0 0 0 0 \n",
"370 1 1 1 1 1 1 \n",
"371 0 0 0 0 0 0 \n",
"372 0 0 0 0 0 0 \n",
"\n",
" SNORA55_cnv HEYL_cnv HPCAL4_cnv NT5C1A_cnv ... ZWINT_rnaseq \\\n",
"0 -1 -1 -1 -1 ... -0.8388 \n",
"1 2 2 2 2 ... -0.1083 \n",
"2 0 0 0 0 ... -0.4155 \n",
"3 0 0 0 0 ... -0.8143 \n",
"4 0 0 0 0 ... 0.0983 \n",
".. ... ... ... ... ... ... \n",
"368 2 2 2 2 ... -0.0291 \n",
"369 0 0 0 0 ... 0.0497 \n",
"370 1 1 1 1 ... 0.3822 \n",
"371 0 0 0 0 ... -0.6853 \n",
"372 0 0 0 0 ... 0.0517 \n",
"\n",
" ZXDA_rnaseq ZXDB_rnaseq ZXDC_rnaseq ZYG11A_rnaseq ZYG11B_rnaseq \\\n",
"0 4.1375 3.9664 1.8437 -0.3959 -0.2561 \n",
"1 0.3393 0.2769 1.7320 -0.0975 2.6955 \n",
"2 1.6846 0.7711 -0.3061 -0.5016 2.8548 \n",
"3 0.8344 1.5075 3.6068 -0.5004 -0.0747 \n",
"4 -0.7908 -0.0053 -0.0643 -0.3706 0.3870 \n",
".. ... ... ... ... ... \n",
"368 -0.1058 -0.6721 0.2802 1.9504 -0.8784 \n",
"369 0.3673 -0.2208 0.3034 3.2580 -0.2089 \n",
"370 -0.7003 -0.7661 -1.7035 -0.5423 -0.3488 \n",
"371 -1.0240 -1.2890 -1.5666 -0.1270 -1.4662 \n",
"372 -0.3570 -0.4843 -0.3792 -0.1964 0.4200 \n",
"\n",
" ZYX_rnaseq ZZEF1_rnaseq ZZZ3_rnaseq TPTEP1_rnaseq \n",
"0 -0.2866 1.8770 -0.3179 -0.3633 \n",
"1 -0.6741 1.0323 1.2766 -0.3982 \n",
"2 -0.6171 -0.8608 -0.0486 -0.3962 \n",
"3 -0.2185 -0.4379 1.6913 1.7748 \n",
"4 -0.5589 -0.5979 0.0047 -0.3548 \n",
".. ... ... ... ... \n",
"368 0.9506 0.0607 1.1883 -0.3521 \n",
"369 1.6053 -0.8746 -0.4491 -0.3450 \n",
"370 1.3713 -0.4365 2.3456 -0.3866 \n",
"371 0.3981 -0.5976 -1.3822 -0.4157 \n",
"372 3.2547 -0.1232 3.4519 -0.1962 \n",
"\n",
"[373 rows x 20395 columns]"
]
},
"execution_count": 68,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"genomic_features"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"if 'case_id' not in slide_data:\n",
" slide_data.index = slide_data.index.str[:12]\n",
" slide_data['case_id'] = slide_data.index\n",
" slide_data = slide_data.reset_index(drop=True)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"new_cols = list(slide_data.columns[-2:]) + list(slide_data.columns[:-2])\n",
"slide_data = slide_data[new_cols]"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>ZZZ3_rnaseq</th>\n",
" <th>TPTEP1_rnaseq</th>\n",
" <th>slide_id</th>\n",
" <th>site</th>\n",
" <th>is_female</th>\n",
" <th>oncotree_code</th>\n",
" <th>age</th>\n",
" <th>survival_months</th>\n",
" <th>censorship</th>\n",
" <th>train</th>\n",
" <th>...</th>\n",
" <th>ZW10_rnaseq</th>\n",
" <th>ZWILCH_rnaseq</th>\n",
" <th>ZWINT_rnaseq</th>\n",
" <th>ZXDA_rnaseq</th>\n",
" <th>ZXDB_rnaseq</th>\n",
" <th>ZXDC_rnaseq</th>\n",
" <th>ZYG11A_rnaseq</th>\n",
" <th>ZYG11B_rnaseq</th>\n",
" <th>ZYX_rnaseq</th>\n",
" <th>ZZEF1_rnaseq</th>\n",
" </tr>\n",
" <tr>\n",
" <th>case_id</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>TCGA-2F-A9KO</th>\n",
" <td>-0.3179</td>\n",
" <td>-0.3633</td>\n",
" <td>TCGA-2F-A9KO-01Z-00-DX1.195576CF-B739-4BD9-B15...</td>\n",
" <td>2F</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>63</td>\n",
" <td>24.11</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-0.7172</td>\n",
" <td>0.7409</td>\n",
" <td>-0.8388</td>\n",
" <td>4.1375</td>\n",
" <td>3.9664</td>\n",
" <td>1.8437</td>\n",
" <td>-0.3959</td>\n",
" <td>-0.2561</td>\n",
" <td>-0.2866</td>\n",
" <td>1.8770</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-2F-A9KP</th>\n",
" <td>1.2766</td>\n",
" <td>-0.3982</td>\n",
" <td>TCGA-2F-A9KP-01Z-00-DX1.3CDF534E-958F-4467-AA7...</td>\n",
" <td>2F</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>66</td>\n",
" <td>11.96</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>0.6373</td>\n",
" <td>0.8559</td>\n",
" <td>-0.1083</td>\n",
" <td>0.3393</td>\n",
" <td>0.2769</td>\n",
" <td>1.7320</td>\n",
" <td>-0.0975</td>\n",
" <td>2.6955</td>\n",
" <td>-0.6741</td>\n",
" <td>1.0323</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-2F-A9KP</th>\n",
" <td>1.2766</td>\n",
" <td>-0.3982</td>\n",
" <td>TCGA-2F-A9KP-01Z-00-DX2.718C82A3-252B-498E-BFB...</td>\n",
" <td>2F</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>66</td>\n",
" <td>11.96</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>0.6373</td>\n",
" <td>0.8559</td>\n",
" <td>-0.1083</td>\n",
" <td>0.3393</td>\n",
" <td>0.2769</td>\n",
" <td>1.7320</td>\n",
" <td>-0.0975</td>\n",
" <td>2.6955</td>\n",
" <td>-0.6741</td>\n",
" <td>1.0323</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-2F-A9KQ</th>\n",
" <td>-0.0486</td>\n",
" <td>-0.3962</td>\n",
" <td>TCGA-2F-A9KQ-01Z-00-DX1.1C8CB2DD-5CC6-4E99-A0F...</td>\n",
" <td>2F</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>69</td>\n",
" <td>94.81</td>\n",
" <td>1</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-0.5676</td>\n",
" <td>-0.0621</td>\n",
" <td>-0.4155</td>\n",
" <td>1.6846</td>\n",
" <td>0.7711</td>\n",
" <td>-0.3061</td>\n",
" <td>-0.5016</td>\n",
" <td>2.8548</td>\n",
" <td>-0.6171</td>\n",
" <td>-0.8608</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-2F-A9KR</th>\n",
" <td>1.6913</td>\n",
" <td>1.7748</td>\n",
" <td>TCGA-2F-A9KR-01Z-00-DX1.D6A4BD2D-18F3-4FA6-827...</td>\n",
" <td>2F</td>\n",
" <td>1</td>\n",
" <td>BLCA</td>\n",
" <td>59</td>\n",
" <td>104.57</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-1.3825</td>\n",
" <td>0.3550</td>\n",
" <td>-0.8143</td>\n",
" <td>0.8344</td>\n",
" <td>1.5075</td>\n",
" <td>3.6068</td>\n",
" <td>-0.5004</td>\n",
" <td>-0.0747</td>\n",
" <td>-0.2185</td>\n",
" <td>-0.4379</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-ZF-AA54</th>\n",
" <td>1.1883</td>\n",
" <td>-0.3521</td>\n",
" <td>TCGA-ZF-AA54-01Z-00-DX1.9118BB51-333A-4257-A79...</td>\n",
" <td>ZF</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>71</td>\n",
" <td>19.38</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-0.0898</td>\n",
" <td>2.1092</td>\n",
" <td>-0.0291</td>\n",
" <td>-0.1058</td>\n",
" <td>-0.6721</td>\n",
" <td>0.2802</td>\n",
" <td>1.9504</td>\n",
" <td>-0.8784</td>\n",
" <td>0.9506</td>\n",
" <td>0.0607</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-ZF-AA58</th>\n",
" <td>-0.4491</td>\n",
" <td>-0.3450</td>\n",
" <td>TCGA-ZF-AA58-01Z-00-DX1.85C3611E-11FA-4AAE-B88...</td>\n",
" <td>ZF</td>\n",
" <td>1</td>\n",
" <td>BLCA</td>\n",
" <td>61</td>\n",
" <td>54.17</td>\n",
" <td>1</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-0.2075</td>\n",
" <td>-0.0617</td>\n",
" <td>0.0497</td>\n",
" <td>0.3673</td>\n",
" <td>-0.2208</td>\n",
" <td>0.3034</td>\n",
" <td>3.2580</td>\n",
" <td>-0.2089</td>\n",
" <td>1.6053</td>\n",
" <td>-0.8746</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-ZF-AA5H</th>\n",
" <td>2.3456</td>\n",
" <td>-0.3866</td>\n",
" <td>TCGA-ZF-AA5H-01Z-00-DX1.2B5DF00E-E0FD-4C58-A82...</td>\n",
" <td>ZF</td>\n",
" <td>1</td>\n",
" <td>BLCA</td>\n",
" <td>60</td>\n",
" <td>29.47</td>\n",
" <td>1</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-1.4118</td>\n",
" <td>-0.1236</td>\n",
" <td>0.3822</td>\n",
" <td>-0.7003</td>\n",
" <td>-0.7661</td>\n",
" <td>-1.7035</td>\n",
" <td>-0.5423</td>\n",
" <td>-0.3488</td>\n",
" <td>1.3713</td>\n",
" <td>-0.4365</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-ZF-AA5N</th>\n",
" <td>-1.3822</td>\n",
" <td>-0.4157</td>\n",
" <td>TCGA-ZF-AA5N-01Z-00-DX1.A207E3EE-CC7D-4267-A77...</td>\n",
" <td>ZF</td>\n",
" <td>1</td>\n",
" <td>BLCA</td>\n",
" <td>62</td>\n",
" <td>5.52</td>\n",
" <td>0</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-0.1733</td>\n",
" <td>-0.2397</td>\n",
" <td>-0.6853</td>\n",
" <td>-1.0240</td>\n",
" <td>-1.2890</td>\n",
" <td>-1.5666</td>\n",
" <td>-0.1270</td>\n",
" <td>-1.4662</td>\n",
" <td>0.3981</td>\n",
" <td>-0.5976</td>\n",
" </tr>\n",
" <tr>\n",
" <th>TCGA-ZF-AA5P</th>\n",
" <td>3.4519</td>\n",
" <td>-0.1962</td>\n",
" <td>TCGA-ZF-AA5P-01Z-00-DX1.B91697A2-A186-4E67-A81...</td>\n",
" <td>ZF</td>\n",
" <td>0</td>\n",
" <td>BLCA</td>\n",
" <td>65</td>\n",
" <td>12.22</td>\n",
" <td>1</td>\n",
" <td>1.0</td>\n",
" <td>...</td>\n",
" <td>-1.1056</td>\n",
" <td>-0.6634</td>\n",
" <td>0.0517</td>\n",
" <td>-0.3570</td>\n",
" <td>-0.4843</td>\n",
" <td>-0.3792</td>\n",
" <td>-0.1964</td>\n",
" <td>0.4200</td>\n",
" <td>3.2547</td>\n",
" <td>-0.1232</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>437 rows × 20403 columns</p>\n",
"</div>"
],
"text/plain": [
" ZZZ3_rnaseq TPTEP1_rnaseq \\\n",
"case_id \n",
"TCGA-2F-A9KO -0.3179 -0.3633 \n",
"TCGA-2F-A9KP 1.2766 -0.3982 \n",
"TCGA-2F-A9KP 1.2766 -0.3982 \n",
"TCGA-2F-A9KQ -0.0486 -0.3962 \n",
"TCGA-2F-A9KR 1.6913 1.7748 \n",
"... ... ... \n",
"TCGA-ZF-AA54 1.1883 -0.3521 \n",
"TCGA-ZF-AA58 -0.4491 -0.3450 \n",
"TCGA-ZF-AA5H 2.3456 -0.3866 \n",
"TCGA-ZF-AA5N -1.3822 -0.4157 \n",
"TCGA-ZF-AA5P 3.4519 -0.1962 \n",
"\n",
" slide_id site \\\n",
"case_id \n",
"TCGA-2F-A9KO TCGA-2F-A9KO-01Z-00-DX1.195576CF-B739-4BD9-B15... 2F \n",
"TCGA-2F-A9KP TCGA-2F-A9KP-01Z-00-DX1.3CDF534E-958F-4467-AA7... 2F \n",
"TCGA-2F-A9KP TCGA-2F-A9KP-01Z-00-DX2.718C82A3-252B-498E-BFB... 2F \n",
"TCGA-2F-A9KQ TCGA-2F-A9KQ-01Z-00-DX1.1C8CB2DD-5CC6-4E99-A0F... 2F \n",
"TCGA-2F-A9KR TCGA-2F-A9KR-01Z-00-DX1.D6A4BD2D-18F3-4FA6-827... 2F \n",
"... ... ... \n",
"TCGA-ZF-AA54 TCGA-ZF-AA54-01Z-00-DX1.9118BB51-333A-4257-A79... ZF \n",
"TCGA-ZF-AA58 TCGA-ZF-AA58-01Z-00-DX1.85C3611E-11FA-4AAE-B88... ZF \n",
"TCGA-ZF-AA5H TCGA-ZF-AA5H-01Z-00-DX1.2B5DF00E-E0FD-4C58-A82... ZF \n",
"TCGA-ZF-AA5N TCGA-ZF-AA5N-01Z-00-DX1.A207E3EE-CC7D-4267-A77... ZF \n",
"TCGA-ZF-AA5P TCGA-ZF-AA5P-01Z-00-DX1.B91697A2-A186-4E67-A81... ZF \n",
"\n",
" is_female oncotree_code age survival_months censorship \\\n",
"case_id \n",
"TCGA-2F-A9KO 0 BLCA 63 24.11 0 \n",
"TCGA-2F-A9KP 0 BLCA 66 11.96 0 \n",
"TCGA-2F-A9KP 0 BLCA 66 11.96 0 \n",
"TCGA-2F-A9KQ 0 BLCA 69 94.81 1 \n",
"TCGA-2F-A9KR 1 BLCA 59 104.57 0 \n",
"... ... ... ... ... ... \n",
"TCGA-ZF-AA54 0 BLCA 71 19.38 0 \n",
"TCGA-ZF-AA58 1 BLCA 61 54.17 1 \n",
"TCGA-ZF-AA5H 1 BLCA 60 29.47 1 \n",
"TCGA-ZF-AA5N 1 BLCA 62 5.52 0 \n",
"TCGA-ZF-AA5P 0 BLCA 65 12.22 1 \n",
"\n",
" train ... ZW10_rnaseq ZWILCH_rnaseq ZWINT_rnaseq \\\n",
"case_id ... \n",
"TCGA-2F-A9KO 1.0 ... -0.7172 0.7409 -0.8388 \n",
"TCGA-2F-A9KP 1.0 ... 0.6373 0.8559 -0.1083 \n",
"TCGA-2F-A9KP 1.0 ... 0.6373 0.8559 -0.1083 \n",
"TCGA-2F-A9KQ 1.0 ... -0.5676 -0.0621 -0.4155 \n",
"TCGA-2F-A9KR 1.0 ... -1.3825 0.3550 -0.8143 \n",
"... ... ... ... ... ... \n",
"TCGA-ZF-AA54 1.0 ... -0.0898 2.1092 -0.0291 \n",
"TCGA-ZF-AA58 1.0 ... -0.2075 -0.0617 0.0497 \n",
"TCGA-ZF-AA5H 1.0 ... -1.4118 -0.1236 0.3822 \n",
"TCGA-ZF-AA5N 1.0 ... -0.1733 -0.2397 -0.6853 \n",
"TCGA-ZF-AA5P 1.0 ... -1.1056 -0.6634 0.0517 \n",
"\n",
" ZXDA_rnaseq ZXDB_rnaseq ZXDC_rnaseq ZYG11A_rnaseq \\\n",
"case_id \n",
"TCGA-2F-A9KO 4.1375 3.9664 1.8437 -0.3959 \n",
"TCGA-2F-A9KP 0.3393 0.2769 1.7320 -0.0975 \n",
"TCGA-2F-A9KP 0.3393 0.2769 1.7320 -0.0975 \n",
"TCGA-2F-A9KQ 1.6846 0.7711 -0.3061 -0.5016 \n",
"TCGA-2F-A9KR 0.8344 1.5075 3.6068 -0.5004 \n",
"... ... ... ... ... \n",
"TCGA-ZF-AA54 -0.1058 -0.6721 0.2802 1.9504 \n",
"TCGA-ZF-AA58 0.3673 -0.2208 0.3034 3.2580 \n",
"TCGA-ZF-AA5H -0.7003 -0.7661 -1.7035 -0.5423 \n",
"TCGA-ZF-AA5N -1.0240 -1.2890 -1.5666 -0.1270 \n",
"TCGA-ZF-AA5P -0.3570 -0.4843 -0.3792 -0.1964 \n",
"\n",
" ZYG11B_rnaseq ZYX_rnaseq ZZEF1_rnaseq \n",
"case_id \n",
"TCGA-2F-A9KO -0.2561 -0.2866 1.8770 \n",
"TCGA-2F-A9KP 2.6955 -0.6741 1.0323 \n",
"TCGA-2F-A9KP 2.6955 -0.6741 1.0323 \n",
"TCGA-2F-A9KQ 2.8548 -0.6171 -0.8608 \n",
"TCGA-2F-A9KR -0.0747 -0.2185 -0.4379 \n",
"... ... ... ... \n",
"TCGA-ZF-AA54 -0.8784 0.9506 0.0607 \n",
"TCGA-ZF-AA58 -0.2089 1.6053 -0.8746 \n",
"TCGA-ZF-AA5H -0.3488 1.3713 -0.4365 \n",
"TCGA-ZF-AA5N -1.4662 0.3981 -0.5976 \n",
"TCGA-ZF-AA5P 0.4200 3.2547 -0.1232 \n",
"\n",
"[437 rows x 20403 columns]"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"slide_data"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
}
},
"nbformat": 4,
"nbformat_minor": 4
}