a b/datasets_csv/.ipynb_checkpoints/Preprocessing-checkpoint.ipynb
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "metadata": {},
7
   "outputs": [],
8
   "source": [
9
    "import os\n",
10
    "from os.path import join\n",
11
    "\n",
12
    "import pandas as pd\n",
13
    "import numpy as np\n",
14
    "\n",
15
    "label_col = 'survival_months'\n",
16
    "n_bins = 4\n",
17
    "eps = 1e-6"
18
   ]
19
  },
20
  {
21
   "cell_type": "code",
22
   "execution_count": 2,
23
   "metadata": {},
24
   "outputs": [],
25
   "source": [
26
    "def add_bins(slide_data):\n",
27
    "    assert 'case_id' in slide_data.columns and 'censorship' in slide_data.columns\n",
28
    "    \n",
29
    "    patients_df = slide_data.drop_duplicates(['case_id']).copy()\n",
30
    "    uncensored_df = patients_df[patients_df['censorship'] < 1]\n",
31
    "    disc_labels, q_bins = pd.qcut(uncensored_df[label_col], q=n_bins, retbins=True, labels=False)\n",
32
    "    q_bins[-1] = slide_data[label_col].max() + eps\n",
33
    "    q_bins[0] = slide_data[label_col].min() - eps\n",
34
    "\n",
35
    "    disc_labels, q_bins = pd.cut(patients_df[label_col], bins=q_bins, retbins=True, labels=False, right=False, include_lowest=True)\n",
36
    "    patients_df.insert(2, 'label', disc_labels.values.astype(int))\n",
37
    "\n",
38
    "    patient_dict = {}\n",
39
    "    slide_data = slide_data.set_index('case_id')\n",
40
    "    for patient in patients_df['case_id']:\n",
41
    "        slide_ids = slide_data.loc[patient, 'slide_id']\n",
42
    "        if isinstance(slide_ids, str):\n",
43
    "            slide_ids = np.array(slide_ids).reshape(-1)\n",
44
    "        else:\n",
45
    "            slide_ids = slide_ids.values\n",
46
    "        patient_dict.update({patient:slide_ids})\n",
47
    "        \n",
48
    "    return q_bins, patient_dict, patients_df"
49
   ]
50
  },
51
  {
52
   "cell_type": "code",
53
   "execution_count": 3,
54
   "metadata": {},
55
   "outputs": [],
56
   "source": [
57
    "slide_data = pd.read_csv('./tcga_gbmlgg_all_clean.csv.zip', compression='zip', header=0, index_col=0, sep=',',  low_memory=False)\n",
58
    "\n",
59
    "n_bins = 4\n",
60
    "eps = 1e-6\n",
61
    "\n",
62
    "### Asserts that 'case_id' is a column, not an index.\n",
63
    "if 'case_id' not in slide_data:\n",
64
    "    slide_data.index = slide_data.index.str[:12]\n",
65
    "    slide_data['case_id'] = slide_data.index\n",
66
    "    slide_data = slide_data.reset_index(drop=True)\n",
67
    "\n",
68
    "q_bins, patients_dict, slide_data = add_bins(slide_data)\n",
69
    "\n",
70
    "slide_data.reset_index(drop=True, inplace=True)\n",
71
    "slide_data = slide_data.assign(slide_id=slide_data['case_id'])\n",
72
    "\n",
73
    "label_dict = {}\n",
74
    "key_count = 0\n",
75
    "for i in range(len(q_bins)-1):\n",
76
    "    for c in [0, 1]:\n",
77
    "        label_dict.update({(i, c):key_count})\n",
78
    "        key_count+=1\n",
79
    "\n",
80
    "for i in slide_data.index:\n",
81
    "    key = slide_data.loc[i, 'label']\n",
82
    "    slide_data.at[i, 'disc_label'] = key\n",
83
    "    censorship = slide_data.loc[i, 'censorship']\n",
84
    "    key = (key, int(censorship))\n",
85
    "    slide_data.at[i, 'label'] = label_dict[key]\n",
86
    "\n",
87
    "bins = q_bins\n",
88
    "num_classes=len(label_dict)\n",
89
    "patients_df = slide_data.drop_duplicates(['case_id'])\n",
90
    "patient_data = {'case_id':patients_df['case_id'].values, 'label':patients_df['label'].values}\n",
91
    "\n",
92
    "new_cols = list(slide_data.columns[-2:]) + list(slide_data.columns[:-2])\n",
93
    "slide_data = slide_data[new_cols]\n",
94
    "metadata = slide_data.columns[:11]"
95
   ]
96
  },
97
  {
98
   "cell_type": "code",
99
   "execution_count": 5,
100
   "metadata": {},
101
   "outputs": [],
102
   "source": [
103
    "from sklearn.pipeline import Pipeline\n",
104
    "from sklearn.decomposition import PCA\n",
105
    "from sklearn.preprocessing import StandardScaler\n",
106
    "\n",
107
    "\n",
108
    "def series_intersection(s1, s2):\n",
109
    "    return pd.Series(list(set(s1) & set(s2)))\n",
110
    "\n",
111
    "genomic_features = slide_data.drop(metadata, axis=1)\n",
112
    "scaler_omic = StandardScaler().fit(genomic_features)"
113
   ]
114
  },
115
  {
116
   "cell_type": "code",
117
   "execution_count": 9,
118
   "metadata": {},
119
   "outputs": [
120
    {
121
     "name": "stderr",
122
     "output_type": "stream",
123
     "text": [
124
      "/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (2) have mixed types.Specify dtype option on import or set low_memory=False.\n",
125
      "  has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n"
126
     ]
127
    }
128
   ],
129
   "source": [
130
    "signatures = pd.read_csv('./signatures.csv')\n",
131
    "slide_df = pd.read_csv('./tcga_gbmlgg_all_clean.csv.zip')"
132
   ]
133
  },
134
  {
135
   "cell_type": "code",
136
   "execution_count": 40,
137
   "metadata": {},
138
   "outputs": [],
139
   "source": [
140
    "omic_from_signatures = []\n",
141
    "for col in signatures.columns:\n",
142
    "    omic = signatures[col].dropna().unique()\n",
143
    "    omic_from_signatures.append(omic)\n",
144
    "\n",
145
    "omic_from_signatures = np.concatenate(omic_from_signatures)"
146
   ]
147
  },
148
  {
149
   "cell_type": "code",
150
   "execution_count": 41,
151
   "metadata": {},
152
   "outputs": [
153
    {
154
     "name": "stderr",
155
     "output_type": "stream",
156
     "text": [
157
      "/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (2) have mixed types.Specify dtype option on import or set low_memory=False.\n",
158
      "  has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n",
159
      "/home/mahmoodlab/anaconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py:3071: DtypeWarning: Columns (4) have mixed types.Specify dtype option on import or set low_memory=False.\n",
160
      "  has_raised = await self.run_ast_nodes(code_ast.body, cell_name,\n"
161
     ]
162
    }
163
   ],
164
   "source": [
165
    "for fname in os.listdir('./'):\n",
166
    "    if fname.endswith('.csv.zip'):\n",
167
    "        slide_df = pd.read_csv(fname)\n",
168
    "        omic_overlap = np.concatenate([omic_from_signatures+mode for mode in ['_mut', '_cnv', '_rnaseq']])\n",
169
    "        omic_overlap = sorted(series_intersection(omic_overlap, slide_df.columns))\n",
170
    "        slide_df[list(slide_df.columns[:9]) + omic_overlap].to_csv('../dataset_csv/%s' % fname)"
171
   ]
172
  },
173
  {
174
   "cell_type": "code",
175
   "execution_count": 36,
176
   "metadata": {},
177
   "outputs": [],
178
   "source": [
179
    "omic_from_signatures = []\n",
180
    "for col in signatures.columns:\n",
181
    "    omic = signatures[col].dropna().unique()\n",
182
    "    omic_from_signatures.append(omic)\n",
183
    "\n",
184
    "omic_from_signatures = np.concatenate(omic_from_signatures)\n"
185
   ]
186
  },
187
  {
188
   "cell_type": "code",
189
   "execution_count": 7,
190
   "metadata": {},
191
   "outputs": [
192
    {
193
     "name": "stdout",
194
     "output_type": "stream",
195
     "text": [
196
      "Tumor Suppressor Genes Embedding Size: 84\n",
197
      "Oncogenes Embedding Size: 314\n",
198
      "Protein Kinases Embedding Size: 498\n",
199
      "Cell Differentiation Markers Embedding Size: 415\n",
200
      "Transcription Factors Embedding Size: 1396\n",
201
      "Cytokines and Growth Factors Embedding Size: 428\n"
202
     ]
203
    }
204
   ],
205
   "source": [
206
    "\n",
207
    "def series_intersection(s1, s2):\n",
208
    "    return pd.Series(list(set(s1) & set(s2)))\n",
209
    "\n",
210
    "sig_names = []\n",
211
    "for col in signatures.columns:\n",
212
    "    sig = signatures[col].dropna().unique()\n",
213
    "    sig = np.concatenate([sig+mode for mode in ['_mut', '_cnv', '_rnaseq']])\n",
214
    "    sig = sorted(series_intersection(sig, genomic_features.columns))\n",
215
    "    sig_names.append(sig)\n",
216
    "    print('%s Embedding Size: %d' % (col, len(sig)))\n",
217
    "sig_sizes = [len(sig) for sig in sig_names]"
218
   ]
219
  },
220
  {
221
   "cell_type": "code",
222
   "execution_count": 21,
223
   "metadata": {},
224
   "outputs": [
225
    {
226
     "data": {
227
      "text/plain": [
228
       "['IFNA10_cnv',\n",
229
       " 'IFNA13_cnv',\n",
230
       " 'IFNA14_cnv',\n",
231
       " 'IFNA16_cnv',\n",
232
       " 'IFNA17_cnv',\n",
233
       " 'IFNA1_cnv',\n",
234
       " 'IFNA21_cnv',\n",
235
       " 'IFNA2_cnv',\n",
236
       " 'IFNA4_cnv',\n",
237
       " 'IFNA5_cnv',\n",
238
       " 'IFNA6_cnv',\n",
239
       " 'IFNA7_cnv',\n",
240
       " 'IFNA8_cnv',\n",
241
       " 'IFNB1_cnv',\n",
242
       " 'IFNE_cnv',\n",
243
       " 'IFNW1_cnv',\n",
244
       " 'PDGFRA_cnv']"
245
      ]
246
     },
247
     "execution_count": 21,
248
     "metadata": {},
249
     "output_type": "execute_result"
250
    }
251
   ],
252
   "source": [
253
    "sig"
254
   ]
255
  },
256
  {
257
   "cell_type": "code",
258
   "execution_count": 434,
259
   "metadata": {},
260
   "outputs": [
261
    {
262
     "data": {
263
      "text/html": [
264
       "<div>\n",
265
       "<style scoped>\n",
266
       "    .dataframe tbody tr th:only-of-type {\n",
267
       "        vertical-align: middle;\n",
268
       "    }\n",
269
       "\n",
270
       "    .dataframe tbody tr th {\n",
271
       "        vertical-align: top;\n",
272
       "    }\n",
273
       "\n",
274
       "    .dataframe thead th {\n",
275
       "        text-align: right;\n",
276
       "    }\n",
277
       "</style>\n",
278
       "<table border=\"1\" class=\"dataframe\">\n",
279
       "  <thead>\n",
280
       "    <tr style=\"text-align: right;\">\n",
281
       "      <th></th>\n",
282
       "      <th>NDUFS5_cnv</th>\n",
283
       "      <th>MACF1_cnv</th>\n",
284
       "      <th>RNA5SP44_cnv</th>\n",
285
       "      <th>KIAA0754_cnv</th>\n",
286
       "      <th>BMP8A_cnv</th>\n",
287
       "      <th>PABPC4_cnv</th>\n",
288
       "      <th>SNORA55_cnv</th>\n",
289
       "      <th>HEYL_cnv</th>\n",
290
       "      <th>HPCAL4_cnv</th>\n",
291
       "      <th>NT5C1A_cnv</th>\n",
292
       "      <th>...</th>\n",
293
       "      <th>ZWINT_rnaseq</th>\n",
294
       "      <th>ZXDA_rnaseq</th>\n",
295
       "      <th>ZXDB_rnaseq</th>\n",
296
       "      <th>ZXDC_rnaseq</th>\n",
297
       "      <th>ZYG11A_rnaseq</th>\n",
298
       "      <th>ZYG11B_rnaseq</th>\n",
299
       "      <th>ZYX_rnaseq</th>\n",
300
       "      <th>ZZEF1_rnaseq</th>\n",
301
       "      <th>ZZZ3_rnaseq</th>\n",
302
       "      <th>TPTEP1_rnaseq</th>\n",
303
       "    </tr>\n",
304
       "  </thead>\n",
305
       "  <tbody>\n",
306
       "    <tr>\n",
307
       "      <th>0</th>\n",
308
       "      <td>-1</td>\n",
309
       "      <td>-1</td>\n",
310
       "      <td>-1</td>\n",
311
       "      <td>-1</td>\n",
312
       "      <td>-1</td>\n",
313
       "      <td>-1</td>\n",
314
       "      <td>-1</td>\n",
315
       "      <td>-1</td>\n",
316
       "      <td>-1</td>\n",
317
       "      <td>-1</td>\n",
318
       "      <td>...</td>\n",
319
       "      <td>-0.8388</td>\n",
320
       "      <td>4.1375</td>\n",
321
       "      <td>3.9664</td>\n",
322
       "      <td>1.8437</td>\n",
323
       "      <td>-0.3959</td>\n",
324
       "      <td>-0.2561</td>\n",
325
       "      <td>-0.2866</td>\n",
326
       "      <td>1.8770</td>\n",
327
       "      <td>-0.3179</td>\n",
328
       "      <td>-0.3633</td>\n",
329
       "    </tr>\n",
330
       "    <tr>\n",
331
       "      <th>1</th>\n",
332
       "      <td>2</td>\n",
333
       "      <td>2</td>\n",
334
       "      <td>2</td>\n",
335
       "      <td>2</td>\n",
336
       "      <td>2</td>\n",
337
       "      <td>2</td>\n",
338
       "      <td>2</td>\n",
339
       "      <td>2</td>\n",
340
       "      <td>2</td>\n",
341
       "      <td>2</td>\n",
342
       "      <td>...</td>\n",
343
       "      <td>-0.1083</td>\n",
344
       "      <td>0.3393</td>\n",
345
       "      <td>0.2769</td>\n",
346
       "      <td>1.7320</td>\n",
347
       "      <td>-0.0975</td>\n",
348
       "      <td>2.6955</td>\n",
349
       "      <td>-0.6741</td>\n",
350
       "      <td>1.0323</td>\n",
351
       "      <td>1.2766</td>\n",
352
       "      <td>-0.3982</td>\n",
353
       "    </tr>\n",
354
       "    <tr>\n",
355
       "      <th>2</th>\n",
356
       "      <td>0</td>\n",
357
       "      <td>0</td>\n",
358
       "      <td>0</td>\n",
359
       "      <td>0</td>\n",
360
       "      <td>0</td>\n",
361
       "      <td>0</td>\n",
362
       "      <td>0</td>\n",
363
       "      <td>0</td>\n",
364
       "      <td>0</td>\n",
365
       "      <td>0</td>\n",
366
       "      <td>...</td>\n",
367
       "      <td>-0.4155</td>\n",
368
       "      <td>1.6846</td>\n",
369
       "      <td>0.7711</td>\n",
370
       "      <td>-0.3061</td>\n",
371
       "      <td>-0.5016</td>\n",
372
       "      <td>2.8548</td>\n",
373
       "      <td>-0.6171</td>\n",
374
       "      <td>-0.8608</td>\n",
375
       "      <td>-0.0486</td>\n",
376
       "      <td>-0.3962</td>\n",
377
       "    </tr>\n",
378
       "    <tr>\n",
379
       "      <th>3</th>\n",
380
       "      <td>0</td>\n",
381
       "      <td>0</td>\n",
382
       "      <td>0</td>\n",
383
       "      <td>0</td>\n",
384
       "      <td>0</td>\n",
385
       "      <td>0</td>\n",
386
       "      <td>0</td>\n",
387
       "      <td>0</td>\n",
388
       "      <td>0</td>\n",
389
       "      <td>0</td>\n",
390
       "      <td>...</td>\n",
391
       "      <td>-0.8143</td>\n",
392
       "      <td>0.8344</td>\n",
393
       "      <td>1.5075</td>\n",
394
       "      <td>3.6068</td>\n",
395
       "      <td>-0.5004</td>\n",
396
       "      <td>-0.0747</td>\n",
397
       "      <td>-0.2185</td>\n",
398
       "      <td>-0.4379</td>\n",
399
       "      <td>1.6913</td>\n",
400
       "      <td>1.7748</td>\n",
401
       "    </tr>\n",
402
       "    <tr>\n",
403
       "      <th>4</th>\n",
404
       "      <td>0</td>\n",
405
       "      <td>0</td>\n",
406
       "      <td>0</td>\n",
407
       "      <td>0</td>\n",
408
       "      <td>0</td>\n",
409
       "      <td>0</td>\n",
410
       "      <td>0</td>\n",
411
       "      <td>0</td>\n",
412
       "      <td>0</td>\n",
413
       "      <td>0</td>\n",
414
       "      <td>...</td>\n",
415
       "      <td>0.0983</td>\n",
416
       "      <td>-0.7908</td>\n",
417
       "      <td>-0.0053</td>\n",
418
       "      <td>-0.0643</td>\n",
419
       "      <td>-0.3706</td>\n",
420
       "      <td>0.3870</td>\n",
421
       "      <td>-0.5589</td>\n",
422
       "      <td>-0.5979</td>\n",
423
       "      <td>0.0047</td>\n",
424
       "      <td>-0.3548</td>\n",
425
       "    </tr>\n",
426
       "    <tr>\n",
427
       "      <th>...</th>\n",
428
       "      <td>...</td>\n",
429
       "      <td>...</td>\n",
430
       "      <td>...</td>\n",
431
       "      <td>...</td>\n",
432
       "      <td>...</td>\n",
433
       "      <td>...</td>\n",
434
       "      <td>...</td>\n",
435
       "      <td>...</td>\n",
436
       "      <td>...</td>\n",
437
       "      <td>...</td>\n",
438
       "      <td>...</td>\n",
439
       "      <td>...</td>\n",
440
       "      <td>...</td>\n",
441
       "      <td>...</td>\n",
442
       "      <td>...</td>\n",
443
       "      <td>...</td>\n",
444
       "      <td>...</td>\n",
445
       "      <td>...</td>\n",
446
       "      <td>...</td>\n",
447
       "      <td>...</td>\n",
448
       "      <td>...</td>\n",
449
       "    </tr>\n",
450
       "    <tr>\n",
451
       "      <th>368</th>\n",
452
       "      <td>2</td>\n",
453
       "      <td>2</td>\n",
454
       "      <td>2</td>\n",
455
       "      <td>2</td>\n",
456
       "      <td>2</td>\n",
457
       "      <td>2</td>\n",
458
       "      <td>2</td>\n",
459
       "      <td>2</td>\n",
460
       "      <td>2</td>\n",
461
       "      <td>2</td>\n",
462
       "      <td>...</td>\n",
463
       "      <td>-0.0291</td>\n",
464
       "      <td>-0.1058</td>\n",
465
       "      <td>-0.6721</td>\n",
466
       "      <td>0.2802</td>\n",
467
       "      <td>1.9504</td>\n",
468
       "      <td>-0.8784</td>\n",
469
       "      <td>0.9506</td>\n",
470
       "      <td>0.0607</td>\n",
471
       "      <td>1.1883</td>\n",
472
       "      <td>-0.3521</td>\n",
473
       "    </tr>\n",
474
       "    <tr>\n",
475
       "      <th>369</th>\n",
476
       "      <td>0</td>\n",
477
       "      <td>0</td>\n",
478
       "      <td>0</td>\n",
479
       "      <td>0</td>\n",
480
       "      <td>0</td>\n",
481
       "      <td>0</td>\n",
482
       "      <td>0</td>\n",
483
       "      <td>0</td>\n",
484
       "      <td>0</td>\n",
485
       "      <td>0</td>\n",
486
       "      <td>...</td>\n",
487
       "      <td>0.0497</td>\n",
488
       "      <td>0.3673</td>\n",
489
       "      <td>-0.2208</td>\n",
490
       "      <td>0.3034</td>\n",
491
       "      <td>3.2580</td>\n",
492
       "      <td>-0.2089</td>\n",
493
       "      <td>1.6053</td>\n",
494
       "      <td>-0.8746</td>\n",
495
       "      <td>-0.4491</td>\n",
496
       "      <td>-0.3450</td>\n",
497
       "    </tr>\n",
498
       "    <tr>\n",
499
       "      <th>370</th>\n",
500
       "      <td>1</td>\n",
501
       "      <td>1</td>\n",
502
       "      <td>1</td>\n",
503
       "      <td>1</td>\n",
504
       "      <td>1</td>\n",
505
       "      <td>1</td>\n",
506
       "      <td>1</td>\n",
507
       "      <td>1</td>\n",
508
       "      <td>1</td>\n",
509
       "      <td>1</td>\n",
510
       "      <td>...</td>\n",
511
       "      <td>0.3822</td>\n",
512
       "      <td>-0.7003</td>\n",
513
       "      <td>-0.7661</td>\n",
514
       "      <td>-1.7035</td>\n",
515
       "      <td>-0.5423</td>\n",
516
       "      <td>-0.3488</td>\n",
517
       "      <td>1.3713</td>\n",
518
       "      <td>-0.4365</td>\n",
519
       "      <td>2.3456</td>\n",
520
       "      <td>-0.3866</td>\n",
521
       "    </tr>\n",
522
       "    <tr>\n",
523
       "      <th>371</th>\n",
524
       "      <td>0</td>\n",
525
       "      <td>0</td>\n",
526
       "      <td>0</td>\n",
527
       "      <td>0</td>\n",
528
       "      <td>0</td>\n",
529
       "      <td>0</td>\n",
530
       "      <td>0</td>\n",
531
       "      <td>0</td>\n",
532
       "      <td>0</td>\n",
533
       "      <td>0</td>\n",
534
       "      <td>...</td>\n",
535
       "      <td>-0.6853</td>\n",
536
       "      <td>-1.0240</td>\n",
537
       "      <td>-1.2890</td>\n",
538
       "      <td>-1.5666</td>\n",
539
       "      <td>-0.1270</td>\n",
540
       "      <td>-1.4662</td>\n",
541
       "      <td>0.3981</td>\n",
542
       "      <td>-0.5976</td>\n",
543
       "      <td>-1.3822</td>\n",
544
       "      <td>-0.4157</td>\n",
545
       "    </tr>\n",
546
       "    <tr>\n",
547
       "      <th>372</th>\n",
548
       "      <td>0</td>\n",
549
       "      <td>0</td>\n",
550
       "      <td>0</td>\n",
551
       "      <td>0</td>\n",
552
       "      <td>0</td>\n",
553
       "      <td>0</td>\n",
554
       "      <td>0</td>\n",
555
       "      <td>0</td>\n",
556
       "      <td>0</td>\n",
557
       "      <td>0</td>\n",
558
       "      <td>...</td>\n",
559
       "      <td>0.0517</td>\n",
560
       "      <td>-0.3570</td>\n",
561
       "      <td>-0.4843</td>\n",
562
       "      <td>-0.3792</td>\n",
563
       "      <td>-0.1964</td>\n",
564
       "      <td>0.4200</td>\n",
565
       "      <td>3.2547</td>\n",
566
       "      <td>-0.1232</td>\n",
567
       "      <td>3.4519</td>\n",
568
       "      <td>-0.1962</td>\n",
569
       "    </tr>\n",
570
       "  </tbody>\n",
571
       "</table>\n",
572
       "<p>373 rows × 20395 columns</p>\n",
573
       "</div>"
574
      ],
575
      "text/plain": [
576
       "     NDUFS5_cnv  MACF1_cnv  RNA5SP44_cnv  KIAA0754_cnv  BMP8A_cnv  PABPC4_cnv  \\\n",
577
       "0            -1         -1            -1            -1         -1          -1   \n",
578
       "1             2          2             2             2          2           2   \n",
579
       "2             0          0             0             0          0           0   \n",
580
       "3             0          0             0             0          0           0   \n",
581
       "4             0          0             0             0          0           0   \n",
582
       "..          ...        ...           ...           ...        ...         ...   \n",
583
       "368           2          2             2             2          2           2   \n",
584
       "369           0          0             0             0          0           0   \n",
585
       "370           1          1             1             1          1           1   \n",
586
       "371           0          0             0             0          0           0   \n",
587
       "372           0          0             0             0          0           0   \n",
588
       "\n",
589
       "     SNORA55_cnv  HEYL_cnv  HPCAL4_cnv  NT5C1A_cnv  ...  ZWINT_rnaseq  \\\n",
590
       "0             -1        -1          -1          -1  ...       -0.8388   \n",
591
       "1              2         2           2           2  ...       -0.1083   \n",
592
       "2              0         0           0           0  ...       -0.4155   \n",
593
       "3              0         0           0           0  ...       -0.8143   \n",
594
       "4              0         0           0           0  ...        0.0983   \n",
595
       "..           ...       ...         ...         ...  ...           ...   \n",
596
       "368            2         2           2           2  ...       -0.0291   \n",
597
       "369            0         0           0           0  ...        0.0497   \n",
598
       "370            1         1           1           1  ...        0.3822   \n",
599
       "371            0         0           0           0  ...       -0.6853   \n",
600
       "372            0         0           0           0  ...        0.0517   \n",
601
       "\n",
602
       "     ZXDA_rnaseq  ZXDB_rnaseq  ZXDC_rnaseq  ZYG11A_rnaseq  ZYG11B_rnaseq  \\\n",
603
       "0         4.1375       3.9664       1.8437        -0.3959        -0.2561   \n",
604
       "1         0.3393       0.2769       1.7320        -0.0975         2.6955   \n",
605
       "2         1.6846       0.7711      -0.3061        -0.5016         2.8548   \n",
606
       "3         0.8344       1.5075       3.6068        -0.5004        -0.0747   \n",
607
       "4        -0.7908      -0.0053      -0.0643        -0.3706         0.3870   \n",
608
       "..           ...          ...          ...            ...            ...   \n",
609
       "368      -0.1058      -0.6721       0.2802         1.9504        -0.8784   \n",
610
       "369       0.3673      -0.2208       0.3034         3.2580        -0.2089   \n",
611
       "370      -0.7003      -0.7661      -1.7035        -0.5423        -0.3488   \n",
612
       "371      -1.0240      -1.2890      -1.5666        -0.1270        -1.4662   \n",
613
       "372      -0.3570      -0.4843      -0.3792        -0.1964         0.4200   \n",
614
       "\n",
615
       "     ZYX_rnaseq  ZZEF1_rnaseq  ZZZ3_rnaseq  TPTEP1_rnaseq  \n",
616
       "0       -0.2866        1.8770      -0.3179        -0.3633  \n",
617
       "1       -0.6741        1.0323       1.2766        -0.3982  \n",
618
       "2       -0.6171       -0.8608      -0.0486        -0.3962  \n",
619
       "3       -0.2185       -0.4379       1.6913         1.7748  \n",
620
       "4       -0.5589       -0.5979       0.0047        -0.3548  \n",
621
       "..          ...           ...          ...            ...  \n",
622
       "368      0.9506        0.0607       1.1883        -0.3521  \n",
623
       "369      1.6053       -0.8746      -0.4491        -0.3450  \n",
624
       "370      1.3713       -0.4365       2.3456        -0.3866  \n",
625
       "371      0.3981       -0.5976      -1.3822        -0.4157  \n",
626
       "372      3.2547       -0.1232       3.4519        -0.1962  \n",
627
       "\n",
628
       "[373 rows x 20395 columns]"
629
      ]
630
     },
631
     "execution_count": 434,
632
     "metadata": {},
633
     "output_type": "execute_result"
634
    }
635
   ],
636
   "source": [
637
    "genomic_features"
638
   ]
639
  },
640
  {
641
   "cell_type": "code",
642
   "execution_count": 2,
643
   "metadata": {},
644
   "outputs": [],
645
   "source": [
646
    "import torch\n",
647
    "import torch.nn as nn\n",
648
    "import torch.nn.functional as F\n",
649
    "import pdb\n",
650
    "import numpy as np\n",
651
    "\n",
652
    "class MIL_Sum_FC_surv(nn.Module):\n",
653
    "    def __init__(self, size_arg = \"small\", dropout=0.25, n_classes=4):\n",
654
    "        super(MIL_Sum_FC_surv, self).__init__()\n",
655
    "\n",
656
    "        self.size_dict = {\"small\": [1024, 512, 256], \"big\": [1024, 512, 384]}\n",
657
    "        size = self.size_dict[size_arg]\n",
658
    "        self.phi = nn.Sequential(*[nn.Linear(size[0], size[1]), nn.ReLU(), nn.Dropout(dropout)])\n",
659
    "        self.rho = nn.Sequential(*[nn.Linear(size[1], size[2]), nn.ReLU(), nn.Dropout(dropout)])\n",
660
    "        self.classifier = nn.Linear(size[2], n_classes)\n",
661
    "\n",
662
    "    def relocate(self):\n",
663
    "        device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
664
    "        if torch.cuda.device_count() >= 1:\n",
665
    "            device_ids = list(range(torch.cuda.device_count()))\n",
666
    "            self.phi = nn.DataParallel(self.phi, device_ids=device_ids).to('cuda:0')\n",
667
    "\n",
668
    "        self.rho = self.rho.to(device)\n",
669
    "        self.classifier = self.classifier.to(device)\n",
670
    "\n",
671
    "    def forward(self, **kwargs):\n",
672
    "        h = kwargs['x_path']\n",
673
    "\n",
674
    "        h = self.phi(h).sum(axis=0)\n",
675
    "        h = self.rho(h)\n",
676
    "        logits  = self.classifier(h).unsqueeze(0)\n",
677
    "        Y_hat = torch.topk(logits, 1, dim = 1)[1]\n",
678
    "        hazards = torch.sigmoid(logits)\n",
679
    "        S = torch.cumprod(1 - hazards, dim=1)\n",
680
    "        \n",
681
    "        return hazards, S, Y_hat, None, None\n",
682
    "\n",
683
    "from os.path import join\n",
684
    "from collections import OrderedDict\n",
685
    "\n",
686
    "import torch\n",
687
    "import torch.nn as nn\n",
688
    "import torch.nn.functional as F\n",
689
    "import pdb\n",
690
    "import numpy as np\n",
691
    "\n",
692
    "\"\"\"\n",
693
    "A Modified Implementation of Deep Attention MIL\n",
694
    "\"\"\"\n",
695
    "\n",
696
    "\n",
697
    "\"\"\"\n",
698
    "Attention Network without Gating (2 fc layers)\n",
699
    "args:\n",
700
    "    L: input feature dimension\n",
701
    "    D: hidden layer dimension\n",
702
    "    dropout: whether to use dropout (p = 0.25)\n",
703
    "    n_classes: number of classes (experimental usage for multiclass MIL)\n",
704
    "\"\"\"\n",
705
    "class Attn_Net(nn.Module):\n",
706
    "\n",
707
    "    def __init__(self, L = 1024, D = 256, dropout = False, n_classes = 1):\n",
708
    "        super(Attn_Net, self).__init__()\n",
709
    "        self.module = [\n",
710
    "            nn.Linear(L, D),\n",
711
    "            nn.Tanh()]\n",
712
    "\n",
713
    "        if dropout:\n",
714
    "            self.module.append(nn.Dropout(0.25))\n",
715
    "\n",
716
    "        self.module.append(nn.Linear(D, n_classes))\n",
717
    "        \n",
718
    "        self.module = nn.Sequential(*self.module)\n",
719
    "    \n",
720
    "    def forward(self, x):\n",
721
    "        return self.module(x), x # N x n_classes\n",
722
    "\n",
723
    "\"\"\"\n",
724
    "Attention Network with Sigmoid Gating (3 fc layers)\n",
725
    "args:\n",
726
    "    L: input feature dimension\n",
727
    "    D: hidden layer dimension\n",
728
    "    dropout: whether to use dropout (p = 0.25)\n",
729
    "    n_classes: number of classes (experimental usage for multiclass MIL)\n",
730
    "\"\"\"\n",
731
    "class Attn_Net_Gated(nn.Module):\n",
732
    "\n",
733
    "    def __init__(self, L = 1024, D = 256, dropout = False, n_classes = 1):\n",
734
    "        super(Attn_Net_Gated, self).__init__()\n",
735
    "        self.attention_a = [\n",
736
    "            nn.Linear(L, D),\n",
737
    "            nn.Tanh()]\n",
738
    "        \n",
739
    "        self.attention_b = [nn.Linear(L, D),\n",
740
    "                            nn.Sigmoid()]\n",
741
    "        if dropout:\n",
742
    "            self.attention_a.append(nn.Dropout(0.25))\n",
743
    "            self.attention_b.append(nn.Dropout(0.25))\n",
744
    "\n",
745
    "        self.attention_a = nn.Sequential(*self.attention_a)\n",
746
    "        self.attention_b = nn.Sequential(*self.attention_b)\n",
747
    "        \n",
748
    "        self.attention_c = nn.Linear(D, n_classes)\n",
749
    "\n",
750
    "    def forward(self, x):\n",
751
    "        a = self.attention_a(x)\n",
752
    "        b = self.attention_b(x)\n",
753
    "        A = a.mul(b)\n",
754
    "        A = self.attention_c(A)  # N x n_classes\n",
755
    "        return A, x\n",
756
    "    \n",
757
    "class MIL_Cluster_FC_surv(nn.Module):\n",
758
    "    def __init__(self, num_clusters=10, size_arg = \"small\", dropout=0.25, n_classes=4):\n",
759
    "        super(MIL_Cluster_FC_surv, self).__init__()\n",
760
    "        self.size_dict = {\"small\": [1024, 512, 256], \"big\": [1024, 512, 384]}\n",
761
    "        self.num_clusters = num_clusters\n",
762
    "        \n",
763
    "        ### Phenotype Learning\n",
764
    "        size = self.size_dict[size_arg]\n",
765
    "        phis = []\n",
766
    "        for phenotype_i in range(num_clusters):\n",
767
    "            phi = [nn.Linear(size[0], size[1]), nn.ReLU(), nn.Dropout(dropout),\n",
768
    "                   nn.Linear(size[1], size[1]), nn.ReLU(), nn.Dropout(dropout)]\n",
769
    "            phis.append(nn.Sequential(*phi))\n",
770
    "        self.phis = nn.ModuleList(phis)\n",
771
    "        self.pool1d = nn.AdaptiveAvgPool1d(1)\n",
772
    "        \n",
773
    "        \n",
774
    "        ### WSI Attention MIL Construction\n",
775
    "        fc = [nn.Linear(size[1], size[1]), nn.ReLU()]\n",
776
    "        fc.append(nn.Dropout(0.25))\n",
777
    "        attention_net = Attn_Net_Gated(L=size[1], D=size[2], dropout=dropout, n_classes=1)\n",
778
    "        fc.append(attention_net)\n",
779
    "        self.attention_net = nn.Sequential(*fc)\n",
780
    "\n",
781
    "        \n",
782
    "        self.rho = nn.Sequential(*[nn.Linear(size[1], size[2]), nn.ReLU(), nn.Dropout(dropout)])\n",
783
    "        self.classifier = nn.Linear(size[2], n_classes)\n",
784
    "\n",
785
    "    def relocate(self):\n",
786
    "        device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
787
    "        if torch.cuda.device_count() >= 1:\n",
788
    "            device_ids = list(range(torch.cuda.device_count()))\n",
789
    "            self.phis = nn.DataParallel(self.phi, device_ids=device_ids).to('cuda:0')\n",
790
    "\n",
791
    "        self.rho = self.rho.to(device)\n",
792
    "        self.classifier = self.classifier.to(device)\n",
793
    "\n",
794
    "    def forward(self, **kwargs):\n",
795
    "        x_path = kwargs['x_path']\n",
796
    "        ### Phenotyping\n",
797
    "        h_phenotypes = []\n",
798
    "        from sklearn.cluster import KMeans\n",
799
    "        kmeans = KMeans(n_clusters=self.num_clusters, random_state=2021).fit(X)\n",
800
    "        #cluster_ids_x, cluster_centers = kmeans(X=x_path, num_clusters=self.num_clusters, distance='euclidean', device=torch.device('cpu'))\n",
801
    "        cluster_ids_x = KMeans(n_clusters=10, random_state=2021, max_iter=20).fit_predict(x_path)\n",
802
    "        for i in range(self.num_clusters):\n",
803
    "            h_phenotypes_i = self.phis[i](x_path[cluster_ids_x==i])\n",
804
    "            h_phenotypes.append(self.pool1d(h_phenotypes_i.T.unsqueeze(0)).squeeze(2))\n",
805
    "        h_phenotypes = torch.stack(h_phenotypes, dim=1).squeeze(0)\n",
806
    "\n",
807
    "\n",
808
    "        ### Attention MIL\n",
809
    "        A, h = self.attention_net(h_phenotypes)  \n",
810
    "        A = torch.transpose(A, 1, 0)\n",
811
    "        if 'attention_only' in kwargs.keys():\n",
812
    "            if kwargs['attention_only']:\n",
813
    "                return A\n",
814
    "        A_raw = A \n",
815
    "        A = F.softmax(A, dim=1) \n",
816
    "        h = torch.mm(A, h_phenotypes)\n",
817
    "\n",
818
    "        \n",
819
    "        h = self.rho(h)\n",
820
    "        logits  = self.classifier(h).unsqueeze(0)\n",
821
    "        Y_hat = torch.topk(logits, 1, dim = 1)[1]\n",
822
    "        hazards = torch.sigmoid(logits)\n",
823
    "        S = torch.cumprod(1 - hazards, dim=1)\n",
824
    "        \n",
825
    "        return hazards, S, Y_hat, None, None"
826
   ]
827
  },
828
  {
829
   "cell_type": "code",
830
   "execution_count": 15,
831
   "metadata": {},
832
   "outputs": [],
833
   "source": [
834
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.cuda.FloatTensor)\n"
835
   ]
836
  },
837
  {
838
   "cell_type": "code",
839
   "execution_count": 17,
840
   "metadata": {},
841
   "outputs": [],
842
   "source": [
843
    "from sklearn.cluster import KMeans\n",
844
    "kmeans = KMeans(n_clusters=10, random_state=2021, max_iter=20).fit_predict(x_path.cpu())"
845
   ]
846
  },
847
  {
848
   "cell_type": "code",
849
   "execution_count": 18,
850
   "metadata": {},
851
   "outputs": [
852
    {
853
     "data": {
854
      "text/plain": [
855
       "array([5, 5, 3, 5, 8, 4, 8, 7, 5, 4, 9, 1, 9, 1, 6, 1, 1, 0, 5, 0, 4, 3,\n",
856
       "       0, 6, 3, 1, 0, 7, 9, 8, 0, 5, 5, 3, 0, 1, 5, 1, 0, 6, 6, 4, 1, 5,\n",
857
       "       3, 0, 1, 0, 8, 5, 1, 8, 1, 0, 5, 0, 2, 5, 6, 5, 0, 0, 5, 1, 2, 7,\n",
858
       "       4, 6, 5, 3, 0, 7, 9, 1, 3, 4, 4, 5, 7, 9, 9, 5, 0, 1, 9, 1, 2, 0,\n",
859
       "       6, 3, 1, 1, 2, 4, 0, 5, 1, 1, 1, 0, 0, 9, 8, 1, 5, 5, 0, 9, 2, 3,\n",
860
       "       7, 0, 1, 6, 7, 5, 3, 5, 0, 1, 6, 1, 6, 2, 8, 7, 6, 1, 6, 2, 5, 0,\n",
861
       "       1, 6, 0, 9, 2, 1, 0, 1, 7, 7, 6, 1, 6, 0, 3, 4, 1, 3, 2, 4, 4, 5,\n",
862
       "       4, 1, 1, 9, 6, 0, 3, 6, 4, 8, 7, 9, 6, 5, 5, 9, 0, 6, 0, 1, 9, 2,\n",
863
       "       3, 5, 1, 9, 6, 1, 0, 6, 6, 0, 0, 6, 7, 1, 6, 1, 1, 1, 4, 0, 2, 1,\n",
864
       "       9, 5, 7, 5, 9, 0, 1, 0, 6, 2, 2, 1, 1, 5, 3, 5, 3, 6, 5, 6, 9, 5,\n",
865
       "       2, 2, 2, 6, 0, 0, 0, 5, 2, 6, 6, 0, 2, 5, 1, 9, 2, 4, 4, 0, 4, 7,\n",
866
       "       4, 1, 1, 3, 6, 0, 1, 2, 4, 0, 8, 1, 8, 5, 5, 7, 4, 1, 6, 1, 0, 8,\n",
867
       "       6, 1, 1, 4, 8, 7, 5, 2, 3, 0, 2, 9, 5, 6, 4, 3, 6, 5, 5, 4, 6, 6,\n",
868
       "       0, 1, 5, 1, 1, 1, 1, 9, 5, 7, 3, 0, 2, 4, 0, 5, 4, 0, 5, 0, 6, 0,\n",
869
       "       3, 1, 4, 6, 3, 7, 1, 6, 7, 0, 1, 4, 6, 1, 6, 0, 6, 0, 5, 9, 1, 1,\n",
870
       "       3, 1, 5, 6, 1, 6, 6, 8, 2, 0, 7, 9, 9, 6, 0, 6, 2, 6, 8, 0, 8, 5,\n",
871
       "       1, 3, 1, 9, 2, 3, 5, 8, 2, 5, 6, 6, 5, 2, 9, 0, 1, 8, 5, 9, 5, 1,\n",
872
       "       0, 1, 0, 8, 6, 1, 7, 2, 8, 3, 1, 6, 2, 2, 1, 6, 0, 2, 6, 1, 1, 4,\n",
873
       "       5, 6, 4, 0, 5, 0, 9, 0, 4, 8, 0, 7, 6, 5, 5, 0, 4, 1, 1, 2, 2, 0,\n",
874
       "       0, 6, 4, 0, 7, 7, 2, 3, 1, 4, 7, 9, 4, 7, 2, 4, 5, 6, 4, 5, 7, 9,\n",
875
       "       8, 0, 6, 2, 0, 6, 6, 3, 5, 4, 4, 0, 1, 0, 5, 3, 1, 6, 0, 7, 4, 1,\n",
876
       "       6, 3, 6, 0, 4, 1, 5, 7, 3, 1, 4, 8, 0, 7, 0, 6, 1, 1, 0, 1, 5, 1,\n",
877
       "       2, 3, 2, 3, 8, 8, 4, 6, 5, 6, 1, 0, 7, 6, 4, 4], dtype=int32)"
878
      ]
879
     },
880
     "execution_count": 18,
881
     "metadata": {},
882
     "output_type": "execute_result"
883
    }
884
   ],
885
   "source": [
886
    "kmeans"
887
   ]
888
  },
889
  {
890
   "cell_type": "code",
891
   "execution_count": 2,
892
   "metadata": {},
893
   "outputs": [
894
    {
895
     "data": {
896
      "text/plain": [
897
       "(tensor([[0.9992, 0.0000, 0.0000, 1.0000]], grad_fn=<SigmoidBackward>),\n",
898
       " tensor([[0.0008, 0.0008, 0.0008, 0.0000]], grad_fn=<CumprodBackward>),\n",
899
       " tensor([[3]]),\n",
900
       " None,\n",
901
       " None)"
902
      ]
903
     },
904
     "execution_count": 2,
905
     "metadata": {},
906
     "output_type": "execute_result"
907
    }
908
   ],
909
   "source": [
910
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
911
    "model = MIL_Sum_FC_surv()\n",
912
    "model.forward(x_path=x_path)"
913
   ]
914
  },
915
  {
916
   "cell_type": "code",
917
   "execution_count": 3,
918
   "metadata": {},
919
   "outputs": [
920
    {
921
     "data": {
922
      "text/plain": [
923
       "(tensor([[4.2595e-07, 1.0000e+00, 0.0000e+00, 7.2488e-12]],\n",
924
       "        grad_fn=<SigmoidBackward>),\n",
925
       " tensor([[1.0000, 0.0000, 0.0000, 0.0000]], grad_fn=<CumprodBackward>),\n",
926
       " tensor([[1]]),\n",
927
       " None,\n",
928
       " None)"
929
      ]
930
     },
931
     "execution_count": 3,
932
     "metadata": {},
933
     "output_type": "execute_result"
934
    }
935
   ],
936
   "source": [
937
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
938
    "self = MIL_Cluster_FC_surv()\n",
939
    "model.forward(x_path=x_path)"
940
   ]
941
  },
942
  {
943
   "cell_type": "code",
944
   "execution_count": 7,
945
   "metadata": {},
946
   "outputs": [],
947
   "source": [
948
    "import os\n",
949
    "fname = os.path.join('/media/ssd1/pan-cancer/tcga_gbm_20x_features/h5_files/TCGA-02-0001-01Z-00-DX1.83fce43e-42ac-4dcd-b156-2908e75f2e47.h5')"
950
   ]
951
  },
952
  {
953
   "cell_type": "code",
954
   "execution_count": 27,
955
   "metadata": {},
956
   "outputs": [],
957
   "source": [
958
    "import h5py\n",
959
    "h5 = h5py.File(fname, \"r\")\n",
960
    "coords = np.array(h5['coords'])"
961
   ]
962
  },
963
  {
964
   "cell_type": "code",
965
   "execution_count": null,
966
   "metadata": {},
967
   "outputs": [],
968
   "source": [
969
    "fm"
970
   ]
971
  },
972
  {
973
   "cell_type": "code",
974
   "execution_count": 17,
975
   "metadata": {},
976
   "outputs": [
977
    {
978
     "data": {
979
      "text/plain": [
980
       "array([43121, 29428])"
981
      ]
982
     },
983
     "execution_count": 17,
984
     "metadata": {},
985
     "output_type": "execute_result"
986
    }
987
   ],
988
   "source": [
989
    "np.array(h5['coords'])[0]"
990
   ]
991
  },
992
  {
993
   "cell_type": "code",
994
   "execution_count": 19,
995
   "metadata": {},
996
   "outputs": [
997
    {
998
     "data": {
999
      "text/plain": [
1000
       "array([43121, 29940])"
1001
      ]
1002
     },
1003
     "execution_count": 19,
1004
     "metadata": {},
1005
     "output_type": "execute_result"
1006
    }
1007
   ],
1008
   "source": [
1009
    "np.array(h5['coords'])[1]"
1010
   ]
1011
  },
1012
  {
1013
   "cell_type": "code",
1014
   "execution_count": 20,
1015
   "metadata": {},
1016
   "outputs": [
1017
    {
1018
     "data": {
1019
      "text/plain": [
1020
       "512"
1021
      ]
1022
     },
1023
     "execution_count": 20,
1024
     "metadata": {},
1025
     "output_type": "execute_result"
1026
    }
1027
   ],
1028
   "source": [
1029
    "np.array(h5['coords'])[1][1] - np.array(h5['coords'])[0][1]"
1030
   ]
1031
  },
1032
  {
1033
   "cell_type": "code",
1034
   "execution_count": 21,
1035
   "metadata": {},
1036
   "outputs": [
1037
    {
1038
     "data": {
1039
      "text/plain": [
1040
       "512"
1041
      ]
1042
     },
1043
     "execution_count": 21,
1044
     "metadata": {},
1045
     "output_type": "execute_result"
1046
    }
1047
   ],
1048
   "source": [
1049
    "np.array(h5['coords'])[2][1] - np.array(h5['coords'])[1][1]"
1050
   ]
1051
  },
1052
  {
1053
   "cell_type": "code",
1054
   "execution_count": 23,
1055
   "metadata": {},
1056
   "outputs": [],
1057
   "source": [
1058
    "import nmslib\n",
1059
    "class Hnsw:\n",
1060
    "\n",
1061
    "    def __init__(self, space='cosinesimil', index_params=None,\n",
1062
    "                 query_params=None, print_progress=True):\n",
1063
    "        self.space = space\n",
1064
    "        self.index_params = index_params\n",
1065
    "        self.query_params = query_params\n",
1066
    "        self.print_progress = print_progress\n",
1067
    "\n",
1068
    "    def fit(self, X):\n",
1069
    "        index_params = self.index_params\n",
1070
    "        if index_params is None:\n",
1071
    "            index_params = {'M': 16, 'post': 0, 'efConstruction': 400}\n",
1072
    "\n",
1073
    "        query_params = self.query_params\n",
1074
    "        if query_params is None:\n",
1075
    "            query_params = {'ef': 90}\n",
1076
    "\n",
1077
    "        # this is the actual nmslib part, hopefully the syntax should\n",
1078
    "        # be pretty readable, the documentation also has a more verbiage\n",
1079
    "        # introduction: https://nmslib.github.io/nmslib/quickstart.html\n",
1080
    "        index = nmslib.init(space=self.space, method='hnsw')\n",
1081
    "        index.addDataPointBatch(X)\n",
1082
    "        index.createIndex(index_params, print_progress=self.print_progress)\n",
1083
    "        index.setQueryTimeParams(query_params)\n",
1084
    "\n",
1085
    "        self.index_ = index\n",
1086
    "        self.index_params_ = index_params\n",
1087
    "        self.query_params_ = query_params\n",
1088
    "        return self\n",
1089
    "\n",
1090
    "    def query(self, vector, topn):\n",
1091
    "        # the knnQuery returns indices and corresponding distance\n",
1092
    "        # we will throw the distance away for now\n",
1093
    "        indices, _ = self.index_.knnQuery(vector, k=topn)\n",
1094
    "        return indices"
1095
   ]
1096
  },
1097
  {
1098
   "cell_type": "code",
1099
   "execution_count": null,
1100
   "metadata": {},
1101
   "outputs": [],
1102
   "source": [
1103
    "x"
1104
   ]
1105
  },
1106
  {
1107
   "cell_type": "code",
1108
   "execution_count": 54,
1109
   "metadata": {},
1110
   "outputs": [
1111
    {
1112
     "data": {
1113
      "text/plain": [
1114
       "array([85, 87, 88, 73, 75, 76, 63, 29], dtype=int32)"
1115
      ]
1116
     },
1117
     "execution_count": 54,
1118
     "metadata": {},
1119
     "output_type": "execute_result"
1120
    }
1121
   ],
1122
   "source": [
1123
    "model = Hnsw(space='l2')\n",
1124
    "model.fit(coords)\n",
1125
    "model.query(coords, topn=8)"
1126
   ]
1127
  },
1128
  {
1129
   "cell_type": "code",
1130
   "execution_count": 59,
1131
   "metadata": {},
1132
   "outputs": [],
1133
   "source": [
1134
    "import networkx as nx\n",
1135
    "G = nx.Graph()\n"
1136
   ]
1137
  },
1138
  {
1139
   "cell_type": "code",
1140
   "execution_count": 56,
1141
   "metadata": {},
1142
   "outputs": [
1143
    {
1144
     "data": {
1145
      "text/plain": [
1146
       "array([43121, 29428])"
1147
      ]
1148
     },
1149
     "execution_count": 56,
1150
     "metadata": {},
1151
     "output_type": "execute_result"
1152
    }
1153
   ],
1154
   "source": [
1155
    "for"
1156
   ]
1157
  },
1158
  {
1159
   "cell_type": "code",
1160
   "execution_count": 52,
1161
   "metadata": {},
1162
   "outputs": [
1163
    {
1164
     "data": {
1165
      "text/plain": [
1166
       "130"
1167
      ]
1168
     },
1169
     "execution_count": 52,
1170
     "metadata": {},
1171
     "output_type": "execute_result"
1172
    }
1173
   ],
1174
   "source": [
1175
    "temp[3]"
1176
   ]
1177
  },
1178
  {
1179
   "cell_type": "code",
1180
   "execution_count": null,
1181
   "metadata": {},
1182
   "outputs": [],
1183
   "source": [
1184
    "model"
1185
   ]
1186
  },
1187
  {
1188
   "cell_type": "code",
1189
   "execution_count": 29,
1190
   "metadata": {},
1191
   "outputs": [
1192
    {
1193
     "data": {
1194
      "text/plain": [
1195
       "array([ 7440, 13280])"
1196
      ]
1197
     },
1198
     "execution_count": 29,
1199
     "metadata": {},
1200
     "output_type": "execute_result"
1201
    }
1202
   ],
1203
   "source": [
1204
    "coords[100]"
1205
   ]
1206
  },
1207
  {
1208
   "cell_type": "code",
1209
   "execution_count": 33,
1210
   "metadata": {},
1211
   "outputs": [],
1212
   "source": [
1213
    "indices = model.query(coords[100], topn =10)"
1214
   ]
1215
  },
1216
  {
1217
   "cell_type": "code",
1218
   "execution_count": 34,
1219
   "metadata": {},
1220
   "outputs": [
1221
    {
1222
     "data": {
1223
      "text/plain": [
1224
       "array([[ 7440, 13280],\n",
1225
       "       [ 7440, 13792],\n",
1226
       "       [ 7952, 13280],\n",
1227
       "       [ 6928, 13792],\n",
1228
       "       [ 7952, 12768],\n",
1229
       "       [ 7952, 13792],\n",
1230
       "       [ 7440, 14304],\n",
1231
       "       [ 8464, 13280],\n",
1232
       "       [ 6928, 14304],\n",
1233
       "       [ 8464, 13792]])"
1234
      ]
1235
     },
1236
     "execution_count": 34,
1237
     "metadata": {},
1238
     "output_type": "execute_result"
1239
    }
1240
   ],
1241
   "source": [
1242
    "coords[indices]"
1243
   ]
1244
  },
1245
  {
1246
   "cell_type": "code",
1247
   "execution_count": 84,
1248
   "metadata": {},
1249
   "outputs": [],
1250
   "source": [
1251
    "def do_KmeansPCA(X=None, y=None, scaler=None, n_clusters=4, n_components=5):\n",
1252
    "    import pandas as pd\n",
1253
    "    import seaborn as sns\n",
1254
    "    from sklearn.datasets import make_blobs\n",
1255
    "    from sklearn import decomposition\n",
1256
    "    from sklearn.decomposition import PCA, TruncatedSVD\n",
1257
    "    from sklearn.preprocessing import StandardScaler, Normalizer\n",
1258
    "    from sklearn.pipeline import make_pipeline\n",
1259
    "    from sklearn.cluster import KMeans\n",
1260
    "    ### Initialize Scaler\n",
1261
    "    if scaler is None: \n",
1262
    "        scaler = StandardScaler()\n",
1263
    "    ### Get Random Data\n",
1264
    "    X, y = make_blobs(n_features=10, n_samples=100, centers=4, random_state=4, cluster_std=7)\n",
1265
    "    ### Scale Data\n",
1266
    "    X = scaler.fit_transform(X)\n",
1267
    "    ### Perform K-Means Clustering\n",
1268
    "    cls = KMeans(n_clusters=n_clusters, init='k-means++', n_jobs=-1, n_init=1)\n",
1269
    "    y_pred = cls.fit_predict(X)\n",
1270
    "    ### Perform PCA\n",
1271
    "    pca = PCA(n_components=n_components)\n",
1272
    "    pc = pca.fit_transform(X)\n",
1273
    "    ### Plot Results\n",
1274
    "    columns = ['PC%d'%c for c in range(1, n_components+1)]\n",
1275
    "    pc_df = pd.DataFrame(data=pc, columns=columns)\n",
1276
    "    pc_df['y_pred'] = y_pred\n",
1277
    "    pc_df['y'] = y\n",
1278
    "    df = pd.DataFrame({'Variance Explained':pca.explained_variance_ratio_, 'Principal Components': columns})\n",
1279
    "    sns.barplot(x='Principal Components',y=\"Variance Explained\", data=df, color=\"c\")\n",
1280
    "    sns.lmplot( x=\"PC1\", y=\"PC2\", data=pc_df, fit_reg=False, \n",
1281
    "      hue='y', legend=True, scatter_kws={\"s\": 80})\n",
1282
    "    sns.lmplot( x=\"PC1\", y=\"PC2\", data=pc_df, fit_reg=False, \n",
1283
    "      hue='y', legend=True, scatter_kws={\"s\": 80})"
1284
   ]
1285
  },
1286
  {
1287
   "cell_type": "code",
1288
   "execution_count": 85,
1289
   "metadata": {},
1290
   "outputs": [
1291
    {
1292
     "data": {
1293
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAX2UlEQVR4nO3de7RedX3n8feHoCDiBSFtEYhBjW2htaABRMVLFUVbREZnCI4dsFbGcVi2dZxWZ2YBYtslVp06Faeg0loGjVcwVZSiXMQLl3ARCIoE5BLBioKCiGDgO3/sfczDyT4n+4Ts85ycvF9rPevs+/N9dk7O59mX32+nqpAkabKtxl2AJGluMiAkSZ0MCElSJwNCktTJgJAkddp63AVsKjvttFMtXrx43GVI0mbl0ksv/VFVLeyaN28CYvHixaxcuXLcZUjSZiXJTVPN8xSTJKmTASFJ6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqdO8aUk9nZ2XLx93CYO4bdmycZcgaR7zCEKS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdRo0IJIclOTaJKuTvK1j/luSXJPkyiRfSfKkkXlHJLmufR0xZJ2SpPUNFhBJFgAnAi8D9gAOT7LHpMUuB5ZW1dOBTwPvbtd9AnAssB+wL3Bskh2GqlWStL6tB9z2vsDqqroBIMly4BDgmokFqurckeUvBF7bDr8UOLuq7mjXPRs4CPj4gPVuEXZevnzcJQzitmXLxl2CNO8MeYppF+CWkfE17bSpvB744kzWTXJUkpVJVt5+++0Ps1xJ0qghAyId06pzweS1wFLgb2eyblWdXFVLq2rpwoULN7pQSdL6hgyINcBuI+O7ArdOXijJi4H/Cbyiqu6bybqSpOEMGRCXAEuS7J7kkcAyYMXoAkn2Bk6iCYcfjsw6C3hJkh3ai9MvaadJkmbJYBepq2ptkqNp/rAvAE6pqlVJjgdWVtUKmlNK2wOfSgJwc1W9oqruSPJOmpABOH7igrUkaXYMeRcTVXUmcOakaceMDL94mnVPAU4ZrjpJ0nRsSS1J6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqZMBIUnqZEBIkjoZEJKkTgaEJKmTASFJ6mRASJI6GRCSpE4GhCSpkwEhSepkQEiSOhkQkqROBoQkqZMBIUnqZEBIkjoZEJKkTgaEJKnT1lPNSPKW6Vasqvdt+nIkSXPFlAEBPKb9+ZvAPsCKdvxg4KtDFiVJGr8pA6Kq3gGQ5F+BZ1TV3e34ccCnZqU6SdLY9LkGsQi4f2T8fmDxINVIkuaM6U4xTTgVuDjJ6UABhwL/PGhVkqSx22BAVNVfJ/kicEA76XVVdfmwZUmSxq3vba7bAXdV1fuBNUl2H7AmSdIcsMGASHIs8JfA29tJjwD+35BFSZLGr88RxKHAK4B7AKrqVtbdAitJmqf6BMT9VVU0F6hJ8uhhS5IkzQV9AuKTSU4CHp/kDcCXgQ8NW5Ykadz63MX0niQHAnfRtKo+pqrOHrwySdJY9WkHQRsIhoIkbUH63MX075Jcl+SnSe5KcneSu/psPMlBSa5NsjrJ2zrmPy/JZUnWJnn1pHkPJLmifa2YvK4kaVh9jiDeDRxcVd+eyYaTLABOBA4E1gCXJFlRVdeMLHYzcCTw1o5N3FtVe83kPSVJm06fgPi3mYZDa19gdVXdAJBkOXAI8KuAqKob23kPbsT2JUkD6hMQK5N8AjgDuG9iYlV9dgPr7QLcMjK+BthvBrVtm2QlsBZ4V1WdMXmBJEcBRwEsWrRoBpuWJG1In4B4LPBz4CUj0wrYUECkY1r1rAtgUVXdmuTJwDlJrqqq6x+ysaqTgZMBli5dOpNtS5I2oM9trq/byG2vAXYbGd8VuLXvym2LbarqhiTnAXsD10+7kiRpk5nukaN/UVXvTvL3dHzzr6o3b2DblwBL2o79vg8sA17Tp6gkOwA/r6r7kuwEPIfmYrkkaZZMdwQxcWF65cZsuKrWJjkaOAtYAJxSVauSHA+srKoVSfYBTgd2AA5O8o6q2hP4beCk9uL1VjTXIK6Z4q0kSQOY7pGj/9L+/OjGbryqzgTOnDTtmJHhS2hOPU1e7xvA727s+0qSHr4NXoNIspCmu+89gG0nplfV7w9YlyRpzPp01ncazemm3YF3ADfSXF+QJM1jfQJix6r6CPDLqjq/qv4YeNbAdUmSxqxPO4hftj9vS/IHNLeqrnfdQJI0v/QJiL9K8jjgvwF/T9Nw7s8HrUqSNHZ9Gsp9vh38KfDCYcuRJM0V0zWU62wgN6FHQzlJ0mZsuiOIjWogJ0maH6ZrKPeQBnJJHttMrrsHr0qSNHZ9nii3NMlVwJXA1Um+leSZw5cmSRqnPncxnQK8qaouAEjyXOAfgacPWZgkabz6NJS7eyIcAKrqa4CnmSRpnutzBHFxkpOAj9Pc1XQYcF6SZwBU1WUD1idJGpM+AbFX+/PYSdOfTRMYdtonSfNQn4ZyNo6TpC1Qn7uYTm272pgYf1KSrwxbliRp3PpcpP4acFGSlyd5A3A28HfDliVJGrc+p5hOSrIKOBf4EbB3Vf1g8MokSWPV5xTTH9G0hfhPwD8BZyb5vYHrkiSNWZ+7mF4FPLeqfgh8PMnpwEdZd3eTJGke6nOK6ZWTxi9Osu9wJUmS5oIpTzEl+eTI8AmTZn8eSdK8Nt01iCUjwwdOmrdwgFokSXPIdKeYpnxY0AbmSZuFnZcvH3cJg7ht2bJxl6B5YrqA2C7J3jRHGY9qh9O+HjUbxUmSxme6gLgNeF87/IOR4YlxSdI8Nt0T5eyDSZK2YH262pAkbYEMCElSJwNCktSpT19MSfLaJMe044tsSS1J81+fI4gPAvsDh7fjdwMnDlaRJGlO6NNZ335V9YwklwNU1Z1JHjlwXZKkMetzBPHLJAtoW08nWQg8OGhVkqSx6xMQ/wc4Hfi1JH9N84S5vxm0KknS2PXp7vu0JJcCL6LpZuOVVfXtwSuTNGvsl0pdNhgQSZ4FrKqqE9vxxyTZr6ouGrw6SdLY9DnF9H+Bn42M39NO26AkByW5NsnqJG/rmP+8JJclWZvk1ZPmHZHkuvZ1RJ/3kyRtOn0CIlX1q+69q+pB+h15LKC5HfZlwB7A4Un2mLTYzcCRwMcmrfsE4FhgP2Bf4NgkO/SoVZK0ifQJiBuSvDnJI9rXnwI39FhvX2B1Vd1QVfcDy4FDRheoqhur6krWvyvqpcDZVXVHVd0JnA0c1OM9JUmbSJ+AeCPwbOD7wBqab/VH9VhvF+CWkfE17bQ+eq2b5KgkK5OsvP3223tuWpLUR5+7mH4IbMytAOna3KZct6pOBk4GWLp0qU+5k6RNqM+1hIXAG4DFo8tX1R9vYNU1wG4j47sCt/asaw3wgknrntdzXUnSJtCnq43PARcAXwYemMG2LwGWJNmd5vTUMuA1Pdc9C/ibkQvTLwHePoP3liQ9TH0CYruq+suZbriq1iY5muaP/QLglKpaleR4YGVVrUiyD00r7R2Ag5O8o6r2rKo7kryTJmQAjq+qO2ZagyRp4/UJiM8neXlVnTnTjbfrnDlp2jEjw5fQnD7qWvcU4JSZvqckadPocxfTn9KExL1J7kpyd5K7hi5MkjRefe5iesxsFCJJmlv6nGKivVi8BNh2YlpVfXWooiRJ49fnNtc/oTnNtCtwBfAs4JvA7w9bmiRpnPpeg9gHuKmqXgjsDdhsWZLmuT4B8Yuq+gVAkm2q6jvAbw5bliRp3Ppcg1iT5PHAGcDZSe6kf4toSdJmqs9dTIe2g8clORd4HPClQauSJI3dlAGR5LFVdVf7bIYJV7U/twds2SxJ89h0RxAfA/4QuJSmJ9VM+vnkwauTJI3NlAFRVX+YJMDzq+rmWaxJkjQHTHsXU/uo0dNnqRZJ0hzS5y6mC5Ps03asJ0nz2s7Ll4+7hEHctmzmz33rExAvBP5zkpuAe2ivQVTV02f8bpKkzUafgHjZ4FVIkuacPu0gbgJI8muMdNYnSZrfNtjVRpJXJLkO+B5wPnAj8MWB65IkjVmfvpjeSdOD63eranfgRcDXB61KkjR2fQLil1X1Y2CrJFtV1bnAXgPXJUkasz4XqX+SZHvgq8BpSX4IrB22LEnSuPU5gjgEuBf4c5pO+q4HDh6yKEnS+E3XWd8HgI9V1TdGJn90+JIkSXPBdEcQ1wHvTXJjkhOSeN1BkrYgUwZEVb2/qvYHnk/Ttfc/Jvl2kmOSPG3WKpQkjcUGr0FU1U1VdUJV7Q28BjgU+PbglUmSxqpPQ7lHJDk4yWk0DeS+C7xq8MokSWM13UXqA4HDgT8ALgaWA0dV1T2zVJskaYymawfxP2ieKvfWqvLxopK0hZnuiXIvnM1CJElzS5+GcpKkLZABIUnqZEBIkjoZEJKkTgaEJKmTASFJ6mRASJI6GRCSpE6DBkSSg5Jcm2R1krd1zN8mySfa+RclWdxOX5zk3iRXtK9/GLJOSdL6+jxydKMkWQCcCBwIrAEuSbKiqq4ZWez1wJ1V9dQky4ATgMPaeddXlc+gkKQxGfIIYl9gdVXdUFX303T2d8ikZQ5h3VPqPg28KEkGrEmS1NOQAbELcMvI+Jp2WucyVbUW+CmwYztv9ySXJzk/yQED1ilJ6jDYKSag60igei5zG7Coqn6c5JnAGUn2rKq7HrJychRwFMCiRYs2QcmSpAlDHkGsAXYbGd8VuHWqZZJsDTwOuKOq7quqHwNU1aXA9cB6jzmtqpOramlVLV24cOEAH0GStlxDBsQlwJIkuyd5JLAMWDFpmRXAEe3wq4FzqqqSLGwvcpPkycAS4IYBa5UkTTLYKaaqWpvkaOAsYAFwSlWtSnI8sLKqVgAfAU5Nshq4gyZEAJ4HHJ9kLfAA8EYfWiRJs2vIaxBU1ZnAmZOmHTMy/Avg33es9xngM0PWJkmani2pJUmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUicDQpLUyYCQJHUyICRJnQwISVInA0KS1MmAkCR1MiAkSZ0MCElSJwNCktTJgJAkdTIgJEmdDAhJUqdBAyLJQUmuTbI6yds65m+T5BPt/IuSLB6Z9/Z2+rVJXjpknZKk9Q0WEEkWACcCLwP2AA5PssekxV4P3FlVTwX+N3BCu+4ewDJgT+Ag4IPt9iRJs2TII4h9gdVVdUNV3Q8sBw6ZtMwhwEfb4U8DL0qSdvryqrqvqr4HrG63J0maJVsPuO1dgFtGxtcA+021TFWtTfJTYMd2+oWT1t1l8hskOQo4qh39WZJrN03pD8tOwI9m441y+OGz8TYPh/tiHffFOu6LdebCvnjSVDOGDIh0TKuey/RZl6o6GTh55qUNJ8nKqlo67jrmAvfFOu6LddwX68z1fTHkKaY1wG4j47sCt061TJKtgccBd/RcV5I0oCED4hJgSZLdkzyS5qLziknLrACOaIdfDZxTVdVOX9be5bQ7sAS4eMBaJUmTDHaKqb2mcDRwFrAAOKWqViU5HlhZVSuAjwCnJllNc+SwrF13VZJPAtcAa4H/WlUPDFXrJjanTnmNmftiHffFOu6Ldeb0vkjzhV2SpIeyJbUkqZMBIUnqZEDMQJIHklyR5Ookn0qyXTv9N5IsT3J9kmuSnJnkae28LyX5SZLPj7f6TWum+yLJXkm+mWRVkiuTHDbuz7CpbMS+eFKSS9t1ViV547g/w6ayMf9H2vmPTfL9JB8YX/Wb1kb+vZhY54okk2/qmX1V5avnC/jZyPBpwFto2mx8E3jjyLy9gAPa4RcBBwOfH3f949wXwNOAJe20JwK3AY8f9+cY0754JLBNO2174EbgieP+HOPYFyPj7wc+Bnxg3J9hnPtidJ258Bqyodx8dwHwdOCFwC+r6h8mZlTVFSPDX0nygtkvb1b12hcj025N8kNgIfCTWatydsxoXwDbMH+P5HvtiyTPBH4d+BIwZxuNPUwz/b2YE+brL+ag2kZ9LwOuAn4HuHS8FY3PxuyLJPvSfIu+ftjqZtdM9kWS3ZJcSdPVzAlVNa8agvbdF0m2At4L/PfZq252zfD/yLZJVia5MMkrZ6XAaRgQM/OoJFcAK4GbadpxbKk2al8k2Rk4FXhdVT04YH2zacb7oqpuqaqnA08Fjkjy6wPXOFtmui/eBJxZVbdsYLnN0cb8H1lUTdcbrwH+LslThixwQzzFNDP3VtVeoxOSrKJpBb6lmfG+SPJY4AvA/6qqC6dabjO00b8X7em2VTTXJj49UH2zaab7Yn/ggCRvorke88gkP6uq9Z4fsxma8e/FxJFkVd2Q5Dxgb8Z4pO0RxMN3DrBNkjdMTEiyT5Lnj7GmcZlyX7TdrZwO/HNVfWpsFc6e6fbFrkke1U7bAXgOMBd6Ih7KlPuiqv5jVS2qqsXAW2l+P+ZDOExlut+LHZJs007bieb34pox1QkYEA9bNbceHAoc2N62tgo4jrZzwSQXAJ+iedbFmszjp+NtYF/8B+B5wJEjt/HtNfXWNm8b2Be/DVyU5FvA+cB7quqqsRU7sA39H9mS9Pi9WNn+XpwLvKuqxhoQdrUhSerkEYQkqZMBIUnqZEBIkjoZEJKkTgaEJKmTAaE5ZaoeMDuWOzPJ4zdi+09MstEN0pLc2N6jPnn69klOmrh1MclXk+y3se8zF6Tpgffl465D42NAaK65t6r2qqrfAe4HHtIVdhpbVdXLq2rGHf1V1a1VNUTL9w/TPDZ3SVXtCRwJrBckm5m9AANiC2ZAaC67AHhqksVJvp3kg8BlwG4T3+RH5n2o/eb+ryOtlJ+a5MtJvpXksiRPaZe/up1/ZJLPpXlmx7VJjp144yRnpHlmw6okR01XZNtfzn40XYg8CE1XCVX1hXb+W9ojoquT/Fk7bXGS7yT5cDv9tCQvTvL1JNel6dCQJMclOTXJOe30N7TTk+Rv23WvSvt8jSQvSHJekk+32z8tSdp5z0xyfvu5zkrTLxbt8ickuTjJd5MckKbl+/HAYe0R3WFta9+JRo6XJ3nMJvp31lw17v7GffkafdH2h0/TT9jngP8CLAYeBJ41styNNN/QFwNrgb3a6Z8EXtsOXwQc2g5vC2zXLn91O+1ImudS7Ag8CrgaWNrOe0L7c2L6jqPvO6nmVwCnT/F5nknTi+ejafoaWkXTv85E3b9L80XtUuAUmucFHAKc0a5/HPCtto6daHp/fSLwKuBsYAFNV9k3AzsDLwB+CuzabvebwHOBRwDfABa22z0MOKUdPg94bzv8cuDLI/vnAyOf5V+A57TD2wNbj/v3xdewLzvr01wz0QMmNEcQH6H5g3hTTd3B3/dqXZ/6lwKL22+3u1TV6QBV9QuA9sv0qLOr6sftvM/S/DFdCbw5yaHtMrsBS4Afb8TneS5NeNwz8h4HACvauq9qp68CvlJVleQqmgCZ8Lmquhe4N8m5wL7tdj9eVQ8A/5bkfGAf4C7g4qpa0273inZbP6Hpavrsdh8soAnHCZ9tf1466b1HfR14X5LTgM9OvIfmLwNCc01XD5gA90yzzn0jww/QfNteLwmmMLmvmUrzgKcXA/tX1c/T9Kq57TTbWAX8XnttZHIX5tPVMVr3gyPjD/LQ/5vr1TiD7T7QbivAqqrafwPrTCy/nqp6V5Iv0BxlXJjkxVX1nWnq0GbOaxCal6rqLmBN2oeuJNlmijuiDkzyhPa6xStpviU/DrizDYffAp61gfe6nuao4x0j5/uXJDkE+CrwyiTbJXk0TUdtF8zw4xySZNskO9KcQrqk3e5hSRYkWUjTEeLF02zjWmBhkv3b+h6RZM8NvO/dwK+uMyR5SlVdVVUntJ/3t2b4ObSZMSA0n/0RzamiK2nOv/9GxzJfo3mA0RXAZ6pqJc2jL7du13sn0OfZFX/Sbn91e4roQ8CtVXUZ8E80f7wvAj5cVZfP8HNcTPMcjQuBd1bzzIDTgStprk+cA/xFVf1gqg1U1f00zyE4IU1voVcAz97A+54L7DFxkRr4s/ai+LeAe4EvzvBzaDNjb67aYiU5kuai9NHjrmUqSY6juXD/nnHXoi2PRxCSpE4eQUiSOnkEIUnqZEBIkjoZEJKkTgaEJKmTASFJ6vT/AQBQk+iug2T9AAAAAElFTkSuQmCC\n",
1294
      "text/plain": [
1295
       "<Figure size 432x288 with 1 Axes>"
1296
      ]
1297
     },
1298
     "metadata": {
1299
      "needs_background": "light"
1300
     },
1301
     "output_type": "display_data"
1302
    },
1303
    {
1304
     "data": {
1305
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAFgCAYAAABKY1XKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5Rc9XXo+e8+p6rf6ocksIwFFgQcMA+D0xL4ARjHsWXFOJIIHidOrgVJOrAuTrxWxgmJZybc65V1k/FMEhtnxbcTg0iMzTUxKA6QYCcyA36AJB4BFGGDFZIoRkgtqaXurlI9ztnzx6lqVbequqrrdc6p2h8WS+qqrq5ft6Szz2/v32//RFUxxhhjluKEPQBjjDHRZ8HCGGNMVRYsjDHGVGXBwhhjTFUWLIwxxlSVCHsAy7Fx40b9h3/4h7CHYYwx9ZCwB9CIWM0spqamwh6CMcZ0pVgFC2OMMeGwYGGMMaYqCxbGGGOqsmBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qK1aa8TpHOenx//xTH5nKMDSZ5x3mr6e9xwx7WvKiPzxjTfhYs2khVuX/PASaf2E/e8/F8xXWEhOswcfV53Di+FpHwNnlGfXzGmPBYsGij+/cc4M6dLzPYm6A/mZx/POf53LnzZQA+sv7ssIYX+fEZY8ITWs1CRPpEZJeI/LOI7BWR/xbWWNohnfWYfGI/g70Jku7CH3vSdRjsTTD5xH7SWc/GZ4yJnDAL3Bngvar6NuByYKOIXBXieFrq+/unyHv+aRfioqTrkPd8ntx/pM0jC0R9fMaYcIWWhtLg8O/ZwofJwv8deyD4sbkcnr/0t+f5ytG5bJtGtFDUx2eMCVeoS2dFxBWR54BDwLdU9akwx9NKY4NJXGfp4rDrCCsHe9o0ooWiPj5jTLhCDRaq6qnq5cBaYIOIXLL4c0RkQkT2iMiew4cPt3+QTfKO81aTcB1ynl/2+Zznk3AdrjpvVZtHFoj6+Iwx4YrEpjxVnQYeAzaWeW5SVcdVdfyMM85o+9iapb/HZeLq85jL5E+7IOc8n1Q2z8TV54W2nyHq4zPGhCu0moWInAHkVHVaRPqB9wF/FNZ42uHG8bUATD6xn5O53IJ9DLddd8H88zY+Y0zUSFBnDuGNRS4D7gFcghnO11T1vy/1mvHxcd2zZ087htdS6azHk/uPcHQuy8rBHq46b1Wk7tijPj5jYirWO1pDCxb16JRgYYzpSrEOFpGoWRhjjIk2CxbGGGOqsmBhjDGmKgsWxhhjqrKus8a0gZ0RYuLOgoUxLWRnhJhOYcHCRFKn3Ik364yQTvl5mPiyYGEipZPuxGs9I+T6t51V8cLfST8PE29W4DaRUrwTTzjCir4kowM9rOhLknCEO3e+zP17DoQ9xJo144yQTvp5mHizYGEio9NO62v0jJBO+3mYeLNgYSKj007ra/SMkE77eZh4s5qFWSDMQmqnndZXekZIuQt+tTNCOu3nYeLNgoUBolFI7bTT+opnhBRXQ5UGjOIZIbddd0HFYNxpPw8TbxYsDNC8JZ6NaPROPIoaOSOkE38eJr4sWJimLPFshkbvxKNIRPjI+rO5/m1nLfuMkE78eZj4smBh5guppTOKUknX4WQux5P7j3DdhWe2dCydelpff49b18+uU38eJn4sWJhIFVIbuRPvRPbzMFFhwcJEspBa7514p7KfhwmbBYuYacXSViukGmOqsWARE61c2mqFVGNMNRYsYqLVS1utkGqMWYqoLl3YjJLx8XHds2dP2MNou3TW4/ovfIeEIxXTRHlf+bvb3t3w3X8661kh1ZjWiHV7YJtZxEA7l7ZaIdUYU441EoyBKC1tNcZ0J5tZtEgzVy1FcWmrMaa7WLBoslasWrKlrcaYsFkaqslacbJZcWnrXCZPzvMXPFdc2jpx9XmRLESnsx47X3qdrz99gJ0vvW4H9RgTUzazaKJWNuSL29LWpWZY2965jjeO9DKdyrf9zAxjTH0sWDRRK1ctxa1HUPl9IcrhmSx3fGMvI/0JBnoSbT8zwxhTH0tDNVE7Vi3197hcdd4qxgaTHJ3L8v39U5FL7VSaYR2dy3FkLoMrMJvxGO5LNpyiM8a0h80smqjVq5aicJpdLcrNsHxVpmYzuCKICJ7vM5vNM9yXbOuZGcaY+tjMoolKVy2V0+iqpVYUz1uh3AxrNpNHlflgprDgc5KuQ97zeXL/kXYO1RhTIwsWTdTKVUu1Fs+jkJIqN8PyfEU5FRwEyn6ObSw0JposWDTZjeNr+cR7LyDvKzMnc0ynssyczJH3taFVS8XUTrl9FhCtO/NyMyzXEaTQGkdVERGGehZmQW1joTHRZTWLJmvVqqU4tfxY3PLcFUFV8VXxCwHkDcN9OCUzC9tYaEy0WbBokWY35Itby48bx9eiqvy/3/oBx1I5fFVUwddgnKqnZhh2ZoYx0WfBIibi1vJDCqueehIubxh2AMF1hFzeZ2o2w6GZDOmcx0h/surGwlacDmiMWR4LFjERt9PsigX5oTIF+bHBHo6ns+Tyyu0fvJBr33Jm2XHHZamwMd3AgkWMxKnlx1K72R0RxgZ6mTmZozfhVgxwrT4d0BhTu9CChYicDfwVsAbwgUlV/VxY44mDOLX8aLQg38o+W8aY5QtzZpEHfktVnxGRFcDTIvItVf2XEMcUC3E4za7Rgnw7Twc0xlQX2j4LVX1NVZ8p/H4G2Ae8KazxmOZqdDd7nJYKG9MNIrEpT0TWAVcAT5V5bkJE9ojInsOHD7d7aKZOje5mj9tSYWM6XejBQkSGgK8Dn1TVE4ufV9VJVR1X1fEzzjij/QM0dWtkN3ur+2wZY5Yn1NVQIpIkCBT3quoDYY7FNF8jBfm4LRU2ptOFuRpKgC8B+1T1j8Mah2m9egvycVoqbEynE9Wli4gte2ORdwNPAC8QLJ0F+D1VfaTSa8bHx3XPnj3tGJ6JkHTWi/xSYWNqEOsdpKHNLFT1O8T8h2faIw5LhY3pdKEXuI0xxkSfBQtjjDFVWbAwxhhTlTUSNMaYcrIpePUJSB2FgZWw7mroGQh7VKGxYGGMMaVU4dl74XufBz8HvgeOC04S3vkbcMXHoAtb41uwMMaYUs/eC4//39AzBMn+U497ueBxgLf/UjhjC5EFi5iI6mlxUR2X6RDtTgVlU8GMomcI3EUdj91k8Pj3Pg+XbO26lJQFi4iL6mlxUR2X6RBhpYJefSJ4v9IZRSk3CfmT8Op34C3vb/77R5gFi4iL6mlxUR2X6RBhpYJSR4PAtBTfg9SR5r93xNnS2Qir9bS4dLbKX+4uGZfpELWmgrKp5r/3wMpgBrMUx4WB7ut2bMEiwoqnxS2+IBclXYe85/Pk/vbe5bR7XOmsx86XXufrTx9g50uvWxDqdMVU0OJAUeQmwc8HqaBmW3d1kOrycuWf93LgJGDdu5v/3hFnaagIi+ppce0aVyvrIlaYj7AwU0E9A0FNpJgCKw1YXg6yc3DNp7quuA0WLCItqqfFtWtcraiLWGE+BsJOBV3xseDX730+KGbPF9cTQaAoPt9lLFhEWOlpceVSPmGdFteOcdVaF7n+bWcta0ZghfkYKE0FlUtFFVNBb3o7/PDR5i+rFQmK55dsDVJdqSNBYFr37q6cURRZzSLCGj3HOs7jakVdxArzMVFMBWVnT68deDnIzMLa9XD3JviH2+Gx/xH8OvkeeObLwbLbZo3jLe+Hy38h+LWLAwXYzCLyonpaXKvH1Yq6SDEAlc4oSiVdh5O5HE/uP2LnZ4RtqVTQuVcHRXDbYd1WFiwirpFzrOM8rlbURaK6YMCUUSkV9Ka3BzMK22HddhYsYiKqp8W1alytqItEdcGAWUIxFVT0w0dth3VIrGZhIqkVdZHSAFROWAsGzDLYDuvQWLAwkXXj+Fo+8d4LyPvKzMkc06ksMydz5H2tqy4S1QUDZhnCXlbbxSwNZSKrFXWRqC4YMDWqdVltF+6wbjXRZi0za4Px8XHds2dP2MMwHSCd9SK1YMAswzNfrr7DOpqroWK929NmFqYrRXXBgKmB7bAOhQUL03LWh8k0le2wDoUFC9My1ofJtNTiZbWmpSxYREwn3YVbHyZjOocFi4jotLvwVjUCNMaEw/ZZRETxLjzhCCv6kowO9LCiL0nCEe7c+TL37zkQ9hCXJaoHNxlj6mPBIgI6sRtqM/sw2Ul5xoTP0lAR0IndUJvRh6nTUnPGxJkFiwjoxG6ozWgEaAVyY6LD0lAR0IndUBvtw9SJqbm2yqaCDq3PfTX4NZsKe0Qm5mxmEQHl7sJ9VWYzeTxfUZS+hBu7bqiN9GHqxNRcW6jCs/cGu5v9XMnu5mRw+twVHws2tRmzTBYsIqB4Fx6kXFxmTnpMzWZQBV99QFg5mOTv/vnHscrTN9IIsBNTc23x7L2n+ibZKXKmiSxYRETxLvv/+eYPODKXwUEQAddxWD3Uw1BvIrZ5+nr6MHViaq7lsqlgRmGnyJkWsJpFRIgI17/tLIb6ErxxpJ83jPTxxtF+zj9jiJWDvfQk3K7K09tBRXV49Ykg9VSudTcEj/v5oJ+SMctkwSJCvr9/Ct9XxgZ6GBvoYbgviVNyd91NG9nsoKI62ClypoUsDRUhlqdfyA4qWiY7Rc60kAWLCLE8/UKtOCmvo9kpcqaFQg0WInIX8CHgkKpeEuZYoqAZG9k6kR1UVKOegWB5bLVT5KJc3M6mgtpL6mgwU1p3dbTH20XCnllsB74A/FXI44iEhUtoF25GK+bpb7vuArurNpXF9RQ52x8SeaEGC1V9XETWhTmGqIlbnr6Tzt/oCHE9Rc72h0SeqC5dUG35AIJg8VClNJSITAATAOecc85P/du//Vv7BheidNaLdJ7emvyZpsmmYPI9weynUq3Fz8PEY9EOeNXF+h9E2GmoqlR1EpgEGB8fDzeytVHU8/TW5M80TXF/SOmMopSbDFJqr37HjlENke2zMMtmTf5MU9n+kFiwYGGWrfQUPJ8sqcQLzCafJJV4AZ9sV20eNE1g+0NiIeyls18F3gOsFpEDwO+r6pfCHJOp7thcjrzvM5P8Hid6voXiofgIDtLrMpz9GfKpn+qazYOmQbY/JBbCXg31C2G+v6nP2GCS/MAu5noeRejF4dQmQSXP8Z5H6B3wWDl4aYijNLHRCftDuoClocyyXXHOICcHdoL2IovuN4QEaC8nB3Zy+Tn2j9vU6IqPwTW/Hax6ysxAejr41c9He39IF4n8aigTPS8cfYbhfocTKRdl4V4pVVB1GRkIPu+agWtCG6eJkbjuD+kiFizMsk1nphnoEfrcXg7PZvBLmsKKwJkrenESaaYz0+EN0sRTz4Atj40oCxZm2UZ7R3Edl5G+HkYGepjL5Ml7SsIVBnsTOAKz2SyjvaNhD9UY0yQWLMyyrV+znoSTIOfnSDpJVvQt/GuU83MknATr16wPaYTGmGazArc5TTqf5vEDj/ONH32Dxw88TjqfXvB8f6KfbRdvI51Lk/NzC57L+TnSuTTbLt5Gf6LCjlxjTOzYzMLMU1V2vLKD7Xu3k/fzeOrhikvCSbDt4m1sPn/zfL+nzedvBmD73u1k8hl89XHEIeEkmLhsYv55Y0xnCL2R4HKMj4/rnj17wh5Gx3rw5QeZfH6S/mQ/Saek31NhtjBx2QRbLtiy4DXpfJrdB3cznZlmtHeU9WvW24yiHnaOQzeIdSNBCxYGCC76H33oo7iOuyBQFOX8HJ7vcd+H7rNg0Ex2jkM3ifUfpNUsDAC7D+4m7+fLBgqApJMk7+fZfXB3m0fW4YrnODgJ6B2G/rHgVycRPP7svWGP0BjAahYtEcfUzHRmGk+X7vzpq297J5opmwpmFItbXEDwcc9Q8PwlWy0lZUJnwaKJllMgjprR3lFcWbrzpyOO7Z1opmrnODhusJN55x/AeddYHcOEyoJFE+14Zcd8gbgv0Tf/eM7PMfn8JMBpBeKoWLx3YjHbO9EClc5xUIX0UZg9BOrB81+FHz5idQwTKqtZNEk6n2b73u2nrSSCIN/fn+xn+97tp+1ZiArbO1FBNgU/fBSe+2rwazbVvK9d6RyH9FGYfT0ICOJC34jVMUzobGbRJMUCcemMolTSSZLJZ9h9cDfXrI1mcz3bO1GiHauUyp3joH4wo5DCfZwQ1C7A6hgmVBYslqlS8boTCsQiwpYLtrDx3I2xK9A3XXGVUs/QwpqClwseh6BLaiPKneOQmYGgl28QOIbWnAocYOdRm9BYsKhRteL1yr6VHVMg7k/01zT7ieOqr5q0c5VS8ZyG730+CAKZmSBIiASBYmDl6a+x86hbwzZGLsmCRY2qFa9vuvimrikQx3nVV02qrVJq5t394nMc9j8GL34dhs5cOKMoZedRN5dtjKyJFbhrUEvx+t6X7uUXL/zFrigQFwOn67gM9Qwx0jvCUM8QruMy+fwkO17ZEfYQG1NplVKpZt/dF89xeO//ERS0K72/nUfdfLYxsiYWLGpQ6+7mNw6+kYnLJvB8j9nsLCcyJ5jNzuL5XscUiOO+6qsmlVYplWrV3X2xjpGdDQJDqeJ51O/8DUuPNEutKcdmroKLKUtD1aDm4nV2uuMLxJ2w6quqcquUSrX67n5xHWM+LZKw86ibrZ0pxyYTkc8AU6r6ucLHfwC8rqqfb8X7WbCowXJ3N9daII6jTlj1VVW5VUpFxbv7az7Vurt7O4+6fcJIOTbPl4AHgM+JiAN8FNjQqjezYFED2918Ste0BYnC3b2dR916YaYcG6Sqr4rIERG5AngD8KyqtiyqWbCoQXF38+Tzk5Ck4lkPnZJqWkrXBE67u+8OYaccG/eXwDZgDXBXK9/ICtw12nz+5o4vXtei69qCFO/uL/+F4FcLFJ0l/gsKHgQ2AuuBR1v5RjazqJHtbj7F2oKYjhKFlGOdVDUrIt8GplWrFBMbZCflmbo1uoM7nfX4/v4pjs3lGBtM8o7zVtPfUyV/bEyrZFOtTjk2fWdfobD9DHCjqr7c7K9fymYWpm71rvpSVe7fc4DJJ/aT93w8X3EdIeE6TFx9HjeOr433DnATTzFbUCAibwUeAh5sdaAACxYmBPfvOcCdO19msDdBf7JksYDnc+fO4O/8R9afXfPXsxmK6Uaq+i/Aee16PwsWpq3SWY/JJ/Yz2Jsg6S5cX5F0HQZ7E0w+sZ/r33ZW1Qu+zVCMaR9bDWXa6vv7p8h7/mmBoijpOuQ9nyf3V18uXpyhJBxhRV+S0YEeVvQlSTjCnTtf5v49B5o9fGO6VtWZhYgMA2eo6o8WPX6Zqj7fspGZjnRsLofnL72owvOVo3PZJT+n2gxl1PH57r1/y3X/dhZ9q1cyeOWVOP3dtWotMqz1d0dYMliIyEeAPwUOiUgS2KaquwtPbwfe3trhmU4zNpjEdZZODbmOsHKwZ8nPKc5QSmseAKgy/tL3uPbZR8HLc/DZBP19PUgiwaqbb2Jk61ZLTbXLclt/W1CJtGozi98DfkpVXxORDcBfi8jvqeoDtGAZmOl87zhvNQnXIVchFZXzfBKuw1XnLd1eodIMZfyl7/HTex4ik+glm+wnN9DL0EAPmssx9edfBGD0hhua882YpdV62qCdJ1HRutsfHgCuA1YBR4Bvv/qHP9twC1wR2Qh8DnCBv1TVP6z2mmrBwlXV1wBUdZeIXAc8JCJrCc5+NGZZ+ntcJq4+b341VGnAyHk+qWye2667oGpxu9wMJZnLcO2zj5JJ9OK7CUSVhBN8fUkmcQYGOHLX3Qxv2mQpqVZbzmmDLz7Q+iNsY2bd7Q8L8HHgU0CS4FqdB3Lrbn/4s8A9r/7hz9Z1DRYRF/gz4GeAA8BuEflGYXVVRdUK3DMi8hPFDwqB4z3AzwEX1zNQY24cX8sn3nsBeV+ZOZljOpVl5mSOvK/cdt0F3Di+turXKJ2hFP3Ef/4Ax/fw3QSKIsBg76mgI8kkms+T2rWrFd+WKVVs/V2u3xIEj/t5eOWf7DyJ8j4O/J8EAeIEcLTwa77w+Mcb+NobgFdUdb+qZoH7CK7pS6o2s7iVRekmVZ0pTGE+Uu9ITXcTET6y/myuf9tZPLn/CEfnsqwc7OGq81bVvD+i3Axl4OQsjvooiu/DGcO9OIvTF55H/tixFnxXZoFaW3//+/dje55EqxRST58CZoFFDavIFR7/1LrbH/5anSmpNwH/UfLxAeDKai+qFizmCFrfvrLo8auAJ5czOtMcjbbYiJL+HpfrLjyz7tcXZyCTT+znZC7HYekjj6AaBIqVA2WK5K5LYmys7vc0Naq19TcS5/MkWuU6gtRTpeMmc0A/QZbnkTq+frkCUNWUVrVg8acERe7F0oXnrq8+LtMMqsqOV3awfe928n4eTz1ccUk4CbZdvI3N52/uulU+i2cox46ezcqXHmGgvwe35/RAobkckkgwsKFl58OYolpbf59zJfywyvUuoudJtNAqaqgnA6vr/PoHgNIWCWuBH1d7UbWaxbpyeylUdQ+wbjmjK0dENorID0TkFRG5vdGv18l2vLKDyecncR2XoZ4hRnpHGOoZwnVcJp+fZMcrO8IeYmiKM5St7zyfc279NUin0dzC2bvmcvipFKtuvsmK2+1Qa+vv8993KqiUE/3zJFrhCEFtYikeMFXn198NXCAi54pID8EJe9+o9qJq0av8QcuBhv7F1VuR70bpfJrte7fTn+yfP3DIV5+53Nz8Ead3vXgXG8/dGNuUVLOMbN0KwJG77sbPZMDzwHWRRILVt94y/3xLddJ+gUa+l1paf4uEe4RtNH2bINWU5PSaBSWPP1bPF1fVvIjcRnD+hQvcpap7q71uyRblIvJVYKeq/sWix38FeL+q/m/1DLbwNd4B3KGqHyh8/LuFb+R/VHpNN7UoL61NHJg5wN/96O8Y7h1GVTmeOc7UySlK/+wU5aM/+VF+e/1vd106qhw/nSa1axf5Y8dIjI0xsGFD62cUnbRfoJnfS7XW3wveK78wqMTt57a0mr+Jdbc/vI1g1dPiIncSGAI+8+of/uz2Zg6ummozi08CD4rIx4CnC4+NAz3Algbfu6aKvIhMABMA55xzToNvGX3lahMn8yc5njmOrz6qytTJKZxCBjFYIiqoKg/vf5i3jL2FLRc0+kcTf05/P0PXXtveN611E1ocNPN7qdb6246wLeeewq+fIsjiuASppxzwmZLn26amw48Km/EuKXy4V1V3NvzGIjcCH1DVXy18/MvABlX9RKXXdMPM4sGXH2Ty+ckFKafZ7Cyvzb4WLBxRH0Hm009FijLaO8rKvpXc96H7uj4d1XbZFEy+J7gbrlTQ9fMw8Vj0L4Cl34vjBnUHPx983DMU3PnH5XuJlmVPjwrLaN9DUMyeAh5rxg7uelTrDdUH3AKcD7wAfElVqxVealVXRb6TlatNAAwkBxBH8H1/PkjIor93gjCXm8MVl90Hd9d1KJFpQHETWifsFyh+L7k0zL0epImKRGDwDcH3E4fvJeYKgaGe5bFNVy0NdQ/BtOcJ4IPARQSpqWaYr8gD/0lQkf/FJn3tWNp9cDd5P09fYuG6AkccVvet5vXU6xVfm3ASOOJwPHOcQ6lDdY+hk/ZxtFWtm9DauV+g3uJ06iicPA6ZGRAHnJJFk6owexB6V3Tb3oeuVy1YvFVVLwUQkS8BTeuTUG9FvpNNZ6ZPSy8VjfaNciJ7gtnc7GnPJZwErgQboBStK1jUs4/DAkuJpTahqR9ceHMpOPZqcBFvZfqm0eJ0zyCcPBG8ZvHniQBO8HzPUOu+BxM51YLFfBW+cHFv6pur6iNEZIoVBaO9o/MX/XJW9KxgNjeLg4NbuDCVfr5qUOw+c6D6rujFF/rXZl+bT4GVzmxyfo7J5ycB5gvnYW0QjPTxqeU2oalC+ijMHgI0+Pj5/wV7H2ztKp9OKrSbyKgWLN4mIicKvxegv/CxAKqqwy0dXZdZv2Y9CSdBzs8tqFkUCYIjDsX/Si/IqoqPz3DP8JLBotyF3sHhYOogY31jrJAVCz4/6SQhCdv3bp/fx1HcIFhLYGmGWo9PDXWmU9yEVrpfIH0UZl9nvq654o3QN9Lai/Zyur1Wmt1k56B3GLIzgLMwoKkGM6Xe4aDwbVrrjpHTWpRzx/GGCtwichfwIeCQql5S7fOLlgwWqhqR27bu0J/oZ9vF24ILbpIFASPn5xARVvauxHEcpjPT+Hqq46qIMNY7xmBikPVr1ld8j3IX+tnsLCgcO3mMhCQY7Rtd8JqkkySTz7D74G7Wr1lftghf/LzFgaUZF/Di8amDvYkFhx3lPJ87d76MqtIz9nT4rVBKN6HlUjBzEJCgT8LgmiBVBbVftOvRjEL7wEroGw7GNXsoCA5KIeYJDL0hmEV1VwuO9rpjpGKLcu4Y+SxwD3ccr/eYiO3AF4C/Ws6Lqh6ratpr8/mbgeCCm8ln8NXHEYeEk+DXL/t1VJW/eOEvOHvF2WS97PyFscftIZPPcNMlN1W8GFdabZXXfOGa5jB1corh3mEcWdgJxlef6cx0xSJ8UTGw7HptF0dPHm34Al7t+NTB3gSf2/UVVqx5jMGegbbMdCoq3S/w3c/Bni8FheCeoaBQXKpVq6OaUWhfdzW4PZAcgP4xyJQsne0tWTrbXS042q3YonyWhQ0Fk4XHIbjoL5uqPi4i65b7uq4KFnEoyIoIWy7YwsZzN5Ydq6oiImzfuz1IS+HMb8qbuGxiPtiUU+lCn5AEgiAi821EVvQsTEc54jDaO7pkEb7IV5+d/76TXQd3NZyqqnh8aoHr5kn1/yMr/MGaZjpt0TMAY+uCi23vEpnaVqyOqrXb61KzgsUptb6S76F7W3C0T5B6qtqinDtGvtZoSmo5uiJYxLFja3+iv+xeiWrBZCmVLvQDyQGQYCUVcNrn5PwcCSfB+jXr2X1w95JF+KLv/fh7rOhd0fAFvNLxqUUnEz9A8aiUMS1NobV170kzLtr1qLXba7VZQS19nUyrtLpFeV26Ili0uyDbDpWCyVIqrbYq7uM4nD4MLFxhlfNzpHNpJi6boD/RX7UIn/Nz5P08SSdZ9nmofgH302nmnnoKb3qaNbPQ5/cTdJg5nS9zgJJwKwf7YgqtrZp10V6ucoX20vesdVZgLV5/1oAAAB7mSURBVDjC1OoW5XXp+GBRKU8PIaYpQrLUhX60bxTP9ziaOYqqciJzYr5WUpreqlaET+fSvPOsd/LUwaeWHEu5C7iqcvyBBzhy191oPg+ex5mOw++eyPLty9/Pc29992lLTTU/gCSD2kUlxRRaWzXrol2PZs4KqvV1Mq3Q6hblden4YFFrQbYbWmRUu9An3SS3r7+dNUNrlkxvLVWEn7hsgpV9K9nz+tI9vMpdwI8/8ABTf/5FnIEBnL5Tf16jmuJn9jwMwHMXX31qzJ6Plz2PsVV9eJrDkfIznWIKre3CSuXYrCDuWtqivNBN/D3AahE5APy+qn6p2us6PljUWpBte5oiJNUu9LXUb6rVTdL5dNVU1eILuJ9Oc+Suu3EGBpBFxeyx4eACd91z3+TJN7+dk05yfp/FJ667mOTIrzH5QuWZTjGF1nZhX7RtVhBPdxxPFZbHLtmivN7itqr+Qj2v6/hgUW1XNISUpmix0ry/OzrK4JVX4vT3N1QgX6xS3aSWVNXiC/jcU0+h+fyCGUWpseEBVojPHWdnOHjRxawc7OGq81bR3+OiuhZk6QAYqk64aHfSgU7xELkW5R0fLGopyIaWpmiBcnn/4klxq26+iZGtWxGRugrkUPvy41pmMKW86elgrEsQ3+PiIZ93/dTahY83MQCaRTrpQKc4CTbcbeeOka+xqEV5O5fLlur4YFHPXW6cVcr7ay7H1J9/EYDRG25Y9tdd7vLj5V7A3dFRcKssNXVdEmNjFZ+uNwCaJVifqXAFgSES/fNqOvwoKuo9/Gjxha70Ljeq+yzq4afT/OvP34i47ml5fwgChnoe5/7N/cs+YrTcoUywMOA2svy4lWM3dar1QKebHoH/fNpSVNXF+iLT8TML6J40RbW8vyST+JkMqV27lnXkaDuWHzv9/ay6+ab5WVFpwNBcDj+VYvWtt1igaKdqfaacRNA76n9eC4meaKSorLbSMl0RLIo6PU1RS94fzyN/7Niyvm67lh+PbN0KwJG77sbPZBbUW1bfesv881FWaWFBLFXrM5U+CiePwdCZC9uahJGistpKy3VVsOh0zcj7l9Ou5cciwugNNzC8aROpXbvIHztGYmyMgQ0bIn/BrXVhQaxUO9Bp9hDgwOKbiFZ21a2kQ2srl95z6Wktyl/4+AuNtig/m6Dj7BrAByZV9XPVXmfBooMMXnklkkiguVzFvL8kEgxs2LCsr9vu5cdOf/+y0mRR0KqFBaFaqmVJZgZQcGThiXnqB+dc+HnIZ+GVf4K3Xt/acTbjDI+IufSeSyu2KL/0nks/C9zzwsdfqLfgnAd+S1WfEZEVwNMi8i1V/ZelXuQs9aSpLp1P8/iBx/nGj77B4wceJ52v1Pur9Yp5fz+VQnMLN34W8/6rbr5p2XfppcuPy+m05cfLtdSGQkkmcQYGgtRaOry/G3UptizJzgYBo1Q+G6R6Bt9wqv166ihM/QCOHwjO8khNwSP/Ozzz5SBN1CrF2kq5IjwEj/v5YGNkfBRblOeBE8DRwq/5wuMfr/cLq+prqvpM4fczwD7gTdVeZzOLOkW1k20r8v7dtvx4uVq1sCASKrUsEYI0VfFAp9RRmD0YBA6nEDx8gnMxWp0GasYZHhFSSD1VbVF+6T2Xfq0JKal1wBXA0s3csGBRt6h2sm1V3n+5m+y6SasWFkRCpZYlb3o73L2p0D3XhbnXg0BRvEHSwtF6/aPBhbqVaaCw2sG3TltalIvIEPB14JOqeqLa51uwqEMcOtk2O+/fLcuP69GqhQWRUq5lSbGrLhoEh+KMonhO91AhReU6rTkVsCisdvCt0/IW5SKSJAgU96rqA7W8xoJFHbq5k22rlh/H4RTDSlq1sCDyiimqb38mCA4eC8/p7l956nNbmQYKsx18a7S0RbkE+fEvAftU9Y9rfZ0FizpYJ9vmiWrtZzm6dkNhMUXVNwyPfCqoTxTP6V585nir00CddbJfS1uUA+8Cfhl4QUSeKzz2e6q6ZErLgkUdurWTbStEtfazXJHfUNjKnc3nvw/6RpZuC9LqNFDY7eCb6IWPv5AqLI9dskV5vcVtVf0OdbQesWBRh27rZNsqcaj91CqyGwrbsbM5SmmgTmgHH7AW5Z3AlpI2plifePK1J5nJzrCqv3x6Io61n8htKGzXzubOSgOFrrDhbvul91x6WovyRpfL1suCRZ1sKenyLa5PzGRnOJE9wfHscVb3rWa07/S0ndV+GtDOnc0dlAaKkkJgiESLcgsWdYrqUtJ0Ps13//O7PHPoGQTh8jMv591venckZjmL6xOKMpubRRAOpw8DnBYwrPbTgGpdY91k85e0dk4ayCxiwaJBUelkq6o8+PKD3PnsnUxnpymeU/KVfV9hrG+MT1z+CTZfEN7KonL1icHkYDAeDYLC1MkphnuHcQoraaz206AO29lswmXBokPseGUHf/r0nzKTn8HFRZwgKCjKsZPH+JNn/gQkvJVF5famOOKwum81h9OHcTQIEKlciqGeIav9NEPUdjbbWROxZsGiA6Tzae568S5m87NBoCg93hTBdVxmc7Pc/eLdoa0sqrQ3ZaR3BICpk1N4vsdMdgbAaj/NEJWdzXbWRN32XXjRaS3KL3ppX6P9oPqAx4FeghjwN6r6+9VeZ8GiA+w+uJu53BxA2TSTFP6bzc2GtrKo0t4UEWG0b5Th3mGOpI/wwXM/yJVvvDL02k9HiMqS1g49a6KV9l14UcUW5fsuvOizwD0XvbSv3la+GeC9qjpbaPvxHRH5e1V9cqkXWbDoANOZafL+0t0BFCXv50NbWVRtb4qnHit6VnDbFbdZkGimsJe0duBZE21SbFE+y8KGgsnC4wDb6/nCGhQ0Z0u+XhKoGngsWHSA0d5REs7Sf5SCkHASoa0ssr0pIQl7SWsYK7JirpB6qtqifN+FF32t3pSUiLjA08D5wJ+pqrUo7wbr16xnMDnIdCZYBbU4FaWF/4aSQ6GuLLK9KSEKa0mrrciqR8tblKuqB1wuIqPAgyJyiaq+uNRrLFh0gP5EPzdfcjN/sudPgtVQeqrIrSieH6R4brrkplDv3Nu1N8VPp5l76im86Wnc0VEGr7yy85r4xUXUVmTFQ8tblBep6rSIPAZsBCxYdIPN529GVU/ts/CDFKQgp/ZZROTOvVV7U1SV4w88wJG77kbz+QXN/FbdfBMjW7dGvoNtx4nKiqx4aXWL8jOAXCFQ9APvA/6o2ussWHQIEWHrW7bywfM+yHf/87s8e+hZAK448wre9aZ3dUUt4PgDD8y3CS894lRzOab+/IsAjN5wQ1jD605RWZEVL61uUf5G4J5C3cIBvqaqD1V7kWgrD1JvsvHxcd2zZ0/YwzAR5KfT/OvP34i4bsUDiNTzOPdv7reUVLst2GeRX7giq7v2WdT8Te678KJtVGlRftFL+7Y3c3DVhDKzEJEbgTuAi4ANqmoRwDRk7qmn0Hx+wYyilCST+JkMqV27otUVthuEvSIrnqxFecGLwFbgf4b0/qaKuBWJvenpoEax5Cd55I8da8+AzOmsyWDNChvutu+78KLTWpQ3uoO7XqEEC1XdB+V3G5twxbVI7I6Ogltl1Y3rkhgba8+AjGmCQmCwFuW1EJEJYALgnHPOCXk0nS+uReLBK69EEgk0l6tYs5BEgoENG0IYXZ2s8Z6JkJYFCxH5R2BNmac+rap/W+vXUdVJYBKCAneThmfK8NNpjtx1N87AwGkXXEkmcQYGOHLX3Qxv2hS5lJTT38+qm29i6s+/iPT1odnsqVlRTw968iSrb70lcuMuyxrvmQhqWbBQ1fe16mub1oh7kXh4yxZSTz/N8b97KLjgqgYXVRFGrv8Qw1vCac++bNZ4z0SQE/YATHTEvUh84sEHSe3aTc+b30zyrLNIrllD8qyz6Hnzm0nt2s2JBx8Me4jV1dp4LxtKjdN0sVCChYhsEZEDwDuAh0Xk0TDGYRaKc5G4NIXm9PbirliBOzqKu2IFTm/vfArNT1dqtxMRxcZ75XY7Q/C4nw+WoBrTRqEEC1V9UFXXqmqvqr5BVT8QxjjMQoNXXgmOQ/7YMbzpabyZGdT355+PcpG4mEIr1lrU9/FmZua/D1wXzedJ7doV8kirsMZ7JqIivxrKnK4VeyBUlROPPII/O4t39GiQ63ccBHBXr8YZHETT6bqKxO3Ys1FMoalqECCmpoIG/YW6hQAyOEju6NGmvm/TWeM9E1EWLGKklXsgiktmE2NjOMkk+SNHQBX1ffKHDuGOjXHmJ3+Tka1bIzHexYopNG96Gu/wYdRxFnxtVUVnZsjsewmiXOe2xnsmoixYRFClO/FW7YFYvGTWHRvDGRnBn5ubv1t3ensZ3rRpWRf3du7ZKKbQvKmp0wIFBE151HGYe+IJ/PQno7uE1hrvmYiyYBEhS92Jr/ylj3H0y19uyR6IYr6f3h5mczN4vofruAwODeJIUNbyZmeXtWS2WXs2ak1hOf39DL373Rz7yleQxKK/1qrg+yTPOAP1/cgu/Z0X9lGoxpRhwSJClroTP/yFPwMguabcPsfG9kB4x6aZO3mCQ/mDKBqcxivBWRir+lcz1ju67CWzje7ZqCeF1ffWt+IMr0BTafD9BfssEmecgTs2hnf8eGSX/s6zxnsmgixYRES1O3FJJslPTZE480zEqbCIrc49ELvS++jLzyCJRDCTKFyDVZWp1GEAht3EspbMNrpno54Uljs2ijs8grPmjfMpNFwXZ3Dw1M8sokt/y7LGeyZCbFNeRCxe+rmY9PQABBfBSuq4EKbzaSYT38N3HVx/4XMigiMOx2YPg+ssa8lsI3s2ak1hLd4zUewPhect2GdRDBRRXvprTNRZsIiIanfizuAgQNDzqIx6L4S7D+4mnfD5/nvOoCfj43gL22+5PvRmfI5svXpZtZDSxn7LHW/VwJlMlt0zUewP5adSp72v5nL4qRSrbr4pusVtYyLMgkVEVLsTF8fBGRlB87mmXginM9N46vHc+pU8/r4zcDyl56RHXypPz0kPx1O+ed0Ir7/nrcv6uktduP1MhvyRI/RfcTlzTz2Fn07jp9PMPPYY0zt2MPfkkxWDzLwKKayRrVtZfestqOfhzc7iHT+ONzuLeh6rb71lWUt/jTGnWM0iImppse2OjLDqv/wyR//6y/iZzIKib70XwtHeUVxxQYTnNqxi7+VjrPvRLANzHqlBl1d/YohjpHhf3/Lz/MXxHLnrbvxMBs3n8U+exD9xAnd4mNSTTzG3azdaSCc5AwPgefiZDF4hELijo+WX61ZIYYkIozfcwPCmTaR27SJ/7BiJsTEGNmywGYUxDbAzuCNk+utfny/qlgaM4sxh9a23MHrDDfjpdNMuhOl8mo8+9FFcxyXpnB6kcn4Oz/e470P30Z+o7z2K4z3xzW8x+/89hjs6htPbC0D+2DHyhw4BkDzzTNyxMdT3yfzoR+D7JM4887SgYOdpm5iKdV95S0NFSK0pFKe/n6Frr2V082aGrr22oQtmf6KfbRdvI51Lk/MXpn5yfo50Ls22i7fVHSiK4x3YsIH0c8+RWLlqPlCo7+NNTQWzI9clf+QI6vuI45BcvRog2GS3qD+V1R6MaT9LQ0VIWCmUzedvBmD73u1k8hl89XHEIeEkmLhsYv75RpTbd+HPzQVbOoppJt/Hn5sLVjIVZhO5Q4fIHzmC09PTcMrNVGAn8pkaWLCIoOLMoV1EhC0XbGHjuRvZfXA305lpRntHWb9mfdUZRTqfruk1ZVd7eV6wca5IdcHnuGNjILDiZ95P74U/abWHZrMT+cwyWLAw8/oT/Vyz9pqaPjeVS3Hns3fy6KvBUSRJJ0nCSZBwEmy7eBubz9+8oDBdXO2lhdkDnoe/eBmwyOkrwhJJBt9xVbTbc8SVnchnlsGChVkWVWXHKzv4/DOf51jmGILM7/pe3bea3kQvk89PArDlglPtXQc2bMBPpci99lpQ5VMNWojn86gq4gYrsor7ScA20bVUrSfyXbLVUlIGsAK3WaYdr+zgi//8RY5nj5OQYCbhiIMgHE4fZi47R3+yn+17t5POn9phPfP3fx/0ePJ9tDCDENcFxwkCRi5HYtWqBbutrZDdQnYin1kmCxamZul8mu17t88vACxNMxVnGFMnp3DFJe/n2X1wN3CqfUdi9eqgt1XhnAz1vCBYFGYVJFzbRNcudiKfWSZLQ5ma7T64m7yfr/i8IKgqqVwKX32mM9PAqZVQ7tAQTk8PbslZGcVGf97sLCMf/jDJtWutkN0OdiKfWSYLFqZmxdYgrlS+yChKXvMkJMFo7yhw+koocRzcFSsWvE5USa5dy+jmxpfpmhrYiXxmmSwNZWpWbA0ymBxEJJhFLCYIKCScBOvXrAca60BrWqR4Il92NggMpYon8r3zN6y4bebZzMLUbP2a9SScBJ56rO5bzeH0YRw9dYSpFv4TkQW7vmvpe2WrnkIQ1xP5bBNhKCxYmJoVW4NMPj/JQHKAMziDqZNT+OoHYUKVlX0r+fXLfn3Bru9iB9pqfa+sRtFmcTuRzzYRhsqChVmW0tYgCSfB6r7V8z2lPrDuA/zXC38VfeYFju/92wVnZi/uQNuMjrmmSeJyIp9tIgyVdZ01dVnc5mP8DeNkv/H3Vc/MbmbHXNNFsimYfE+QIqtUkPfzMPFYNGdFgVhPe2xmYeqyuDVIaXv1pc7MbnffK9MhipsIkxVuLNxkUHd59TvxmCXFkK2GMg2r98xsY2pmmwhDZ8HCNKzeM7ONqZltIgydpaFMw8q2Hz/tk8qfmR03fjrN3FNP4U1PLyjgmxazTYShs2BhGtYNm+5UleMPPFC1gF9kQaXJipsIi6uhSgNGcRPhNZ+KcnE79ixYmIZ1w6a74w88UFMBf7lBxSxDXDcRdghbOmuaonQ1VKVNd6M33BDiCOvnp9P868/fiLhuxWConse5f3M/Jx55pGN/DpGRTcVjE+HpYn2XYDML0xSdvOmu3PnhpSSZxM9kmH3iOzWtChvetMlSUo2IyybCDmPBwjSFiDB6ww0Mb9rUcZvuai3gp555uqagktq1y/aamNixYGGaqhM33dVawAe6ZlWY6T62z8KYKkoL+OUUC/j9b397x68KM93LgoUxVRS75vqp1GkBo/Ss8BVXX11TUInzqjDTvSwNZVpqccPB9WvWz59zESe1FPBFxFqxm45lS2dNS6gqO17Zwfa928n7+fnjWBNOgm0Xb2Pz+Ztjud+gWtdc22dhlhDrP/hQgoWIfBa4HsgCPwJuUtXpaq+zYBEfD778IJPPT9Kf7CfpnLrDzvk50rk0E5dNsOWCLSGOsLWsFbspw4LFst9U5P3ATlXNi8gfAajq71R7nQWLeEjn03z0oY/iOu6CQFGU83N4vsd9H7ovlikpY+oU62ARSoFbVb+pqvnCh08Ca8MYh2mN3Qd3k/fzZQMFQNJJkvfz7D64u80jM8bUKwoF7puB/1XpSRGZACYAzjnnnHaNqaO0u6nddGYaT5feb+Crz3SmaubRGBMRLQsWIvKPwJoyT31aVf+28DmfBvLAvZW+jqpOApMQpKFaMNSOFVaxdbR3FFeW3m/giMNo72jT39sY0xotCxaq+r6lnheRjwMfAn5a47QkK0Zq7ZTabOvXrCfhJMj5uYo1i4STYP2a9U1/b2NMa4RSsxCRjcDvAB9W1VQYY+h0YR512p/oZ9vF20jn0uT8hRvUiquhtl28zYrbxsRIWDu4vwCsAL4lIs+JyBdDGkfHCvuo083nb2bisgk832M2O8uJzAlms7N4vsfEZRNsPn9zS97XGNMaoRS4VfX8MN63m4R91KmIsOWCLWw8d2NH7OA2pttFYTWUaYGoHHXan+jnmrXXtPQ9jDGtZ40EO1StnVKtqZ0xphYWLDpUrZ1SrQWFMaYWlobqYJ181Kkxpr2s62wXsKZ2xkRCrHtD2cyiC3TiUafGmPaymoUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKls6aWGr36X/GdDsLFiZWwjr9z5huZ8HCxEpYp/8Z0+2sZmFiI8zT/4zpdhYsTE38dJqZxx5jescOZh57LJQLctin/xnTzSwNZZYUpRpB2Kf/GdPNLFiYJUWpRhCV0/+M6UaWhjIVRa1GYKf/GRMeCxZdbqlaRNRqBHb6nzHhsTRUl6qlFhHFGoGd/mdMOCxYdKlaahHuqlWRqxGICKM33MDwpk12+p8xbWTBogvVWot481//1XyNoFwqqt4aQTNaddjpf8a0lwWLLlSsRZTOKEpJMomfyXDyhRdYdfNN8zOQ0oAxXyP41V+p+cIfpWW4xpjlsWDRhZZTi6hcI3AZ2LCeI3/118FjNVz4o7QM1xizPBYsutBy9itUqhHkfvxjjnzprmDGUTKTqHThrzX1Nbxpk9UejIkgWzrbherZr1CsEYxu3szAhg0c/fK9y9p/EbVluMaY5bFg0YUa3a9Qz4U/istwjTG1szRUl2pkv0I9F35r1WFMvFmw6FKN7Feo58Jfmvpq5jJcY0x7WLDocvXsV6jnwl9MfS21DHf1rbdYcduYiLKahVm2emseI1u3svrWW1DPw5udxTt+HG92FvU8a9VhTMSJqoY9hpqNj4/rnj17wh6GobENdn46ba06TDeK9Y5TCxamIXbhN6ZmsQ4WVrMwDbEeTcZ0B6tZGGOMqcqChTHGmKosWBhjjKkqlGAhIp8RkedF5DkR+aaInBXGOIwxxtQmrJnFZ1X1MlW9HHgI+L9CGocxxpgahBIsVPVEyYeDQHzW7xpjTBcKbemsiPwB8F+A48B1S3zeBDABcM4557RncMYYYxZo2aY8EflHYE2Zpz6tqn9b8nm/C/Sp6u9X+5q2Kc8YE2O2Ka8cVX1fjZ/6FeBhoGqwMMYYE46wVkNdUPLhh4GXwhiHMcaY2oRVs/hDEflJwAf+DbglpHEYY4ypQawaCYrIYYLgsthqYKrNw6lHXMYJ8RlrXMYJ8RlrXMYJ8RnrauAlVd0Y9kDqFatgUYmI7FHV8bDHUU1cxgnxGWtcxgnxGWtcxgnxGWtcxrkUa/dhjDGmKgsWxhhjquqUYDEZ9gBqFJdxQnzGGpdxQnzGGpdxQnzGGpdxVtQRNQtjjDGt1SkzC2OMMS1kwcIYY0xVHRMs4nJGhoh8VkReKoz1QREZDXtMlYjIjSKyV0R8EYncsj8R2SgiPxCRV0Tk9rDHU4mI3CUih0TkxbDHshQROVtEvi0i+wp/7r8Z9pjKEZE+EdklIv9cGOd/C3tMSxERV0SeFZGHwh5LIzomWBCfMzK+BVyiqpcBPwR+N+TxLOVFYCvweNgDWUxEXODPgA8CbwV+QUTeGu6oKtoOxGEzVh74LVW9CLgK+K8R/ZlmgPeq6tuAy4GNInJVyGNaym8C+8IeRKM6JljE5YwMVf2mquYLHz4JrA1zPEtR1X2q+oOwx1HBBuAVVd2vqlngPuDnQh5TWar6OHA07HFUo6qvqeozhd/PEFzg3hTuqE6ngdnCh8nC/5H89y4ia4GfBf4y7LE0qmOCBQRnZIjIfwAfI7ozi1I3A38f9iBi6k3Af5R8fIAIXtjiSkTWAVcAT4U7kvIKqZ3ngEPAt1Q1kuME/hT4bYI+eLEWq2AhIv8oIi+W+f/nAFT106p6NnAvcFtUx1n4nE8TTPvvDWuchXFUHWtElTsbIJJ3l3EjIkPA14FPLpqxR4aqeoWU81pgg4hcEvaYFhORDwGHVPXpsMfSDKGdlFePuJyRUW2cIvJx4EPAT2vIG12W8TONmgPA2SUfrwV+HNJYOoaIJAkCxb2q+kDY46lGVadF5DGCmlDUFhC8C/iwiGwC+oBhEfmyqv5SyOOqS6xmFkuJyxkZIrIR+B3gw6qaCns8MbYbuEBEzhWRHuCjwDdCHlOsiYgAXwL2qeofhz2eSkTkjOIqQhHpB95HBP+9q+rvqupaVV1H8PdzZ1wDBXRQsCA4I+NFEXkeeD/BCoQo+gKwAvhWYZnvF8MeUCUiskVEDgDvAB4WkUfDHlNRYZHAbcCjBIXYr6nq3nBHVZ6IfBX4PvCTInJARH4l7DFV8C7gl4H3Fv5uPle4K46aNwLfLvxb301Qs4j1stQ4sHYfxhhjquqkmYUxxpgWsWBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qyYGG6goh4haWgL4rI/SIyUHh8jYjcJyI/EpF/EZFHROQthef+QUSm494t1JhmsGBhukVaVS9X1UuALHBLYRPag8BjqvoTqvpW4PeANxRe81mCfQfGdD0LFqYbPQGcD1wH5FR1fmOkqj6nqk8Ufv9PwEw4QzQmWixYmK4iIgmCMzBeAC4BOqLJmzGtZsHCdIv+QkvrPcC/E/RAMsbUKFZdZ41pQLrQ0nqeiOwFfj6k8RgTKzazMN1sJ9ArIr9WfEBE1ovItSGOyZhIsmBhulbhLJEtwM8Uls7uBe6gcC6GiDwB3A/8dKFb7AdCG6wxIbOus8YYY6qymYUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKgoUxxpiq/n9jPPDBLiPgsQAAAABJRU5ErkJggg==\n",
1306
      "text/plain": [
1307
       "<Figure size 402.375x360 with 1 Axes>"
1308
      ]
1309
     },
1310
     "metadata": {
1311
      "needs_background": "light"
1312
     },
1313
     "output_type": "display_data"
1314
    },
1315
    {
1316
     "data": {
1317
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAFgCAYAAABKY1XKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5Rc9XXo+e8+p6rf6ocksIwFFgQcMA+D0xL4ARjHsWXFOJIIHidOrgVJOrAuTrxWxgmJZybc65V1k/FMEhtnxbcTg0iMzTUxKA6QYCcyA36AJB4BFGGDFZIoRkgtqaXurlI9ztnzx6lqVbequqrrdc6p2h8WS+qqrq5ft6Szz2/v32//RFUxxhhjluKEPQBjjDHRZ8HCGGNMVRYsjDHGVGXBwhhjTFUWLIwxxlSVCHsAy7Fx40b9h3/4h7CHYYwx9ZCwB9CIWM0spqamwh6CMcZ0pVgFC2OMMeGwYGGMMaYqCxbGGGOqsmBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qK1aa8TpHOenx//xTH5nKMDSZ5x3mr6e9xwx7WvKiPzxjTfhYs2khVuX/PASaf2E/e8/F8xXWEhOswcfV53Di+FpHwNnlGfXzGmPBYsGij+/cc4M6dLzPYm6A/mZx/POf53LnzZQA+sv7ssIYX+fEZY8ITWs1CRPpEZJeI/LOI7BWR/xbWWNohnfWYfGI/g70Jku7CH3vSdRjsTTD5xH7SWc/GZ4yJnDAL3Bngvar6NuByYKOIXBXieFrq+/unyHv+aRfioqTrkPd8ntx/pM0jC0R9fMaYcIWWhtLg8O/ZwofJwv8deyD4sbkcnr/0t+f5ytG5bJtGtFDUx2eMCVeoS2dFxBWR54BDwLdU9akwx9NKY4NJXGfp4rDrCCsHe9o0ooWiPj5jTLhCDRaq6qnq5cBaYIOIXLL4c0RkQkT2iMiew4cPt3+QTfKO81aTcB1ynl/2+Zznk3AdrjpvVZtHFoj6+Iwx4YrEpjxVnQYeAzaWeW5SVcdVdfyMM85o+9iapb/HZeLq85jL5E+7IOc8n1Q2z8TV54W2nyHq4zPGhCu0moWInAHkVHVaRPqB9wF/FNZ42uHG8bUATD6xn5O53IJ9DLddd8H88zY+Y0zUSFBnDuGNRS4D7gFcghnO11T1vy/1mvHxcd2zZ087htdS6azHk/uPcHQuy8rBHq46b1Wk7tijPj5jYirWO1pDCxb16JRgYYzpSrEOFpGoWRhjjIk2CxbGGGOqsmBhjDGmKgsWxhhjqrKus8a0gZ0RYuLOgoUxLWRnhJhOYcHCRFKn3Ik364yQTvl5mPiyYGEipZPuxGs9I+T6t51V8cLfST8PE29W4DaRUrwTTzjCir4kowM9rOhLknCEO3e+zP17DoQ9xJo144yQTvp5mHizYGEio9NO62v0jJBO+3mYeLNgYSKj007ra/SMkE77eZh4s5qFWSDMQmqnndZXekZIuQt+tTNCOu3nYeLNgoUBolFI7bTT+opnhBRXQ5UGjOIZIbddd0HFYNxpPw8TbxYsDNC8JZ6NaPROPIoaOSOkE38eJr4sWJimLPFshkbvxKNIRPjI+rO5/m1nLfuMkE78eZj4smBh5guppTOKUknX4WQux5P7j3DdhWe2dCydelpff49b18+uU38eJn4sWJhIFVIbuRPvRPbzMFFhwcJEspBa7514p7KfhwmbBYuYacXSViukGmOqsWARE61c2mqFVGNMNRYsYqLVS1utkGqMWYqoLl3YjJLx8XHds2dP2MNou3TW4/ovfIeEIxXTRHlf+bvb3t3w3X8661kh1ZjWiHV7YJtZxEA7l7ZaIdUYU441EoyBKC1tNcZ0J5tZtEgzVy1FcWmrMaa7WLBoslasWrKlrcaYsFkaqslacbJZcWnrXCZPzvMXPFdc2jpx9XmRLESnsx47X3qdrz99gJ0vvW4H9RgTUzazaKJWNuSL29LWpWZY2965jjeO9DKdyrf9zAxjTH0sWDRRK1ctxa1HUPl9IcrhmSx3fGMvI/0JBnoSbT8zwxhTH0tDNVE7Vi3197hcdd4qxgaTHJ3L8v39U5FL7VSaYR2dy3FkLoMrMJvxGO5LNpyiM8a0h80smqjVq5aicJpdLcrNsHxVpmYzuCKICJ7vM5vNM9yXbOuZGcaY+tjMoolKVy2V0+iqpVYUz1uh3AxrNpNHlflgprDgc5KuQ97zeXL/kXYO1RhTIwsWTdTKVUu1Fs+jkJIqN8PyfEU5FRwEyn6ObSw0JposWDTZjeNr+cR7LyDvKzMnc0ynssyczJH3taFVS8XUTrl9FhCtO/NyMyzXEaTQGkdVERGGehZmQW1joTHRZTWLJmvVqqU4tfxY3PLcFUFV8VXxCwHkDcN9OCUzC9tYaEy0WbBokWY35Itby48bx9eiqvy/3/oBx1I5fFVUwddgnKqnZhh2ZoYx0WfBIibi1vJDCqueehIubxh2AMF1hFzeZ2o2w6GZDOmcx0h/surGwlacDmiMWR4LFjERt9PsigX5oTIF+bHBHo6ns+Tyyu0fvJBr33Jm2XHHZamwMd3AgkWMxKnlx1K72R0RxgZ6mTmZozfhVgxwrT4d0BhTu9CChYicDfwVsAbwgUlV/VxY44mDOLX8aLQg38o+W8aY5QtzZpEHfktVnxGRFcDTIvItVf2XEMcUC3E4za7Rgnw7Twc0xlQX2j4LVX1NVZ8p/H4G2Ae8KazxmOZqdDd7nJYKG9MNIrEpT0TWAVcAT5V5bkJE9ojInsOHD7d7aKZOje5mj9tSYWM6XejBQkSGgK8Dn1TVE4ufV9VJVR1X1fEzzjij/QM0dWtkN3ur+2wZY5Yn1NVQIpIkCBT3quoDYY7FNF8jBfm4LRU2ptOFuRpKgC8B+1T1j8Mah2m9egvycVoqbEynE9Wli4gte2ORdwNPAC8QLJ0F+D1VfaTSa8bHx3XPnj3tGJ6JkHTWi/xSYWNqEOsdpKHNLFT1O8T8h2faIw5LhY3pdKEXuI0xxkSfBQtjjDFVWbAwxhhTlTUSNMaYcrIpePUJSB2FgZWw7mroGQh7VKGxYGGMMaVU4dl74XufBz8HvgeOC04S3vkbcMXHoAtb41uwMMaYUs/eC4//39AzBMn+U497ueBxgLf/UjhjC5EFi5iI6mlxUR2X6RDtTgVlU8GMomcI3EUdj91k8Pj3Pg+XbO26lJQFi4iL6mlxUR2X6RBhpYJefSJ4v9IZRSk3CfmT8Op34C3vb/77R5gFi4iL6mlxUR2X6RBhpYJSR4PAtBTfg9SR5r93xNnS2Qir9bS4dLbKX+4uGZfpELWmgrKp5r/3wMpgBrMUx4WB7ut2bMEiwoqnxS2+IBclXYe85/Pk/vbe5bR7XOmsx86XXufrTx9g50uvWxDqdMVU0OJAUeQmwc8HqaBmW3d1kOrycuWf93LgJGDdu5v/3hFnaagIi+ppce0aVyvrIlaYj7AwU0E9A0FNpJgCKw1YXg6yc3DNp7quuA0WLCItqqfFtWtcraiLWGE+BsJOBV3xseDX730+KGbPF9cTQaAoPt9lLFhEWOlpceVSPmGdFteOcdVaF7n+bWcta0ZghfkYKE0FlUtFFVNBb3o7/PDR5i+rFQmK55dsDVJdqSNBYFr37q6cURRZzSLCGj3HOs7jakVdxArzMVFMBWVnT68deDnIzMLa9XD3JviH2+Gx/xH8OvkeeObLwbLbZo3jLe+Hy38h+LWLAwXYzCLyonpaXKvH1Yq6SDEAlc4oSiVdh5O5HE/uP2LnZ4RtqVTQuVcHRXDbYd1WFiwirpFzrOM8rlbURaK6YMCUUSkV9Ka3BzMK22HddhYsYiKqp8W1alytqItEdcGAWUIxFVT0w0dth3VIrGZhIqkVdZHSAFROWAsGzDLYDuvQWLAwkXXj+Fo+8d4LyPvKzMkc06ksMydz5H2tqy4S1QUDZhnCXlbbxSwNZSKrFXWRqC4YMDWqdVltF+6wbjXRZi0za4Px8XHds2dP2MMwHSCd9SK1YMAswzNfrr7DOpqroWK929NmFqYrRXXBgKmB7bAOhQUL03LWh8k0le2wDoUFC9My1ofJtNTiZbWmpSxYREwn3YVbHyZjOocFi4jotLvwVjUCNMaEw/ZZRETxLjzhCCv6kowO9LCiL0nCEe7c+TL37zkQ9hCXJaoHNxlj6mPBIgI6sRtqM/sw2Ul5xoTP0lAR0IndUJvRh6nTUnPGxJkFiwjoxG6ozWgEaAVyY6LD0lAR0IndUBvtw9SJqbm2yqaCDq3PfTX4NZsKe0Qm5mxmEQHl7sJ9VWYzeTxfUZS+hBu7bqiN9GHqxNRcW6jCs/cGu5v9XMnu5mRw+twVHws2tRmzTBYsIqB4Fx6kXFxmTnpMzWZQBV99QFg5mOTv/vnHscrTN9IIsBNTc23x7L2n+ibZKXKmiSxYRETxLvv/+eYPODKXwUEQAddxWD3Uw1BvIrZ5+nr6MHViaq7lsqlgRmGnyJkWsJpFRIgI17/tLIb6ErxxpJ83jPTxxtF+zj9jiJWDvfQk3K7K09tBRXV49Ykg9VSudTcEj/v5oJ+SMctkwSJCvr9/Ct9XxgZ6GBvoYbgviVNyd91NG9nsoKI62ClypoUsDRUhlqdfyA4qWiY7Rc60kAWLCLE8/UKtOCmvo9kpcqaFQg0WInIX8CHgkKpeEuZYoqAZG9k6kR1UVKOegWB5bLVT5KJc3M6mgtpL6mgwU1p3dbTH20XCnllsB74A/FXI44iEhUtoF25GK+bpb7vuArurNpXF9RQ52x8SeaEGC1V9XETWhTmGqIlbnr6Tzt/oCHE9Rc72h0SeqC5dUG35AIJg8VClNJSITAATAOecc85P/du//Vv7BheidNaLdJ7emvyZpsmmYPI9weynUq3Fz8PEY9EOeNXF+h9E2GmoqlR1EpgEGB8fDzeytVHU8/TW5M80TXF/SOmMopSbDFJqr37HjlENke2zMMtmTf5MU9n+kFiwYGGWrfQUPJ8sqcQLzCafJJV4AZ9sV20eNE1g+0NiIeyls18F3gOsFpEDwO+r6pfCHJOp7thcjrzvM5P8Hid6voXiofgIDtLrMpz9GfKpn+qazYOmQbY/JBbCXg31C2G+v6nP2GCS/MAu5noeRejF4dQmQSXP8Z5H6B3wWDl4aYijNLHRCftDuoClocyyXXHOICcHdoL2IovuN4QEaC8nB3Zy+Tn2j9vU6IqPwTW/Hax6ysxAejr41c9He39IF4n8aigTPS8cfYbhfocTKRdl4V4pVVB1GRkIPu+agWtCG6eJkbjuD+kiFizMsk1nphnoEfrcXg7PZvBLmsKKwJkrenESaaYz0+EN0sRTz4Atj40oCxZm2UZ7R3Edl5G+HkYGepjL5Ml7SsIVBnsTOAKz2SyjvaNhD9UY0yQWLMyyrV+znoSTIOfnSDpJVvQt/GuU83MknATr16wPaYTGmGazArc5TTqf5vEDj/ONH32Dxw88TjqfXvB8f6KfbRdvI51Lk/NzC57L+TnSuTTbLt5Gf6LCjlxjTOzYzMLMU1V2vLKD7Xu3k/fzeOrhikvCSbDt4m1sPn/zfL+nzedvBmD73u1k8hl89XHEIeEkmLhsYv55Y0xnCL2R4HKMj4/rnj17wh5Gx3rw5QeZfH6S/mQ/Saek31NhtjBx2QRbLtiy4DXpfJrdB3cznZlmtHeU9WvW24yiHnaOQzeIdSNBCxYGCC76H33oo7iOuyBQFOX8HJ7vcd+H7rNg0Ex2jkM3ifUfpNUsDAC7D+4m7+fLBgqApJMk7+fZfXB3m0fW4YrnODgJ6B2G/rHgVycRPP7svWGP0BjAahYtEcfUzHRmGk+X7vzpq297J5opmwpmFItbXEDwcc9Q8PwlWy0lZUJnwaKJllMgjprR3lFcWbrzpyOO7Z1opmrnODhusJN55x/AeddYHcOEyoJFE+14Zcd8gbgv0Tf/eM7PMfn8JMBpBeKoWLx3YjHbO9EClc5xUIX0UZg9BOrB81+FHz5idQwTKqtZNEk6n2b73u2nrSSCIN/fn+xn+97tp+1ZiArbO1FBNgU/fBSe+2rwazbVvK9d6RyH9FGYfT0ICOJC34jVMUzobGbRJMUCcemMolTSSZLJZ9h9cDfXrI1mcz3bO1GiHauUyp3joH4wo5DCfZwQ1C7A6hgmVBYslqlS8boTCsQiwpYLtrDx3I2xK9A3XXGVUs/QwpqClwseh6BLaiPKneOQmYGgl28QOIbWnAocYOdRm9BYsKhRteL1yr6VHVMg7k/01zT7ieOqr5q0c5VS8ZyG730+CAKZmSBIiASBYmDl6a+x86hbwzZGLsmCRY2qFa9vuvimrikQx3nVV02qrVJq5t394nMc9j8GL34dhs5cOKMoZedRN5dtjKyJFbhrUEvx+t6X7uUXL/zFrigQFwOn67gM9Qwx0jvCUM8QruMy+fwkO17ZEfYQG1NplVKpZt/dF89xeO//ERS0K72/nUfdfLYxsiYWLGpQ6+7mNw6+kYnLJvB8j9nsLCcyJ5jNzuL5XscUiOO+6qsmlVYplWrV3X2xjpGdDQJDqeJ51O/8DUuPNEutKcdmroKLKUtD1aDm4nV2uuMLxJ2w6quqcquUSrX67n5xHWM+LZKw86ibrZ0pxyYTkc8AU6r6ucLHfwC8rqqfb8X7WbCowXJ3N9daII6jTlj1VVW5VUpFxbv7az7Vurt7O4+6fcJIOTbPl4AHgM+JiAN8FNjQqjezYFED2918Ste0BYnC3b2dR916YaYcG6Sqr4rIERG5AngD8KyqtiyqWbCoQXF38+Tzk5Ck4lkPnZJqWkrXBE67u+8OYaccG/eXwDZgDXBXK9/ICtw12nz+5o4vXtei69qCFO/uL/+F4FcLFJ0l/gsKHgQ2AuuBR1v5RjazqJHtbj7F2oKYjhKFlGOdVDUrIt8GplWrFBMbZCflmbo1uoM7nfX4/v4pjs3lGBtM8o7zVtPfUyV/bEyrZFOtTjk2fWdfobD9DHCjqr7c7K9fymYWpm71rvpSVe7fc4DJJ/aT93w8X3EdIeE6TFx9HjeOr433DnATTzFbUCAibwUeAh5sdaAACxYmBPfvOcCdO19msDdBf7JksYDnc+fO4O/8R9afXfPXsxmK6Uaq+i/Aee16PwsWpq3SWY/JJ/Yz2Jsg6S5cX5F0HQZ7E0w+sZ/r33ZW1Qu+zVCMaR9bDWXa6vv7p8h7/mmBoijpOuQ9nyf3V18uXpyhJBxhRV+S0YEeVvQlSTjCnTtf5v49B5o9fGO6VtWZhYgMA2eo6o8WPX6Zqj7fspGZjnRsLofnL72owvOVo3PZJT+n2gxl1PH57r1/y3X/dhZ9q1cyeOWVOP3dtWotMqz1d0dYMliIyEeAPwUOiUgS2KaquwtPbwfe3trhmU4zNpjEdZZODbmOsHKwZ8nPKc5QSmseAKgy/tL3uPbZR8HLc/DZBP19PUgiwaqbb2Jk61ZLTbXLclt/W1CJtGozi98DfkpVXxORDcBfi8jvqeoDtGAZmOl87zhvNQnXIVchFZXzfBKuw1XnLd1eodIMZfyl7/HTex4ik+glm+wnN9DL0EAPmssx9edfBGD0hhua882YpdV62qCdJ1HRutsfHgCuA1YBR4Bvv/qHP9twC1wR2Qh8DnCBv1TVP6z2mmrBwlXV1wBUdZeIXAc8JCJrCc5+NGZZ+ntcJq4+b341VGnAyHk+qWye2667oGpxu9wMJZnLcO2zj5JJ9OK7CUSVhBN8fUkmcQYGOHLX3Qxv2mQpqVZbzmmDLz7Q+iNsY2bd7Q8L8HHgU0CS4FqdB3Lrbn/4s8A9r/7hz9Z1DRYRF/gz4GeAA8BuEflGYXVVRdUK3DMi8hPFDwqB4z3AzwEX1zNQY24cX8sn3nsBeV+ZOZljOpVl5mSOvK/cdt0F3Di+turXKJ2hFP3Ef/4Ax/fw3QSKIsBg76mgI8kkms+T2rWrFd+WKVVs/V2u3xIEj/t5eOWf7DyJ8j4O/J8EAeIEcLTwa77w+Mcb+NobgFdUdb+qZoH7CK7pS6o2s7iVRekmVZ0pTGE+Uu9ITXcTET6y/myuf9tZPLn/CEfnsqwc7OGq81bVvD+i3Axl4OQsjvooiu/DGcO9OIvTF55H/tixFnxXZoFaW3//+/dje55EqxRST58CZoFFDavIFR7/1LrbH/5anSmpNwH/UfLxAeDKai+qFizmCFrfvrLo8auAJ5czOtMcjbbYiJL+HpfrLjyz7tcXZyCTT+znZC7HYekjj6AaBIqVA2WK5K5LYmys7vc0Naq19TcS5/MkWuU6gtRTpeMmc0A/QZbnkTq+frkCUNWUVrVg8acERe7F0oXnrq8+LtMMqsqOV3awfe928n4eTz1ccUk4CbZdvI3N52/uulU+i2cox46ezcqXHmGgvwe35/RAobkckkgwsKFl58OYolpbf59zJfywyvUuoudJtNAqaqgnA6vr/PoHgNIWCWuBH1d7UbWaxbpyeylUdQ+wbjmjK0dENorID0TkFRG5vdGv18l2vLKDyecncR2XoZ4hRnpHGOoZwnVcJp+fZMcrO8IeYmiKM5St7zyfc279NUin0dzC2bvmcvipFKtuvsmK2+1Qa+vv8993KqiUE/3zJFrhCEFtYikeMFXn198NXCAi54pID8EJe9+o9qJq0av8QcuBhv7F1VuR70bpfJrte7fTn+yfP3DIV5+53Nz8Ead3vXgXG8/dGNuUVLOMbN0KwJG77sbPZMDzwHWRRILVt94y/3xLddJ+gUa+l1paf4uEe4RtNH2bINWU5PSaBSWPP1bPF1fVvIjcRnD+hQvcpap7q71uyRblIvJVYKeq/sWix38FeL+q/m/1DLbwNd4B3KGqHyh8/LuFb+R/VHpNN7UoL61NHJg5wN/96O8Y7h1GVTmeOc7UySlK/+wU5aM/+VF+e/1vd106qhw/nSa1axf5Y8dIjI0xsGFD62cUnbRfoJnfS7XW3wveK78wqMTt57a0mr+Jdbc/vI1g1dPiIncSGAI+8+of/uz2Zg6ummozi08CD4rIx4CnC4+NAz3Algbfu6aKvIhMABMA55xzToNvGX3lahMn8yc5njmOrz6qytTJKZxCBjFYIiqoKg/vf5i3jL2FLRc0+kcTf05/P0PXXtveN611E1ocNPN7qdb6246wLeeewq+fIsjiuASppxzwmZLn26amw48Km/EuKXy4V1V3NvzGIjcCH1DVXy18/MvABlX9RKXXdMPM4sGXH2Ty+ckFKafZ7Cyvzb4WLBxRH0Hm009FijLaO8rKvpXc96H7uj4d1XbZFEy+J7gbrlTQ9fMw8Vj0L4Cl34vjBnUHPx983DMU3PnH5XuJlmVPjwrLaN9DUMyeAh5rxg7uelTrDdUH3AKcD7wAfElVqxVealVXRb6TlatNAAwkBxBH8H1/PkjIor93gjCXm8MVl90Hd9d1KJFpQHETWifsFyh+L7k0zL0epImKRGDwDcH3E4fvJeYKgaGe5bFNVy0NdQ/BtOcJ4IPARQSpqWaYr8gD/0lQkf/FJn3tWNp9cDd5P09fYuG6AkccVvet5vXU6xVfm3ASOOJwPHOcQ6lDdY+hk/ZxtFWtm9DauV+g3uJ06iicPA6ZGRAHnJJFk6owexB6V3Tb3oeuVy1YvFVVLwUQkS8BTeuTUG9FvpNNZ6ZPSy8VjfaNciJ7gtnc7GnPJZwErgQboBStK1jUs4/DAkuJpTahqR9ceHMpOPZqcBFvZfqm0eJ0zyCcPBG8ZvHniQBO8HzPUOu+BxM51YLFfBW+cHFv6pur6iNEZIoVBaO9o/MX/XJW9KxgNjeLg4NbuDCVfr5qUOw+c6D6rujFF/rXZl+bT4GVzmxyfo7J5ycB5gvnYW0QjPTxqeU2oalC+ijMHgI0+Pj5/wV7H2ztKp9OKrSbyKgWLN4mIicKvxegv/CxAKqqwy0dXZdZv2Y9CSdBzs8tqFkUCYIjDsX/Si/IqoqPz3DP8JLBotyF3sHhYOogY31jrJAVCz4/6SQhCdv3bp/fx1HcIFhLYGmGWo9PDXWmU9yEVrpfIH0UZl9nvq654o3QN9Lai/Zyur1Wmt1k56B3GLIzgLMwoKkGM6Xe4aDwbVrrjpHTWpRzx/GGCtwichfwIeCQql5S7fOLlgwWqhqR27bu0J/oZ9vF24ILbpIFASPn5xARVvauxHEcpjPT+Hqq46qIMNY7xmBikPVr1ld8j3IX+tnsLCgcO3mMhCQY7Rtd8JqkkySTz7D74G7Wr1lftghf/LzFgaUZF/Di8amDvYkFhx3lPJ87d76MqtIz9nT4rVBKN6HlUjBzEJCgT8LgmiBVBbVftOvRjEL7wEroGw7GNXsoCA5KIeYJDL0hmEV1VwuO9rpjpGKLcu4Y+SxwD3ccr/eYiO3AF4C/Ws6Lqh6ratpr8/mbgeCCm8ln8NXHEYeEk+DXL/t1VJW/eOEvOHvF2WS97PyFscftIZPPcNMlN1W8GFdabZXXfOGa5jB1corh3mEcWdgJxlef6cx0xSJ8UTGw7HptF0dPHm34Al7t+NTB3gSf2/UVVqx5jMGegbbMdCoq3S/w3c/Bni8FheCeoaBQXKpVq6OaUWhfdzW4PZAcgP4xyJQsne0tWTrbXS042q3YonyWhQ0Fk4XHIbjoL5uqPi4i65b7uq4KFnEoyIoIWy7YwsZzN5Ydq6oiImzfuz1IS+HMb8qbuGxiPtiUU+lCn5AEgiAi821EVvQsTEc54jDaO7pkEb7IV5+d/76TXQd3NZyqqnh8aoHr5kn1/yMr/MGaZjpt0TMAY+uCi23vEpnaVqyOqrXb61KzgsUptb6S76F7W3C0T5B6qtqinDtGvtZoSmo5uiJYxLFja3+iv+xeiWrBZCmVLvQDyQGQYCUVcNrn5PwcCSfB+jXr2X1w95JF+KLv/fh7rOhd0fAFvNLxqUUnEz9A8aiUMS1NobV170kzLtr1qLXba7VZQS19nUyrtLpFeV26Ili0uyDbDpWCyVIqrbYq7uM4nD4MLFxhlfNzpHNpJi6boD/RX7UIn/Nz5P08SSdZ9nmofgH302nmnnoKb3qaNbPQ5/cTdJg5nS9zgJJwKwf7YgqtrZp10V6ucoX20vesdVZgLV5/1oAAAB7mSURBVDjC1OoW5XXp+GBRKU8PIaYpQrLUhX60bxTP9ziaOYqqciJzYr5WUpreqlaET+fSvPOsd/LUwaeWHEu5C7iqcvyBBzhy191oPg+ex5mOw++eyPLty9/Pc29992lLTTU/gCSD2kUlxRRaWzXrol2PZs4KqvV1Mq3Q6hblden4YFFrQbYbWmRUu9An3SS3r7+dNUNrlkxvLVWEn7hsgpV9K9nz+tI9vMpdwI8/8ABTf/5FnIEBnL5Tf16jmuJn9jwMwHMXX31qzJ6Plz2PsVV9eJrDkfIznWIKre3CSuXYrCDuWtqivNBN/D3AahE5APy+qn6p2us6PljUWpBte5oiJNUu9LXUb6rVTdL5dNVU1eILuJ9Oc+Suu3EGBpBFxeyx4eACd91z3+TJN7+dk05yfp/FJ667mOTIrzH5QuWZTjGF1nZhX7RtVhBPdxxPFZbHLtmivN7itqr+Qj2v6/hgUW1XNISUpmix0ry/OzrK4JVX4vT3N1QgX6xS3aSWVNXiC/jcU0+h+fyCGUWpseEBVojPHWdnOHjRxawc7OGq81bR3+OiuhZk6QAYqk64aHfSgU7xELkW5R0fLGopyIaWpmiBcnn/4klxq26+iZGtWxGRugrkUPvy41pmMKW86elgrEsQ3+PiIZ93/dTahY83MQCaRTrpQKc4CTbcbeeOka+xqEV5O5fLlur4YFHPXW6cVcr7ay7H1J9/EYDRG25Y9tdd7vLj5V7A3dFRcKssNXVdEmNjFZ+uNwCaJVifqXAFgSES/fNqOvwoKuo9/Gjxha70Ljeq+yzq4afT/OvP34i47ml5fwgChnoe5/7N/cs+YrTcoUywMOA2svy4lWM3dar1QKebHoH/fNpSVNXF+iLT8TML6J40RbW8vyST+JkMqV27lnXkaDuWHzv9/ay6+ab5WVFpwNBcDj+VYvWtt1igaKdqfaacRNA76n9eC4meaKSorLbSMl0RLIo6PU1RS94fzyN/7Niyvm67lh+PbN0KwJG77sbPZBbUW1bfesv881FWaWFBLFXrM5U+CiePwdCZC9uahJGistpKy3VVsOh0zcj7l9Ou5cciwugNNzC8aROpXbvIHztGYmyMgQ0bIn/BrXVhQaxUO9Bp9hDgwOKbiFZ21a2kQ2srl95z6Wktyl/4+AuNtig/m6Dj7BrAByZV9XPVXmfBooMMXnklkkiguVzFvL8kEgxs2LCsr9vu5cdOf/+y0mRR0KqFBaFaqmVJZgZQcGThiXnqB+dc+HnIZ+GVf4K3Xt/acTbjDI+IufSeSyu2KL/0nks/C9zzwsdfqLfgnAd+S1WfEZEVwNMi8i1V/ZelXuQs9aSpLp1P8/iBx/nGj77B4wceJ52v1Pur9Yp5fz+VQnMLN34W8/6rbr5p2XfppcuPy+m05cfLtdSGQkkmcQYGgtRaOry/G3UptizJzgYBo1Q+G6R6Bt9wqv166ihM/QCOHwjO8khNwSP/Ozzz5SBN1CrF2kq5IjwEj/v5YGNkfBRblOeBE8DRwq/5wuMfr/cLq+prqvpM4fczwD7gTdVeZzOLOkW1k20r8v7dtvx4uVq1sCASKrUsEYI0VfFAp9RRmD0YBA6nEDx8gnMxWp0GasYZHhFSSD1VbVF+6T2Xfq0JKal1wBXA0s3csGBRt6h2sm1V3n+5m+y6SasWFkRCpZYlb3o73L2p0D3XhbnXg0BRvEHSwtF6/aPBhbqVaaCw2sG3TltalIvIEPB14JOqeqLa51uwqEMcOtk2O+/fLcuP69GqhQWRUq5lSbGrLhoEh+KMonhO91AhReU6rTkVsCisdvCt0/IW5SKSJAgU96rqA7W8xoJFHbq5k22rlh/H4RTDSlq1sCDyiimqb38mCA4eC8/p7l956nNbmQYKsx18a7S0RbkE+fEvAftU9Y9rfZ0FizpYJ9vmiWrtZzm6dkNhMUXVNwyPfCqoTxTP6V585nir00CddbJfS1uUA+8Cfhl4QUSeKzz2e6q6ZErLgkUdurWTbStEtfazXJHfUNjKnc3nvw/6RpZuC9LqNFDY7eCb6IWPv5AqLI9dskV5vcVtVf0OdbQesWBRh27rZNsqcaj91CqyGwrbsbM5SmmgTmgHH7AW5Z3AlpI2plifePK1J5nJzrCqv3x6Io61n8htKGzXzubOSgOFrrDhbvul91x6WovyRpfL1suCRZ1sKenyLa5PzGRnOJE9wfHscVb3rWa07/S0ndV+GtDOnc0dlAaKkkJgiESLcgsWdYrqUtJ0Ps13//O7PHPoGQTh8jMv591venckZjmL6xOKMpubRRAOpw8DnBYwrPbTgGpdY91k85e0dk4ayCxiwaJBUelkq6o8+PKD3PnsnUxnpymeU/KVfV9hrG+MT1z+CTZfEN7KonL1icHkYDAeDYLC1MkphnuHcQoraaz206AO29lswmXBokPseGUHf/r0nzKTn8HFRZwgKCjKsZPH+JNn/gQkvJVF5famOOKwum81h9OHcTQIEKlciqGeIav9NEPUdjbbWROxZsGiA6Tzae568S5m87NBoCg93hTBdVxmc7Pc/eLdoa0sqrQ3ZaR3BICpk1N4vsdMdgbAaj/NEJWdzXbWRN32XXjRaS3KL3ppX6P9oPqAx4FeghjwN6r6+9VeZ8GiA+w+uJu53BxA2TSTFP6bzc2GtrKo0t4UEWG0b5Th3mGOpI/wwXM/yJVvvDL02k9HiMqS1g49a6KV9l14UcUW5fsuvOizwD0XvbSv3la+GeC9qjpbaPvxHRH5e1V9cqkXWbDoANOZafL+0t0BFCXv50NbWVRtb4qnHit6VnDbFbdZkGimsJe0duBZE21SbFE+y8KGgsnC4wDb6/nCGhQ0Z0u+XhKoGngsWHSA0d5REs7Sf5SCkHASoa0ssr0pIQl7SWsYK7JirpB6qtqifN+FF32t3pSUiLjA08D5wJ+pqrUo7wbr16xnMDnIdCZYBbU4FaWF/4aSQ6GuLLK9KSEKa0mrrciqR8tblKuqB1wuIqPAgyJyiaq+uNRrLFh0gP5EPzdfcjN/sudPgtVQeqrIrSieH6R4brrkplDv3Nu1N8VPp5l76im86Wnc0VEGr7yy85r4xUXUVmTFQ8tblBep6rSIPAZsBCxYdIPN529GVU/ts/CDFKQgp/ZZROTOvVV7U1SV4w88wJG77kbz+QXN/FbdfBMjW7dGvoNtx4nKiqx4aXWL8jOAXCFQ9APvA/6o2ussWHQIEWHrW7bywfM+yHf/87s8e+hZAK448wre9aZ3dUUt4PgDD8y3CS894lRzOab+/IsAjN5wQ1jD605RWZEVL61uUf5G4J5C3cIBvqaqD1V7kWgrD1JvsvHxcd2zZ0/YwzAR5KfT/OvP34i4bsUDiNTzOPdv7reUVLst2GeRX7giq7v2WdT8Te678KJtVGlRftFL+7Y3c3DVhDKzEJEbgTuAi4ANqmoRwDRk7qmn0Hx+wYyilCST+JkMqV27otUVthuEvSIrnqxFecGLwFbgf4b0/qaKuBWJvenpoEax5Cd55I8da8+AzOmsyWDNChvutu+78KLTWpQ3uoO7XqEEC1XdB+V3G5twxbVI7I6Ogltl1Y3rkhgba8+AjGmCQmCwFuW1EJEJYALgnHPOCXk0nS+uReLBK69EEgk0l6tYs5BEgoENG0IYXZ2s8Z6JkJYFCxH5R2BNmac+rap/W+vXUdVJYBKCAneThmfK8NNpjtx1N87AwGkXXEkmcQYGOHLX3Qxv2hS5lJTT38+qm29i6s+/iPT1odnsqVlRTw968iSrb70lcuMuyxrvmQhqWbBQ1fe16mub1oh7kXh4yxZSTz/N8b97KLjgqgYXVRFGrv8Qw1vCac++bNZ4z0SQE/YATHTEvUh84sEHSe3aTc+b30zyrLNIrllD8qyz6Hnzm0nt2s2JBx8Me4jV1dp4LxtKjdN0sVCChYhsEZEDwDuAh0Xk0TDGYRaKc5G4NIXm9PbirliBOzqKu2IFTm/vfArNT1dqtxMRxcZ75XY7Q/C4nw+WoBrTRqEEC1V9UFXXqmqvqr5BVT8QxjjMQoNXXgmOQ/7YMbzpabyZGdT355+PcpG4mEIr1lrU9/FmZua/D1wXzedJ7doV8kirsMZ7JqIivxrKnK4VeyBUlROPPII/O4t39GiQ63ccBHBXr8YZHETT6bqKxO3Ys1FMoalqECCmpoIG/YW6hQAyOEju6NGmvm/TWeM9E1EWLGKklXsgiktmE2NjOMkk+SNHQBX1ffKHDuGOjXHmJ3+Tka1bIzHexYopNG96Gu/wYdRxFnxtVUVnZsjsewmiXOe2xnsmoixYRFClO/FW7YFYvGTWHRvDGRnBn5ubv1t3ensZ3rRpWRf3du7ZKKbQvKmp0wIFBE151HGYe+IJ/PQno7uE1hrvmYiyYBEhS92Jr/ylj3H0y19uyR6IYr6f3h5mczN4vofruAwODeJIUNbyZmeXtWS2WXs2ak1hOf39DL373Rz7yleQxKK/1qrg+yTPOAP1/cgu/Z0X9lGoxpRhwSJClroTP/yFPwMguabcPsfG9kB4x6aZO3mCQ/mDKBqcxivBWRir+lcz1ju67CWzje7ZqCeF1ffWt+IMr0BTafD9BfssEmecgTs2hnf8eGSX/s6zxnsmgixYRES1O3FJJslPTZE480zEqbCIrc49ELvS++jLzyCJRDCTKFyDVZWp1GEAht3EspbMNrpno54Uljs2ijs8grPmjfMpNFwXZ3Dw1M8sokt/y7LGeyZCbFNeRCxe+rmY9PQABBfBSuq4EKbzaSYT38N3HVx/4XMigiMOx2YPg+ssa8lsI3s2ak1hLd4zUewPhect2GdRDBRRXvprTNRZsIiIanfizuAgQNDzqIx6L4S7D+4mnfD5/nvOoCfj43gL22+5PvRmfI5svXpZtZDSxn7LHW/VwJlMlt0zUewP5adSp72v5nL4qRSrbr4pusVtYyLMgkVEVLsTF8fBGRlB87mmXginM9N46vHc+pU8/r4zcDyl56RHXypPz0kPx1O+ed0Ir7/nrcv6uktduP1MhvyRI/RfcTlzTz2Fn07jp9PMPPYY0zt2MPfkkxWDzLwKKayRrVtZfestqOfhzc7iHT+ONzuLeh6rb71lWUt/jTGnWM0iImppse2OjLDqv/wyR//6y/iZzIKib70XwtHeUVxxQYTnNqxi7+VjrPvRLANzHqlBl1d/YohjpHhf3/Lz/MXxHLnrbvxMBs3n8U+exD9xAnd4mNSTTzG3azdaSCc5AwPgefiZDF4hELijo+WX61ZIYYkIozfcwPCmTaR27SJ/7BiJsTEGNmywGYUxDbAzuCNk+utfny/qlgaM4sxh9a23MHrDDfjpdNMuhOl8mo8+9FFcxyXpnB6kcn4Oz/e470P30Z+o7z2K4z3xzW8x+/89hjs6htPbC0D+2DHyhw4BkDzzTNyxMdT3yfzoR+D7JM4887SgYOdpm5iKdV95S0NFSK0pFKe/n6Frr2V082aGrr22oQtmf6KfbRdvI51Lk/MXpn5yfo50Ls22i7fVHSiK4x3YsIH0c8+RWLlqPlCo7+NNTQWzI9clf+QI6vuI45BcvRog2GS3qD+V1R6MaT9LQ0VIWCmUzedvBmD73u1k8hl89XHEIeEkmLhsYv75RpTbd+HPzQVbOoppJt/Hn5sLVjIVZhO5Q4fIHzmC09PTcMrNVGAn8pkaWLCIoOLMoV1EhC0XbGHjuRvZfXA305lpRntHWb9mfdUZRTqfruk1ZVd7eV6wca5IdcHnuGNjILDiZ95P74U/abWHZrMT+cwyWLAw8/oT/Vyz9pqaPjeVS3Hns3fy6KvBUSRJJ0nCSZBwEmy7eBubz9+8oDBdXO2lhdkDnoe/eBmwyOkrwhJJBt9xVbTbc8SVnchnlsGChVkWVWXHKzv4/DOf51jmGILM7/pe3bea3kQvk89PArDlglPtXQc2bMBPpci99lpQ5VMNWojn86gq4gYrsor7ScA20bVUrSfyXbLVUlIGsAK3WaYdr+zgi//8RY5nj5OQYCbhiIMgHE4fZi47R3+yn+17t5POn9phPfP3fx/0ePJ9tDCDENcFxwkCRi5HYtWqBbutrZDdQnYin1kmCxamZul8mu17t88vACxNMxVnGFMnp3DFJe/n2X1wN3CqfUdi9eqgt1XhnAz1vCBYFGYVJFzbRNcudiKfWSZLQ5ma7T64m7yfr/i8IKgqqVwKX32mM9PAqZVQ7tAQTk8PbslZGcVGf97sLCMf/jDJtWutkN0OdiKfWSYLFqZmxdYgrlS+yChKXvMkJMFo7yhw+koocRzcFSsWvE5USa5dy+jmxpfpmhrYiXxmmSwNZWpWbA0ymBxEJJhFLCYIKCScBOvXrAca60BrWqR4Il92NggMpYon8r3zN6y4bebZzMLUbP2a9SScBJ56rO5bzeH0YRw9dYSpFv4TkQW7vmvpe2WrnkIQ1xP5bBNhKCxYmJoVW4NMPj/JQHKAMziDqZNT+OoHYUKVlX0r+fXLfn3Bru9iB9pqfa+sRtFmcTuRzzYRhsqChVmW0tYgCSfB6r7V8z2lPrDuA/zXC38VfeYFju/92wVnZi/uQNuMjrmmSeJyIp9tIgyVdZ01dVnc5mP8DeNkv/H3Vc/MbmbHXNNFsimYfE+QIqtUkPfzMPFYNGdFgVhPe2xmYeqyuDVIaXv1pc7MbnffK9MhipsIkxVuLNxkUHd59TvxmCXFkK2GMg2r98xsY2pmmwhDZ8HCNKzeM7ONqZltIgydpaFMw8q2Hz/tk8qfmR03fjrN3FNP4U1PLyjgmxazTYShs2BhGtYNm+5UleMPPFC1gF9kQaXJipsIi6uhSgNGcRPhNZ+KcnE79ixYmIZ1w6a74w88UFMBf7lBxSxDXDcRdghbOmuaonQ1VKVNd6M33BDiCOvnp9P868/fiLhuxWConse5f3M/Jx55pGN/DpGRTcVjE+HpYn2XYDML0xSdvOmu3PnhpSSZxM9kmH3iOzWtChvetMlSUo2IyybCDmPBwjSFiDB6ww0Mb9rUcZvuai3gp555uqagktq1y/aamNixYGGaqhM33dVawAe6ZlWY6T62z8KYKkoL+OUUC/j9b397x68KM93LgoUxVRS75vqp1GkBo/Ss8BVXX11TUInzqjDTvSwNZVpqccPB9WvWz59zESe1FPBFxFqxm45lS2dNS6gqO17Zwfa928n7+fnjWBNOgm0Xb2Pz+Ztjud+gWtdc22dhlhDrP/hQgoWIfBa4HsgCPwJuUtXpaq+zYBEfD778IJPPT9Kf7CfpnLrDzvk50rk0E5dNsOWCLSGOsLWsFbspw4LFst9U5P3ATlXNi8gfAajq71R7nQWLeEjn03z0oY/iOu6CQFGU83N4vsd9H7ovlikpY+oU62ARSoFbVb+pqvnCh08Ca8MYh2mN3Qd3k/fzZQMFQNJJkvfz7D64u80jM8bUKwoF7puB/1XpSRGZACYAzjnnnHaNqaO0u6nddGYaT5feb+Crz3SmaubRGBMRLQsWIvKPwJoyT31aVf+28DmfBvLAvZW+jqpOApMQpKFaMNSOFVaxdbR3FFeW3m/giMNo72jT39sY0xotCxaq+r6lnheRjwMfAn5a47QkK0Zq7ZTabOvXrCfhJMj5uYo1i4STYP2a9U1/b2NMa4RSsxCRjcDvAB9W1VQYY+h0YR512p/oZ9vF20jn0uT8hRvUiquhtl28zYrbxsRIWDu4vwCsAL4lIs+JyBdDGkfHCvuo083nb2bisgk832M2O8uJzAlms7N4vsfEZRNsPn9zS97XGNMaoRS4VfX8MN63m4R91KmIsOWCLWw8d2NH7OA2pttFYTWUaYGoHHXan+jnmrXXtPQ9jDGtZ40EO1StnVKtqZ0xphYWLDpUrZ1SrQWFMaYWlobqYJ181Kkxpr2s62wXsKZ2xkRCrHtD2cyiC3TiUafGmPaymoUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKls6aWGr36X/GdDsLFiZWwjr9z5huZ8HCxEpYp/8Z0+2sZmFiI8zT/4zpdhYsTE38dJqZxx5jescOZh57LJQLctin/xnTzSwNZZYUpRpB2Kf/GdPNLFiYJUWpRhCV0/+M6UaWhjIVRa1GYKf/GRMeCxZdbqlaRNRqBHb6nzHhsTRUl6qlFhHFGoGd/mdMOCxYdKlaahHuqlWRqxGICKM33MDwpk12+p8xbWTBogvVWot481//1XyNoFwqqt4aQTNaddjpf8a0lwWLLlSsRZTOKEpJMomfyXDyhRdYdfNN8zOQ0oAxXyP41V+p+cIfpWW4xpjlsWDRhZZTi6hcI3AZ2LCeI3/118FjNVz4o7QM1xizPBYsutBy9itUqhHkfvxjjnzprmDGUTKTqHThrzX1Nbxpk9UejIkgWzrbherZr1CsEYxu3szAhg0c/fK9y9p/EbVluMaY5bFg0YUa3a9Qz4U/istwjTG1szRUl2pkv0I9F35r1WFMvFmw6FKN7Feo58Jfmvpq5jJcY0x7WLDocvXsV6jnwl9MfS21DHf1rbdYcduYiLKahVm2emseI1u3svrWW1DPw5udxTt+HG92FvU8a9VhTMSJqoY9hpqNj4/rnj17wh6GobENdn46ba06TDeK9Y5TCxamIXbhN6ZmsQ4WVrMwDbEeTcZ0B6tZGGOMqcqChTHGmKosWBhjjKkqlGAhIp8RkedF5DkR+aaInBXGOIwxxtQmrJnFZ1X1MlW9HHgI+L9CGocxxpgahBIsVPVEyYeDQHzW7xpjTBcKbemsiPwB8F+A48B1S3zeBDABcM4557RncMYYYxZo2aY8EflHYE2Zpz6tqn9b8nm/C/Sp6u9X+5q2Kc8YE2O2Ka8cVX1fjZ/6FeBhoGqwMMYYE46wVkNdUPLhh4GXwhiHMcaY2oRVs/hDEflJwAf+DbglpHEYY4ypQawaCYrIYYLgsthqYKrNw6lHXMYJ8RlrXMYJ8RlrXMYJ8RnrauAlVd0Y9kDqFatgUYmI7FHV8bDHUU1cxgnxGWtcxgnxGWtcxgnxGWtcxrkUa/dhjDGmKgsWxhhjquqUYDEZ9gBqFJdxQnzGGpdxQnzGGpdxQnzGGpdxVtQRNQtjjDGt1SkzC2OMMS1kwcIYY0xVHRMs4nJGhoh8VkReKoz1QREZDXtMlYjIjSKyV0R8EYncsj8R2SgiPxCRV0Tk9rDHU4mI3CUih0TkxbDHshQROVtEvi0i+wp/7r8Z9pjKEZE+EdklIv9cGOd/C3tMSxERV0SeFZGHwh5LIzomWBCfMzK+BVyiqpcBPwR+N+TxLOVFYCvweNgDWUxEXODPgA8CbwV+QUTeGu6oKtoOxGEzVh74LVW9CLgK+K8R/ZlmgPeq6tuAy4GNInJVyGNaym8C+8IeRKM6JljE5YwMVf2mquYLHz4JrA1zPEtR1X2q+oOwx1HBBuAVVd2vqlngPuDnQh5TWar6OHA07HFUo6qvqeozhd/PEFzg3hTuqE6ngdnCh8nC/5H89y4ia4GfBf4y7LE0qmOCBQRnZIjIfwAfI7ozi1I3A38f9iBi6k3Af5R8fIAIXtjiSkTWAVcAT4U7kvIKqZ3ngEPAt1Q1kuME/hT4bYI+eLEWq2AhIv8oIi+W+f/nAFT106p6NnAvcFtUx1n4nE8TTPvvDWuchXFUHWtElTsbIJJ3l3EjIkPA14FPLpqxR4aqeoWU81pgg4hcEvaYFhORDwGHVPXpsMfSDKGdlFePuJyRUW2cIvJx4EPAT2vIG12W8TONmgPA2SUfrwV+HNJYOoaIJAkCxb2q+kDY46lGVadF5DGCmlDUFhC8C/iwiGwC+oBhEfmyqv5SyOOqS6xmFkuJyxkZIrIR+B3gw6qaCns8MbYbuEBEzhWRHuCjwDdCHlOsiYgAXwL2qeofhz2eSkTkjOIqQhHpB95HBP+9q+rvqupaVV1H8PdzZ1wDBXRQsCA4I+NFEXkeeD/BCoQo+gKwAvhWYZnvF8MeUCUiskVEDgDvAB4WkUfDHlNRYZHAbcCjBIXYr6nq3nBHVZ6IfBX4PvCTInJARH4l7DFV8C7gl4H3Fv5uPle4K46aNwLfLvxb301Qs4j1stQ4sHYfxhhjquqkmYUxxpgWsWBhjDGmKgsWxhhjqrJgYYwxpioLFsYYY6qyYGG6goh4haWgL4rI/SIyUHh8jYjcJyI/EpF/EZFHROQthef+QUSm494t1JhmsGBhukVaVS9X1UuALHBLYRPag8BjqvoTqvpW4PeANxRe81mCfQfGdD0LFqYbPQGcD1wH5FR1fmOkqj6nqk8Ufv9PwEw4QzQmWixYmK4iIgmCMzBeAC4BOqLJmzGtZsHCdIv+QkvrPcC/E/RAMsbUKFZdZ41pQLrQ0nqeiOwFfj6k8RgTKzazMN1sJ9ArIr9WfEBE1ovItSGOyZhIsmBhulbhLJEtwM8Uls7uBe6gcC6GiDwB3A/8dKFb7AdCG6wxIbOus8YYY6qymYUxxpiqLFgYY4ypyoKFMcaYqixYGGOMqcqChTHGmKosWBhjjKnKgoUxxpiq/n9jPPDBLiPgsQAAAABJRU5ErkJggg==\n",
1318
      "text/plain": [
1319
       "<Figure size 402.375x360 with 1 Axes>"
1320
      ]
1321
     },
1322
     "metadata": {
1323
      "needs_background": "light"
1324
     },
1325
     "output_type": "display_data"
1326
    }
1327
   ],
1328
   "source": [
1329
    "do_KmeansPCA()"
1330
   ]
1331
  },
1332
  {
1333
   "cell_type": "code",
1334
   "execution_count": 76,
1335
   "metadata": {},
1336
   "outputs": [
1337
    {
1338
     "data": {
1339
      "text/plain": [
1340
       "(tensor([[1.0000, 0.0000, 1.0000, 0.9998]], grad_fn=<SigmoidBackward>),\n",
1341
       " tensor([[0., 0., 0., 0.]], grad_fn=<CumprodBackward>),\n",
1342
       " tensor([[2]]),\n",
1343
       " None,\n",
1344
       " None)"
1345
      ]
1346
     },
1347
     "execution_count": 76,
1348
     "metadata": {},
1349
     "output_type": "execute_result"
1350
    }
1351
   ],
1352
   "source": [
1353
    "model.forward(x_path=x_path)"
1354
   ]
1355
  },
1356
  {
1357
   "cell_type": "code",
1358
   "execution_count": 69,
1359
   "metadata": {},
1360
   "outputs": [
1361
    {
1362
     "ename": "SyntaxError",
1363
     "evalue": "invalid syntax (<ipython-input-69-c543913fa78f>, line 1)",
1364
     "output_type": "error",
1365
     "traceback": [
1366
      "\u001b[0;36m  File \u001b[0;32m\"<ipython-input-69-c543913fa78f>\"\u001b[0;36m, line \u001b[0;32m1\u001b[0m\n\u001b[0;31m    import ..models\u001b[0m\n\u001b[0m           ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
1367
     ]
1368
    }
1369
   ],
1370
   "source": [
1371
    "import ..models"
1372
   ]
1373
  },
1374
  {
1375
   "cell_type": "code",
1376
   "execution_count": 63,
1377
   "metadata": {},
1378
   "outputs": [],
1379
   "source": [
1380
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)"
1381
   ]
1382
  },
1383
  {
1384
   "cell_type": "code",
1385
   "execution_count": 65,
1386
   "metadata": {},
1387
   "outputs": [
1388
    {
1389
     "ename": "NameError",
1390
     "evalue": "name 'MultiheadAttention' is not defined",
1391
     "output_type": "error",
1392
     "traceback": [
1393
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
1394
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
1395
      "\u001b[0;32m<ipython-input-65-f85a99af33ee>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mself\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMM_CoAttn_Transformer_Surv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0momic_sizes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
1396
      "\u001b[0;32m<ipython-input-62-9e5f322e30a0>\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, omic_sizes, n_classes, model_size_wsi, model_size_omic, dropout)\u001b[0m\n\u001b[1;32m     28\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     29\u001b[0m         \u001b[0;31m### Multihead Attention\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 30\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcoattn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMultiheadAttention\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membed_dim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m256\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_heads\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     31\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     32\u001b[0m         \u001b[0;31m### Transformer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
1397
      "\u001b[0;31mNameError\u001b[0m: name 'MultiheadAttention' is not defined"
1398
     ]
1399
    }
1400
   ],
1401
   "source": [
1402
    "self = MM_CoAttn_Transformer_Surv(omic_sizes=sig_sizes)"
1403
   ]
1404
  },
1405
  {
1406
   "cell_type": "code",
1407
   "execution_count": 52,
1408
   "metadata": {},
1409
   "outputs": [
1410
    {
1411
     "ename": "NameError",
1412
     "evalue": "name 'sig_size' is not defined",
1413
     "output_type": "error",
1414
     "traceback": [
1415
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
1416
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
1417
      "\u001b[0;32m<ipython-input-52-097a03ed0c40>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mself\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMM_CoAttn_Surv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msig_sizes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      2\u001b[0m \u001b[0mx_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m500\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1024\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFloatTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0msig_feats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFloatTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0msize\u001b[0m \u001b[0;32min\u001b[0m \u001b[0msig_sizes\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0mx_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_net\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_path\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0munsqueeze\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
1418
      "\u001b[0;32m<ipython-input-43-4469ba9e1eea>\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, sig_sizes, n_classes, model_size_wsi, model_size_omic, dropout)\u001b[0m\n\u001b[1;32m     19\u001b[0m         \u001b[0mhidden\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize_dict_omic\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmodel_size_omic\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     20\u001b[0m         \u001b[0msig_networks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0minput_dim\u001b[0m \u001b[0;32min\u001b[0m \u001b[0msig_size\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     22\u001b[0m             \u001b[0mfc_omic\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mSNN_Block\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdim1\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minput_dim\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdim2\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mhidden\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     23\u001b[0m             \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhidden\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
1419
      "\u001b[0;31mNameError\u001b[0m: name 'sig_size' is not defined"
1420
     ]
1421
    }
1422
   ],
1423
   "source": [
1424
    "self = MM_CoAttn_Surv(sig_sizes=sig_sizes)\n",
1425
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
1426
    "sig_feats = [torch.randint(10, size=(size,)).type(torch.FloatTensor) for size in sig_sizes]\n",
1427
    "\n",
1428
    "x_path = self.attention_net(x_path).unsqueeze(1)\n",
1429
    "x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).unsqueeze(1)\n",
1430
    "\n",
1431
    "out, attention_weights = self.coattn(x_omic, x_path, x_path)\n",
1432
    "out = self.transformer(out)\n",
1433
    "out = self.conv(out.squeeze(1).T.unsqueeze(0))\n",
1434
    "#out = self.classifier(out.squeeze(0).squeeze(1))"
1435
   ]
1436
  },
1437
  {
1438
   "cell_type": "code",
1439
   "execution_count": 471,
1440
   "metadata": {},
1441
   "outputs": [
1442
    {
1443
     "data": {
1444
      "text/plain": [
1445
       "torch.Size([1, 256, 1])"
1446
      ]
1447
     },
1448
     "execution_count": 471,
1449
     "metadata": {},
1450
     "output_type": "execute_result"
1451
    }
1452
   ],
1453
   "source": [
1454
    "out.shape"
1455
   ]
1456
  },
1457
  {
1458
   "cell_type": "code",
1459
   "execution_count": 472,
1460
   "metadata": {},
1461
   "outputs": [
1462
    {
1463
     "data": {
1464
      "text/plain": [
1465
       "tensor([[[ 0.5998,  1.9873, -1.1435,  ..., -0.0048,  0.2963,  1.1112]],\n",
1466
       "\n",
1467
       "        [[-0.4201, -0.1456,  0.2057,  ..., -0.2175,  0.4188,  0.4702]],\n",
1468
       "\n",
1469
       "        [[ 1.0294,  3.1634,  0.4595,  ...,  1.2059,  0.5845,  1.4114]],\n",
1470
       "\n",
1471
       "        [[-1.1435, -1.1435, -1.1435,  ...,  0.1951, -0.4378,  0.2051]],\n",
1472
       "\n",
1473
       "        [[ 0.9948,  1.1596,  2.1419,  ..., -0.1225,  1.3597, -0.3037]],\n",
1474
       "\n",
1475
       "        [[ 0.4019, -1.1435, -0.1522,  ..., -0.2058,  0.0351, -1.1435]]],\n",
1476
       "       grad_fn=<UnsqueezeBackward0>)"
1477
      ]
1478
     },
1479
     "execution_count": 472,
1480
     "metadata": {},
1481
     "output_type": "execute_result"
1482
    }
1483
   ],
1484
   "source": [
1485
    "x_omic"
1486
   ]
1487
  },
1488
  {
1489
   "cell_type": "code",
1490
   "execution_count": 474,
1491
   "metadata": {},
1492
   "outputs": [],
1493
   "source": [
1494
    "self = MM_CoAttn_Surv(sig_sizes=sig_sizes)\n",
1495
    "x_path = torch.randint(10, size=(500, 1024)).type(torch.FloatTensor)\n",
1496
    "sig_feats = [torch.randint(10, size=(size,)).type(torch.FloatTensor) for size in sig_sizes]\n",
1497
    "\n",
1498
    "x_path = self.attention_net(x_path).unsqueeze(1)\n",
1499
    "x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).unsqueeze(1)\n",
1500
    "out, attention_weights = self.coattn(x_omic, x_path, x_path)\n",
1501
    "\n",
1502
    "out = self.transformer(out)\n"
1503
   ]
1504
  },
1505
  {
1506
   "cell_type": "code",
1507
   "execution_count": 491,
1508
   "metadata": {},
1509
   "outputs": [
1510
    {
1511
     "data": {
1512
      "text/plain": [
1513
       "torch.Size([1536])"
1514
      ]
1515
     },
1516
     "execution_count": 491,
1517
     "metadata": {},
1518
     "output_type": "execute_result"
1519
    }
1520
   ],
1521
   "source": [
1522
    "torch.cat([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)]).shape"
1523
   ]
1524
  },
1525
  {
1526
   "cell_type": "code",
1527
   "execution_count": 484,
1528
   "metadata": {},
1529
   "outputs": [
1530
    {
1531
     "data": {
1532
      "text/plain": [
1533
       "torch.Size([6, 1, 512])"
1534
      ]
1535
     },
1536
     "execution_count": 484,
1537
     "metadata": {},
1538
     "output_type": "execute_result"
1539
    }
1540
   ],
1541
   "source": [
1542
    "torch.cat([out, out], axis=2).shape"
1543
   ]
1544
  },
1545
  {
1546
   "cell_type": "code",
1547
   "execution_count": 455,
1548
   "metadata": {},
1549
   "outputs": [
1550
    {
1551
     "data": {
1552
      "text/plain": [
1553
       "torch.Size([6, 1, 256])"
1554
      ]
1555
     },
1556
     "execution_count": 455,
1557
     "metadata": {},
1558
     "output_type": "execute_result"
1559
    }
1560
   ],
1561
   "source": [
1562
    "out.shape"
1563
   ]
1564
  },
1565
  {
1566
   "cell_type": "code",
1567
   "execution_count": 452,
1568
   "metadata": {},
1569
   "outputs": [
1570
    {
1571
     "data": {
1572
      "text/plain": [
1573
       "torch.Size([6, 1, 256])"
1574
      ]
1575
     },
1576
     "execution_count": 452,
1577
     "metadata": {},
1578
     "output_type": "execute_result"
1579
    }
1580
   ],
1581
   "source": [
1582
    "out.shape"
1583
   ]
1584
  },
1585
  {
1586
   "cell_type": "code",
1587
   "execution_count": null,
1588
   "metadata": {},
1589
   "outputs": [],
1590
   "source": []
1591
  },
1592
  {
1593
   "cell_type": "code",
1594
   "execution_count": 423,
1595
   "metadata": {},
1596
   "outputs": [
1597
    {
1598
     "data": {
1599
      "text/plain": [
1600
       "torch.Size([1, 8, 6, 500])"
1601
      ]
1602
     },
1603
     "execution_count": 423,
1604
     "metadata": {},
1605
     "output_type": "execute_result"
1606
    }
1607
   ],
1608
   "source": [
1609
    "attention_weights.shape"
1610
   ]
1611
  },
1612
  {
1613
   "cell_type": "code",
1614
   "execution_count": 415,
1615
   "metadata": {},
1616
   "outputs": [
1617
    {
1618
     "data": {
1619
      "text/plain": [
1620
       "tensor([[[0.0018, 0.0020, 0.0012,  ..., 0.0016, 0.0025, 0.0031],\n",
1621
       "         [0.0026, 0.0015, 0.0016,  ..., 0.0021, 0.0021, 0.0016],\n",
1622
       "         [0.0019, 0.0014, 0.0011,  ..., 0.0020, 0.0013, 0.0025],\n",
1623
       "         [0.0016, 0.0013, 0.0023,  ..., 0.0009, 0.0015, 0.0027],\n",
1624
       "         [0.0015, 0.0013, 0.0023,  ..., 0.0026, 0.0019, 0.0026],\n",
1625
       "         [0.0013, 0.0019, 0.0025,  ..., 0.0022, 0.0020, 0.0021]]],\n",
1626
       "       grad_fn=<DivBackward0>)"
1627
      ]
1628
     },
1629
     "execution_count": 415,
1630
     "metadata": {},
1631
     "output_type": "execute_result"
1632
    }
1633
   ],
1634
   "source": [
1635
    "attention_weights_0"
1636
   ]
1637
  },
1638
  {
1639
   "cell_type": "code",
1640
   "execution_count": 416,
1641
   "metadata": {},
1642
   "outputs": [
1643
    {
1644
     "data": {
1645
      "text/plain": [
1646
       "tensor([[[0.0018, 0.0020, 0.0012,  ..., 0.0016, 0.0025, 0.0031],\n",
1647
       "         [0.0026, 0.0015, 0.0016,  ..., 0.0021, 0.0021, 0.0016],\n",
1648
       "         [0.0019, 0.0014, 0.0011,  ..., 0.0020, 0.0013, 0.0025],\n",
1649
       "         [0.0016, 0.0013, 0.0023,  ..., 0.0009, 0.0015, 0.0027],\n",
1650
       "         [0.0015, 0.0013, 0.0023,  ..., 0.0026, 0.0019, 0.0026],\n",
1651
       "         [0.0013, 0.0019, 0.0025,  ..., 0.0022, 0.0020, 0.0021]]],\n",
1652
       "       grad_fn=<DivBackward0>)"
1653
      ]
1654
     },
1655
     "execution_count": 416,
1656
     "metadata": {},
1657
     "output_type": "execute_result"
1658
    }
1659
   ],
1660
   "source": [
1661
    "softmax(attention_weights_1, dim=-1).sum(axis=1) / 8"
1662
   ]
1663
  },
1664
  {
1665
   "cell_type": "code",
1666
   "execution_count": 411,
1667
   "metadata": {},
1668
   "outputs": [
1669
    {
1670
     "data": {
1671
      "text/plain": [
1672
       "torch.Size([1, 1, 6, 500])"
1673
      ]
1674
     },
1675
     "execution_count": 411,
1676
     "metadata": {},
1677
     "output_type": "execute_result"
1678
    }
1679
   ],
1680
   "source": [
1681
    "softmax(attention_weights_1, dim=-1).shape"
1682
   ]
1683
  },
1684
  {
1685
   "cell_type": "code",
1686
   "execution_count": 339,
1687
   "metadata": {},
1688
   "outputs": [
1689
    {
1690
     "data": {
1691
      "text/plain": [
1692
       "tensor(1.0000, grad_fn=<SumBackward0>)"
1693
      ]
1694
     },
1695
     "execution_count": 339,
1696
     "metadata": {},
1697
     "output_type": "execute_result"
1698
    }
1699
   ],
1700
   "source": [
1701
    "attention_weights_0[0][0].sum()"
1702
   ]
1703
  },
1704
  {
1705
   "cell_type": "code",
1706
   "execution_count": 396,
1707
   "metadata": {},
1708
   "outputs": [],
1709
   "source": [
1710
    "test = softmax(attention_weights_2, dim=-1)"
1711
   ]
1712
  },
1713
  {
1714
   "cell_type": "code",
1715
   "execution_count": 402,
1716
   "metadata": {},
1717
   "outputs": [
1718
    {
1719
     "data": {
1720
      "text/plain": [
1721
       "tensor([0.0024, 0.0030, 0.0019, 0.0018, 0.0038, 0.0015, 0.0020, 0.0016, 0.0015,\n",
1722
       "        0.0019, 0.0015, 0.0035, 0.0026, 0.0017, 0.0014, 0.0013, 0.0023, 0.0020,\n",
1723
       "        0.0017, 0.0010], grad_fn=<SliceBackward>)"
1724
      ]
1725
     },
1726
     "execution_count": 402,
1727
     "metadata": {},
1728
     "output_type": "execute_result"
1729
    }
1730
   ],
1731
   "source": [
1732
    "attention_weights_0[0][0][:20]"
1733
   ]
1734
  },
1735
  {
1736
   "cell_type": "code",
1737
   "execution_count": 404,
1738
   "metadata": {},
1739
   "outputs": [
1740
    {
1741
     "data": {
1742
      "text/plain": [
1743
       "tensor([0.0028, 0.0033, 0.0019, 0.0013, 0.0042, 0.0016, 0.0024, 0.0018, 0.0019,\n",
1744
       "        0.0024, 0.0016, 0.0033, 0.0022, 0.0014, 0.0016, 0.0013, 0.0023, 0.0021,\n",
1745
       "        0.0013, 0.0013], grad_fn=<SliceBackward>)"
1746
      ]
1747
     },
1748
     "execution_count": 404,
1749
     "metadata": {},
1750
     "output_type": "execute_result"
1751
    }
1752
   ],
1753
   "source": [
1754
    "test[0][0][:20]"
1755
   ]
1756
  },
1757
  {
1758
   "cell_type": "code",
1759
   "execution_count": 366,
1760
   "metadata": {},
1761
   "outputs": [
1762
    {
1763
     "data": {
1764
      "text/plain": [
1765
       "tensor([[[False, False, False,  ..., False, False, False],\n",
1766
       "         [False, False, False,  ..., False, False, False],\n",
1767
       "         [False, False, False,  ..., False, False, False],\n",
1768
       "         [False, False, False,  ..., False, False, False],\n",
1769
       "         [False, False, False,  ..., False, False, False],\n",
1770
       "         [False, False, False,  ..., False, False, False]]])"
1771
      ]
1772
     },
1773
     "execution_count": 366,
1774
     "metadata": {},
1775
     "output_type": "execute_result"
1776
    }
1777
   ],
1778
   "source": [
1779
    "torch.eq(attention_weights_0, test)"
1780
   ]
1781
  },
1782
  {
1783
   "cell_type": "code",
1784
   "execution_count": 320,
1785
   "metadata": {},
1786
   "outputs": [
1787
    {
1788
     "data": {
1789
      "text/plain": [
1790
       "torch.Size([1, 8, 6, 500])"
1791
      ]
1792
     },
1793
     "execution_count": 320,
1794
     "metadata": {},
1795
     "output_type": "execute_result"
1796
    }
1797
   ],
1798
   "source": [
1799
    "attention_weights_1.shape"
1800
   ]
1801
  },
1802
  {
1803
   "cell_type": "code",
1804
   "execution_count": 318,
1805
   "metadata": {},
1806
   "outputs": [
1807
    {
1808
     "data": {
1809
      "text/plain": [
1810
       "torch.Size([1, 6, 500])"
1811
      ]
1812
     },
1813
     "execution_count": 318,
1814
     "metadata": {},
1815
     "output_type": "execute_result"
1816
    }
1817
   ],
1818
   "source": [
1819
    "attention_weights_2.shape"
1820
   ]
1821
  },
1822
  {
1823
   "cell_type": "code",
1824
   "execution_count": 282,
1825
   "metadata": {},
1826
   "outputs": [],
1827
   "source": [
1828
    "out = self.classifier(out.squeeze(0).squeeze(1))"
1829
   ]
1830
  },
1831
  {
1832
   "cell_type": "code",
1833
   "execution_count": 284,
1834
   "metadata": {},
1835
   "outputs": [
1836
    {
1837
     "data": {
1838
      "text/plain": [
1839
       "tensor([ 0.2832,  0.1548, -0.0972, -0.2801], grad_fn=<AddBackward0>)"
1840
      ]
1841
     },
1842
     "execution_count": 284,
1843
     "metadata": {},
1844
     "output_type": "execute_result"
1845
    }
1846
   ],
1847
   "source": [
1848
    "out"
1849
   ]
1850
  },
1851
  {
1852
   "cell_type": "code",
1853
   "execution_count": 269,
1854
   "metadata": {},
1855
   "outputs": [
1856
    {
1857
     "data": {
1858
      "text/plain": [
1859
       "tensor([[0.0018, 0.0019, 0.0019,  ..., 0.0019, 0.0022, 0.0018],\n",
1860
       "        [0.0020, 0.0020, 0.0021,  ..., 0.0021, 0.0020, 0.0020],\n",
1861
       "        [0.0019, 0.0022, 0.0021,  ..., 0.0019, 0.0019, 0.0020],\n",
1862
       "        [0.0021, 0.0022, 0.0019,  ..., 0.0018, 0.0020, 0.0021],\n",
1863
       "        [0.0019, 0.0019, 0.0020,  ..., 0.0020, 0.0018, 0.0019],\n",
1864
       "        [0.0021, 0.0021, 0.0019,  ..., 0.0019, 0.0021, 0.0021]],\n",
1865
       "       grad_fn=<SelectBackward>)"
1866
      ]
1867
     },
1868
     "execution_count": 269,
1869
     "metadata": {},
1870
     "output_type": "execute_result"
1871
    }
1872
   ],
1873
   "source": [
1874
    "attention_weights[0]"
1875
   ]
1876
  },
1877
  {
1878
   "cell_type": "code",
1879
   "execution_count": 241,
1880
   "metadata": {},
1881
   "outputs": [
1882
    {
1883
     "data": {
1884
      "text/plain": [
1885
       "(tensor([[[-0.0504,  0.0757, -0.0366,  ..., -0.0275, -0.0294,  0.1300]],\n",
1886
       " \n",
1887
       "         [[-0.0500,  0.0762, -0.0352,  ..., -0.0253, -0.0289,  0.1311]],\n",
1888
       " \n",
1889
       "         [[-0.0497,  0.0772, -0.0321,  ..., -0.0246, -0.0288,  0.1301]],\n",
1890
       " \n",
1891
       "         [[-0.0491,  0.0794, -0.0337,  ..., -0.0260, -0.0278,  0.1281]],\n",
1892
       " \n",
1893
       "         [[-0.0483,  0.0781, -0.0343,  ..., -0.0246, -0.0301,  0.1321]],\n",
1894
       " \n",
1895
       "         [[-0.0499,  0.0768, -0.0305,  ..., -0.0257, -0.0280,  0.1321]]],\n",
1896
       "        grad_fn=<AddBackward0>),\n",
1897
       " tensor([[[0.0019, 0.0019, 0.0019,  ..., 0.0020, 0.0021, 0.0021],\n",
1898
       "          [0.0017, 0.0020, 0.0020,  ..., 0.0019, 0.0019, 0.0018],\n",
1899
       "          [0.0019, 0.0018, 0.0019,  ..., 0.0019, 0.0019, 0.0021],\n",
1900
       "          [0.0020, 0.0020, 0.0019,  ..., 0.0020, 0.0021, 0.0019],\n",
1901
       "          [0.0017, 0.0023, 0.0021,  ..., 0.0019, 0.0020, 0.0020],\n",
1902
       "          [0.0021, 0.0021, 0.0020,  ..., 0.0021, 0.0021, 0.0020]]],\n",
1903
       "        grad_fn=<DivBackward0>))"
1904
      ]
1905
     },
1906
     "execution_count": 241,
1907
     "metadata": {},
1908
     "output_type": "execute_result"
1909
    }
1910
   ],
1911
   "source": [
1912
    "self.coattn(x_omic, x_path, x_path)"
1913
   ]
1914
  },
1915
  {
1916
   "cell_type": "code",
1917
   "execution_count": null,
1918
   "metadata": {},
1919
   "outputs": [],
1920
   "source": [
1921
    "h"
1922
   ]
1923
  },
1924
  {
1925
   "cell_type": "code",
1926
   "execution_count": 208,
1927
   "metadata": {},
1928
   "outputs": [],
1929
   "source": [
1930
    "sig_feats = [torch.randn(size) for size in sig_sizes]\n",
1931
    "x_omic = torch.stack([self.sig_networks[idx].forward(sig_feat) for idx, sig_feat in enumerate(sig_feats)])\n"
1932
   ]
1933
  },
1934
  {
1935
   "cell_type": "code",
1936
   "execution_count": 204,
1937
   "metadata": {},
1938
   "outputs": [],
1939
   "source": []
1940
  },
1941
  {
1942
   "cell_type": "code",
1943
   "execution_count": 206,
1944
   "metadata": {},
1945
   "outputs": [
1946
    {
1947
     "data": {
1948
      "text/plain": [
1949
       "torch.Size([6, 256])"
1950
      ]
1951
     },
1952
     "execution_count": 206,
1953
     "metadata": {},
1954
     "output_type": "execute_result"
1955
    }
1956
   ],
1957
   "source": [
1958
    "x_omic.shape"
1959
   ]
1960
  },
1961
  {
1962
   "cell_type": "code",
1963
   "execution_count": 166,
1964
   "metadata": {},
1965
   "outputs": [
1966
    {
1967
     "ename": "NameError",
1968
     "evalue": "name 'sig1' is not defined",
1969
     "output_type": "error",
1970
     "traceback": [
1971
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
1972
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
1973
      "\u001b[0;32m<ipython-input-166-aea4cb4c555c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0msig1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig4\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msig6\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
1974
      "\u001b[0;31mNameError\u001b[0m: name 'sig1' is not defined"
1975
     ]
1976
    }
1977
   ],
1978
   "source": [
1979
    "sig1, sig2, sig3, sig4, sig5, sig6 = torch.randn()"
1980
   ]
1981
  },
1982
  {
1983
   "cell_type": "code",
1984
   "execution_count": 158,
1985
   "metadata": {},
1986
   "outputs": [],
1987
   "source": [
1988
    "src = torch.rand(6, 1, 256)\n",
1989
    "out = transformer(src)\n",
1990
    "out = out.squeeze(1).T.unsqueeze(0)"
1991
   ]
1992
  },
1993
  {
1994
   "cell_type": "code",
1995
   "execution_count": 163,
1996
   "metadata": {},
1997
   "outputs": [],
1998
   "source": [
1999
    "conv = nn.Conv1d(in_channels=256, out_channels=256, kernel_size=4, stride=4)"
2000
   ]
2001
  },
2002
  {
2003
   "cell_type": "code",
2004
   "execution_count": 164,
2005
   "metadata": {},
2006
   "outputs": [
2007
    {
2008
     "data": {
2009
      "text/plain": [
2010
       "torch.Size([1, 256, 6])"
2011
      ]
2012
     },
2013
     "execution_count": 164,
2014
     "metadata": {},
2015
     "output_type": "execute_result"
2016
    }
2017
   ],
2018
   "source": [
2019
    "out.shape"
2020
   ]
2021
  },
2022
  {
2023
   "cell_type": "code",
2024
   "execution_count": 165,
2025
   "metadata": {},
2026
   "outputs": [
2027
    {
2028
     "data": {
2029
      "text/plain": [
2030
       "torch.Size([1, 256, 1])"
2031
      ]
2032
     },
2033
     "execution_count": 165,
2034
     "metadata": {},
2035
     "output_type": "execute_result"
2036
    }
2037
   ],
2038
   "source": [
2039
    "conv(out).shape"
2040
   ]
2041
  },
2042
  {
2043
   "cell_type": "code",
2044
   "execution_count": 112,
2045
   "metadata": {},
2046
   "outputs": [
2047
    {
2048
     "data": {
2049
      "text/plain": [
2050
       "torch.Size([1536])"
2051
      ]
2052
     },
2053
     "execution_count": 112,
2054
     "metadata": {},
2055
     "output_type": "execute_result"
2056
    }
2057
   ],
2058
   "source": [
2059
    "x.reshape(-1).shape"
2060
   ]
2061
  },
2062
  {
2063
   "cell_type": "code",
2064
   "execution_count": 106,
2065
   "metadata": {},
2066
   "outputs": [
2067
    {
2068
     "data": {
2069
      "text/plain": [
2070
       "3072"
2071
      ]
2072
     },
2073
     "execution_count": 106,
2074
     "metadata": {},
2075
     "output_type": "execute_result"
2076
    }
2077
   ],
2078
   "source": [
2079
    "256 * 12"
2080
   ]
2081
  },
2082
  {
2083
   "cell_type": "code",
2084
   "execution_count": 88,
2085
   "metadata": {},
2086
   "outputs": [],
2087
   "source": [
2088
    "net = Attn_Net_Gated()\n",
2089
    "wsi_feats = torch.randn(500, 1, 256)\n",
2090
    "sig_feats = torch.randn(6, 1, 256)"
2091
   ]
2092
  },
2093
  {
2094
   "cell_type": "code",
2095
   "execution_count": 89,
2096
   "metadata": {},
2097
   "outputs": [],
2098
   "source": [
2099
    "multihead_attn = nn.MultiheadAttention(embed_dim=256, num_heads=8)"
2100
   ]
2101
  },
2102
  {
2103
   "cell_type": "code",
2104
   "execution_count": 90,
2105
   "metadata": {},
2106
   "outputs": [],
2107
   "source": [
2108
    "out, coattn_weights = multihead_attn(sig_feats, wsi_feats, wsi_feats)"
2109
   ]
2110
  },
2111
  {
2112
   "cell_type": "code",
2113
   "execution_count": 96,
2114
   "metadata": {},
2115
   "outputs": [],
2116
   "source": [
2117
    "cotton = DenseCoAttn(dim1=256, dim2=256, num_attn=8, num_none=3, dropout=0.3b)"
2118
   ]
2119
  },
2120
  {
2121
   "cell_type": "code",
2122
   "execution_count": 100,
2123
   "metadata": {},
2124
   "outputs": [],
2125
   "source": [
2126
    "from math import sqrt\n",
2127
    "wsi_feats = torch.randn(1, 500, 256)\n",
2128
    "sig_feats = torch.randn(1, 6, 256)\n",
2129
    "_ = cotton(wsi_feats, sig_feats)"
2130
   ]
2131
  },
2132
  {
2133
   "cell_type": "code",
2134
   "execution_count": 103,
2135
   "metadata": {},
2136
   "outputs": [
2137
    {
2138
     "data": {
2139
      "text/plain": [
2140
       "torch.Size([1, 6, 256])"
2141
      ]
2142
     },
2143
     "execution_count": 103,
2144
     "metadata": {},
2145
     "output_type": "execute_result"
2146
    }
2147
   ],
2148
   "source": [
2149
    "_[0].shape"
2150
   ]
2151
  },
2152
  {
2153
   "cell_type": "code",
2154
   "execution_count": 104,
2155
   "metadata": {},
2156
   "outputs": [
2157
    {
2158
     "data": {
2159
      "text/plain": [
2160
       "torch.Size([1, 500, 256])"
2161
      ]
2162
     },
2163
     "execution_count": 104,
2164
     "metadata": {},
2165
     "output_type": "execute_result"
2166
    }
2167
   ],
2168
   "source": [
2169
    "_[1].shape"
2170
   ]
2171
  },
2172
  {
2173
   "cell_type": "code",
2174
   "execution_count": 94,
2175
   "metadata": {},
2176
   "outputs": [],
2177
   "source": [
2178
    "\n",
2179
    "import torch\n",
2180
    "import torch.nn as nn\n",
2181
    "import torch.nn.functional as F\n",
2182
    "\n",
2183
    "\n",
2184
    "def qkv_attention(query, key, value, mask=None, dropout=None):\n",
2185
    "\td_k = query.size(-1)\n",
2186
    "\tscores = torch.matmul(query, key.transpose(-2,-1)) / sqrt(d_k)\n",
2187
    "\tif mask is not None:\n",
2188
    "\t\tscores.data.masked_fill_(mask.eq(0), -65504.0)\n",
2189
    "\t\n",
2190
    "\tp_attn = F.softmax(scores, dim=-1)\n",
2191
    "\tif dropout is not None:\n",
2192
    "\t\tp_attn = dropout(p_attn)\n",
2193
    "\n",
2194
    "\treturn torch.matmul(p_attn, value), p_attn\n",
2195
    "\n",
2196
    "\n",
2197
    "class DenseCoAttn(nn.Module):\n",
2198
    "\n",
2199
    "\tdef __init__(self, dim1, dim2, num_attn, num_none, dropout, is_multi_head=False):\n",
2200
    "\t\tsuper(DenseCoAttn, self).__init__()\n",
2201
    "\t\tdim = min(dim1, dim2)\n",
2202
    "\t\tself.linears = nn.ModuleList([nn.Linear(dim1, dim, bias=False),\n",
2203
    "\t\t\t\t\t\t\t\t\t  nn.Linear(dim2, dim, bias=False)])\n",
2204
    "\t\tself.nones = nn.ParameterList([nn.Parameter(nn.init.xavier_uniform_(torch.empty(num_none, dim1))),\n",
2205
    "\t\t\t\t\t\t\t\t\t   nn.Parameter(nn.init.xavier_uniform_(torch.empty(num_none, dim2)))])\n",
2206
    "\t\tself.d_k = dim // num_attn\n",
2207
    "\t\tself.h = num_attn\n",
2208
    "\t\tself.num_none = num_none\n",
2209
    "\t\tself.is_multi_head = is_multi_head\n",
2210
    "\t\tself.attn = None\n",
2211
    "\t\tself.dropouts = nn.ModuleList([nn.Dropout(p=dropout) for _ in range(2)])\n",
2212
    "\n",
2213
    "\tdef forward(self, value1, value2, mask1=None, mask2=None):\n",
2214
    "\t\tbatch = value1.size(0)\n",
2215
    "\t\tdim1, dim2 = value1.size(-1), value2.size(-1)\n",
2216
    "\t\tvalue1 = torch.cat([self.nones[0].unsqueeze(0).expand(batch, self.num_none, dim1), value1], dim=1)\n",
2217
    "\t\tvalue2 = torch.cat([self.nones[1].unsqueeze(0).expand(batch, self.num_none, dim2), value2], dim=1)\n",
2218
    "\t\tnone_mask = value1.new_ones((batch, self.num_none))\n",
2219
    "\n",
2220
    "\t\tif mask1 is not None:\n",
2221
    "\t\t\tmask1 = torch.cat([none_mask, mask1], dim=1)\n",
2222
    "\t\t\tmask1 = mask1.unsqueeze(1).unsqueeze(2)\n",
2223
    "\t\tif mask2 is not None:\n",
2224
    "\t\t\tmask2 = torch.cat([none_mask, mask2], dim=1)\n",
2225
    "\t\t\tmask2 = mask2.unsqueeze(1).unsqueeze(2)\n",
2226
    "\n",
2227
    "\t\tquery1, query2 = [l(x).view(batch, -1, self.h, self.d_k).transpose(1, 2) \n",
2228
    "\t\t\tfor l, x in zip(self.linears, (value1, value2))]\n",
2229
    "\n",
2230
    "\t\tif self.is_multi_head:\n",
2231
    "\t\t\tweighted1, attn1 = qkv_attention(query2, query1, query1, mask=mask1, dropout=self.dropouts[0])\n",
2232
    "\t\t\tweighted1 = weighted1.transpose(1, 2).contiguous()[:, self.num_none:, :]\n",
2233
    "\t\t\tweighted2, attn2 = qkv_attention(query1, query2, query2, mask=mask2, dropout=self.dropouts[1])\n",
2234
    "\t\t\tweighted2 = weighted2.transpose(1, 2).contiguous()[:, self.num_none:, :]\n",
2235
    "\t\telse:\n",
2236
    "\t\t\tweighted1, attn1 = qkv_attention(query2, query1, value1.unsqueeze(1), mask=mask1, \n",
2237
    "\t\t\t\tdropout=self.dropouts[0])\n",
2238
    "\t\t\tweighted1 = weighted1.mean(dim=1)[:, self.num_none:, :]\n",
2239
    "\t\t\tweighted2, attn2 = qkv_attention(query1, query2, value2.unsqueeze(1), mask=mask2, \n",
2240
    "\t\t\t\tdropout=self.dropouts[1])\n",
2241
    "\t\t\tweighted2 = weighted2.mean(dim=1)[:, self.num_none:, :]\n",
2242
    "\t\tself.attn = [attn1[:,:,self.num_none:,self.num_none:], attn2[:,:,self.num_none:,self.num_none:]]\n",
2243
    "\n",
2244
    "\t\treturn weighted1, weighted2\n"
2245
   ]
2246
  },
2247
  {
2248
   "cell_type": "code",
2249
   "execution_count": null,
2250
   "metadata": {},
2251
   "outputs": [],
2252
   "source": []
2253
  },
2254
  {
2255
   "cell_type": "code",
2256
   "execution_count": null,
2257
   "metadata": {},
2258
   "outputs": [],
2259
   "source": []
2260
  },
2261
  {
2262
   "cell_type": "code",
2263
   "execution_count": null,
2264
   "metadata": {},
2265
   "outputs": [],
2266
   "source": []
2267
  },
2268
  {
2269
   "cell_type": "code",
2270
   "execution_count": 417,
2271
   "metadata": {},
2272
   "outputs": [],
2273
   "source": [
2274
    "from torch.nn.functional import *\n",
2275
    "\n",
2276
    "def multi_head_attention_forward(\n",
2277
    "    query: Tensor,\n",
2278
    "    key: Tensor,\n",
2279
    "    value: Tensor,\n",
2280
    "    embed_dim_to_check: int,\n",
2281
    "    num_heads: int,\n",
2282
    "    in_proj_weight: Tensor,\n",
2283
    "    in_proj_bias: Tensor,\n",
2284
    "    bias_k: Optional[Tensor],\n",
2285
    "    bias_v: Optional[Tensor],\n",
2286
    "    add_zero_attn: bool,\n",
2287
    "    dropout_p: float,\n",
2288
    "    out_proj_weight: Tensor,\n",
2289
    "    out_proj_bias: Tensor,\n",
2290
    "    training: bool = True,\n",
2291
    "    key_padding_mask: Optional[Tensor] = None,\n",
2292
    "    need_weights: bool = True,\n",
2293
    "    need_raw: bool = True,\n",
2294
    "    attn_mask: Optional[Tensor] = None,\n",
2295
    "    use_separate_proj_weight: bool = False,\n",
2296
    "    q_proj_weight: Optional[Tensor] = None,\n",
2297
    "    k_proj_weight: Optional[Tensor] = None,\n",
2298
    "    v_proj_weight: Optional[Tensor] = None,\n",
2299
    "    static_k: Optional[Tensor] = None,\n",
2300
    "    static_v: Optional[Tensor] = None,\n",
2301
    ") -> Tuple[Tensor, Optional[Tensor]]:\n",
2302
    "    r\"\"\"\n",
2303
    "    Args:\n",
2304
    "        query, key, value: map a query and a set of key-value pairs to an output.\n",
2305
    "            See \"Attention Is All You Need\" for more details.\n",
2306
    "        embed_dim_to_check: total dimension of the model.\n",
2307
    "        num_heads: parallel attention heads.\n",
2308
    "        in_proj_weight, in_proj_bias: input projection weight and bias.\n",
2309
    "        bias_k, bias_v: bias of the key and value sequences to be added at dim=0.\n",
2310
    "        add_zero_attn: add a new batch of zeros to the key and\n",
2311
    "                       value sequences at dim=1.\n",
2312
    "        dropout_p: probability of an element to be zeroed.\n",
2313
    "        out_proj_weight, out_proj_bias: the output projection weight and bias.\n",
2314
    "        training: apply dropout if is ``True``.\n",
2315
    "        key_padding_mask: if provided, specified padding elements in the key will\n",
2316
    "            be ignored by the attention. This is an binary mask. When the value is True,\n",
2317
    "            the corresponding value on the attention layer will be filled with -inf.\n",
2318
    "        need_weights: output attn_output_weights.\n",
2319
    "        attn_mask: 2D or 3D mask that prevents attention to certain positions. A 2D mask will be broadcasted for all\n",
2320
    "            the batches while a 3D mask allows to specify a different mask for the entries of each batch.\n",
2321
    "        use_separate_proj_weight: the function accept the proj. weights for query, key,\n",
2322
    "            and value in different forms. If false, in_proj_weight will be used, which is\n",
2323
    "            a combination of q_proj_weight, k_proj_weight, v_proj_weight.\n",
2324
    "        q_proj_weight, k_proj_weight, v_proj_weight, in_proj_bias: input projection weight and bias.\n",
2325
    "        static_k, static_v: static key and value used for attention operators.\n",
2326
    "    Shape:\n",
2327
    "        Inputs:\n",
2328
    "        - query: :math:`(L, N, E)` where L is the target sequence length, N is the batch size, E is\n",
2329
    "          the embedding dimension.\n",
2330
    "        - key: :math:`(S, N, E)`, where S is the source sequence length, N is the batch size, E is\n",
2331
    "          the embedding dimension.\n",
2332
    "        - value: :math:`(S, N, E)` where S is the source sequence length, N is the batch size, E is\n",
2333
    "          the embedding dimension.\n",
2334
    "        - key_padding_mask: :math:`(N, S)` where N is the batch size, S is the source sequence length.\n",
2335
    "          If a ByteTensor is provided, the non-zero positions will be ignored while the zero positions\n",
2336
    "          will be unchanged. If a BoolTensor is provided, the positions with the\n",
2337
    "          value of ``True`` will be ignored while the position with the value of ``False`` will be unchanged.\n",
2338
    "        - attn_mask: 2D mask :math:`(L, S)` where L is the target sequence length, S is the source sequence length.\n",
2339
    "          3D mask :math:`(N*num_heads, L, S)` where N is the batch size, L is the target sequence length,\n",
2340
    "          S is the source sequence length. attn_mask ensures that position i is allowed to attend the unmasked\n",
2341
    "          positions. If a ByteTensor is provided, the non-zero positions are not allowed to attend\n",
2342
    "          while the zero positions will be unchanged. If a BoolTensor is provided, positions with ``True``\n",
2343
    "          are not allowed to attend while ``False`` values will be unchanged. If a FloatTensor\n",
2344
    "          is provided, it will be added to the attention weight.\n",
2345
    "        - static_k: :math:`(N*num_heads, S, E/num_heads)`, where S is the source sequence length,\n",
2346
    "          N is the batch size, E is the embedding dimension. E/num_heads is the head dimension.\n",
2347
    "        - static_v: :math:`(N*num_heads, S, E/num_heads)`, where S is the source sequence length,\n",
2348
    "          N is the batch size, E is the embedding dimension. E/num_heads is the head dimension.\n",
2349
    "        Outputs:\n",
2350
    "        - attn_output: :math:`(L, N, E)` where L is the target sequence length, N is the batch size,\n",
2351
    "          E is the embedding dimension.\n",
2352
    "        - attn_output_weights: :math:`(N, L, S)` where N is the batch size,\n",
2353
    "          L is the target sequence length, S is the source sequence length.\n",
2354
    "    \"\"\"\n",
2355
    "    tens_ops = (query, key, value, in_proj_weight, in_proj_bias, bias_k, bias_v, out_proj_weight, out_proj_bias)\n",
2356
    "    if has_torch_function(tens_ops):\n",
2357
    "        return handle_torch_function(\n",
2358
    "            multi_head_attention_forward,\n",
2359
    "            tens_ops,\n",
2360
    "            query,\n",
2361
    "            key,\n",
2362
    "            value,\n",
2363
    "            embed_dim_to_check,\n",
2364
    "            num_heads,\n",
2365
    "            in_proj_weight,\n",
2366
    "            in_proj_bias,\n",
2367
    "            bias_k,\n",
2368
    "            bias_v,\n",
2369
    "            add_zero_attn,\n",
2370
    "            dropout_p,\n",
2371
    "            out_proj_weight,\n",
2372
    "            out_proj_bias,\n",
2373
    "            training=training,\n",
2374
    "            key_padding_mask=key_padding_mask,\n",
2375
    "            need_weights=need_weights,\n",
2376
    "            need_raw=need_raw,\n",
2377
    "            attn_mask=attn_mask,\n",
2378
    "            use_separate_proj_weight=use_separate_proj_weight,\n",
2379
    "            q_proj_weight=q_proj_weight,\n",
2380
    "            k_proj_weight=k_proj_weight,\n",
2381
    "            v_proj_weight=v_proj_weight,\n",
2382
    "            static_k=static_k,\n",
2383
    "            static_v=static_v,\n",
2384
    "        )\n",
2385
    "    tgt_len, bsz, embed_dim = query.size()\n",
2386
    "    assert embed_dim == embed_dim_to_check\n",
2387
    "    # allow MHA to have different sizes for the feature dimension\n",
2388
    "    assert key.size(0) == value.size(0) and key.size(1) == value.size(1)\n",
2389
    "\n",
2390
    "    head_dim = embed_dim // num_heads\n",
2391
    "    assert head_dim * num_heads == embed_dim, \"embed_dim must be divisible by num_heads\"\n",
2392
    "    scaling = float(head_dim) ** -0.5\n",
2393
    "\n",
2394
    "    if not use_separate_proj_weight:\n",
2395
    "        if (query is key or torch.equal(query, key)) and (key is value or torch.equal(key, value)):\n",
2396
    "            # self-attention\n",
2397
    "            q, k, v = linear(query, in_proj_weight, in_proj_bias).chunk(3, dim=-1)\n",
2398
    "\n",
2399
    "        elif key is value or torch.equal(key, value):\n",
2400
    "            # encoder-decoder attention\n",
2401
    "            # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
2402
    "            _b = in_proj_bias\n",
2403
    "            _start = 0\n",
2404
    "            _end = embed_dim\n",
2405
    "            _w = in_proj_weight[_start:_end, :]\n",
2406
    "            if _b is not None:\n",
2407
    "                _b = _b[_start:_end]\n",
2408
    "            q = linear(query, _w, _b)\n",
2409
    "\n",
2410
    "            if key is None:\n",
2411
    "                assert value is None\n",
2412
    "                k = None\n",
2413
    "                v = None\n",
2414
    "            else:\n",
2415
    "\n",
2416
    "                # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
2417
    "                _b = in_proj_bias\n",
2418
    "                _start = embed_dim\n",
2419
    "                _end = None\n",
2420
    "                _w = in_proj_weight[_start:, :]\n",
2421
    "                if _b is not None:\n",
2422
    "                    _b = _b[_start:]\n",
2423
    "                k, v = linear(key, _w, _b).chunk(2, dim=-1)\n",
2424
    "\n",
2425
    "        else:\n",
2426
    "            # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
2427
    "            _b = in_proj_bias\n",
2428
    "            _start = 0\n",
2429
    "            _end = embed_dim\n",
2430
    "            _w = in_proj_weight[_start:_end, :]\n",
2431
    "            if _b is not None:\n",
2432
    "                _b = _b[_start:_end]\n",
2433
    "            q = linear(query, _w, _b)\n",
2434
    "\n",
2435
    "            # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
2436
    "            _b = in_proj_bias\n",
2437
    "            _start = embed_dim\n",
2438
    "            _end = embed_dim * 2\n",
2439
    "            _w = in_proj_weight[_start:_end, :]\n",
2440
    "            if _b is not None:\n",
2441
    "                _b = _b[_start:_end]\n",
2442
    "            k = linear(key, _w, _b)\n",
2443
    "\n",
2444
    "            # This is inline in_proj function with in_proj_weight and in_proj_bias\n",
2445
    "            _b = in_proj_bias\n",
2446
    "            _start = embed_dim * 2\n",
2447
    "            _end = None\n",
2448
    "            _w = in_proj_weight[_start:, :]\n",
2449
    "            if _b is not None:\n",
2450
    "                _b = _b[_start:]\n",
2451
    "            v = linear(value, _w, _b)\n",
2452
    "    else:\n",
2453
    "        q_proj_weight_non_opt = torch.jit._unwrap_optional(q_proj_weight)\n",
2454
    "        len1, len2 = q_proj_weight_non_opt.size()\n",
2455
    "        assert len1 == embed_dim and len2 == query.size(-1)\n",
2456
    "\n",
2457
    "        k_proj_weight_non_opt = torch.jit._unwrap_optional(k_proj_weight)\n",
2458
    "        len1, len2 = k_proj_weight_non_opt.size()\n",
2459
    "        assert len1 == embed_dim and len2 == key.size(-1)\n",
2460
    "\n",
2461
    "        v_proj_weight_non_opt = torch.jit._unwrap_optional(v_proj_weight)\n",
2462
    "        len1, len2 = v_proj_weight_non_opt.size()\n",
2463
    "        assert len1 == embed_dim and len2 == value.size(-1)\n",
2464
    "\n",
2465
    "        if in_proj_bias is not None:\n",
2466
    "            q = linear(query, q_proj_weight_non_opt, in_proj_bias[0:embed_dim])\n",
2467
    "            k = linear(key, k_proj_weight_non_opt, in_proj_bias[embed_dim : (embed_dim * 2)])\n",
2468
    "            v = linear(value, v_proj_weight_non_opt, in_proj_bias[(embed_dim * 2) :])\n",
2469
    "        else:\n",
2470
    "            q = linear(query, q_proj_weight_non_opt, in_proj_bias)\n",
2471
    "            k = linear(key, k_proj_weight_non_opt, in_proj_bias)\n",
2472
    "            v = linear(value, v_proj_weight_non_opt, in_proj_bias)\n",
2473
    "    q = q * scaling\n",
2474
    "\n",
2475
    "    if attn_mask is not None:\n",
2476
    "        assert (\n",
2477
    "            attn_mask.dtype == torch.float32\n",
2478
    "            or attn_mask.dtype == torch.float64\n",
2479
    "            or attn_mask.dtype == torch.float16\n",
2480
    "            or attn_mask.dtype == torch.uint8\n",
2481
    "            or attn_mask.dtype == torch.bool\n",
2482
    "        ), \"Only float, byte, and bool types are supported for attn_mask, not {}\".format(attn_mask.dtype)\n",
2483
    "        if attn_mask.dtype == torch.uint8:\n",
2484
    "            warnings.warn(\"Byte tensor for attn_mask in nn.MultiheadAttention is deprecated. Use bool tensor instead.\")\n",
2485
    "            attn_mask = attn_mask.to(torch.bool)\n",
2486
    "\n",
2487
    "        if attn_mask.dim() == 2:\n",
2488
    "            attn_mask = attn_mask.unsqueeze(0)\n",
2489
    "            if list(attn_mask.size()) != [1, query.size(0), key.size(0)]:\n",
2490
    "                raise RuntimeError(\"The size of the 2D attn_mask is not correct.\")\n",
2491
    "        elif attn_mask.dim() == 3:\n",
2492
    "            if list(attn_mask.size()) != [bsz * num_heads, query.size(0), key.size(0)]:\n",
2493
    "                raise RuntimeError(\"The size of the 3D attn_mask is not correct.\")\n",
2494
    "        else:\n",
2495
    "            raise RuntimeError(\"attn_mask's dimension {} is not supported\".format(attn_mask.dim()))\n",
2496
    "        # attn_mask's dim is 3 now.\n",
2497
    "\n",
2498
    "    # convert ByteTensor key_padding_mask to bool\n",
2499
    "    if key_padding_mask is not None and key_padding_mask.dtype == torch.uint8:\n",
2500
    "        warnings.warn(\n",
2501
    "            \"Byte tensor for key_padding_mask in nn.MultiheadAttention is deprecated. Use bool tensor instead.\"\n",
2502
    "        )\n",
2503
    "        key_padding_mask = key_padding_mask.to(torch.bool)\n",
2504
    "\n",
2505
    "    if bias_k is not None and bias_v is not None:\n",
2506
    "        if static_k is None and static_v is None:\n",
2507
    "            k = torch.cat([k, bias_k.repeat(1, bsz, 1)])\n",
2508
    "            v = torch.cat([v, bias_v.repeat(1, bsz, 1)])\n",
2509
    "            if attn_mask is not None:\n",
2510
    "                attn_mask = pad(attn_mask, (0, 1))\n",
2511
    "            if key_padding_mask is not None:\n",
2512
    "                key_padding_mask = pad(key_padding_mask, (0, 1))\n",
2513
    "        else:\n",
2514
    "            assert static_k is None, \"bias cannot be added to static key.\"\n",
2515
    "            assert static_v is None, \"bias cannot be added to static value.\"\n",
2516
    "    else:\n",
2517
    "        assert bias_k is None\n",
2518
    "        assert bias_v is None\n",
2519
    "\n",
2520
    "    q = q.contiguous().view(tgt_len, bsz * num_heads, head_dim).transpose(0, 1)\n",
2521
    "    if k is not None:\n",
2522
    "        k = k.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)\n",
2523
    "    if v is not None:\n",
2524
    "        v = v.contiguous().view(-1, bsz * num_heads, head_dim).transpose(0, 1)\n",
2525
    "\n",
2526
    "    if static_k is not None:\n",
2527
    "        assert static_k.size(0) == bsz * num_heads\n",
2528
    "        assert static_k.size(2) == head_dim\n",
2529
    "        k = static_k\n",
2530
    "\n",
2531
    "    if static_v is not None:\n",
2532
    "        assert static_v.size(0) == bsz * num_heads\n",
2533
    "        assert static_v.size(2) == head_dim\n",
2534
    "        v = static_v\n",
2535
    "\n",
2536
    "    src_len = k.size(1)\n",
2537
    "\n",
2538
    "    if key_padding_mask is not None:\n",
2539
    "        assert key_padding_mask.size(0) == bsz\n",
2540
    "        assert key_padding_mask.size(1) == src_len\n",
2541
    "\n",
2542
    "    if add_zero_attn:\n",
2543
    "        src_len += 1\n",
2544
    "        k = torch.cat([k, torch.zeros((k.size(0), 1) + k.size()[2:], dtype=k.dtype, device=k.device)], dim=1)\n",
2545
    "        v = torch.cat([v, torch.zeros((v.size(0), 1) + v.size()[2:], dtype=v.dtype, device=v.device)], dim=1)\n",
2546
    "        if attn_mask is not None:\n",
2547
    "            attn_mask = pad(attn_mask, (0, 1))\n",
2548
    "        if key_padding_mask is not None:\n",
2549
    "            key_padding_mask = pad(key_padding_mask, (0, 1))\n",
2550
    "\n",
2551
    "    attn_output_weights = torch.bmm(q, k.transpose(1, 2))\n",
2552
    "    assert list(attn_output_weights.size()) == [bsz * num_heads, tgt_len, src_len]\n",
2553
    "\n",
2554
    "    if attn_mask is not None:\n",
2555
    "        if attn_mask.dtype == torch.bool:\n",
2556
    "            attn_output_weights.masked_fill_(attn_mask, float(\"-inf\"))\n",
2557
    "        else:\n",
2558
    "            attn_output_weights += attn_mask\n",
2559
    "\n",
2560
    "    if key_padding_mask is not None:\n",
2561
    "        attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
2562
    "        attn_output_weights = attn_output_weights.masked_fill(\n",
2563
    "            key_padding_mask.unsqueeze(1).unsqueeze(2),\n",
2564
    "            float(\"-inf\"),\n",
2565
    "        )\n",
2566
    "        attn_output_weights = attn_output_weights.view(bsz * num_heads, tgt_len, src_len)\n",
2567
    "    \n",
2568
    "    attn_output_weights_raw = attn_output_weights\n",
2569
    "    attn_output_weights = softmax(attn_output_weights, dim=-1)\n",
2570
    "    attn_output_weights = dropout(attn_output_weights, p=dropout_p, training=training)\n",
2571
    "\n",
2572
    "    attn_output = torch.bmm(attn_output_weights, v)\n",
2573
    "    assert list(attn_output.size()) == [bsz * num_heads, tgt_len, head_dim]\n",
2574
    "    attn_output = attn_output.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)\n",
2575
    "    attn_output = linear(attn_output, out_proj_weight, out_proj_bias)\n",
2576
    "    \n",
2577
    "    if need_weights:\n",
2578
    "        if need_raw:\n",
2579
    "            \n",
2580
    "            attn_output_weights_raw = attn_output_weights_raw.view(bsz, num_heads, tgt_len, src_len)\n",
2581
    "            return attn_output,attn_output_weights_raw\n",
2582
    "            \n",
2583
    "            #attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
2584
    "            #return attn_output, attn_output_weights.sum(dim=1) / num_heads, attn_output_weights_raw, attn_output_weights_raw.sum(dim=1) / num_heads\n",
2585
    "        else:\n",
2586
    "            # average attention weights over heads\n",
2587
    "            attn_output_weights = attn_output_weights.view(bsz, num_heads, tgt_len, src_len)\n",
2588
    "            return attn_output, attn_output_weights.sum(dim=1) / num_heads\n",
2589
    "    else:\n",
2590
    "        return attn_output, None\n"
2591
   ]
2592
  },
2593
  {
2594
   "cell_type": "code",
2595
   "execution_count": 418,
2596
   "metadata": {},
2597
   "outputs": [],
2598
   "source": [
2599
    "import torch\n",
2600
    "from torch import Tensor\n",
2601
    "from torch.nn.modules.linear import _LinearWithBias\n",
2602
    "from torch.nn.init import xavier_uniform_\n",
2603
    "from torch.nn.init import constant_\n",
2604
    "from torch.nn.init import xavier_normal_\n",
2605
    "from torch.nn.parameter import Parameter\n",
2606
    "from torch.nn import Module\n",
2607
    "\n",
2608
    "class MultiheadAttention(Module):\n",
2609
    "    r\"\"\"Allows the model to jointly attend to information\n",
2610
    "    from different representation subspaces.\n",
2611
    "    See reference: Attention Is All You Need\n",
2612
    "\n",
2613
    "    .. math::\n",
2614
    "        \\text{MultiHead}(Q, K, V) = \\text{Concat}(head_1,\\dots,head_h)W^O\n",
2615
    "        \\text{where} head_i = \\text{Attention}(QW_i^Q, KW_i^K, VW_i^V)\n",
2616
    "\n",
2617
    "    Args:\n",
2618
    "        embed_dim: total dimension of the model.\n",
2619
    "        num_heads: parallel attention heads.\n",
2620
    "        dropout: a Dropout layer on attn_output_weights. Default: 0.0.\n",
2621
    "        bias: add bias as module parameter. Default: True.\n",
2622
    "        add_bias_kv: add bias to the key and value sequences at dim=0.\n",
2623
    "        add_zero_attn: add a new batch of zeros to the key and\n",
2624
    "                       value sequences at dim=1.\n",
2625
    "        kdim: total number of features in key. Default: None.\n",
2626
    "        vdim: total number of features in value. Default: None.\n",
2627
    "\n",
2628
    "        Note: if kdim and vdim are None, they will be set to embed_dim such that\n",
2629
    "        query, key, and value have the same number of features.\n",
2630
    "\n",
2631
    "    Examples::\n",
2632
    "\n",
2633
    "        >>> multihead_attn = nn.MultiheadAttention(embed_dim, num_heads)\n",
2634
    "        >>> attn_output, attn_output_weights = multihead_attn(query, key, value)\n",
2635
    "    \"\"\"\n",
2636
    "    bias_k: Optional[torch.Tensor]\n",
2637
    "    bias_v: Optional[torch.Tensor]\n",
2638
    "\n",
2639
    "    def __init__(self, embed_dim, num_heads, dropout=0., bias=True, add_bias_kv=False, add_zero_attn=False, kdim=None, vdim=None):\n",
2640
    "        super(MultiheadAttention, self).__init__()\n",
2641
    "        self.embed_dim = embed_dim\n",
2642
    "        self.kdim = kdim if kdim is not None else embed_dim\n",
2643
    "        self.vdim = vdim if vdim is not None else embed_dim\n",
2644
    "        self._qkv_same_embed_dim = self.kdim == embed_dim and self.vdim == embed_dim\n",
2645
    "\n",
2646
    "        self.num_heads = num_heads\n",
2647
    "        self.dropout = dropout\n",
2648
    "        self.head_dim = embed_dim // num_heads\n",
2649
    "        assert self.head_dim * num_heads == self.embed_dim, \"embed_dim must be divisible by num_heads\"\n",
2650
    "\n",
2651
    "        if self._qkv_same_embed_dim is False:\n",
2652
    "            self.q_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim))\n",
2653
    "            self.k_proj_weight = Parameter(torch.Tensor(embed_dim, self.kdim))\n",
2654
    "            self.v_proj_weight = Parameter(torch.Tensor(embed_dim, self.vdim))\n",
2655
    "            self.register_parameter('in_proj_weight', None)\n",
2656
    "        else:\n",
2657
    "            self.in_proj_weight = Parameter(torch.empty(3 * embed_dim, embed_dim))\n",
2658
    "            self.register_parameter('q_proj_weight', None)\n",
2659
    "            self.register_parameter('k_proj_weight', None)\n",
2660
    "            self.register_parameter('v_proj_weight', None)\n",
2661
    "\n",
2662
    "        if bias:\n",
2663
    "            self.in_proj_bias = Parameter(torch.empty(3 * embed_dim))\n",
2664
    "        else:\n",
2665
    "            self.register_parameter('in_proj_bias', None)\n",
2666
    "        self.out_proj = _LinearWithBias(embed_dim, embed_dim)\n",
2667
    "\n",
2668
    "        if add_bias_kv:\n",
2669
    "            self.bias_k = Parameter(torch.empty(1, 1, embed_dim))\n",
2670
    "            self.bias_v = Parameter(torch.empty(1, 1, embed_dim))\n",
2671
    "        else:\n",
2672
    "            self.bias_k = self.bias_v = None\n",
2673
    "\n",
2674
    "        self.add_zero_attn = add_zero_attn\n",
2675
    "\n",
2676
    "        self._reset_parameters()\n",
2677
    "\n",
2678
    "    def _reset_parameters(self):\n",
2679
    "        if self._qkv_same_embed_dim:\n",
2680
    "            xavier_uniform_(self.in_proj_weight)\n",
2681
    "        else:\n",
2682
    "            xavier_uniform_(self.q_proj_weight)\n",
2683
    "            xavier_uniform_(self.k_proj_weight)\n",
2684
    "            xavier_uniform_(self.v_proj_weight)\n",
2685
    "\n",
2686
    "        if self.in_proj_bias is not None:\n",
2687
    "            constant_(self.in_proj_bias, 0.)\n",
2688
    "            constant_(self.out_proj.bias, 0.)\n",
2689
    "        if self.bias_k is not None:\n",
2690
    "            xavier_normal_(self.bias_k)\n",
2691
    "        if self.bias_v is not None:\n",
2692
    "            xavier_normal_(self.bias_v)\n",
2693
    "\n",
2694
    "    def __setstate__(self, state):\n",
2695
    "        # Support loading old MultiheadAttention checkpoints generated by v1.1.0\n",
2696
    "        if '_qkv_same_embed_dim' not in state:\n",
2697
    "            state['_qkv_same_embed_dim'] = True\n",
2698
    "\n",
2699
    "        super(MultiheadAttention, self).__setstate__(state)\n",
2700
    "\n",
2701
    "    def forward(self, query, key, value, key_padding_mask=None,\n",
2702
    "                need_weights=True, need_raw=True, attn_mask=None):\n",
2703
    "        # type: (Tensor, Tensor, Tensor, Optional[Tensor], bool, Optional[Tensor]) -> Tuple[Tensor, Optional[Tensor]]\n",
2704
    "        r\"\"\"\n",
2705
    "    Args:\n",
2706
    "        query, key, value: map a query and a set of key-value pairs to an output.\n",
2707
    "            See \"Attention Is All You Need\" for more details.\n",
2708
    "        key_padding_mask: if provided, specified padding elements in the key will\n",
2709
    "            be ignored by the attention. When given a binary mask and a value is True,\n",
2710
    "            the corresponding value on the attention layer will be ignored. When given\n",
2711
    "            a byte mask and a value is non-zero, the corresponding value on the attention\n",
2712
    "            layer will be ignored\n",
2713
    "        need_weights: output attn_output_weights.\n",
2714
    "        attn_mask: 2D or 3D mask that prevents attention to certain positions. A 2D mask will be broadcasted for all\n",
2715
    "            the batches while a 3D mask allows to specify a different mask for the entries of each batch.\n",
2716
    "\n",
2717
    "    Shape:\n",
2718
    "        - Inputs:\n",
2719
    "        - query: :math:`(L, N, E)` where L is the target sequence length, N is the batch size, E is\n",
2720
    "          the embedding dimension.\n",
2721
    "        - key: :math:`(S, N, E)`, where S is the source sequence length, N is the batch size, E is\n",
2722
    "          the embedding dimension.\n",
2723
    "        - value: :math:`(S, N, E)` where S is the source sequence length, N is the batch size, E is\n",
2724
    "          the embedding dimension.\n",
2725
    "        - key_padding_mask: :math:`(N, S)` where N is the batch size, S is the source sequence length.\n",
2726
    "          If a ByteTensor is provided, the non-zero positions will be ignored while the position\n",
2727
    "          with the zero positions will be unchanged. If a BoolTensor is provided, the positions with the\n",
2728
    "          value of ``True`` will be ignored while the position with the value of ``False`` will be unchanged.\n",
2729
    "        - attn_mask: 2D mask :math:`(L, S)` where L is the target sequence length, S is the source sequence length.\n",
2730
    "          3D mask :math:`(N*num_heads, L, S)` where N is the batch size, L is the target sequence length,\n",
2731
    "          S is the source sequence length. attn_mask ensure that position i is allowed to attend the unmasked\n",
2732
    "          positions. If a ByteTensor is provided, the non-zero positions are not allowed to attend\n",
2733
    "          while the zero positions will be unchanged. If a BoolTensor is provided, positions with ``True``\n",
2734
    "          is not allowed to attend while ``False`` values will be unchanged. If a FloatTensor\n",
2735
    "          is provided, it will be added to the attention weight.\n",
2736
    "\n",
2737
    "        - Outputs:\n",
2738
    "        - attn_output: :math:`(L, N, E)` where L is the target sequence length, N is the batch size,\n",
2739
    "          E is the embedding dimension.\n",
2740
    "        - attn_output_weights: :math:`(N, L, S)` where N is the batch size,\n",
2741
    "          L is the target sequence length, S is the source sequence length.\n",
2742
    "        \"\"\"\n",
2743
    "        if not self._qkv_same_embed_dim:\n",
2744
    "            return multi_head_attention_forward(\n",
2745
    "                query, key, value, self.embed_dim, self.num_heads,\n",
2746
    "                self.in_proj_weight, self.in_proj_bias,\n",
2747
    "                self.bias_k, self.bias_v, self.add_zero_attn,\n",
2748
    "                self.dropout, self.out_proj.weight, self.out_proj.bias,\n",
2749
    "                training=self.training,\n",
2750
    "                key_padding_mask=key_padding_mask, need_weights=need_weights, need_raw=need_raw,\n",
2751
    "                attn_mask=attn_mask, use_separate_proj_weight=True,\n",
2752
    "                q_proj_weight=self.q_proj_weight, k_proj_weight=self.k_proj_weight,\n",
2753
    "                v_proj_weight=self.v_proj_weight)\n",
2754
    "        else:\n",
2755
    "            return multi_head_attention_forward(\n",
2756
    "                query, key, value, self.embed_dim, self.num_heads,\n",
2757
    "                self.in_proj_weight, self.in_proj_bias,\n",
2758
    "                self.bias_k, self.bias_v, self.add_zero_attn,\n",
2759
    "                self.dropout, self.out_proj.weight, self.out_proj.bias,\n",
2760
    "                training=self.training,\n",
2761
    "                key_padding_mask=key_padding_mask, need_weights=need_weights, need_raw=need_raw,\n",
2762
    "                attn_mask=attn_mask)"
2763
   ]
2764
  },
2765
  {
2766
   "cell_type": "code",
2767
   "execution_count": null,
2768
   "metadata": {},
2769
   "outputs": [],
2770
   "source": []
2771
  },
2772
  {
2773
   "cell_type": "code",
2774
   "execution_count": null,
2775
   "metadata": {},
2776
   "outputs": [],
2777
   "source": []
2778
  },
2779
  {
2780
   "cell_type": "code",
2781
   "execution_count": null,
2782
   "metadata": {},
2783
   "outputs": [],
2784
   "source": []
2785
  },
2786
  {
2787
   "cell_type": "code",
2788
   "execution_count": null,
2789
   "metadata": {},
2790
   "outputs": [],
2791
   "source": []
2792
  },
2793
  {
2794
   "cell_type": "code",
2795
   "execution_count": null,
2796
   "metadata": {},
2797
   "outputs": [],
2798
   "source": []
2799
  },
2800
  {
2801
   "cell_type": "code",
2802
   "execution_count": null,
2803
   "metadata": {},
2804
   "outputs": [],
2805
   "source": []
2806
  },
2807
  {
2808
   "cell_type": "code",
2809
   "execution_count": null,
2810
   "metadata": {},
2811
   "outputs": [],
2812
   "source": []
2813
  },
2814
  {
2815
   "cell_type": "code",
2816
   "execution_count": null,
2817
   "metadata": {},
2818
   "outputs": [],
2819
   "source": []
2820
  },
2821
  {
2822
   "cell_type": "code",
2823
   "execution_count": null,
2824
   "metadata": {},
2825
   "outputs": [],
2826
   "source": []
2827
  },
2828
  {
2829
   "cell_type": "code",
2830
   "execution_count": null,
2831
   "metadata": {},
2832
   "outputs": [],
2833
   "source": []
2834
  },
2835
  {
2836
   "cell_type": "code",
2837
   "execution_count": null,
2838
   "metadata": {},
2839
   "outputs": [],
2840
   "source": []
2841
  },
2842
  {
2843
   "cell_type": "code",
2844
   "execution_count": null,
2845
   "metadata": {},
2846
   "outputs": [],
2847
   "source": []
2848
  },
2849
  {
2850
   "cell_type": "code",
2851
   "execution_count": 104,
2852
   "metadata": {},
2853
   "outputs": [
2854
    {
2855
     "ename": "ModuleNotFoundError",
2856
     "evalue": "No module named 'torch'",
2857
     "output_type": "error",
2858
     "traceback": [
2859
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
2860
      "\u001b[0;31mModuleNotFoundError\u001b[0m                       Traceback (most recent call last)",
2861
      "\u001b[0;32m<ipython-input-104-6bb47b25d46a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmath\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      4\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
2862
      "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'torch'"
2863
     ]
2864
    }
2865
   ],
2866
   "source": [
2867
    "import math\n",
2868
    "\n",
2869
    "import torch\n",
2870
    "from torch import nn\n",
2871
    "\n",
2872
    "############\n",
2873
    "# Omic Model\n",
2874
    "############\n",
2875
    "def init_max_weights(module):\n",
2876
    "    for m in module.modules():\n",
2877
    "        if type(m) == nn.Linear:\n",
2878
    "            stdv = 1. / math.sqrt(m.weight.size(1))\n",
2879
    "            m.weight.data.normal_(0, stdv)\n",
2880
    "            m.bias.data.zero_()\n",
2881
    "\n",
2882
    "def SNN_Block(dim1, dim2, dropout=0.25):\n",
2883
    "    return nn.Sequential(\n",
2884
    "            nn.Linear(dim1, dim2),\n",
2885
    "            nn.ELU(),\n",
2886
    "            nn.AlphaDropout(p=dropout, inplace=False))\n",
2887
    "\n",
2888
    "class MaxNet(nn.Module):\n",
2889
    "    def __init__(self, input_dim: int, meta_dim: int=0, model_size_omic: str='small', n_classes: int=4):\n",
2890
    "        super(MaxNet, self).__init__()\n",
2891
    "        self.meta_dim = meta_dim\n",
2892
    "        self.n_classes = n_classes\n",
2893
    "        self.size_dict_omic = {'small': [256, 256, 256, 256], 'big': [1024, 1024, 1024, 256]}\n",
2894
    "        \n",
2895
    "        ### Constructing Genomic SNN\n",
2896
    "        hidden = self.size_dict_omic[model_size_omic]\n",
2897
    "        fc_omic = [SNN_Block(dim1=input_dim, dim2=hidden[0])]\n",
2898
    "        for i, _ in enumerate(hidden[1:]):\n",
2899
    "            fc_omic.append(SNN_Block(dim1=hidden[i], dim2=hidden[i+1], dropout=0.25))\n",
2900
    "        self.fc_omic = nn.Sequential(*fc_omic)\n",
2901
    "        self.classifier = nn.Linear(hidden[-1]+self.meta_dim, n_classes)\n",
2902
    "        init_max_weights(self)\n",
2903
    "\n",
2904
    "    def forward(self, **kwargs):\n",
2905
    "        x = kwargs['x_omic']\n",
2906
    "        meta = kwargs['meta']\n",
2907
    "        features = self.fc_omic(x)\n",
2908
    "\n",
2909
    "        if self.meta_dim: \n",
2910
    "            axis_dim = 1 if len(meta.shape) > 1 else 0\n",
2911
    "            features = torch.cat((features, meta), axis_dim)\n",
2912
    "\n",
2913
    "        logits = self.classifier(features).unsqueeze(0)\n",
2914
    "        Y_hat = torch.topk(logits, 1, dim=1)[1]\n",
2915
    "        hazards = torch.sigmoid(logits)\n",
2916
    "        S = torch.cumprod(1 - hazards, dim=1)\n",
2917
    "        return hazards, S, Y_hat, None, None\n",
2918
    "\n",
2919
    "    def relocate(self):\n",
2920
    "            device=torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
2921
    "\n",
2922
    "            if torch.cuda.device_count() > 1:\n",
2923
    "                device_ids = list(range(torch.cuda.device_count()))\n",
2924
    "                self.fc_omic = nn.DataParallel(self.fc_omic, device_ids=device_ids).to('cuda:0')\n",
2925
    "            else:\n",
2926
    "                self.fc_omic = self.fc_omic.to(device)\n",
2927
    "\n",
2928
    "\n",
2929
    "            self.classifier = self.classifier.to(device)"
2930
   ]
2931
  },
2932
  {
2933
   "cell_type": "code",
2934
   "execution_count": null,
2935
   "metadata": {},
2936
   "outputs": [],
2937
   "source": []
2938
  },
2939
  {
2940
   "cell_type": "code",
2941
   "execution_count": 88,
2942
   "metadata": {},
2943
   "outputs": [
2944
    {
2945
     "data": {
2946
      "text/html": [
2947
       "<div>\n",
2948
       "<style scoped>\n",
2949
       "    .dataframe tbody tr th:only-of-type {\n",
2950
       "        vertical-align: middle;\n",
2951
       "    }\n",
2952
       "\n",
2953
       "    .dataframe tbody tr th {\n",
2954
       "        vertical-align: top;\n",
2955
       "    }\n",
2956
       "\n",
2957
       "    .dataframe thead th {\n",
2958
       "        text-align: right;\n",
2959
       "    }\n",
2960
       "</style>\n",
2961
       "<table border=\"1\" class=\"dataframe\">\n",
2962
       "  <thead>\n",
2963
       "    <tr style=\"text-align: right;\">\n",
2964
       "      <th></th>\n",
2965
       "      <th>CXCL14_rnaseq</th>\n",
2966
       "      <th>FGF1_rnaseq</th>\n",
2967
       "      <th>IFNA8_cnv</th>\n",
2968
       "      <th>ADM_rnaseq</th>\n",
2969
       "      <th>LTBP2_rnaseq</th>\n",
2970
       "      <th>CCL28_rnaseq</th>\n",
2971
       "      <th>IFNA7_rnaseq</th>\n",
2972
       "      <th>GH2_rnaseq</th>\n",
2973
       "      <th>AIMP1_rnaseq</th>\n",
2974
       "      <th>DEFB1_rnaseq</th>\n",
2975
       "      <th>...</th>\n",
2976
       "      <th>NPPB_rnaseq</th>\n",
2977
       "      <th>CCL27_rnaseq</th>\n",
2978
       "      <th>FASLG_rnaseq</th>\n",
2979
       "      <th>FGF20_cnv</th>\n",
2980
       "      <th>FAM3C_rnaseq</th>\n",
2981
       "      <th>IL18_rnaseq</th>\n",
2982
       "      <th>GDF10_rnaseq</th>\n",
2983
       "      <th>MYDGF_rnaseq</th>\n",
2984
       "      <th>IL10_rnaseq</th>\n",
2985
       "      <th>IFNW1_rnaseq</th>\n",
2986
       "    </tr>\n",
2987
       "  </thead>\n",
2988
       "  <tbody>\n",
2989
       "    <tr>\n",
2990
       "      <th>0</th>\n",
2991
       "      <td>-0.1170</td>\n",
2992
       "      <td>-0.2221</td>\n",
2993
       "      <td>1</td>\n",
2994
       "      <td>-0.5126</td>\n",
2995
       "      <td>-0.3289</td>\n",
2996
       "      <td>-0.7331</td>\n",
2997
       "      <td>-0.1244</td>\n",
2998
       "      <td>-0.1693</td>\n",
2999
       "      <td>0.5942</td>\n",
3000
       "      <td>-0.4707</td>\n",
3001
       "      <td>...</td>\n",
3002
       "      <td>-0.2276</td>\n",
3003
       "      <td>1.2033</td>\n",
3004
       "      <td>0.9826</td>\n",
3005
       "      <td>-1</td>\n",
3006
       "      <td>-0.6161</td>\n",
3007
       "      <td>-0.5643</td>\n",
3008
       "      <td>-0.2165</td>\n",
3009
       "      <td>-0.2836</td>\n",
3010
       "      <td>0.9991</td>\n",
3011
       "      <td>-0.3899</td>\n",
3012
       "    </tr>\n",
3013
       "    <tr>\n",
3014
       "      <th>1</th>\n",
3015
       "      <td>-0.2330</td>\n",
3016
       "      <td>-0.4343</td>\n",
3017
       "      <td>-1</td>\n",
3018
       "      <td>-0.2381</td>\n",
3019
       "      <td>-0.4799</td>\n",
3020
       "      <td>-0.0520</td>\n",
3021
       "      <td>-0.1244</td>\n",
3022
       "      <td>-0.1693</td>\n",
3023
       "      <td>1.1854</td>\n",
3024
       "      <td>-0.4820</td>\n",
3025
       "      <td>...</td>\n",
3026
       "      <td>-0.2276</td>\n",
3027
       "      <td>-0.2946</td>\n",
3028
       "      <td>-0.5443</td>\n",
3029
       "      <td>-1</td>\n",
3030
       "      <td>-0.3499</td>\n",
3031
       "      <td>-0.7958</td>\n",
3032
       "      <td>-0.3140</td>\n",
3033
       "      <td>-0.3359</td>\n",
3034
       "      <td>-0.4865</td>\n",
3035
       "      <td>-0.3899</td>\n",
3036
       "    </tr>\n",
3037
       "    <tr>\n",
3038
       "      <th>2</th>\n",
3039
       "      <td>-0.1384</td>\n",
3040
       "      <td>-0.1597</td>\n",
3041
       "      <td>-1</td>\n",
3042
       "      <td>-0.1521</td>\n",
3043
       "      <td>-0.3348</td>\n",
3044
       "      <td>-0.5310</td>\n",
3045
       "      <td>-0.1244</td>\n",
3046
       "      <td>-0.1693</td>\n",
3047
       "      <td>0.3889</td>\n",
3048
       "      <td>-0.3607</td>\n",
3049
       "      <td>...</td>\n",
3050
       "      <td>3.4177</td>\n",
3051
       "      <td>-0.2946</td>\n",
3052
       "      <td>-0.5320</td>\n",
3053
       "      <td>0</td>\n",
3054
       "      <td>0.4581</td>\n",
3055
       "      <td>-0.6179</td>\n",
3056
       "      <td>-0.2107</td>\n",
3057
       "      <td>0.2751</td>\n",
3058
       "      <td>-0.5108</td>\n",
3059
       "      <td>1.0629</td>\n",
3060
       "    </tr>\n",
3061
       "    <tr>\n",
3062
       "      <th>3</th>\n",
3063
       "      <td>-0.1624</td>\n",
3064
       "      <td>-0.3463</td>\n",
3065
       "      <td>-1</td>\n",
3066
       "      <td>0.0272</td>\n",
3067
       "      <td>-0.7623</td>\n",
3068
       "      <td>0.8196</td>\n",
3069
       "      <td>-0.1244</td>\n",
3070
       "      <td>-0.1693</td>\n",
3071
       "      <td>-0.0416</td>\n",
3072
       "      <td>0.1661</td>\n",
3073
       "      <td>...</td>\n",
3074
       "      <td>-0.2276</td>\n",
3075
       "      <td>-0.1020</td>\n",
3076
       "      <td>-0.4682</td>\n",
3077
       "      <td>-1</td>\n",
3078
       "      <td>-0.4391</td>\n",
3079
       "      <td>-0.7275</td>\n",
3080
       "      <td>-0.2876</td>\n",
3081
       "      <td>-0.4696</td>\n",
3082
       "      <td>-0.6248</td>\n",
3083
       "      <td>-0.3899</td>\n",
3084
       "    </tr>\n",
3085
       "    <tr>\n",
3086
       "      <th>4</th>\n",
3087
       "      <td>-0.2346</td>\n",
3088
       "      <td>-0.4090</td>\n",
3089
       "      <td>-1</td>\n",
3090
       "      <td>-0.2078</td>\n",
3091
       "      <td>0.5702</td>\n",
3092
       "      <td>-0.4219</td>\n",
3093
       "      <td>-0.1244</td>\n",
3094
       "      <td>0.5257</td>\n",
3095
       "      <td>-0.9790</td>\n",
3096
       "      <td>0.3938</td>\n",
3097
       "      <td>...</td>\n",
3098
       "      <td>-0.2276</td>\n",
3099
       "      <td>-0.1035</td>\n",
3100
       "      <td>-0.4688</td>\n",
3101
       "      <td>-1</td>\n",
3102
       "      <td>1.2596</td>\n",
3103
       "      <td>-0.5807</td>\n",
3104
       "      <td>0.4108</td>\n",
3105
       "      <td>0.1801</td>\n",
3106
       "      <td>-0.6086</td>\n",
3107
       "      <td>-0.3899</td>\n",
3108
       "    </tr>\n",
3109
       "    <tr>\n",
3110
       "      <th>...</th>\n",
3111
       "      <td>...</td>\n",
3112
       "      <td>...</td>\n",
3113
       "      <td>...</td>\n",
3114
       "      <td>...</td>\n",
3115
       "      <td>...</td>\n",
3116
       "      <td>...</td>\n",
3117
       "      <td>...</td>\n",
3118
       "      <td>...</td>\n",
3119
       "      <td>...</td>\n",
3120
       "      <td>...</td>\n",
3121
       "      <td>...</td>\n",
3122
       "      <td>...</td>\n",
3123
       "      <td>...</td>\n",
3124
       "      <td>...</td>\n",
3125
       "      <td>...</td>\n",
3126
       "      <td>...</td>\n",
3127
       "      <td>...</td>\n",
3128
       "      <td>...</td>\n",
3129
       "      <td>...</td>\n",
3130
       "      <td>...</td>\n",
3131
       "      <td>...</td>\n",
3132
       "    </tr>\n",
3133
       "    <tr>\n",
3134
       "      <th>368</th>\n",
3135
       "      <td>-0.2417</td>\n",
3136
       "      <td>10.1423</td>\n",
3137
       "      <td>-1</td>\n",
3138
       "      <td>-0.5456</td>\n",
3139
       "      <td>0.8742</td>\n",
3140
       "      <td>-0.1822</td>\n",
3141
       "      <td>-0.1244</td>\n",
3142
       "      <td>-0.1693</td>\n",
3143
       "      <td>-1.2395</td>\n",
3144
       "      <td>-0.5125</td>\n",
3145
       "      <td>...</td>\n",
3146
       "      <td>-0.2276</td>\n",
3147
       "      <td>-0.2946</td>\n",
3148
       "      <td>0.0777</td>\n",
3149
       "      <td>0</td>\n",
3150
       "      <td>-0.8242</td>\n",
3151
       "      <td>-0.6727</td>\n",
3152
       "      <td>0.1938</td>\n",
3153
       "      <td>0.9210</td>\n",
3154
       "      <td>0.4479</td>\n",
3155
       "      <td>-0.3899</td>\n",
3156
       "    </tr>\n",
3157
       "    <tr>\n",
3158
       "      <th>369</th>\n",
3159
       "      <td>-0.2412</td>\n",
3160
       "      <td>1.3253</td>\n",
3161
       "      <td>1</td>\n",
3162
       "      <td>-0.5680</td>\n",
3163
       "      <td>1.0719</td>\n",
3164
       "      <td>-0.1707</td>\n",
3165
       "      <td>-0.1244</td>\n",
3166
       "      <td>-0.1693</td>\n",
3167
       "      <td>-1.6694</td>\n",
3168
       "      <td>-0.4528</td>\n",
3169
       "      <td>...</td>\n",
3170
       "      <td>0.5679</td>\n",
3171
       "      <td>-0.2661</td>\n",
3172
       "      <td>1.0215</td>\n",
3173
       "      <td>-2</td>\n",
3174
       "      <td>-0.5327</td>\n",
3175
       "      <td>0.3335</td>\n",
3176
       "      <td>-0.1730</td>\n",
3177
       "      <td>0.0147</td>\n",
3178
       "      <td>0.6012</td>\n",
3179
       "      <td>2.2526</td>\n",
3180
       "    </tr>\n",
3181
       "    <tr>\n",
3182
       "      <th>370</th>\n",
3183
       "      <td>-0.2396</td>\n",
3184
       "      <td>0.0435</td>\n",
3185
       "      <td>0</td>\n",
3186
       "      <td>-0.3610</td>\n",
3187
       "      <td>3.1965</td>\n",
3188
       "      <td>1.3670</td>\n",
3189
       "      <td>-0.1244</td>\n",
3190
       "      <td>-0.1693</td>\n",
3191
       "      <td>0.4439</td>\n",
3192
       "      <td>-0.5099</td>\n",
3193
       "      <td>...</td>\n",
3194
       "      <td>-0.2276</td>\n",
3195
       "      <td>-0.2289</td>\n",
3196
       "      <td>0.0521</td>\n",
3197
       "      <td>-1</td>\n",
3198
       "      <td>1.0317</td>\n",
3199
       "      <td>-0.1473</td>\n",
3200
       "      <td>-0.1517</td>\n",
3201
       "      <td>0.9384</td>\n",
3202
       "      <td>-0.3165</td>\n",
3203
       "      <td>0.6239</td>\n",
3204
       "    </tr>\n",
3205
       "    <tr>\n",
3206
       "      <th>371</th>\n",
3207
       "      <td>-0.2393</td>\n",
3208
       "      <td>-0.4475</td>\n",
3209
       "      <td>0</td>\n",
3210
       "      <td>0.4772</td>\n",
3211
       "      <td>2.9612</td>\n",
3212
       "      <td>-0.7799</td>\n",
3213
       "      <td>-0.1244</td>\n",
3214
       "      <td>-0.1693</td>\n",
3215
       "      <td>0.5778</td>\n",
3216
       "      <td>1.7607</td>\n",
3217
       "      <td>...</td>\n",
3218
       "      <td>-0.2276</td>\n",
3219
       "      <td>9.4098</td>\n",
3220
       "      <td>-0.5443</td>\n",
3221
       "      <td>0</td>\n",
3222
       "      <td>0.2992</td>\n",
3223
       "      <td>-0.5451</td>\n",
3224
       "      <td>-0.2456</td>\n",
3225
       "      <td>0.8898</td>\n",
3226
       "      <td>-0.5781</td>\n",
3227
       "      <td>-0.3899</td>\n",
3228
       "    </tr>\n",
3229
       "    <tr>\n",
3230
       "      <th>372</th>\n",
3231
       "      <td>-0.1936</td>\n",
3232
       "      <td>-0.2281</td>\n",
3233
       "      <td>0</td>\n",
3234
       "      <td>-0.4124</td>\n",
3235
       "      <td>-0.1873</td>\n",
3236
       "      <td>-0.1200</td>\n",
3237
       "      <td>-0.1244</td>\n",
3238
       "      <td>-0.0326</td>\n",
3239
       "      <td>-0.8786</td>\n",
3240
       "      <td>-0.3912</td>\n",
3241
       "      <td>...</td>\n",
3242
       "      <td>-0.2276</td>\n",
3243
       "      <td>-0.2570</td>\n",
3244
       "      <td>-0.3810</td>\n",
3245
       "      <td>-1</td>\n",
3246
       "      <td>-0.6399</td>\n",
3247
       "      <td>-0.9128</td>\n",
3248
       "      <td>0.3367</td>\n",
3249
       "      <td>-0.4686</td>\n",
3250
       "      <td>0.8995</td>\n",
3251
       "      <td>1.3522</td>\n",
3252
       "    </tr>\n",
3253
       "  </tbody>\n",
3254
       "</table>\n",
3255
       "<p>373 rows × 347 columns</p>\n",
3256
       "</div>"
3257
      ],
3258
      "text/plain": [
3259
       "     CXCL14_rnaseq  FGF1_rnaseq  IFNA8_cnv  ADM_rnaseq  LTBP2_rnaseq  \\\n",
3260
       "0          -0.1170      -0.2221          1     -0.5126       -0.3289   \n",
3261
       "1          -0.2330      -0.4343         -1     -0.2381       -0.4799   \n",
3262
       "2          -0.1384      -0.1597         -1     -0.1521       -0.3348   \n",
3263
       "3          -0.1624      -0.3463         -1      0.0272       -0.7623   \n",
3264
       "4          -0.2346      -0.4090         -1     -0.2078        0.5702   \n",
3265
       "..             ...          ...        ...         ...           ...   \n",
3266
       "368        -0.2417      10.1423         -1     -0.5456        0.8742   \n",
3267
       "369        -0.2412       1.3253          1     -0.5680        1.0719   \n",
3268
       "370        -0.2396       0.0435          0     -0.3610        3.1965   \n",
3269
       "371        -0.2393      -0.4475          0      0.4772        2.9612   \n",
3270
       "372        -0.1936      -0.2281          0     -0.4124       -0.1873   \n",
3271
       "\n",
3272
       "     CCL28_rnaseq  IFNA7_rnaseq  GH2_rnaseq  AIMP1_rnaseq  DEFB1_rnaseq  ...  \\\n",
3273
       "0         -0.7331       -0.1244     -0.1693        0.5942       -0.4707  ...   \n",
3274
       "1         -0.0520       -0.1244     -0.1693        1.1854       -0.4820  ...   \n",
3275
       "2         -0.5310       -0.1244     -0.1693        0.3889       -0.3607  ...   \n",
3276
       "3          0.8196       -0.1244     -0.1693       -0.0416        0.1661  ...   \n",
3277
       "4         -0.4219       -0.1244      0.5257       -0.9790        0.3938  ...   \n",
3278
       "..            ...           ...         ...           ...           ...  ...   \n",
3279
       "368       -0.1822       -0.1244     -0.1693       -1.2395       -0.5125  ...   \n",
3280
       "369       -0.1707       -0.1244     -0.1693       -1.6694       -0.4528  ...   \n",
3281
       "370        1.3670       -0.1244     -0.1693        0.4439       -0.5099  ...   \n",
3282
       "371       -0.7799       -0.1244     -0.1693        0.5778        1.7607  ...   \n",
3283
       "372       -0.1200       -0.1244     -0.0326       -0.8786       -0.3912  ...   \n",
3284
       "\n",
3285
       "     NPPB_rnaseq  CCL27_rnaseq  FASLG_rnaseq  FGF20_cnv  FAM3C_rnaseq  \\\n",
3286
       "0        -0.2276        1.2033        0.9826         -1       -0.6161   \n",
3287
       "1        -0.2276       -0.2946       -0.5443         -1       -0.3499   \n",
3288
       "2         3.4177       -0.2946       -0.5320          0        0.4581   \n",
3289
       "3        -0.2276       -0.1020       -0.4682         -1       -0.4391   \n",
3290
       "4        -0.2276       -0.1035       -0.4688         -1        1.2596   \n",
3291
       "..           ...           ...           ...        ...           ...   \n",
3292
       "368      -0.2276       -0.2946        0.0777          0       -0.8242   \n",
3293
       "369       0.5679       -0.2661        1.0215         -2       -0.5327   \n",
3294
       "370      -0.2276       -0.2289        0.0521         -1        1.0317   \n",
3295
       "371      -0.2276        9.4098       -0.5443          0        0.2992   \n",
3296
       "372      -0.2276       -0.2570       -0.3810         -1       -0.6399   \n",
3297
       "\n",
3298
       "     IL18_rnaseq  GDF10_rnaseq  MYDGF_rnaseq  IL10_rnaseq  IFNW1_rnaseq  \n",
3299
       "0        -0.5643       -0.2165       -0.2836       0.9991       -0.3899  \n",
3300
       "1        -0.7958       -0.3140       -0.3359      -0.4865       -0.3899  \n",
3301
       "2        -0.6179       -0.2107        0.2751      -0.5108        1.0629  \n",
3302
       "3        -0.7275       -0.2876       -0.4696      -0.6248       -0.3899  \n",
3303
       "4        -0.5807        0.4108        0.1801      -0.6086       -0.3899  \n",
3304
       "..           ...           ...           ...          ...           ...  \n",
3305
       "368      -0.6727        0.1938        0.9210       0.4479       -0.3899  \n",
3306
       "369       0.3335       -0.1730        0.0147       0.6012        2.2526  \n",
3307
       "370      -0.1473       -0.1517        0.9384      -0.3165        0.6239  \n",
3308
       "371      -0.5451       -0.2456        0.8898      -0.5781       -0.3899  \n",
3309
       "372      -0.9128        0.3367       -0.4686       0.8995        1.3522  \n",
3310
       "\n",
3311
       "[373 rows x 347 columns]"
3312
      ]
3313
     },
3314
     "execution_count": 88,
3315
     "metadata": {},
3316
     "output_type": "execute_result"
3317
    }
3318
   ],
3319
   "source": [
3320
    "genomic_features[series_intersecdef series_intersection(s1, s2):\n",
3321
    "    return pd.Series(list(set(s1) & set(s2)))\n",
3322
    "tion(sig, genomic_features.columns)]"
3323
   ]
3324
  },
3325
  {
3326
   "cell_type": "code",
3327
   "execution_count": 84,
3328
   "metadata": {},
3329
   "outputs": [],
3330
   "source": [
3331
    "def series_intersection(s1, s2):\n",
3332
    "    return pd.Series(list(set(s1) & set(s2)))\n"
3333
   ]
3334
  },
3335
  {
3336
   "cell_type": "code",
3337
   "execution_count": 68,
3338
   "metadata": {},
3339
   "outputs": [
3340
    {
3341
     "data": {
3342
      "text/html": [
3343
       "<div>\n",
3344
       "<style scoped>\n",
3345
       "    .dataframe tbody tr th:only-of-type {\n",
3346
       "        vertical-align: middle;\n",
3347
       "    }\n",
3348
       "\n",
3349
       "    .dataframe tbody tr th {\n",
3350
       "        vertical-align: top;\n",
3351
       "    }\n",
3352
       "\n",
3353
       "    .dataframe thead th {\n",
3354
       "        text-align: right;\n",
3355
       "    }\n",
3356
       "</style>\n",
3357
       "<table border=\"1\" class=\"dataframe\">\n",
3358
       "  <thead>\n",
3359
       "    <tr style=\"text-align: right;\">\n",
3360
       "      <th></th>\n",
3361
       "      <th>NDUFS5_cnv</th>\n",
3362
       "      <th>MACF1_cnv</th>\n",
3363
       "      <th>RNA5SP44_cnv</th>\n",
3364
       "      <th>KIAA0754_cnv</th>\n",
3365
       "      <th>BMP8A_cnv</th>\n",
3366
       "      <th>PABPC4_cnv</th>\n",
3367
       "      <th>SNORA55_cnv</th>\n",
3368
       "      <th>HEYL_cnv</th>\n",
3369
       "      <th>HPCAL4_cnv</th>\n",
3370
       "      <th>NT5C1A_cnv</th>\n",
3371
       "      <th>...</th>\n",
3372
       "      <th>ZWINT_rnaseq</th>\n",
3373
       "      <th>ZXDA_rnaseq</th>\n",
3374
       "      <th>ZXDB_rnaseq</th>\n",
3375
       "      <th>ZXDC_rnaseq</th>\n",
3376
       "      <th>ZYG11A_rnaseq</th>\n",
3377
       "      <th>ZYG11B_rnaseq</th>\n",
3378
       "      <th>ZYX_rnaseq</th>\n",
3379
       "      <th>ZZEF1_rnaseq</th>\n",
3380
       "      <th>ZZZ3_rnaseq</th>\n",
3381
       "      <th>TPTEP1_rnaseq</th>\n",
3382
       "    </tr>\n",
3383
       "  </thead>\n",
3384
       "  <tbody>\n",
3385
       "    <tr>\n",
3386
       "      <th>0</th>\n",
3387
       "      <td>-1</td>\n",
3388
       "      <td>-1</td>\n",
3389
       "      <td>-1</td>\n",
3390
       "      <td>-1</td>\n",
3391
       "      <td>-1</td>\n",
3392
       "      <td>-1</td>\n",
3393
       "      <td>-1</td>\n",
3394
       "      <td>-1</td>\n",
3395
       "      <td>-1</td>\n",
3396
       "      <td>-1</td>\n",
3397
       "      <td>...</td>\n",
3398
       "      <td>-0.8388</td>\n",
3399
       "      <td>4.1375</td>\n",
3400
       "      <td>3.9664</td>\n",
3401
       "      <td>1.8437</td>\n",
3402
       "      <td>-0.3959</td>\n",
3403
       "      <td>-0.2561</td>\n",
3404
       "      <td>-0.2866</td>\n",
3405
       "      <td>1.8770</td>\n",
3406
       "      <td>-0.3179</td>\n",
3407
       "      <td>-0.3633</td>\n",
3408
       "    </tr>\n",
3409
       "    <tr>\n",
3410
       "      <th>1</th>\n",
3411
       "      <td>2</td>\n",
3412
       "      <td>2</td>\n",
3413
       "      <td>2</td>\n",
3414
       "      <td>2</td>\n",
3415
       "      <td>2</td>\n",
3416
       "      <td>2</td>\n",
3417
       "      <td>2</td>\n",
3418
       "      <td>2</td>\n",
3419
       "      <td>2</td>\n",
3420
       "      <td>2</td>\n",
3421
       "      <td>...</td>\n",
3422
       "      <td>-0.1083</td>\n",
3423
       "      <td>0.3393</td>\n",
3424
       "      <td>0.2769</td>\n",
3425
       "      <td>1.7320</td>\n",
3426
       "      <td>-0.0975</td>\n",
3427
       "      <td>2.6955</td>\n",
3428
       "      <td>-0.6741</td>\n",
3429
       "      <td>1.0323</td>\n",
3430
       "      <td>1.2766</td>\n",
3431
       "      <td>-0.3982</td>\n",
3432
       "    </tr>\n",
3433
       "    <tr>\n",
3434
       "      <th>2</th>\n",
3435
       "      <td>0</td>\n",
3436
       "      <td>0</td>\n",
3437
       "      <td>0</td>\n",
3438
       "      <td>0</td>\n",
3439
       "      <td>0</td>\n",
3440
       "      <td>0</td>\n",
3441
       "      <td>0</td>\n",
3442
       "      <td>0</td>\n",
3443
       "      <td>0</td>\n",
3444
       "      <td>0</td>\n",
3445
       "      <td>...</td>\n",
3446
       "      <td>-0.4155</td>\n",
3447
       "      <td>1.6846</td>\n",
3448
       "      <td>0.7711</td>\n",
3449
       "      <td>-0.3061</td>\n",
3450
       "      <td>-0.5016</td>\n",
3451
       "      <td>2.8548</td>\n",
3452
       "      <td>-0.6171</td>\n",
3453
       "      <td>-0.8608</td>\n",
3454
       "      <td>-0.0486</td>\n",
3455
       "      <td>-0.3962</td>\n",
3456
       "    </tr>\n",
3457
       "    <tr>\n",
3458
       "      <th>3</th>\n",
3459
       "      <td>0</td>\n",
3460
       "      <td>0</td>\n",
3461
       "      <td>0</td>\n",
3462
       "      <td>0</td>\n",
3463
       "      <td>0</td>\n",
3464
       "      <td>0</td>\n",
3465
       "      <td>0</td>\n",
3466
       "      <td>0</td>\n",
3467
       "      <td>0</td>\n",
3468
       "      <td>0</td>\n",
3469
       "      <td>...</td>\n",
3470
       "      <td>-0.8143</td>\n",
3471
       "      <td>0.8344</td>\n",
3472
       "      <td>1.5075</td>\n",
3473
       "      <td>3.6068</td>\n",
3474
       "      <td>-0.5004</td>\n",
3475
       "      <td>-0.0747</td>\n",
3476
       "      <td>-0.2185</td>\n",
3477
       "      <td>-0.4379</td>\n",
3478
       "      <td>1.6913</td>\n",
3479
       "      <td>1.7748</td>\n",
3480
       "    </tr>\n",
3481
       "    <tr>\n",
3482
       "      <th>4</th>\n",
3483
       "      <td>0</td>\n",
3484
       "      <td>0</td>\n",
3485
       "      <td>0</td>\n",
3486
       "      <td>0</td>\n",
3487
       "      <td>0</td>\n",
3488
       "      <td>0</td>\n",
3489
       "      <td>0</td>\n",
3490
       "      <td>0</td>\n",
3491
       "      <td>0</td>\n",
3492
       "      <td>0</td>\n",
3493
       "      <td>...</td>\n",
3494
       "      <td>0.0983</td>\n",
3495
       "      <td>-0.7908</td>\n",
3496
       "      <td>-0.0053</td>\n",
3497
       "      <td>-0.0643</td>\n",
3498
       "      <td>-0.3706</td>\n",
3499
       "      <td>0.3870</td>\n",
3500
       "      <td>-0.5589</td>\n",
3501
       "      <td>-0.5979</td>\n",
3502
       "      <td>0.0047</td>\n",
3503
       "      <td>-0.3548</td>\n",
3504
       "    </tr>\n",
3505
       "    <tr>\n",
3506
       "      <th>...</th>\n",
3507
       "      <td>...</td>\n",
3508
       "      <td>...</td>\n",
3509
       "      <td>...</td>\n",
3510
       "      <td>...</td>\n",
3511
       "      <td>...</td>\n",
3512
       "      <td>...</td>\n",
3513
       "      <td>...</td>\n",
3514
       "      <td>...</td>\n",
3515
       "      <td>...</td>\n",
3516
       "      <td>...</td>\n",
3517
       "      <td>...</td>\n",
3518
       "      <td>...</td>\n",
3519
       "      <td>...</td>\n",
3520
       "      <td>...</td>\n",
3521
       "      <td>...</td>\n",
3522
       "      <td>...</td>\n",
3523
       "      <td>...</td>\n",
3524
       "      <td>...</td>\n",
3525
       "      <td>...</td>\n",
3526
       "      <td>...</td>\n",
3527
       "      <td>...</td>\n",
3528
       "    </tr>\n",
3529
       "    <tr>\n",
3530
       "      <th>368</th>\n",
3531
       "      <td>2</td>\n",
3532
       "      <td>2</td>\n",
3533
       "      <td>2</td>\n",
3534
       "      <td>2</td>\n",
3535
       "      <td>2</td>\n",
3536
       "      <td>2</td>\n",
3537
       "      <td>2</td>\n",
3538
       "      <td>2</td>\n",
3539
       "      <td>2</td>\n",
3540
       "      <td>2</td>\n",
3541
       "      <td>...</td>\n",
3542
       "      <td>-0.0291</td>\n",
3543
       "      <td>-0.1058</td>\n",
3544
       "      <td>-0.6721</td>\n",
3545
       "      <td>0.2802</td>\n",
3546
       "      <td>1.9504</td>\n",
3547
       "      <td>-0.8784</td>\n",
3548
       "      <td>0.9506</td>\n",
3549
       "      <td>0.0607</td>\n",
3550
       "      <td>1.1883</td>\n",
3551
       "      <td>-0.3521</td>\n",
3552
       "    </tr>\n",
3553
       "    <tr>\n",
3554
       "      <th>369</th>\n",
3555
       "      <td>0</td>\n",
3556
       "      <td>0</td>\n",
3557
       "      <td>0</td>\n",
3558
       "      <td>0</td>\n",
3559
       "      <td>0</td>\n",
3560
       "      <td>0</td>\n",
3561
       "      <td>0</td>\n",
3562
       "      <td>0</td>\n",
3563
       "      <td>0</td>\n",
3564
       "      <td>0</td>\n",
3565
       "      <td>...</td>\n",
3566
       "      <td>0.0497</td>\n",
3567
       "      <td>0.3673</td>\n",
3568
       "      <td>-0.2208</td>\n",
3569
       "      <td>0.3034</td>\n",
3570
       "      <td>3.2580</td>\n",
3571
       "      <td>-0.2089</td>\n",
3572
       "      <td>1.6053</td>\n",
3573
       "      <td>-0.8746</td>\n",
3574
       "      <td>-0.4491</td>\n",
3575
       "      <td>-0.3450</td>\n",
3576
       "    </tr>\n",
3577
       "    <tr>\n",
3578
       "      <th>370</th>\n",
3579
       "      <td>1</td>\n",
3580
       "      <td>1</td>\n",
3581
       "      <td>1</td>\n",
3582
       "      <td>1</td>\n",
3583
       "      <td>1</td>\n",
3584
       "      <td>1</td>\n",
3585
       "      <td>1</td>\n",
3586
       "      <td>1</td>\n",
3587
       "      <td>1</td>\n",
3588
       "      <td>1</td>\n",
3589
       "      <td>...</td>\n",
3590
       "      <td>0.3822</td>\n",
3591
       "      <td>-0.7003</td>\n",
3592
       "      <td>-0.7661</td>\n",
3593
       "      <td>-1.7035</td>\n",
3594
       "      <td>-0.5423</td>\n",
3595
       "      <td>-0.3488</td>\n",
3596
       "      <td>1.3713</td>\n",
3597
       "      <td>-0.4365</td>\n",
3598
       "      <td>2.3456</td>\n",
3599
       "      <td>-0.3866</td>\n",
3600
       "    </tr>\n",
3601
       "    <tr>\n",
3602
       "      <th>371</th>\n",
3603
       "      <td>0</td>\n",
3604
       "      <td>0</td>\n",
3605
       "      <td>0</td>\n",
3606
       "      <td>0</td>\n",
3607
       "      <td>0</td>\n",
3608
       "      <td>0</td>\n",
3609
       "      <td>0</td>\n",
3610
       "      <td>0</td>\n",
3611
       "      <td>0</td>\n",
3612
       "      <td>0</td>\n",
3613
       "      <td>...</td>\n",
3614
       "      <td>-0.6853</td>\n",
3615
       "      <td>-1.0240</td>\n",
3616
       "      <td>-1.2890</td>\n",
3617
       "      <td>-1.5666</td>\n",
3618
       "      <td>-0.1270</td>\n",
3619
       "      <td>-1.4662</td>\n",
3620
       "      <td>0.3981</td>\n",
3621
       "      <td>-0.5976</td>\n",
3622
       "      <td>-1.3822</td>\n",
3623
       "      <td>-0.4157</td>\n",
3624
       "    </tr>\n",
3625
       "    <tr>\n",
3626
       "      <th>372</th>\n",
3627
       "      <td>0</td>\n",
3628
       "      <td>0</td>\n",
3629
       "      <td>0</td>\n",
3630
       "      <td>0</td>\n",
3631
       "      <td>0</td>\n",
3632
       "      <td>0</td>\n",
3633
       "      <td>0</td>\n",
3634
       "      <td>0</td>\n",
3635
       "      <td>0</td>\n",
3636
       "      <td>0</td>\n",
3637
       "      <td>...</td>\n",
3638
       "      <td>0.0517</td>\n",
3639
       "      <td>-0.3570</td>\n",
3640
       "      <td>-0.4843</td>\n",
3641
       "      <td>-0.3792</td>\n",
3642
       "      <td>-0.1964</td>\n",
3643
       "      <td>0.4200</td>\n",
3644
       "      <td>3.2547</td>\n",
3645
       "      <td>-0.1232</td>\n",
3646
       "      <td>3.4519</td>\n",
3647
       "      <td>-0.1962</td>\n",
3648
       "    </tr>\n",
3649
       "  </tbody>\n",
3650
       "</table>\n",
3651
       "<p>373 rows × 20395 columns</p>\n",
3652
       "</div>"
3653
      ],
3654
      "text/plain": [
3655
       "     NDUFS5_cnv  MACF1_cnv  RNA5SP44_cnv  KIAA0754_cnv  BMP8A_cnv  PABPC4_cnv  \\\n",
3656
       "0            -1         -1            -1            -1         -1          -1   \n",
3657
       "1             2          2             2             2          2           2   \n",
3658
       "2             0          0             0             0          0           0   \n",
3659
       "3             0          0             0             0          0           0   \n",
3660
       "4             0          0             0             0          0           0   \n",
3661
       "..          ...        ...           ...           ...        ...         ...   \n",
3662
       "368           2          2             2             2          2           2   \n",
3663
       "369           0          0             0             0          0           0   \n",
3664
       "370           1          1             1             1          1           1   \n",
3665
       "371           0          0             0             0          0           0   \n",
3666
       "372           0          0             0             0          0           0   \n",
3667
       "\n",
3668
       "     SNORA55_cnv  HEYL_cnv  HPCAL4_cnv  NT5C1A_cnv  ...  ZWINT_rnaseq  \\\n",
3669
       "0             -1        -1          -1          -1  ...       -0.8388   \n",
3670
       "1              2         2           2           2  ...       -0.1083   \n",
3671
       "2              0         0           0           0  ...       -0.4155   \n",
3672
       "3              0         0           0           0  ...       -0.8143   \n",
3673
       "4              0         0           0           0  ...        0.0983   \n",
3674
       "..           ...       ...         ...         ...  ...           ...   \n",
3675
       "368            2         2           2           2  ...       -0.0291   \n",
3676
       "369            0         0           0           0  ...        0.0497   \n",
3677
       "370            1         1           1           1  ...        0.3822   \n",
3678
       "371            0         0           0           0  ...       -0.6853   \n",
3679
       "372            0         0           0           0  ...        0.0517   \n",
3680
       "\n",
3681
       "     ZXDA_rnaseq  ZXDB_rnaseq  ZXDC_rnaseq  ZYG11A_rnaseq  ZYG11B_rnaseq  \\\n",
3682
       "0         4.1375       3.9664       1.8437        -0.3959        -0.2561   \n",
3683
       "1         0.3393       0.2769       1.7320        -0.0975         2.6955   \n",
3684
       "2         1.6846       0.7711      -0.3061        -0.5016         2.8548   \n",
3685
       "3         0.8344       1.5075       3.6068        -0.5004        -0.0747   \n",
3686
       "4        -0.7908      -0.0053      -0.0643        -0.3706         0.3870   \n",
3687
       "..           ...          ...          ...            ...            ...   \n",
3688
       "368      -0.1058      -0.6721       0.2802         1.9504        -0.8784   \n",
3689
       "369       0.3673      -0.2208       0.3034         3.2580        -0.2089   \n",
3690
       "370      -0.7003      -0.7661      -1.7035        -0.5423        -0.3488   \n",
3691
       "371      -1.0240      -1.2890      -1.5666        -0.1270        -1.4662   \n",
3692
       "372      -0.3570      -0.4843      -0.3792        -0.1964         0.4200   \n",
3693
       "\n",
3694
       "     ZYX_rnaseq  ZZEF1_rnaseq  ZZZ3_rnaseq  TPTEP1_rnaseq  \n",
3695
       "0       -0.2866        1.8770      -0.3179        -0.3633  \n",
3696
       "1       -0.6741        1.0323       1.2766        -0.3982  \n",
3697
       "2       -0.6171       -0.8608      -0.0486        -0.3962  \n",
3698
       "3       -0.2185       -0.4379       1.6913         1.7748  \n",
3699
       "4       -0.5589       -0.5979       0.0047        -0.3548  \n",
3700
       "..          ...           ...          ...            ...  \n",
3701
       "368      0.9506        0.0607       1.1883        -0.3521  \n",
3702
       "369      1.6053       -0.8746      -0.4491        -0.3450  \n",
3703
       "370      1.3713       -0.4365       2.3456        -0.3866  \n",
3704
       "371      0.3981       -0.5976      -1.3822        -0.4157  \n",
3705
       "372      3.2547       -0.1232       3.4519        -0.1962  \n",
3706
       "\n",
3707
       "[373 rows x 20395 columns]"
3708
      ]
3709
     },
3710
     "execution_count": 68,
3711
     "metadata": {},
3712
     "output_type": "execute_result"
3713
    }
3714
   ],
3715
   "source": [
3716
    "genomic_features"
3717
   ]
3718
  },
3719
  {
3720
   "cell_type": "code",
3721
   "execution_count": 11,
3722
   "metadata": {},
3723
   "outputs": [],
3724
   "source": [
3725
    "if 'case_id' not in slide_data:\n",
3726
    "    slide_data.index = slide_data.index.str[:12]\n",
3727
    "    slide_data['case_id'] = slide_data.index\n",
3728
    "    slide_data = slide_data.reset_index(drop=True)"
3729
   ]
3730
  },
3731
  {
3732
   "cell_type": "code",
3733
   "execution_count": 14,
3734
   "metadata": {},
3735
   "outputs": [],
3736
   "source": [
3737
    "new_cols = list(slide_data.columns[-2:]) + list(slide_data.columns[:-2])\n",
3738
    "slide_data = slide_data[new_cols]"
3739
   ]
3740
  },
3741
  {
3742
   "cell_type": "code",
3743
   "execution_count": 15,
3744
   "metadata": {},
3745
   "outputs": [
3746
    {
3747
     "data": {
3748
      "text/html": [
3749
       "<div>\n",
3750
       "<style scoped>\n",
3751
       "    .dataframe tbody tr th:only-of-type {\n",
3752
       "        vertical-align: middle;\n",
3753
       "    }\n",
3754
       "\n",
3755
       "    .dataframe tbody tr th {\n",
3756
       "        vertical-align: top;\n",
3757
       "    }\n",
3758
       "\n",
3759
       "    .dataframe thead th {\n",
3760
       "        text-align: right;\n",
3761
       "    }\n",
3762
       "</style>\n",
3763
       "<table border=\"1\" class=\"dataframe\">\n",
3764
       "  <thead>\n",
3765
       "    <tr style=\"text-align: right;\">\n",
3766
       "      <th></th>\n",
3767
       "      <th>ZZZ3_rnaseq</th>\n",
3768
       "      <th>TPTEP1_rnaseq</th>\n",
3769
       "      <th>slide_id</th>\n",
3770
       "      <th>site</th>\n",
3771
       "      <th>is_female</th>\n",
3772
       "      <th>oncotree_code</th>\n",
3773
       "      <th>age</th>\n",
3774
       "      <th>survival_months</th>\n",
3775
       "      <th>censorship</th>\n",
3776
       "      <th>train</th>\n",
3777
       "      <th>...</th>\n",
3778
       "      <th>ZW10_rnaseq</th>\n",
3779
       "      <th>ZWILCH_rnaseq</th>\n",
3780
       "      <th>ZWINT_rnaseq</th>\n",
3781
       "      <th>ZXDA_rnaseq</th>\n",
3782
       "      <th>ZXDB_rnaseq</th>\n",
3783
       "      <th>ZXDC_rnaseq</th>\n",
3784
       "      <th>ZYG11A_rnaseq</th>\n",
3785
       "      <th>ZYG11B_rnaseq</th>\n",
3786
       "      <th>ZYX_rnaseq</th>\n",
3787
       "      <th>ZZEF1_rnaseq</th>\n",
3788
       "    </tr>\n",
3789
       "    <tr>\n",
3790
       "      <th>case_id</th>\n",
3791
       "      <th></th>\n",
3792
       "      <th></th>\n",
3793
       "      <th></th>\n",
3794
       "      <th></th>\n",
3795
       "      <th></th>\n",
3796
       "      <th></th>\n",
3797
       "      <th></th>\n",
3798
       "      <th></th>\n",
3799
       "      <th></th>\n",
3800
       "      <th></th>\n",
3801
       "      <th></th>\n",
3802
       "      <th></th>\n",
3803
       "      <th></th>\n",
3804
       "      <th></th>\n",
3805
       "      <th></th>\n",
3806
       "      <th></th>\n",
3807
       "      <th></th>\n",
3808
       "      <th></th>\n",
3809
       "      <th></th>\n",
3810
       "      <th></th>\n",
3811
       "      <th></th>\n",
3812
       "    </tr>\n",
3813
       "  </thead>\n",
3814
       "  <tbody>\n",
3815
       "    <tr>\n",
3816
       "      <th>TCGA-2F-A9KO</th>\n",
3817
       "      <td>-0.3179</td>\n",
3818
       "      <td>-0.3633</td>\n",
3819
       "      <td>TCGA-2F-A9KO-01Z-00-DX1.195576CF-B739-4BD9-B15...</td>\n",
3820
       "      <td>2F</td>\n",
3821
       "      <td>0</td>\n",
3822
       "      <td>BLCA</td>\n",
3823
       "      <td>63</td>\n",
3824
       "      <td>24.11</td>\n",
3825
       "      <td>0</td>\n",
3826
       "      <td>1.0</td>\n",
3827
       "      <td>...</td>\n",
3828
       "      <td>-0.7172</td>\n",
3829
       "      <td>0.7409</td>\n",
3830
       "      <td>-0.8388</td>\n",
3831
       "      <td>4.1375</td>\n",
3832
       "      <td>3.9664</td>\n",
3833
       "      <td>1.8437</td>\n",
3834
       "      <td>-0.3959</td>\n",
3835
       "      <td>-0.2561</td>\n",
3836
       "      <td>-0.2866</td>\n",
3837
       "      <td>1.8770</td>\n",
3838
       "    </tr>\n",
3839
       "    <tr>\n",
3840
       "      <th>TCGA-2F-A9KP</th>\n",
3841
       "      <td>1.2766</td>\n",
3842
       "      <td>-0.3982</td>\n",
3843
       "      <td>TCGA-2F-A9KP-01Z-00-DX1.3CDF534E-958F-4467-AA7...</td>\n",
3844
       "      <td>2F</td>\n",
3845
       "      <td>0</td>\n",
3846
       "      <td>BLCA</td>\n",
3847
       "      <td>66</td>\n",
3848
       "      <td>11.96</td>\n",
3849
       "      <td>0</td>\n",
3850
       "      <td>1.0</td>\n",
3851
       "      <td>...</td>\n",
3852
       "      <td>0.6373</td>\n",
3853
       "      <td>0.8559</td>\n",
3854
       "      <td>-0.1083</td>\n",
3855
       "      <td>0.3393</td>\n",
3856
       "      <td>0.2769</td>\n",
3857
       "      <td>1.7320</td>\n",
3858
       "      <td>-0.0975</td>\n",
3859
       "      <td>2.6955</td>\n",
3860
       "      <td>-0.6741</td>\n",
3861
       "      <td>1.0323</td>\n",
3862
       "    </tr>\n",
3863
       "    <tr>\n",
3864
       "      <th>TCGA-2F-A9KP</th>\n",
3865
       "      <td>1.2766</td>\n",
3866
       "      <td>-0.3982</td>\n",
3867
       "      <td>TCGA-2F-A9KP-01Z-00-DX2.718C82A3-252B-498E-BFB...</td>\n",
3868
       "      <td>2F</td>\n",
3869
       "      <td>0</td>\n",
3870
       "      <td>BLCA</td>\n",
3871
       "      <td>66</td>\n",
3872
       "      <td>11.96</td>\n",
3873
       "      <td>0</td>\n",
3874
       "      <td>1.0</td>\n",
3875
       "      <td>...</td>\n",
3876
       "      <td>0.6373</td>\n",
3877
       "      <td>0.8559</td>\n",
3878
       "      <td>-0.1083</td>\n",
3879
       "      <td>0.3393</td>\n",
3880
       "      <td>0.2769</td>\n",
3881
       "      <td>1.7320</td>\n",
3882
       "      <td>-0.0975</td>\n",
3883
       "      <td>2.6955</td>\n",
3884
       "      <td>-0.6741</td>\n",
3885
       "      <td>1.0323</td>\n",
3886
       "    </tr>\n",
3887
       "    <tr>\n",
3888
       "      <th>TCGA-2F-A9KQ</th>\n",
3889
       "      <td>-0.0486</td>\n",
3890
       "      <td>-0.3962</td>\n",
3891
       "      <td>TCGA-2F-A9KQ-01Z-00-DX1.1C8CB2DD-5CC6-4E99-A0F...</td>\n",
3892
       "      <td>2F</td>\n",
3893
       "      <td>0</td>\n",
3894
       "      <td>BLCA</td>\n",
3895
       "      <td>69</td>\n",
3896
       "      <td>94.81</td>\n",
3897
       "      <td>1</td>\n",
3898
       "      <td>1.0</td>\n",
3899
       "      <td>...</td>\n",
3900
       "      <td>-0.5676</td>\n",
3901
       "      <td>-0.0621</td>\n",
3902
       "      <td>-0.4155</td>\n",
3903
       "      <td>1.6846</td>\n",
3904
       "      <td>0.7711</td>\n",
3905
       "      <td>-0.3061</td>\n",
3906
       "      <td>-0.5016</td>\n",
3907
       "      <td>2.8548</td>\n",
3908
       "      <td>-0.6171</td>\n",
3909
       "      <td>-0.8608</td>\n",
3910
       "    </tr>\n",
3911
       "    <tr>\n",
3912
       "      <th>TCGA-2F-A9KR</th>\n",
3913
       "      <td>1.6913</td>\n",
3914
       "      <td>1.7748</td>\n",
3915
       "      <td>TCGA-2F-A9KR-01Z-00-DX1.D6A4BD2D-18F3-4FA6-827...</td>\n",
3916
       "      <td>2F</td>\n",
3917
       "      <td>1</td>\n",
3918
       "      <td>BLCA</td>\n",
3919
       "      <td>59</td>\n",
3920
       "      <td>104.57</td>\n",
3921
       "      <td>0</td>\n",
3922
       "      <td>1.0</td>\n",
3923
       "      <td>...</td>\n",
3924
       "      <td>-1.3825</td>\n",
3925
       "      <td>0.3550</td>\n",
3926
       "      <td>-0.8143</td>\n",
3927
       "      <td>0.8344</td>\n",
3928
       "      <td>1.5075</td>\n",
3929
       "      <td>3.6068</td>\n",
3930
       "      <td>-0.5004</td>\n",
3931
       "      <td>-0.0747</td>\n",
3932
       "      <td>-0.2185</td>\n",
3933
       "      <td>-0.4379</td>\n",
3934
       "    </tr>\n",
3935
       "    <tr>\n",
3936
       "      <th>...</th>\n",
3937
       "      <td>...</td>\n",
3938
       "      <td>...</td>\n",
3939
       "      <td>...</td>\n",
3940
       "      <td>...</td>\n",
3941
       "      <td>...</td>\n",
3942
       "      <td>...</td>\n",
3943
       "      <td>...</td>\n",
3944
       "      <td>...</td>\n",
3945
       "      <td>...</td>\n",
3946
       "      <td>...</td>\n",
3947
       "      <td>...</td>\n",
3948
       "      <td>...</td>\n",
3949
       "      <td>...</td>\n",
3950
       "      <td>...</td>\n",
3951
       "      <td>...</td>\n",
3952
       "      <td>...</td>\n",
3953
       "      <td>...</td>\n",
3954
       "      <td>...</td>\n",
3955
       "      <td>...</td>\n",
3956
       "      <td>...</td>\n",
3957
       "      <td>...</td>\n",
3958
       "    </tr>\n",
3959
       "    <tr>\n",
3960
       "      <th>TCGA-ZF-AA54</th>\n",
3961
       "      <td>1.1883</td>\n",
3962
       "      <td>-0.3521</td>\n",
3963
       "      <td>TCGA-ZF-AA54-01Z-00-DX1.9118BB51-333A-4257-A79...</td>\n",
3964
       "      <td>ZF</td>\n",
3965
       "      <td>0</td>\n",
3966
       "      <td>BLCA</td>\n",
3967
       "      <td>71</td>\n",
3968
       "      <td>19.38</td>\n",
3969
       "      <td>0</td>\n",
3970
       "      <td>1.0</td>\n",
3971
       "      <td>...</td>\n",
3972
       "      <td>-0.0898</td>\n",
3973
       "      <td>2.1092</td>\n",
3974
       "      <td>-0.0291</td>\n",
3975
       "      <td>-0.1058</td>\n",
3976
       "      <td>-0.6721</td>\n",
3977
       "      <td>0.2802</td>\n",
3978
       "      <td>1.9504</td>\n",
3979
       "      <td>-0.8784</td>\n",
3980
       "      <td>0.9506</td>\n",
3981
       "      <td>0.0607</td>\n",
3982
       "    </tr>\n",
3983
       "    <tr>\n",
3984
       "      <th>TCGA-ZF-AA58</th>\n",
3985
       "      <td>-0.4491</td>\n",
3986
       "      <td>-0.3450</td>\n",
3987
       "      <td>TCGA-ZF-AA58-01Z-00-DX1.85C3611E-11FA-4AAE-B88...</td>\n",
3988
       "      <td>ZF</td>\n",
3989
       "      <td>1</td>\n",
3990
       "      <td>BLCA</td>\n",
3991
       "      <td>61</td>\n",
3992
       "      <td>54.17</td>\n",
3993
       "      <td>1</td>\n",
3994
       "      <td>1.0</td>\n",
3995
       "      <td>...</td>\n",
3996
       "      <td>-0.2075</td>\n",
3997
       "      <td>-0.0617</td>\n",
3998
       "      <td>0.0497</td>\n",
3999
       "      <td>0.3673</td>\n",
4000
       "      <td>-0.2208</td>\n",
4001
       "      <td>0.3034</td>\n",
4002
       "      <td>3.2580</td>\n",
4003
       "      <td>-0.2089</td>\n",
4004
       "      <td>1.6053</td>\n",
4005
       "      <td>-0.8746</td>\n",
4006
       "    </tr>\n",
4007
       "    <tr>\n",
4008
       "      <th>TCGA-ZF-AA5H</th>\n",
4009
       "      <td>2.3456</td>\n",
4010
       "      <td>-0.3866</td>\n",
4011
       "      <td>TCGA-ZF-AA5H-01Z-00-DX1.2B5DF00E-E0FD-4C58-A82...</td>\n",
4012
       "      <td>ZF</td>\n",
4013
       "      <td>1</td>\n",
4014
       "      <td>BLCA</td>\n",
4015
       "      <td>60</td>\n",
4016
       "      <td>29.47</td>\n",
4017
       "      <td>1</td>\n",
4018
       "      <td>1.0</td>\n",
4019
       "      <td>...</td>\n",
4020
       "      <td>-1.4118</td>\n",
4021
       "      <td>-0.1236</td>\n",
4022
       "      <td>0.3822</td>\n",
4023
       "      <td>-0.7003</td>\n",
4024
       "      <td>-0.7661</td>\n",
4025
       "      <td>-1.7035</td>\n",
4026
       "      <td>-0.5423</td>\n",
4027
       "      <td>-0.3488</td>\n",
4028
       "      <td>1.3713</td>\n",
4029
       "      <td>-0.4365</td>\n",
4030
       "    </tr>\n",
4031
       "    <tr>\n",
4032
       "      <th>TCGA-ZF-AA5N</th>\n",
4033
       "      <td>-1.3822</td>\n",
4034
       "      <td>-0.4157</td>\n",
4035
       "      <td>TCGA-ZF-AA5N-01Z-00-DX1.A207E3EE-CC7D-4267-A77...</td>\n",
4036
       "      <td>ZF</td>\n",
4037
       "      <td>1</td>\n",
4038
       "      <td>BLCA</td>\n",
4039
       "      <td>62</td>\n",
4040
       "      <td>5.52</td>\n",
4041
       "      <td>0</td>\n",
4042
       "      <td>1.0</td>\n",
4043
       "      <td>...</td>\n",
4044
       "      <td>-0.1733</td>\n",
4045
       "      <td>-0.2397</td>\n",
4046
       "      <td>-0.6853</td>\n",
4047
       "      <td>-1.0240</td>\n",
4048
       "      <td>-1.2890</td>\n",
4049
       "      <td>-1.5666</td>\n",
4050
       "      <td>-0.1270</td>\n",
4051
       "      <td>-1.4662</td>\n",
4052
       "      <td>0.3981</td>\n",
4053
       "      <td>-0.5976</td>\n",
4054
       "    </tr>\n",
4055
       "    <tr>\n",
4056
       "      <th>TCGA-ZF-AA5P</th>\n",
4057
       "      <td>3.4519</td>\n",
4058
       "      <td>-0.1962</td>\n",
4059
       "      <td>TCGA-ZF-AA5P-01Z-00-DX1.B91697A2-A186-4E67-A81...</td>\n",
4060
       "      <td>ZF</td>\n",
4061
       "      <td>0</td>\n",
4062
       "      <td>BLCA</td>\n",
4063
       "      <td>65</td>\n",
4064
       "      <td>12.22</td>\n",
4065
       "      <td>1</td>\n",
4066
       "      <td>1.0</td>\n",
4067
       "      <td>...</td>\n",
4068
       "      <td>-1.1056</td>\n",
4069
       "      <td>-0.6634</td>\n",
4070
       "      <td>0.0517</td>\n",
4071
       "      <td>-0.3570</td>\n",
4072
       "      <td>-0.4843</td>\n",
4073
       "      <td>-0.3792</td>\n",
4074
       "      <td>-0.1964</td>\n",
4075
       "      <td>0.4200</td>\n",
4076
       "      <td>3.2547</td>\n",
4077
       "      <td>-0.1232</td>\n",
4078
       "    </tr>\n",
4079
       "  </tbody>\n",
4080
       "</table>\n",
4081
       "<p>437 rows × 20403 columns</p>\n",
4082
       "</div>"
4083
      ],
4084
      "text/plain": [
4085
       "              ZZZ3_rnaseq  TPTEP1_rnaseq  \\\n",
4086
       "case_id                                    \n",
4087
       "TCGA-2F-A9KO      -0.3179        -0.3633   \n",
4088
       "TCGA-2F-A9KP       1.2766        -0.3982   \n",
4089
       "TCGA-2F-A9KP       1.2766        -0.3982   \n",
4090
       "TCGA-2F-A9KQ      -0.0486        -0.3962   \n",
4091
       "TCGA-2F-A9KR       1.6913         1.7748   \n",
4092
       "...                   ...            ...   \n",
4093
       "TCGA-ZF-AA54       1.1883        -0.3521   \n",
4094
       "TCGA-ZF-AA58      -0.4491        -0.3450   \n",
4095
       "TCGA-ZF-AA5H       2.3456        -0.3866   \n",
4096
       "TCGA-ZF-AA5N      -1.3822        -0.4157   \n",
4097
       "TCGA-ZF-AA5P       3.4519        -0.1962   \n",
4098
       "\n",
4099
       "                                                       slide_id site  \\\n",
4100
       "case_id                                                                \n",
4101
       "TCGA-2F-A9KO  TCGA-2F-A9KO-01Z-00-DX1.195576CF-B739-4BD9-B15...   2F   \n",
4102
       "TCGA-2F-A9KP  TCGA-2F-A9KP-01Z-00-DX1.3CDF534E-958F-4467-AA7...   2F   \n",
4103
       "TCGA-2F-A9KP  TCGA-2F-A9KP-01Z-00-DX2.718C82A3-252B-498E-BFB...   2F   \n",
4104
       "TCGA-2F-A9KQ  TCGA-2F-A9KQ-01Z-00-DX1.1C8CB2DD-5CC6-4E99-A0F...   2F   \n",
4105
       "TCGA-2F-A9KR  TCGA-2F-A9KR-01Z-00-DX1.D6A4BD2D-18F3-4FA6-827...   2F   \n",
4106
       "...                                                         ...  ...   \n",
4107
       "TCGA-ZF-AA54  TCGA-ZF-AA54-01Z-00-DX1.9118BB51-333A-4257-A79...   ZF   \n",
4108
       "TCGA-ZF-AA58  TCGA-ZF-AA58-01Z-00-DX1.85C3611E-11FA-4AAE-B88...   ZF   \n",
4109
       "TCGA-ZF-AA5H  TCGA-ZF-AA5H-01Z-00-DX1.2B5DF00E-E0FD-4C58-A82...   ZF   \n",
4110
       "TCGA-ZF-AA5N  TCGA-ZF-AA5N-01Z-00-DX1.A207E3EE-CC7D-4267-A77...   ZF   \n",
4111
       "TCGA-ZF-AA5P  TCGA-ZF-AA5P-01Z-00-DX1.B91697A2-A186-4E67-A81...   ZF   \n",
4112
       "\n",
4113
       "              is_female oncotree_code  age  survival_months  censorship  \\\n",
4114
       "case_id                                                                   \n",
4115
       "TCGA-2F-A9KO          0          BLCA   63            24.11           0   \n",
4116
       "TCGA-2F-A9KP          0          BLCA   66            11.96           0   \n",
4117
       "TCGA-2F-A9KP          0          BLCA   66            11.96           0   \n",
4118
       "TCGA-2F-A9KQ          0          BLCA   69            94.81           1   \n",
4119
       "TCGA-2F-A9KR          1          BLCA   59           104.57           0   \n",
4120
       "...                 ...           ...  ...              ...         ...   \n",
4121
       "TCGA-ZF-AA54          0          BLCA   71            19.38           0   \n",
4122
       "TCGA-ZF-AA58          1          BLCA   61            54.17           1   \n",
4123
       "TCGA-ZF-AA5H          1          BLCA   60            29.47           1   \n",
4124
       "TCGA-ZF-AA5N          1          BLCA   62             5.52           0   \n",
4125
       "TCGA-ZF-AA5P          0          BLCA   65            12.22           1   \n",
4126
       "\n",
4127
       "              train  ...  ZW10_rnaseq  ZWILCH_rnaseq  ZWINT_rnaseq  \\\n",
4128
       "case_id              ...                                             \n",
4129
       "TCGA-2F-A9KO    1.0  ...      -0.7172         0.7409       -0.8388   \n",
4130
       "TCGA-2F-A9KP    1.0  ...       0.6373         0.8559       -0.1083   \n",
4131
       "TCGA-2F-A9KP    1.0  ...       0.6373         0.8559       -0.1083   \n",
4132
       "TCGA-2F-A9KQ    1.0  ...      -0.5676        -0.0621       -0.4155   \n",
4133
       "TCGA-2F-A9KR    1.0  ...      -1.3825         0.3550       -0.8143   \n",
4134
       "...             ...  ...          ...            ...           ...   \n",
4135
       "TCGA-ZF-AA54    1.0  ...      -0.0898         2.1092       -0.0291   \n",
4136
       "TCGA-ZF-AA58    1.0  ...      -0.2075        -0.0617        0.0497   \n",
4137
       "TCGA-ZF-AA5H    1.0  ...      -1.4118        -0.1236        0.3822   \n",
4138
       "TCGA-ZF-AA5N    1.0  ...      -0.1733        -0.2397       -0.6853   \n",
4139
       "TCGA-ZF-AA5P    1.0  ...      -1.1056        -0.6634        0.0517   \n",
4140
       "\n",
4141
       "              ZXDA_rnaseq  ZXDB_rnaseq  ZXDC_rnaseq  ZYG11A_rnaseq  \\\n",
4142
       "case_id                                                              \n",
4143
       "TCGA-2F-A9KO       4.1375       3.9664       1.8437        -0.3959   \n",
4144
       "TCGA-2F-A9KP       0.3393       0.2769       1.7320        -0.0975   \n",
4145
       "TCGA-2F-A9KP       0.3393       0.2769       1.7320        -0.0975   \n",
4146
       "TCGA-2F-A9KQ       1.6846       0.7711      -0.3061        -0.5016   \n",
4147
       "TCGA-2F-A9KR       0.8344       1.5075       3.6068        -0.5004   \n",
4148
       "...                   ...          ...          ...            ...   \n",
4149
       "TCGA-ZF-AA54      -0.1058      -0.6721       0.2802         1.9504   \n",
4150
       "TCGA-ZF-AA58       0.3673      -0.2208       0.3034         3.2580   \n",
4151
       "TCGA-ZF-AA5H      -0.7003      -0.7661      -1.7035        -0.5423   \n",
4152
       "TCGA-ZF-AA5N      -1.0240      -1.2890      -1.5666        -0.1270   \n",
4153
       "TCGA-ZF-AA5P      -0.3570      -0.4843      -0.3792        -0.1964   \n",
4154
       "\n",
4155
       "              ZYG11B_rnaseq  ZYX_rnaseq  ZZEF1_rnaseq  \n",
4156
       "case_id                                                \n",
4157
       "TCGA-2F-A9KO        -0.2561     -0.2866        1.8770  \n",
4158
       "TCGA-2F-A9KP         2.6955     -0.6741        1.0323  \n",
4159
       "TCGA-2F-A9KP         2.6955     -0.6741        1.0323  \n",
4160
       "TCGA-2F-A9KQ         2.8548     -0.6171       -0.8608  \n",
4161
       "TCGA-2F-A9KR        -0.0747     -0.2185       -0.4379  \n",
4162
       "...                     ...         ...           ...  \n",
4163
       "TCGA-ZF-AA54        -0.8784      0.9506        0.0607  \n",
4164
       "TCGA-ZF-AA58        -0.2089      1.6053       -0.8746  \n",
4165
       "TCGA-ZF-AA5H        -0.3488      1.3713       -0.4365  \n",
4166
       "TCGA-ZF-AA5N        -1.4662      0.3981       -0.5976  \n",
4167
       "TCGA-ZF-AA5P         0.4200      3.2547       -0.1232  \n",
4168
       "\n",
4169
       "[437 rows x 20403 columns]"
4170
      ]
4171
     },
4172
     "execution_count": 15,
4173
     "metadata": {},
4174
     "output_type": "execute_result"
4175
    }
4176
   ],
4177
   "source": [
4178
    "slide_data"
4179
   ]
4180
  }
4181
 ],
4182
 "metadata": {
4183
  "kernelspec": {
4184
   "display_name": "Python 3",
4185
   "language": "python",
4186
   "name": "python3"
4187
  },
4188
  "language_info": {
4189
   "codemirror_mode": {
4190
    "name": "ipython",
4191
    "version": 3
4192
   },
4193
   "file_extension": ".py",
4194
   "mimetype": "text/x-python",
4195
   "name": "python",
4196
   "nbconvert_exporter": "python",
4197
   "pygments_lexer": "ipython3",
4198
   "version": "3.8.3"
4199
  }
4200
 },
4201
 "nbformat": 4,
4202
 "nbformat_minor": 4
4203
}