Diff of /EGFR/EGFRv7.ipynb [000000] .. [d90d15]

Switch to unified view

a b/EGFR/EGFRv7.ipynb
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "metadata": {},
7
   "outputs": [
8
    {
9
     "name": "stderr",
10
     "output_type": "stream",
11
     "text": [
12
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/sklearn/utils/validation.py:475: DataConversionWarning: Data with input dtype object was converted to float64 by StandardScaler.\n",
13
      "  warnings.warn(msg, DataConversionWarning)\n",
14
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/sklearn/utils/validation.py:475: DataConversionWarning: Data with input dtype object was converted to float64 by StandardScaler.\n",
15
      "  warnings.warn(msg, DataConversionWarning)\n"
16
     ]
17
    },
18
    {
19
     "name": "stdout",
20
     "output_type": "stream",
21
     "text": [
22
      "0.9440567436313729\n",
23
      "0.7222222222222222\n",
24
      "0.8\n"
25
     ]
26
    }
27
   ],
28
   "source": [
29
    "import torch \n",
30
    "import torch.nn as nn\n",
31
    "import torch.nn.functional as F\n",
32
    "import torch.optim as optim\n",
33
    "import numpy as np\n",
34
    "import matplotlib.pyplot as plt\n",
35
    "import pandas as pd\n",
36
    "import math\n",
37
    "import sklearn.preprocessing as sk\n",
38
    "import seaborn as sns\n",
39
    "from sklearn import metrics\n",
40
    "from sklearn.feature_selection import VarianceThreshold\n",
41
    "from sklearn.model_selection import train_test_split\n",
42
    "from utils import AllTripletSelector,HardestNegativeTripletSelector, RandomNegativeTripletSelector, SemihardNegativeTripletSelector # Strategies for selecting triplets within a minibatch\n",
43
    "from metrics import AverageNonzeroTripletsMetric\n",
44
    "from torch.utils.data.sampler import WeightedRandomSampler\n",
45
    "from sklearn.metrics import roc_auc_score\n",
46
    "from sklearn.metrics import average_precision_score\n",
47
    "import random\n",
48
    "from random import randint\n",
49
    "from sklearn.model_selection import StratifiedKFold\n",
50
    "\n",
51
    "save_results_to = '/home/hnoghabi/EGFR/'\n",
52
    "torch.manual_seed(42)\n",
53
    "random.seed(42)\n",
54
    "\n",
55
    "GDSCE = pd.read_csv(\"GDSC_exprs.z.EGFRi.tsv\", \n",
56
    "                    sep = \"\\t\", index_col=0, decimal = \",\")\n",
57
    "GDSCE = pd.DataFrame.transpose(GDSCE)\n",
58
    "\n",
59
    "GDSCM = pd.read_csv(\"GDSC_mutations.EGFRi.tsv\", \n",
60
    "                    sep = \"\\t\", index_col=0, decimal = \".\")\n",
61
    "GDSCM = pd.DataFrame.transpose(GDSCM)\n",
62
    "GDSCM = GDSCM.loc[:,~GDSCM.columns.duplicated()]\n",
63
    "\n",
64
    "GDSCC = pd.read_csv(\"GDSC_CNA.EGFRi.tsv\", \n",
65
    "                    sep = \"\\t\", index_col=0, decimal = \".\")\n",
66
    "GDSCC.drop_duplicates(keep='last')\n",
67
    "GDSCC = pd.DataFrame.transpose(GDSCC)\n",
68
    "GDSCC = GDSCC.loc[:,~GDSCC.columns.duplicated()]\n",
69
    "\n",
70
    "PDXEerlo = pd.read_csv(\"PDX_exprs.Erlotinib.eb_with.GDSC_exprs.Erlotinib.tsv\", \n",
71
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
72
    "PDXEerlo = pd.DataFrame.transpose(PDXEerlo)\n",
73
    "PDXMerlo = pd.read_csv(\"PDX_mutations.Erlotinib.tsv\", \n",
74
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
75
    "PDXMerlo = pd.DataFrame.transpose(PDXMerlo)\n",
76
    "PDXCerlo = pd.read_csv(\"PDX_CNA.Erlotinib.tsv\", \n",
77
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
78
    "PDXCerlo.drop_duplicates(keep='last')\n",
79
    "PDXCerlo = pd.DataFrame.transpose(PDXCerlo)\n",
80
    "PDXCerlo = PDXCerlo.loc[:,~PDXCerlo.columns.duplicated()]\n",
81
    "\n",
82
    "PDXEcet = pd.read_csv(\"PDX_exprs.Cetuximab.eb_with.GDSC_exprs.Cetuximab.tsv\", \n",
83
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
84
    "PDXEcet = pd.DataFrame.transpose(PDXEcet)\n",
85
    "PDXMcet = pd.read_csv(\"PDX_mutations.Cetuximab.tsv\", \n",
86
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
87
    "PDXMcet = pd.DataFrame.transpose(PDXMcet)\n",
88
    "PDXCcet = pd.read_csv(\"PDX_CNA.Cetuximab.tsv\", \n",
89
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
90
    "PDXCcet.drop_duplicates(keep='last')\n",
91
    "PDXCcet = pd.DataFrame.transpose(PDXCcet)\n",
92
    "PDXCcet = PDXCcet.loc[:,~PDXCcet.columns.duplicated()]\n",
93
    "\n",
94
    "selector = VarianceThreshold(0.05)\n",
95
    "selector.fit_transform(GDSCE)\n",
96
    "GDSCE = GDSCE[GDSCE.columns[selector.get_support(indices=True)]]\n",
97
    "\n",
98
    "GDSCM = GDSCM.fillna(0)\n",
99
    "GDSCM[GDSCM != 0.0] = 1\n",
100
    "GDSCC = GDSCC.fillna(0)\n",
101
    "GDSCC[GDSCC != 0.0] = 1\n",
102
    "\n",
103
    "ls = GDSCE.columns.intersection(GDSCM.columns)\n",
104
    "ls = ls.intersection(GDSCC.columns)\n",
105
    "ls = ls.intersection(PDXEerlo.columns)\n",
106
    "ls = ls.intersection(PDXMerlo.columns)\n",
107
    "ls = ls.intersection(PDXCerlo.columns)\n",
108
    "ls = ls.intersection(PDXEcet.columns)\n",
109
    "ls = ls.intersection(PDXMcet.columns)\n",
110
    "ls = ls.intersection(PDXCcet.columns)\n",
111
    "ls2 = GDSCE.index.intersection(GDSCM.index)\n",
112
    "ls2 = ls2.intersection(GDSCC.index)\n",
113
    "ls3 = PDXEerlo.index.intersection(PDXMerlo.index)\n",
114
    "ls3 = ls3.intersection(PDXCerlo.index)\n",
115
    "ls4 = PDXEcet.index.intersection(PDXMcet.index)\n",
116
    "ls4 = ls4.intersection(PDXCcet.index)\n",
117
    "ls = pd.unique(ls)\n",
118
    "\n",
119
    "PDXEerlo = PDXEerlo.loc[ls3,ls]\n",
120
    "PDXMerlo = PDXMerlo.loc[ls3,ls]\n",
121
    "PDXCerlo = PDXCerlo.loc[ls3,ls]\n",
122
    "PDXEcet = PDXEcet.loc[ls4,ls]\n",
123
    "PDXMcet = PDXMcet.loc[ls4,ls]\n",
124
    "PDXCcet = PDXCcet.loc[ls4,ls]\n",
125
    "GDSCE = GDSCE.loc[:,ls]\n",
126
    "GDSCM = GDSCM.loc[:,ls]\n",
127
    "GDSCC = GDSCC.loc[:,ls]\n",
128
    "\n",
129
    "GDSCR = pd.read_csv(\"GDSC_response.EGFRi.tsv\", \n",
130
    "                    sep = \"\\t\", index_col=0, decimal = \",\")\n",
131
    "\n",
132
    "GDSCR.rename(mapper = str, axis = 'index', inplace = True)\n",
133
    "\n",
134
    "d = {\"R\":0,\"S\":1}\n",
135
    "GDSCR[\"response\"] = GDSCR.loc[:,\"response\"].apply(lambda x: d[x])\n",
136
    "\n",
137
    "responses = GDSCR\n",
138
    "drugs = set(responses[\"drug\"].values)\n",
139
    "exprs_z = GDSCE\n",
140
    "cna = GDSCC\n",
141
    "mut = GDSCM\n",
142
    "expression_zscores = []\n",
143
    "CNA=[]\n",
144
    "mutations = []\n",
145
    "for drug in drugs:\n",
146
    "    samples = responses.loc[responses[\"drug\"]==drug,:].index.values\n",
147
    "    e_z = exprs_z.loc[samples,:]\n",
148
    "    c = cna.loc[samples,:]\n",
149
    "    m = mut.loc[samples,:]\n",
150
    "    m = mut.loc[samples,:]\n",
151
    "    # next 3 rows if you want non-unique sample names\n",
152
    "    e_z.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
153
    "    c.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
154
    "    m.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
155
    "    expression_zscores.append(e_z)\n",
156
    "    CNA.append(c)\n",
157
    "    mutations.append(m)\n",
158
    "responses.index = responses.index.values +\"_\"+responses[\"drug\"].values\n",
159
    "GDSCEv2 = pd.concat(expression_zscores, axis =0 )\n",
160
    "GDSCCv2 = pd.concat(CNA, axis =0 )\n",
161
    "GDSCMv2 = pd.concat(mutations, axis =0 )\n",
162
    "GDSCRv2 = responses\n",
163
    "\n",
164
    "ls2 = GDSCEv2.index.intersection(GDSCMv2.index)\n",
165
    "ls2 = ls2.intersection(GDSCCv2.index)\n",
166
    "GDSCEv2 = GDSCEv2.loc[ls2,:]\n",
167
    "GDSCMv2 = GDSCMv2.loc[ls2,:]\n",
168
    "GDSCCv2 = GDSCCv2.loc[ls2,:]\n",
169
    "GDSCRv2 = GDSCRv2.loc[ls2,:]\n",
170
    "\n",
171
    "Y = GDSCRv2['response'].values\n",
172
    "\n",
173
    "PDXRcet = pd.read_csv(\"PDX_response.Cetuximab.tsv\", \n",
174
    "                       sep = \"\\t\", index_col=0, decimal = \",\")\n",
175
    "PDXRcet.loc[PDXRcet.iloc[:,0] == 'R'] = 0\n",
176
    "PDXRcet.loc[PDXRcet.iloc[:,0] == 'S'] = 1\n",
177
    "PDXRcet = PDXRcet.loc[ls4,:]\n",
178
    "Ytscet = PDXRcet['response'].values    \n",
179
    "\n",
180
    "PDXRerlo = pd.read_csv(\"PDX_response.Erlotinib.tsv\", \n",
181
    "                       sep = \"\\t\", index_col=0, decimal = \",\")\n",
182
    "PDXRerlo.loc[PDXRerlo.iloc[:,0] == 'R'] = 0\n",
183
    "PDXRerlo.loc[PDXRerlo.iloc[:,0] == 'S'] = 1\n",
184
    "PDXRerlo = PDXRerlo.loc[ls3,:]\n",
185
    "Ytserlo = PDXRerlo['response'].values  \n",
186
    "\n",
187
    "hdm1 = 32\n",
188
    "hdm2 = 16\n",
189
    "hdm3 = 256\n",
190
    "rate1 = 0.5\n",
191
    "rate2 = 0.8\n",
192
    "rate3 = 0.5\n",
193
    "rate4 = 0.3\n",
194
    "\n",
195
    "scalerGDSC = sk.StandardScaler()\n",
196
    "scalerGDSC.fit(GDSCEv2.values)\n",
197
    "X_trainE = scalerGDSC.transform(GDSCEv2.values)\n",
198
    "X_testEerlo = scalerGDSC.transform(PDXEerlo.values)    \n",
199
    "X_testEcet = scalerGDSC.transform(PDXEcet.values)    \n",
200
    "\n",
201
    "X_trainM = np.nan_to_num(GDSCMv2.values)\n",
202
    "X_trainC = np.nan_to_num(GDSCCv2.values)\n",
203
    "X_testMerlo = np.nan_to_num(PDXMerlo.values)\n",
204
    "X_testCerlo = np.nan_to_num(PDXCerlo.values)\n",
205
    "X_testMcet = np.nan_to_num(PDXMcet.values)\n",
206
    "X_testCcet = np.nan_to_num(PDXCcet.values)\n",
207
    "\n",
208
    "TX_testEerlo = torch.FloatTensor(X_testEerlo)\n",
209
    "TX_testMerlo = torch.FloatTensor(X_testMerlo)\n",
210
    "TX_testCerlo = torch.FloatTensor(X_testCerlo)\n",
211
    "ty_testEerlo = torch.FloatTensor(Ytserlo.astype(int))\n",
212
    "\n",
213
    "TX_testEcet = torch.FloatTensor(X_testEcet)\n",
214
    "TX_testMcet = torch.FloatTensor(X_testMcet)\n",
215
    "TX_testCcet = torch.FloatTensor(X_testCcet)\n",
216
    "ty_testEcet = torch.FloatTensor(Ytscet.astype(int))\n",
217
    "\n",
218
    "n_sampE, IE_dim = X_trainE.shape\n",
219
    "n_sampM, IM_dim = X_trainM.shape\n",
220
    "n_sampC, IC_dim = X_trainC.shape\n",
221
    "\n",
222
    "h_dim1 = hdm1\n",
223
    "h_dim2 = hdm2\n",
224
    "h_dim3 = hdm3        \n",
225
    "Z_in = h_dim1 + h_dim2 + h_dim3\n",
226
    "\n",
227
    "costtr = []\n",
228
    "auctr = []\n",
229
    "costts = []\n",
230
    "aucts = []\n",
231
    "\n",
232
    "class AEE(nn.Module):\n",
233
    "    def __init__(self):\n",
234
    "        super(AEE, self).__init__()\n",
235
    "        self.EnE = torch.nn.Sequential(\n",
236
    "            nn.Linear(IE_dim, h_dim1),\n",
237
    "            nn.BatchNorm1d(h_dim1),\n",
238
    "            nn.ReLU(),\n",
239
    "            nn.Dropout(rate1))\n",
240
    "    def forward(self, x):\n",
241
    "        output = self.EnE(x)\n",
242
    "        return output\n",
243
    "\n",
244
    "class AEM(nn.Module):\n",
245
    "    def __init__(self):\n",
246
    "        super(AEM, self).__init__()\n",
247
    "        self.EnM = torch.nn.Sequential(\n",
248
    "            nn.Linear(IM_dim, h_dim2),\n",
249
    "            nn.BatchNorm1d(h_dim2),\n",
250
    "            nn.ReLU(),\n",
251
    "            nn.Dropout(rate2))\n",
252
    "    def forward(self, x):\n",
253
    "        output = self.EnM(x)\n",
254
    "        return output    \n",
255
    "\n",
256
    "\n",
257
    "class AEC(nn.Module):\n",
258
    "    def __init__(self):\n",
259
    "        super(AEC, self).__init__()\n",
260
    "        self.EnC = torch.nn.Sequential(\n",
261
    "            nn.Linear(IM_dim, h_dim3),\n",
262
    "            nn.BatchNorm1d(h_dim3),\n",
263
    "            nn.ReLU(),\n",
264
    "            nn.Dropout(rate3))\n",
265
    "    def forward(self, x):\n",
266
    "        output = self.EnC(x)\n",
267
    "        return output       \n",
268
    "\n",
269
    "class Classifier(nn.Module):\n",
270
    "    def __init__(self):\n",
271
    "        super(Classifier, self).__init__()\n",
272
    "        self.FC = torch.nn.Sequential(\n",
273
    "            nn.Linear(Z_in, 1),\n",
274
    "            nn.Dropout(rate4),\n",
275
    "            nn.Sigmoid())\n",
276
    "    def forward(self, x):\n",
277
    "        return self.FC(x)\n",
278
    "\n",
279
    "torch.cuda.manual_seed_all(42)\n",
280
    "\n",
281
    "AutoencoderE = torch.load('EGFRv2Exprs.pt')\n",
282
    "AutoencoderM = torch.load('EGFRv2Mut.pt')\n",
283
    "AutoencoderC = torch.load('EGFRv2CNA.pt')\n",
284
    "\n",
285
    "Clas = torch.load('EGFRv2Class.pt')\n",
286
    "\n",
287
    "AutoencoderE.eval()\n",
288
    "AutoencoderM.eval()\n",
289
    "AutoencoderC.eval()\n",
290
    "Clas.eval()\n",
291
    "\n",
292
    "ZEX = AutoencoderE(torch.FloatTensor(X_trainE))\n",
293
    "ZMX = AutoencoderM(torch.FloatTensor(X_trainM))\n",
294
    "ZCX = AutoencoderC(torch.FloatTensor(X_trainC))\n",
295
    "ZTX = torch.cat((ZEX, ZMX, ZCX), 1)\n",
296
    "ZTX = F.normalize(ZTX, p=2, dim=0)\n",
297
    "PredX = Clas(ZTX)\n",
298
    "AUCt = roc_auc_score(Y, PredX.detach().numpy())\n",
299
    "print(AUCt)\n",
300
    "\n",
301
    "ZETerlo = AutoencoderE(TX_testEerlo)\n",
302
    "ZMTerlo = AutoencoderM(TX_testMerlo)\n",
303
    "ZCTerlo = AutoencoderC(TX_testCerlo)\n",
304
    "ZTTerlo = torch.cat((ZETerlo, ZMTerlo, ZCTerlo), 1)\n",
305
    "ZTTerlo = F.normalize(ZTTerlo, p=2, dim=0)\n",
306
    "PredTerlo = Clas(ZTTerlo)\n",
307
    "AUCterlo = roc_auc_score(Ytserlo, PredTerlo.detach().numpy())\n",
308
    "print(AUCterlo)\n",
309
    "\n",
310
    "ZETcet = AutoencoderE(TX_testEcet)\n",
311
    "ZMTcet = AutoencoderM(TX_testMcet)\n",
312
    "ZCTcet = AutoencoderC(TX_testCcet)\n",
313
    "ZTTcet = torch.cat((ZETcet, ZMTcet, ZCTcet), 1)\n",
314
    "ZTTcet = F.normalize(ZTTcet, p=2, dim=0)\n",
315
    "PredTcet = Clas(ZTTcet)\n",
316
    "AUCtcet = roc_auc_score(Ytscet, PredTcet.detach().numpy())\n",
317
    "print(AUCtcet)"
318
   ]
319
  },
320
  {
321
   "cell_type": "code",
322
   "execution_count": 2,
323
   "metadata": {},
324
   "outputs": [
325
    {
326
     "name": "stderr",
327
     "output_type": "stream",
328
     "text": [
329
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
330
      "Passing list-likes to .loc or [] with any missing label will raise\n",
331
      "KeyError in the future, you can use .reindex() as an alternative.\n",
332
      "\n",
333
      "See the documentation here:\n",
334
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
335
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
336
      "Passing list-likes to .loc or [] with any missing label will raise\n",
337
      "KeyError in the future, you can use .reindex() as an alternative.\n",
338
      "\n",
339
      "See the documentation here:\n",
340
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
341
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
342
      "Passing list-likes to .loc or [] with any missing label will raise\n",
343
      "KeyError in the future, you can use .reindex() as an alternative.\n",
344
      "\n",
345
      "See the documentation here:\n",
346
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
347
     ]
348
    },
349
    {
350
     "name": "stdout",
351
     "output_type": "stream",
352
     "text": [
353
      "(492, 13081)\n",
354
      "(492, 13081)\n",
355
      "(492, 13081)\n"
356
     ]
357
    }
358
   ],
359
   "source": [
360
    "PRADE = pd.read_csv(\"TCGA-PRAD_exprs.tsv\", \n",
361
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
362
    "PRADE = pd.DataFrame.transpose(PRADE)\n",
363
    "\n",
364
    "PRADM = pd.read_csv(\"TCGA-PRAD_mutations.tsv\", \n",
365
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
366
    "PRADM = pd.DataFrame.transpose(PRADM)\n",
367
    "PRADM = PRADM.loc[:,~PRADM.columns.duplicated()]\n",
368
    "\n",
369
    "PRADC = pd.read_csv(\"TCGA-PRAD_CNA.tsv\", \n",
370
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
371
    "PRADC = pd.DataFrame.transpose(PRADC)\n",
372
    "PRADC = PRADC.loc[:,~PRADC.columns.duplicated()]\n",
373
    "\n",
374
    "PRADM = PRADM.fillna(0)\n",
375
    "PRADM[PRADM != 0.0] = 1\n",
376
    "PRADC = PRADC.fillna(0)\n",
377
    "PRADC[PRADC != 0.0] = 1\n",
378
    "\n",
379
    "#PRADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
380
    "#PRADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
381
    "#PRADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
382
    "\n",
383
    "lsPRAD = PRADE.index.intersection(PRADM.index)\n",
384
    "lsPRAD = lsPRAD.intersection(PRADC.index)\n",
385
    "lsPRAD = pd.unique(lsPRAD)\n",
386
    "\n",
387
    "PRADE = PRADE.loc[lsPRAD,ls]\n",
388
    "PRADM = PRADM.loc[lsPRAD,ls]\n",
389
    "PRADC = PRADC.loc[lsPRAD,ls]\n",
390
    "\n",
391
    "print(PRADE.shape)\n",
392
    "print(PRADM.shape)\n",
393
    "print(PRADC.shape)\n",
394
    "\n",
395
    "AutoencoderE.eval()\n",
396
    "AutoencoderM.eval()\n",
397
    "AutoencoderC.eval()\n",
398
    "Clas.eval()\n",
399
    "\n",
400
    "PRADE2 = np.nan_to_num(PRADE.values)\n",
401
    "PRADM2 = np.nan_to_num(PRADM.values)\n",
402
    "PRADC2 = np.nan_to_num(PRADC.values)\n",
403
    "\n",
404
    "NPRADE2 = scalerGDSC.transform(PRADE2)    \n",
405
    "\n",
406
    "PRADexprs = torch.FloatTensor(NPRADE2)\n",
407
    "PRADmut = torch.FloatTensor(PRADM2)\n",
408
    "PRADcna = torch.FloatTensor(PRADC2)\n",
409
    "\n",
410
    "PRADZE = AutoencoderE(PRADexprs)\n",
411
    "PRADZM = AutoencoderM(PRADmut)\n",
412
    "PRADZC = AutoencoderC(PRADcna)\n",
413
    "\n",
414
    "PRADZT = torch.cat((PRADZE, PRADZM, PRADZC), 1)\n",
415
    "PRADZTX = F.normalize(PRADZT, p=2, dim=0)\n",
416
    "PredPRAD = Clas(PRADZTX)\n",
417
    "\n",
418
    "#print(PredPRAD.detach().numpy())"
419
   ]
420
  },
421
  {
422
   "cell_type": "code",
423
   "execution_count": 3,
424
   "metadata": {},
425
   "outputs": [
426
    {
427
     "name": "stderr",
428
     "output_type": "stream",
429
     "text": [
430
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
431
      "Passing list-likes to .loc or [] with any missing label will raise\n",
432
      "KeyError in the future, you can use .reindex() as an alternative.\n",
433
      "\n",
434
      "See the documentation here:\n",
435
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
436
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
437
      "Passing list-likes to .loc or [] with any missing label will raise\n",
438
      "KeyError in the future, you can use .reindex() as an alternative.\n",
439
      "\n",
440
      "See the documentation here:\n",
441
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
442
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
443
      "Passing list-likes to .loc or [] with any missing label will raise\n",
444
      "KeyError in the future, you can use .reindex() as an alternative.\n",
445
      "\n",
446
      "See the documentation here:\n",
447
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
448
     ]
449
    },
450
    {
451
     "name": "stdout",
452
     "output_type": "stream",
453
     "text": [
454
      "(161, 13081)\n",
455
      "(161, 13081)\n",
456
      "(161, 13081)\n"
457
     ]
458
    }
459
   ],
460
   "source": [
461
    "KIRPE = pd.read_csv(\"TCGA-KIRP_exprs.tsv\", \n",
462
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
463
    "KIRPE = pd.DataFrame.transpose(KIRPE)\n",
464
    "\n",
465
    "KIRPM = pd.read_csv(\"TCGA-KIRP_mutations.tsv\", \n",
466
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
467
    "KIRPM = pd.DataFrame.transpose(KIRPM)\n",
468
    "KIRPM = KIRPM.loc[:,~KIRPM.columns.duplicated()]\n",
469
    "\n",
470
    "KIRPC = pd.read_csv(\"TCGA-KIRP_CNA.tsv\", \n",
471
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
472
    "KIRPC = pd.DataFrame.transpose(KIRPC)\n",
473
    "KIRPC = KIRPC.loc[:,~KIRPC.columns.duplicated()]\n",
474
    "\n",
475
    "KIRPM = KIRPM.fillna(0)\n",
476
    "KIRPM[KIRPM != 0.0] = 1\n",
477
    "KIRPC = KIRPC.fillna(0)\n",
478
    "KIRPC[KIRPC != 0.0] = 1\n",
479
    "\n",
480
    "#KIRPE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
481
    "#KIRPM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
482
    "#KIRPC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
483
    "\n",
484
    "lsKIRP = KIRPE.index.intersection(KIRPM.index)\n",
485
    "lsKIRP = lsKIRP.intersection(KIRPC.index)\n",
486
    "lsKIRP = pd.unique(lsKIRP)\n",
487
    "\n",
488
    "KIRPE = KIRPE.loc[lsKIRP,ls]\n",
489
    "KIRPM = KIRPM.loc[lsKIRP,ls]\n",
490
    "KIRPC = KIRPC.loc[lsKIRP,ls]\n",
491
    "\n",
492
    "print(KIRPE.shape)\n",
493
    "print(KIRPM.shape)\n",
494
    "print(KIRPC.shape)\n",
495
    "\n",
496
    "AutoencoderE.eval()\n",
497
    "AutoencoderM.eval()\n",
498
    "AutoencoderC.eval()\n",
499
    "Clas.eval()\n",
500
    "\n",
501
    "KIRPE2 = np.nan_to_num(KIRPE.values)\n",
502
    "KIRPM2 = np.nan_to_num(KIRPM.values)\n",
503
    "KIRPC2 = np.nan_to_num(KIRPC.values)\n",
504
    "\n",
505
    "NKIRPE2 = scalerGDSC.transform(KIRPE2)    \n",
506
    "\n",
507
    "KIRPexprs = torch.FloatTensor(NKIRPE2)\n",
508
    "KIRPmut = torch.FloatTensor(KIRPM2)\n",
509
    "KIRPcna = torch.FloatTensor(KIRPC2)\n",
510
    "\n",
511
    "KIRPZE = AutoencoderE(KIRPexprs)\n",
512
    "KIRPZM = AutoencoderM(KIRPmut)\n",
513
    "KIRPZC = AutoencoderC(KIRPcna)\n",
514
    "\n",
515
    "KIRPZT = torch.cat((KIRPZE, KIRPZM, KIRPZC), 1)\n",
516
    "KIRPZTX = F.normalize(KIRPZT, p=2, dim=0)\n",
517
    "PredKIRP = Clas(KIRPZTX)\n",
518
    "\n",
519
    "#print(PredKIRP.detach().numpy())"
520
   ]
521
  },
522
  {
523
   "cell_type": "code",
524
   "execution_count": 4,
525
   "metadata": {},
526
   "outputs": [
527
    {
528
     "name": "stderr",
529
     "output_type": "stream",
530
     "text": [
531
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
532
      "Passing list-likes to .loc or [] with any missing label will raise\n",
533
      "KeyError in the future, you can use .reindex() as an alternative.\n",
534
      "\n",
535
      "See the documentation here:\n",
536
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
537
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
538
      "Passing list-likes to .loc or [] with any missing label will raise\n",
539
      "KeyError in the future, you can use .reindex() as an alternative.\n",
540
      "\n",
541
      "See the documentation here:\n",
542
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
543
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
544
      "Passing list-likes to .loc or [] with any missing label will raise\n",
545
      "KeyError in the future, you can use .reindex() as an alternative.\n",
546
      "\n",
547
      "See the documentation here:\n",
548
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
549
     ]
550
    },
551
    {
552
     "name": "stdout",
553
     "output_type": "stream",
554
     "text": [
555
      "(123, 13081)\n",
556
      "(123, 13081)\n",
557
      "(123, 13081)\n"
558
     ]
559
    }
560
   ],
561
   "source": [
562
    "BLCAE = pd.read_csv(\"TCGA-BLCA_exprs.tsv\", \n",
563
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
564
    "BLCAE = pd.DataFrame.transpose(BLCAE)\n",
565
    "\n",
566
    "BLCAM = pd.read_csv(\"TCGA-BLCA_mutations.tsv\", \n",
567
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
568
    "BLCAM = pd.DataFrame.transpose(BLCAM)\n",
569
    "BLCAM = BLCAM.loc[:,~BLCAM.columns.duplicated()]\n",
570
    "\n",
571
    "BLCAC = pd.read_csv(\"TCGA-BLCA_CNA.tsv\", \n",
572
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
573
    "BLCAC = pd.DataFrame.transpose(BLCAC)\n",
574
    "BLCAC = BLCAC.loc[:,~BLCAC.columns.duplicated()]\n",
575
    "\n",
576
    "BLCAM = BLCAM.fillna(0)\n",
577
    "BLCAM[BLCAM != 0.0] = 1\n",
578
    "BLCAC = BLCAC.fillna(0)\n",
579
    "BLCAC[BLCAC != 0.0] = 1\n",
580
    "\n",
581
    "#BLCAE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
582
    "#BLCAM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
583
    "#BLCAC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
584
    "\n",
585
    "lsBLCA = BLCAE.index.intersection(BLCAM.index)\n",
586
    "lsBLCA = lsBLCA.intersection(BLCAC.index)\n",
587
    "lsBLCA = pd.unique(lsBLCA)\n",
588
    "\n",
589
    "BLCAE = BLCAE.loc[lsBLCA,ls]\n",
590
    "BLCAM = BLCAM.loc[lsBLCA,ls]\n",
591
    "BLCAC = BLCAC.loc[lsBLCA,ls]\n",
592
    "\n",
593
    "print(BLCAE.shape)\n",
594
    "print(BLCAM.shape)\n",
595
    "print(BLCAC.shape)\n",
596
    "\n",
597
    "AutoencoderE.eval()\n",
598
    "AutoencoderM.eval()\n",
599
    "AutoencoderC.eval()\n",
600
    "Clas.eval()\n",
601
    "\n",
602
    "BLCAE2 = np.nan_to_num(BLCAE.values)\n",
603
    "BLCAM2 = np.nan_to_num(BLCAM.values)\n",
604
    "BLCAC2 = np.nan_to_num(BLCAC.values)\n",
605
    "\n",
606
    "NBLCAE2 = scalerGDSC.transform(BLCAE2)    \n",
607
    "\n",
608
    "BLCAexprs = torch.FloatTensor(NBLCAE2)\n",
609
    "BLCAmut = torch.FloatTensor(BLCAM2)\n",
610
    "BLCAcna = torch.FloatTensor(BLCAC2)\n",
611
    "\n",
612
    "BLCAZE = AutoencoderE(BLCAexprs)\n",
613
    "BLCAZM = AutoencoderM(BLCAmut)\n",
614
    "BLCAZC = AutoencoderC(BLCAcna)\n",
615
    "\n",
616
    "BLCAZT = torch.cat((BLCAZE, BLCAZM, BLCAZC), 1)\n",
617
    "BLCAZTX = F.normalize(BLCAZT, p=2, dim=0)\n",
618
    "PredBLCA = Clas(BLCAZTX)\n",
619
    "\n",
620
    "#print(PredBLCA.detach().numpy())"
621
   ]
622
  },
623
  {
624
   "cell_type": "code",
625
   "execution_count": 5,
626
   "metadata": {},
627
   "outputs": [
628
    {
629
     "name": "stderr",
630
     "output_type": "stream",
631
     "text": [
632
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
633
      "Passing list-likes to .loc or [] with any missing label will raise\n",
634
      "KeyError in the future, you can use .reindex() as an alternative.\n",
635
      "\n",
636
      "See the documentation here:\n",
637
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
638
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
639
      "Passing list-likes to .loc or [] with any missing label will raise\n",
640
      "KeyError in the future, you can use .reindex() as an alternative.\n",
641
      "\n",
642
      "See the documentation here:\n",
643
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
644
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
645
      "Passing list-likes to .loc or [] with any missing label will raise\n",
646
      "KeyError in the future, you can use .reindex() as an alternative.\n",
647
      "\n",
648
      "See the documentation here:\n",
649
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
650
     ]
651
    },
652
    {
653
     "name": "stdout",
654
     "output_type": "stream",
655
     "text": [
656
      "(921, 13081)\n",
657
      "(921, 13081)\n",
658
      "(921, 13081)\n"
659
     ]
660
    }
661
   ],
662
   "source": [
663
    "BRCAE = pd.read_csv(\"TCGA-BRCA_exprs.tsv\", \n",
664
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
665
    "BRCAE = pd.DataFrame.transpose(BRCAE)\n",
666
    "\n",
667
    "BRCAM = pd.read_csv(\"TCGA-BRCA_mutations.tsv\", \n",
668
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
669
    "BRCAM = pd.DataFrame.transpose(BRCAM)\n",
670
    "BRCAM = BRCAM.loc[:,~BRCAM.columns.duplicated()]\n",
671
    "\n",
672
    "BRCAC = pd.read_csv(\"TCGA-BRCA_CNA.tsv\", \n",
673
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
674
    "BRCAC = pd.DataFrame.transpose(BRCAC)\n",
675
    "BRCAC = BRCAC.loc[:,~BRCAC.columns.duplicated()]\n",
676
    "\n",
677
    "BRCAM = BRCAM.fillna(0)\n",
678
    "BRCAM[BRCAM != 0.0] = 1\n",
679
    "BRCAC = BRCAC.fillna(0)\n",
680
    "BRCAC[BRCAC != 0.0] = 1\n",
681
    "\n",
682
    "#BRCAE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
683
    "#BRCAM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
684
    "#BRCAC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
685
    "\n",
686
    "lsBRCA = BRCAE.index.intersection(BRCAM.index)\n",
687
    "lsBRCA = lsBRCA.intersection(BRCAC.index)\n",
688
    "lsBRCA = pd.unique(lsBRCA)\n",
689
    "\n",
690
    "BRCAE = BRCAE.loc[lsBRCA,ls]\n",
691
    "BRCAM = BRCAM.loc[lsBRCA,ls]\n",
692
    "BRCAC = BRCAC.loc[lsBRCA,ls]\n",
693
    "\n",
694
    "print(BRCAE.shape)\n",
695
    "print(BRCAM.shape)\n",
696
    "print(BRCAC.shape)\n",
697
    "\n",
698
    "AutoencoderE.eval()\n",
699
    "AutoencoderM.eval()\n",
700
    "AutoencoderC.eval()\n",
701
    "Clas.eval()\n",
702
    "\n",
703
    "BRCAE2 = np.nan_to_num(BRCAE.values)\n",
704
    "BRCAM2 = np.nan_to_num(BRCAM.values)\n",
705
    "BRCAC2 = np.nan_to_num(BRCAC.values)\n",
706
    "\n",
707
    "NBRCAE2 = scalerGDSC.transform(BRCAE2)    \n",
708
    "\n",
709
    "BRCAexprs = torch.FloatTensor(NBRCAE2)\n",
710
    "BRCAmut = torch.FloatTensor(BRCAM2)\n",
711
    "BRCAcna = torch.FloatTensor(BRCAC2)\n",
712
    "\n",
713
    "BRCAZE = AutoencoderE(BRCAexprs)\n",
714
    "BRCAZM = AutoencoderM(BRCAmut)\n",
715
    "BRCAZC = AutoencoderC(BRCAcna)\n",
716
    "\n",
717
    "BRCAZT = torch.cat((BRCAZE, BRCAZM, BRCAZC), 1)\n",
718
    "BRCAZTX = F.normalize(BRCAZT, p=2, dim=0)\n",
719
    "PredBRCA = Clas(BRCAZTX)\n",
720
    "\n",
721
    "#print(PredBRCA.detach().numpy())"
722
   ]
723
  },
724
  {
725
   "cell_type": "code",
726
   "execution_count": 6,
727
   "metadata": {},
728
   "outputs": [
729
    {
730
     "name": "stdout",
731
     "output_type": "stream",
732
     "text": [
733
      "(130, 13081)\n",
734
      "(130, 13081)\n",
735
      "(130, 13081)\n"
736
     ]
737
    },
738
    {
739
     "name": "stderr",
740
     "output_type": "stream",
741
     "text": [
742
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
743
      "Passing list-likes to .loc or [] with any missing label will raise\n",
744
      "KeyError in the future, you can use .reindex() as an alternative.\n",
745
      "\n",
746
      "See the documentation here:\n",
747
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
748
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
749
      "Passing list-likes to .loc or [] with any missing label will raise\n",
750
      "KeyError in the future, you can use .reindex() as an alternative.\n",
751
      "\n",
752
      "See the documentation here:\n",
753
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
754
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
755
      "Passing list-likes to .loc or [] with any missing label will raise\n",
756
      "KeyError in the future, you can use .reindex() as an alternative.\n",
757
      "\n",
758
      "See the documentation here:\n",
759
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
760
     ]
761
    }
762
   ],
763
   "source": [
764
    "PAADE = pd.read_csv(\"TCGA-PAAD_exprs.tsv\", \n",
765
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
766
    "PAADE = pd.DataFrame.transpose(PAADE)\n",
767
    "\n",
768
    "PAADM = pd.read_csv(\"TCGA-PAAD_mutations.tsv\", \n",
769
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
770
    "PAADM = pd.DataFrame.transpose(PAADM)\n",
771
    "PAADM = PAADM.loc[:,~PAADM.columns.duplicated()]\n",
772
    "\n",
773
    "PAADC = pd.read_csv(\"TCGA-PAAD_CNA.tsv\", \n",
774
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
775
    "PAADC = pd.DataFrame.transpose(PAADC)\n",
776
    "PAADC = PAADC.loc[:,~PAADC.columns.duplicated()]\n",
777
    "\n",
778
    "PAADM = PAADM.fillna(0)\n",
779
    "PAADM[PAADM != 0.0] = 1\n",
780
    "PAADC = PAADC.fillna(0)\n",
781
    "PAADC[PAADC != 0.0] = 1\n",
782
    "\n",
783
    "#PAADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
784
    "#PAADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
785
    "#PAADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
786
    "\n",
787
    "lsPAAD = PAADE.index.intersection(PAADM.index)\n",
788
    "lsPAAD = lsPAAD.intersection(PAADC.index)\n",
789
    "lsPAAD = pd.unique(lsPAAD)\n",
790
    "\n",
791
    "PAADE = PAADE.loc[lsPAAD,ls]\n",
792
    "PAADM = PAADM.loc[lsPAAD,ls]\n",
793
    "PAADC = PAADC.loc[lsPAAD,ls]\n",
794
    "\n",
795
    "print(PAADE.shape)\n",
796
    "print(PAADM.shape)\n",
797
    "print(PAADC.shape)\n",
798
    "\n",
799
    "AutoencoderE.eval()\n",
800
    "AutoencoderM.eval()\n",
801
    "AutoencoderC.eval()\n",
802
    "Clas.eval()\n",
803
    "\n",
804
    "PAADE2 = np.nan_to_num(PAADE.values)\n",
805
    "PAADM2 = np.nan_to_num(PAADM.values)\n",
806
    "PAADC2 = np.nan_to_num(PAADC.values)\n",
807
    "\n",
808
    "NPAADE2 = scalerGDSC.transform(PAADE2)    \n",
809
    "\n",
810
    "PAADexprs = torch.FloatTensor(NPAADE2)\n",
811
    "PAADmut = torch.FloatTensor(PAADM2)\n",
812
    "PAADcna = torch.FloatTensor(PAADC2)\n",
813
    "\n",
814
    "PAADZE = AutoencoderE(PAADexprs)\n",
815
    "PAADZM = AutoencoderM(PAADmut)\n",
816
    "PAADZC = AutoencoderC(PAADcna)\n",
817
    "\n",
818
    "PAADZT = torch.cat((PAADZE, PAADZM, PAADZC), 1)\n",
819
    "PAADZTX = F.normalize(PAADZT, p=2, dim=0)\n",
820
    "PredPAAD = Clas(PAADZTX)\n",
821
    "\n",
822
    "#print(PredPAAD.detach().numpy())"
823
   ]
824
  },
825
  {
826
   "cell_type": "code",
827
   "execution_count": 7,
828
   "metadata": {},
829
   "outputs": [
830
    {
831
     "name": "stderr",
832
     "output_type": "stream",
833
     "text": [
834
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
835
      "Passing list-likes to .loc or [] with any missing label will raise\n",
836
      "KeyError in the future, you can use .reindex() as an alternative.\n",
837
      "\n",
838
      "See the documentation here:\n",
839
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
840
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
841
      "Passing list-likes to .loc or [] with any missing label will raise\n",
842
      "KeyError in the future, you can use .reindex() as an alternative.\n",
843
      "\n",
844
      "See the documentation here:\n",
845
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
846
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
847
      "Passing list-likes to .loc or [] with any missing label will raise\n",
848
      "KeyError in the future, you can use .reindex() as an alternative.\n",
849
      "\n",
850
      "See the documentation here:\n",
851
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
852
     ]
853
    },
854
    {
855
     "name": "stdout",
856
     "output_type": "stream",
857
     "text": [
858
      "(475, 13081)\n",
859
      "(475, 13081)\n",
860
      "(475, 13081)\n"
861
     ]
862
    }
863
   ],
864
   "source": [
865
    "LUADE = pd.read_csv(\"TCGA-LUAD_exprs.tsv\", \n",
866
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
867
    "LUADE = pd.DataFrame.transpose(LUADE)\n",
868
    "\n",
869
    "LUADM = pd.read_csv(\"TCGA-LUAD_mutations.tsv\", \n",
870
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
871
    "LUADM = pd.DataFrame.transpose(LUADM)\n",
872
    "LUADM = LUADM.loc[:,~LUADM.columns.duplicated()]\n",
873
    "\n",
874
    "LUADC = pd.read_csv(\"TCGA-LUAD_CNA.tsv\", \n",
875
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
876
    "LUADC = pd.DataFrame.transpose(LUADC)\n",
877
    "LUADC = LUADC.loc[:,~LUADC.columns.duplicated()]\n",
878
    "\n",
879
    "LUADM = LUADM.fillna(0)\n",
880
    "LUADM[LUADM != 0.0] = 1\n",
881
    "LUADC = LUADC.fillna(0)\n",
882
    "LUADC[LUADC != 0.0] = 1\n",
883
    "\n",
884
    "#LUADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
885
    "#LUADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
886
    "#LUADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
887
    "\n",
888
    "lsLUAD = LUADE.index.intersection(LUADM.index)\n",
889
    "lsLUAD = lsLUAD.intersection(LUADC.index)\n",
890
    "lsLUAD = pd.unique(lsLUAD)\n",
891
    "\n",
892
    "LUADE = LUADE.loc[lsLUAD,ls]\n",
893
    "LUADM = LUADM.loc[lsLUAD,ls]\n",
894
    "LUADC = LUADC.loc[lsLUAD,ls]\n",
895
    "\n",
896
    "print(LUADE.shape)\n",
897
    "print(LUADM.shape)\n",
898
    "print(LUADC.shape)\n",
899
    "\n",
900
    "AutoencoderE.eval()\n",
901
    "AutoencoderM.eval()\n",
902
    "AutoencoderC.eval()\n",
903
    "Clas.eval()\n",
904
    "\n",
905
    "LUADE2 = np.nan_to_num(LUADE.values)\n",
906
    "LUADM2 = np.nan_to_num(LUADM.values)\n",
907
    "LUADC2 = np.nan_to_num(LUADC.values)\n",
908
    "\n",
909
    "NLUADE2 = scalerGDSC.transform(LUADE2)    \n",
910
    "\n",
911
    "LUADexprs = torch.FloatTensor(NLUADE2)\n",
912
    "LUADmut = torch.FloatTensor(LUADM2)\n",
913
    "LUADcna = torch.FloatTensor(LUADC2)\n",
914
    "\n",
915
    "LUADZE = AutoencoderE(LUADexprs)\n",
916
    "LUADZM = AutoencoderM(LUADmut)\n",
917
    "LUADZC = AutoencoderC(LUADcna)\n",
918
    "\n",
919
    "LUADZT = torch.cat((LUADZE, LUADZM, LUADZC), 1)\n",
920
    "LUADZTX = F.normalize(LUADZT, p=2, dim=0)\n",
921
    "PredLUAD = Clas(LUADZTX)\n",
922
    "\n",
923
    "#print(PredLUAD.detach().numpy())"
924
   ]
925
  },
926
  {
927
   "cell_type": "code",
928
   "execution_count": 8,
929
   "metadata": {},
930
   "outputs": [],
931
   "source": [
932
    "from scipy.stats.stats import pearsonr\n",
933
    "from scipy.stats import spearmanr\n",
934
    "import statsmodels.api as sm\n",
935
    "from mne.stats import bonferroni_correction\n",
936
    "\n",
937
    "lsEGFR = [10000, 102, 10252, 10253,10254,1026,1027,107,108,109,111,11140,112,113,114,1147,115,117145,1173,1175,1211,1213,1385,1445,156,160,161,163,1950,1956,196883,2060,207,208,2308,2309,23239,2475,253260,2549,26018,2885,2931,29924,30011,3164,3265,3320,3709,3710,3845,4193,4303,4893,5136,5153,5170,5290,5295,5335,5566,5567,5568,5573,5575,5576,5577,5578,5580,5581,5582,55824,5594,5595,5604,5605,572,5728,57761,58513,5894,6199,6233,64223,6456,6464,6654,6714,6868,7249,728590,729120,730418,7311,731292,7529,79109,801,8027,8038,805,808,814,842,84335,867,9146,983,998]\n",
938
    "\n",
939
    "#lsEGFR = [10000,1026,1027,10298,10718,1398,1399,145957,1839,1950,1956,1978,2002,2064,2065,2066,2069,207,208,23533,23642,2475,25,2549,25759,27,2885,2932,3084,3265,369,3725,374,3845,399694,4609,4690,4893,5058,5062,5063,5290,5291,5293,5294,5295,5296,5335,53358,5336,5578,5579,5582,5594,5595,5599,5601,5602,5604,5605,5609,56924,57144,572,5747,5894,6198,6199,6416,6464,6654,6655,6714,673,6776,6777,685,7039,815,816,817,818,8440,8503,867,868,9542]"
940
   ]
941
  },
942
  {
943
   "cell_type": "code",
944
   "execution_count": 12,
945
   "metadata": {},
946
   "outputs": [],
947
   "source": [
948
    "listEGFR = PRADE.columns.intersection(lsEGFR)\n",
949
    "PRADEEGFR = PRADE[listEGFR]\n",
950
    "PRADMEGFR = PRADM[listEGFR]\n",
951
    "PRADCEGFR = PRADC[listEGFR]"
952
   ]
953
  },
954
  {
955
   "cell_type": "code",
956
   "execution_count": 13,
957
   "metadata": {},
958
   "outputs": [
959
    {
960
     "data": {
961
      "text/html": [
962
       "<table class=\"simpletable\">\n",
963
       "<caption>OLS Regression Results</caption>\n",
964
       "<tr>\n",
965
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
966
       "</tr>\n",
967
       "<tr>\n",
968
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.999</td>\n",
969
       "</tr>\n",
970
       "<tr>\n",
971
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   3331.</td>\n",
972
       "</tr>\n",
973
       "<tr>\n",
974
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
975
       "</tr>\n",
976
       "<tr>\n",
977
       "  <th>Time:</th>                 <td>18:16:35</td>     <th>  Log-Likelihood:    </th> <td>  1340.8</td>\n",
978
       "</tr>\n",
979
       "<tr>\n",
980
       "  <th>No. Observations:</th>      <td>   492</td>      <th>  AIC:               </th> <td>  -2480.</td>\n",
981
       "</tr>\n",
982
       "<tr>\n",
983
       "  <th>Df Residuals:</th>          <td>   391</td>      <th>  BIC:               </th> <td>  -2056.</td>\n",
984
       "</tr>\n",
985
       "<tr>\n",
986
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
987
       "</tr>\n",
988
       "<tr>\n",
989
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
990
       "</tr>\n",
991
       "</table>\n",
992
       "<table class=\"simpletable\">\n",
993
       "<tr>\n",
994
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
995
       "</tr>\n",
996
       "<tr>\n",
997
       "  <th>102</th>    <td>    0.0038</td> <td>    0.003</td> <td>    1.171</td> <td> 0.242</td> <td>   -0.003</td> <td>    0.010</td>\n",
998
       "</tr>\n",
999
       "<tr>\n",
1000
       "  <th>107</th>    <td>   -0.0015</td> <td>    0.001</td> <td>   -0.982</td> <td> 0.327</td> <td>   -0.004</td> <td>    0.001</td>\n",
1001
       "</tr>\n",
1002
       "<tr>\n",
1003
       "  <th>108</th>    <td>   -0.0006</td> <td>    0.002</td> <td>   -0.377</td> <td> 0.706</td> <td>   -0.004</td> <td>    0.003</td>\n",
1004
       "</tr>\n",
1005
       "<tr>\n",
1006
       "  <th>109</th>    <td>   -0.0021</td> <td>    0.004</td> <td>   -0.585</td> <td> 0.559</td> <td>   -0.009</td> <td>    0.005</td>\n",
1007
       "</tr>\n",
1008
       "<tr>\n",
1009
       "  <th>111</th>    <td>   -0.0038</td> <td>    0.003</td> <td>   -1.432</td> <td> 0.153</td> <td>   -0.009</td> <td>    0.001</td>\n",
1010
       "</tr>\n",
1011
       "<tr>\n",
1012
       "  <th>112</th>    <td>   -0.0092</td> <td>    0.004</td> <td>   -2.553</td> <td> 0.011</td> <td>   -0.016</td> <td>   -0.002</td>\n",
1013
       "</tr>\n",
1014
       "<tr>\n",
1015
       "  <th>113</th>    <td>    0.0046</td> <td>    0.003</td> <td>    1.325</td> <td> 0.186</td> <td>   -0.002</td> <td>    0.011</td>\n",
1016
       "</tr>\n",
1017
       "<tr>\n",
1018
       "  <th>114</th>    <td>    0.0184</td> <td>    0.021</td> <td>    0.868</td> <td> 0.386</td> <td>   -0.023</td> <td>    0.060</td>\n",
1019
       "</tr>\n",
1020
       "<tr>\n",
1021
       "  <th>115</th>    <td>   -0.0002</td> <td>    0.004</td> <td>   -0.047</td> <td> 0.963</td> <td>   -0.007</td> <td>    0.007</td>\n",
1022
       "</tr>\n",
1023
       "<tr>\n",
1024
       "  <th>160</th>    <td>    0.0020</td> <td>    0.004</td> <td>    0.564</td> <td> 0.573</td> <td>   -0.005</td> <td>    0.009</td>\n",
1025
       "</tr>\n",
1026
       "<tr>\n",
1027
       "  <th>161</th>    <td>    0.0081</td> <td>    0.006</td> <td>    1.424</td> <td> 0.155</td> <td>   -0.003</td> <td>    0.019</td>\n",
1028
       "</tr>\n",
1029
       "<tr>\n",
1030
       "  <th>163</th>    <td>    0.0006</td> <td>    0.004</td> <td>    0.142</td> <td> 0.887</td> <td>   -0.008</td> <td>    0.009</td>\n",
1031
       "</tr>\n",
1032
       "<tr>\n",
1033
       "  <th>207</th>    <td>    0.0157</td> <td>    0.004</td> <td>    3.698</td> <td> 0.000</td> <td>    0.007</td> <td>    0.024</td>\n",
1034
       "</tr>\n",
1035
       "<tr>\n",
1036
       "  <th>208</th>    <td>    0.0044</td> <td>    0.006</td> <td>    0.792</td> <td> 0.429</td> <td>   -0.007</td> <td>    0.015</td>\n",
1037
       "</tr>\n",
1038
       "<tr>\n",
1039
       "  <th>572</th>    <td>    0.0089</td> <td>    0.004</td> <td>    1.975</td> <td> 0.049</td> <td> 3.82e-05</td> <td>    0.018</td>\n",
1040
       "</tr>\n",
1041
       "<tr>\n",
1042
       "  <th>801</th>    <td>   -0.0057</td> <td>    0.003</td> <td>   -1.772</td> <td> 0.077</td> <td>   -0.012</td> <td>    0.001</td>\n",
1043
       "</tr>\n",
1044
       "<tr>\n",
1045
       "  <th>805</th>    <td>    0.0053</td> <td>    0.004</td> <td>    1.280</td> <td> 0.201</td> <td>   -0.003</td> <td>    0.013</td>\n",
1046
       "</tr>\n",
1047
       "<tr>\n",
1048
       "  <th>808</th>    <td>    0.0055</td> <td>    0.005</td> <td>    1.141</td> <td> 0.255</td> <td>   -0.004</td> <td>    0.015</td>\n",
1049
       "</tr>\n",
1050
       "<tr>\n",
1051
       "  <th>814</th>    <td>   -0.0012</td> <td>    0.003</td> <td>   -0.477</td> <td> 0.633</td> <td>   -0.006</td> <td>    0.004</td>\n",
1052
       "</tr>\n",
1053
       "<tr>\n",
1054
       "  <th>842</th>    <td>   -0.0072</td> <td>    0.004</td> <td>   -1.804</td> <td> 0.072</td> <td>   -0.015</td> <td>    0.001</td>\n",
1055
       "</tr>\n",
1056
       "<tr>\n",
1057
       "  <th>867</th>    <td>    0.0034</td> <td>    0.005</td> <td>    0.666</td> <td> 0.506</td> <td>   -0.007</td> <td>    0.014</td>\n",
1058
       "</tr>\n",
1059
       "<tr>\n",
1060
       "  <th>983</th>    <td>    0.0062</td> <td>    0.002</td> <td>    3.919</td> <td> 0.000</td> <td>    0.003</td> <td>    0.009</td>\n",
1061
       "</tr>\n",
1062
       "<tr>\n",
1063
       "  <th>998</th>    <td>   -0.0097</td> <td>    0.005</td> <td>   -1.854</td> <td> 0.064</td> <td>   -0.020</td> <td>    0.001</td>\n",
1064
       "</tr>\n",
1065
       "<tr>\n",
1066
       "  <th>1026</th>   <td>    0.0025</td> <td>    0.001</td> <td>    1.717</td> <td> 0.087</td> <td>   -0.000</td> <td>    0.005</td>\n",
1067
       "</tr>\n",
1068
       "<tr>\n",
1069
       "  <th>1027</th>   <td>   -0.0033</td> <td>    0.003</td> <td>   -1.224</td> <td> 0.222</td> <td>   -0.009</td> <td>    0.002</td>\n",
1070
       "</tr>\n",
1071
       "<tr>\n",
1072
       "  <th>1147</th>   <td>   -0.0025</td> <td>    0.004</td> <td>   -0.635</td> <td> 0.526</td> <td>   -0.010</td> <td>    0.005</td>\n",
1073
       "</tr>\n",
1074
       "<tr>\n",
1075
       "  <th>1173</th>   <td>    0.0166</td> <td>    0.005</td> <td>    3.472</td> <td> 0.001</td> <td>    0.007</td> <td>    0.026</td>\n",
1076
       "</tr>\n",
1077
       "<tr>\n",
1078
       "  <th>1175</th>   <td>   -0.0029</td> <td>    0.003</td> <td>   -0.927</td> <td> 0.355</td> <td>   -0.009</td> <td>    0.003</td>\n",
1079
       "</tr>\n",
1080
       "<tr>\n",
1081
       "  <th>1211</th>   <td>    0.0137</td> <td>    0.004</td> <td>    3.271</td> <td> 0.001</td> <td>    0.005</td> <td>    0.022</td>\n",
1082
       "</tr>\n",
1083
       "<tr>\n",
1084
       "  <th>1213</th>   <td>   -0.0176</td> <td>    0.005</td> <td>   -3.238</td> <td> 0.001</td> <td>   -0.028</td> <td>   -0.007</td>\n",
1085
       "</tr>\n",
1086
       "<tr>\n",
1087
       "  <th>1385</th>   <td>   -0.0272</td> <td>    0.006</td> <td>   -4.807</td> <td> 0.000</td> <td>   -0.038</td> <td>   -0.016</td>\n",
1088
       "</tr>\n",
1089
       "<tr>\n",
1090
       "  <th>1445</th>   <td>   -0.0190</td> <td>    0.005</td> <td>   -3.692</td> <td> 0.000</td> <td>   -0.029</td> <td>   -0.009</td>\n",
1091
       "</tr>\n",
1092
       "<tr>\n",
1093
       "  <th>1950</th>   <td>    0.0015</td> <td>    0.001</td> <td>    1.262</td> <td> 0.208</td> <td>   -0.001</td> <td>    0.004</td>\n",
1094
       "</tr>\n",
1095
       "<tr>\n",
1096
       "  <th>1956</th>   <td>    0.0091</td> <td>    0.003</td> <td>    3.223</td> <td> 0.001</td> <td>    0.004</td> <td>    0.015</td>\n",
1097
       "</tr>\n",
1098
       "<tr>\n",
1099
       "  <th>2060</th>   <td>   -0.0084</td> <td>    0.005</td> <td>   -1.800</td> <td> 0.073</td> <td>   -0.018</td> <td>    0.001</td>\n",
1100
       "</tr>\n",
1101
       "<tr>\n",
1102
       "  <th>2308</th>   <td>   -0.0026</td> <td>    0.004</td> <td>   -0.730</td> <td> 0.466</td> <td>   -0.010</td> <td>    0.004</td>\n",
1103
       "</tr>\n",
1104
       "<tr>\n",
1105
       "  <th>2309</th>   <td>    0.0015</td> <td>    0.002</td> <td>    0.613</td> <td> 0.540</td> <td>   -0.003</td> <td>    0.006</td>\n",
1106
       "</tr>\n",
1107
       "<tr>\n",
1108
       "  <th>2475</th>   <td>    0.0009</td> <td>    0.003</td> <td>    0.326</td> <td> 0.744</td> <td>   -0.005</td> <td>    0.007</td>\n",
1109
       "</tr>\n",
1110
       "<tr>\n",
1111
       "  <th>2549</th>   <td>    0.0126</td> <td>    0.004</td> <td>    2.997</td> <td> 0.003</td> <td>    0.004</td> <td>    0.021</td>\n",
1112
       "</tr>\n",
1113
       "<tr>\n",
1114
       "  <th>2885</th>   <td>    0.0143</td> <td>    0.006</td> <td>    2.293</td> <td> 0.022</td> <td>    0.002</td> <td>    0.027</td>\n",
1115
       "</tr>\n",
1116
       "<tr>\n",
1117
       "  <th>2931</th>   <td>    0.0105</td> <td>    0.003</td> <td>    3.025</td> <td> 0.003</td> <td>    0.004</td> <td>    0.017</td>\n",
1118
       "</tr>\n",
1119
       "<tr>\n",
1120
       "  <th>3164</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.984</td> <td> 0.325</td> <td>   -0.001</td> <td>    0.003</td>\n",
1121
       "</tr>\n",
1122
       "<tr>\n",
1123
       "  <th>3265</th>   <td>    0.0007</td> <td>    0.004</td> <td>    0.162</td> <td> 0.872</td> <td>   -0.008</td> <td>    0.009</td>\n",
1124
       "</tr>\n",
1125
       "<tr>\n",
1126
       "  <th>3320</th>   <td>    0.0039</td> <td>    0.003</td> <td>    1.407</td> <td> 0.160</td> <td>   -0.002</td> <td>    0.009</td>\n",
1127
       "</tr>\n",
1128
       "<tr>\n",
1129
       "  <th>3709</th>   <td>   -0.0004</td> <td>    0.002</td> <td>   -0.168</td> <td> 0.867</td> <td>   -0.005</td> <td>    0.004</td>\n",
1130
       "</tr>\n",
1131
       "<tr>\n",
1132
       "  <th>3710</th>   <td>    0.0032</td> <td>    0.002</td> <td>    1.278</td> <td> 0.202</td> <td>   -0.002</td> <td>    0.008</td>\n",
1133
       "</tr>\n",
1134
       "<tr>\n",
1135
       "  <th>3845</th>   <td>    0.0043</td> <td>    0.003</td> <td>    1.285</td> <td> 0.200</td> <td>   -0.002</td> <td>    0.011</td>\n",
1136
       "</tr>\n",
1137
       "<tr>\n",
1138
       "  <th>4193</th>   <td>    0.0049</td> <td>    0.003</td> <td>    1.479</td> <td> 0.140</td> <td>   -0.002</td> <td>    0.011</td>\n",
1139
       "</tr>\n",
1140
       "<tr>\n",
1141
       "  <th>4303</th>   <td>   -0.0028</td> <td>    0.004</td> <td>   -0.781</td> <td> 0.435</td> <td>   -0.010</td> <td>    0.004</td>\n",
1142
       "</tr>\n",
1143
       "<tr>\n",
1144
       "  <th>4893</th>   <td>   -0.0048</td> <td>    0.003</td> <td>   -1.744</td> <td> 0.082</td> <td>   -0.010</td> <td>    0.001</td>\n",
1145
       "</tr>\n",
1146
       "<tr>\n",
1147
       "  <th>5136</th>   <td>    0.0006</td> <td>    0.002</td> <td>    0.346</td> <td> 0.729</td> <td>   -0.003</td> <td>    0.004</td>\n",
1148
       "</tr>\n",
1149
       "<tr>\n",
1150
       "  <th>5170</th>   <td>    0.0062</td> <td>    0.005</td> <td>    1.193</td> <td> 0.234</td> <td>   -0.004</td> <td>    0.016</td>\n",
1151
       "</tr>\n",
1152
       "<tr>\n",
1153
       "  <th>5290</th>   <td>    0.0012</td> <td>    0.004</td> <td>    0.276</td> <td> 0.782</td> <td>   -0.007</td> <td>    0.010</td>\n",
1154
       "</tr>\n",
1155
       "<tr>\n",
1156
       "  <th>5295</th>   <td>   -0.0067</td> <td>    0.003</td> <td>   -2.376</td> <td> 0.018</td> <td>   -0.012</td> <td>   -0.001</td>\n",
1157
       "</tr>\n",
1158
       "<tr>\n",
1159
       "  <th>5335</th>   <td>    0.0026</td> <td>    0.004</td> <td>    0.607</td> <td> 0.544</td> <td>   -0.006</td> <td>    0.011</td>\n",
1160
       "</tr>\n",
1161
       "<tr>\n",
1162
       "  <th>5566</th>   <td>   -0.0074</td> <td>    0.005</td> <td>   -1.360</td> <td> 0.175</td> <td>   -0.018</td> <td>    0.003</td>\n",
1163
       "</tr>\n",
1164
       "<tr>\n",
1165
       "  <th>5567</th>   <td>   -0.0004</td> <td>    0.002</td> <td>   -0.188</td> <td> 0.851</td> <td>   -0.004</td> <td>    0.003</td>\n",
1166
       "</tr>\n",
1167
       "<tr>\n",
1168
       "  <th>5573</th>   <td>   -0.0023</td> <td>    0.005</td> <td>   -0.451</td> <td> 0.652</td> <td>   -0.012</td> <td>    0.008</td>\n",
1169
       "</tr>\n",
1170
       "<tr>\n",
1171
       "  <th>5575</th>   <td>    0.0043</td> <td>    0.002</td> <td>    1.820</td> <td> 0.070</td> <td>   -0.000</td> <td>    0.009</td>\n",
1172
       "</tr>\n",
1173
       "<tr>\n",
1174
       "  <th>5576</th>   <td>   -0.0015</td> <td>    0.004</td> <td>   -0.396</td> <td> 0.692</td> <td>   -0.009</td> <td>    0.006</td>\n",
1175
       "</tr>\n",
1176
       "<tr>\n",
1177
       "  <th>5577</th>   <td>    0.0034</td> <td>    0.002</td> <td>    1.822</td> <td> 0.069</td> <td>   -0.000</td> <td>    0.007</td>\n",
1178
       "</tr>\n",
1179
       "<tr>\n",
1180
       "  <th>5578</th>   <td>    0.0009</td> <td>    0.002</td> <td>    0.418</td> <td> 0.676</td> <td>   -0.003</td> <td>    0.005</td>\n",
1181
       "</tr>\n",
1182
       "<tr>\n",
1183
       "  <th>5580</th>   <td>   -0.0008</td> <td>    0.003</td> <td>   -0.276</td> <td> 0.783</td> <td>   -0.007</td> <td>    0.005</td>\n",
1184
       "</tr>\n",
1185
       "<tr>\n",
1186
       "  <th>5581</th>   <td>   -0.0089</td> <td>    0.005</td> <td>   -1.857</td> <td> 0.064</td> <td>   -0.018</td> <td>    0.001</td>\n",
1187
       "</tr>\n",
1188
       "<tr>\n",
1189
       "  <th>5582</th>   <td>    0.0085</td> <td>    0.017</td> <td>    0.491</td> <td> 0.624</td> <td>   -0.026</td> <td>    0.043</td>\n",
1190
       "</tr>\n",
1191
       "<tr>\n",
1192
       "  <th>5594</th>   <td>    0.0003</td> <td>    0.006</td> <td>    0.055</td> <td> 0.956</td> <td>   -0.012</td> <td>    0.012</td>\n",
1193
       "</tr>\n",
1194
       "<tr>\n",
1195
       "  <th>5595</th>   <td>    0.0037</td> <td>    0.004</td> <td>    0.935</td> <td> 0.350</td> <td>   -0.004</td> <td>    0.011</td>\n",
1196
       "</tr>\n",
1197
       "<tr>\n",
1198
       "  <th>5604</th>   <td>   -0.0115</td> <td>    0.004</td> <td>   -3.119</td> <td> 0.002</td> <td>   -0.019</td> <td>   -0.004</td>\n",
1199
       "</tr>\n",
1200
       "<tr>\n",
1201
       "  <th>5605</th>   <td>    0.0087</td> <td>    0.005</td> <td>    1.745</td> <td> 0.082</td> <td>   -0.001</td> <td>    0.018</td>\n",
1202
       "</tr>\n",
1203
       "<tr>\n",
1204
       "  <th>5728</th>   <td>-2.957e-05</td> <td>    0.002</td> <td>   -0.016</td> <td> 0.987</td> <td>   -0.004</td> <td>    0.004</td>\n",
1205
       "</tr>\n",
1206
       "<tr>\n",
1207
       "  <th>5894</th>   <td>    0.0119</td> <td>    0.006</td> <td>    2.114</td> <td> 0.035</td> <td>    0.001</td> <td>    0.023</td>\n",
1208
       "</tr>\n",
1209
       "<tr>\n",
1210
       "  <th>6199</th>   <td>   -0.0020</td> <td>    0.004</td> <td>   -0.486</td> <td> 0.627</td> <td>   -0.010</td> <td>    0.006</td>\n",
1211
       "</tr>\n",
1212
       "<tr>\n",
1213
       "  <th>6456</th>   <td>   -0.0035</td> <td>    0.001</td> <td>   -2.815</td> <td> 0.005</td> <td>   -0.006</td> <td>   -0.001</td>\n",
1214
       "</tr>\n",
1215
       "<tr>\n",
1216
       "  <th>6464</th>   <td>    0.0086</td> <td>    0.005</td> <td>    1.901</td> <td> 0.058</td> <td>   -0.000</td> <td>    0.018</td>\n",
1217
       "</tr>\n",
1218
       "<tr>\n",
1219
       "  <th>6654</th>   <td>   -0.0153</td> <td>    0.006</td> <td>   -2.782</td> <td> 0.006</td> <td>   -0.026</td> <td>   -0.004</td>\n",
1220
       "</tr>\n",
1221
       "<tr>\n",
1222
       "  <th>6714</th>   <td>    0.0003</td> <td>    0.003</td> <td>    0.118</td> <td> 0.906</td> <td>   -0.005</td> <td>    0.006</td>\n",
1223
       "</tr>\n",
1224
       "<tr>\n",
1225
       "  <th>6868</th>   <td>   -0.0008</td> <td>    0.005</td> <td>   -0.152</td> <td> 0.880</td> <td>   -0.011</td> <td>    0.009</td>\n",
1226
       "</tr>\n",
1227
       "<tr>\n",
1228
       "  <th>7249</th>   <td>    0.0062</td> <td>    0.006</td> <td>    1.129</td> <td> 0.259</td> <td>   -0.005</td> <td>    0.017</td>\n",
1229
       "</tr>\n",
1230
       "<tr>\n",
1231
       "  <th>7311</th>   <td>    0.0123</td> <td>    0.004</td> <td>    2.798</td> <td> 0.005</td> <td>    0.004</td> <td>    0.021</td>\n",
1232
       "</tr>\n",
1233
       "<tr>\n",
1234
       "  <th>7529</th>   <td>    0.0052</td> <td>    0.005</td> <td>    1.055</td> <td> 0.292</td> <td>   -0.004</td> <td>    0.015</td>\n",
1235
       "</tr>\n",
1236
       "<tr>\n",
1237
       "  <th>8027</th>   <td>    0.0059</td> <td>    0.005</td> <td>    1.259</td> <td> 0.209</td> <td>   -0.003</td> <td>    0.015</td>\n",
1238
       "</tr>\n",
1239
       "<tr>\n",
1240
       "  <th>8038</th>   <td>   -0.0037</td> <td>    0.003</td> <td>   -1.379</td> <td> 0.169</td> <td>   -0.009</td> <td>    0.002</td>\n",
1241
       "</tr>\n",
1242
       "<tr>\n",
1243
       "  <th>9146</th>   <td>   -0.0085</td> <td>    0.006</td> <td>   -1.370</td> <td> 0.172</td> <td>   -0.021</td> <td>    0.004</td>\n",
1244
       "</tr>\n",
1245
       "<tr>\n",
1246
       "  <th>10000</th>  <td>   -0.0011</td> <td>    0.003</td> <td>   -0.442</td> <td> 0.659</td> <td>   -0.006</td> <td>    0.004</td>\n",
1247
       "</tr>\n",
1248
       "<tr>\n",
1249
       "  <th>10252</th>  <td>    0.0025</td> <td>    0.002</td> <td>    1.088</td> <td> 0.277</td> <td>   -0.002</td> <td>    0.007</td>\n",
1250
       "</tr>\n",
1251
       "<tr>\n",
1252
       "  <th>10253</th>  <td>    0.0016</td> <td>    0.003</td> <td>    0.595</td> <td> 0.552</td> <td>   -0.004</td> <td>    0.007</td>\n",
1253
       "</tr>\n",
1254
       "<tr>\n",
1255
       "  <th>10254</th>  <td>    0.0036</td> <td>    0.004</td> <td>    0.861</td> <td> 0.390</td> <td>   -0.005</td> <td>    0.012</td>\n",
1256
       "</tr>\n",
1257
       "<tr>\n",
1258
       "  <th>11140</th>  <td>    0.0068</td> <td>    0.006</td> <td>    1.064</td> <td> 0.288</td> <td>   -0.006</td> <td>    0.019</td>\n",
1259
       "</tr>\n",
1260
       "<tr>\n",
1261
       "  <th>23239</th>  <td>    0.0050</td> <td>    0.003</td> <td>    1.664</td> <td> 0.097</td> <td>   -0.001</td> <td>    0.011</td>\n",
1262
       "</tr>\n",
1263
       "<tr>\n",
1264
       "  <th>26018</th>  <td>    0.0061</td> <td>    0.003</td> <td>    2.397</td> <td> 0.017</td> <td>    0.001</td> <td>    0.011</td>\n",
1265
       "</tr>\n",
1266
       "<tr>\n",
1267
       "  <th>29924</th>  <td>   -0.0105</td> <td>    0.005</td> <td>   -2.092</td> <td> 0.037</td> <td>   -0.020</td> <td>   -0.001</td>\n",
1268
       "</tr>\n",
1269
       "<tr>\n",
1270
       "  <th>30011</th>  <td>   -0.0037</td> <td>    0.003</td> <td>   -1.088</td> <td> 0.277</td> <td>   -0.010</td> <td>    0.003</td>\n",
1271
       "</tr>\n",
1272
       "<tr>\n",
1273
       "  <th>55824</th>  <td>    0.0007</td> <td>    0.003</td> <td>    0.277</td> <td> 0.782</td> <td>   -0.004</td> <td>    0.006</td>\n",
1274
       "</tr>\n",
1275
       "<tr>\n",
1276
       "  <th>57761</th>  <td>   -0.0018</td> <td>    0.002</td> <td>   -0.844</td> <td> 0.399</td> <td>   -0.006</td> <td>    0.002</td>\n",
1277
       "</tr>\n",
1278
       "<tr>\n",
1279
       "  <th>58513</th>  <td>   -0.0083</td> <td>    0.007</td> <td>   -1.252</td> <td> 0.211</td> <td>   -0.021</td> <td>    0.005</td>\n",
1280
       "</tr>\n",
1281
       "<tr>\n",
1282
       "  <th>64223</th>  <td>   -0.0173</td> <td>    0.006</td> <td>   -3.144</td> <td> 0.002</td> <td>   -0.028</td> <td>   -0.006</td>\n",
1283
       "</tr>\n",
1284
       "<tr>\n",
1285
       "  <th>79109</th>  <td>    0.0100</td> <td>    0.006</td> <td>    1.691</td> <td> 0.092</td> <td>   -0.002</td> <td>    0.022</td>\n",
1286
       "</tr>\n",
1287
       "<tr>\n",
1288
       "  <th>84335</th>  <td>   -0.0074</td> <td>    0.005</td> <td>   -1.453</td> <td> 0.147</td> <td>   -0.018</td> <td>    0.003</td>\n",
1289
       "</tr>\n",
1290
       "<tr>\n",
1291
       "  <th>117145</th> <td>    0.0006</td> <td>    0.003</td> <td>    0.199</td> <td> 0.843</td> <td>   -0.006</td> <td>    0.007</td>\n",
1292
       "</tr>\n",
1293
       "<tr>\n",
1294
       "  <th>196883</th> <td>   -0.0055</td> <td>    0.003</td> <td>   -1.924</td> <td> 0.055</td> <td>   -0.011</td> <td>    0.000</td>\n",
1295
       "</tr>\n",
1296
       "<tr>\n",
1297
       "  <th>253260</th> <td>    0.0202</td> <td>    0.005</td> <td>    3.858</td> <td> 0.000</td> <td>    0.010</td> <td>    0.030</td>\n",
1298
       "</tr>\n",
1299
       "</table>\n",
1300
       "<table class=\"simpletable\">\n",
1301
       "<tr>\n",
1302
       "  <th>Omnibus:</th>       <td>63.897</td> <th>  Durbin-Watson:     </th> <td>   1.907</td>\n",
1303
       "</tr>\n",
1304
       "<tr>\n",
1305
       "  <th>Prob(Omnibus):</th> <td> 0.000</td> <th>  Jarque-Bera (JB):  </th> <td> 218.126</td>\n",
1306
       "</tr>\n",
1307
       "<tr>\n",
1308
       "  <th>Skew:</th>          <td> 0.560</td> <th>  Prob(JB):          </th> <td>4.31e-48</td>\n",
1309
       "</tr>\n",
1310
       "<tr>\n",
1311
       "  <th>Kurtosis:</th>      <td> 6.063</td> <th>  Cond. No.          </th> <td>1.34e+03</td>\n",
1312
       "</tr>\n",
1313
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 1.34e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
1314
      ],
1315
      "text/plain": [
1316
       "<class 'statsmodels.iolib.summary.Summary'>\n",
1317
       "\"\"\"\n",
1318
       "                            OLS Regression Results                            \n",
1319
       "==============================================================================\n",
1320
       "Dep. Variable:                      y   R-squared:                       0.999\n",
1321
       "Model:                            OLS   Adj. R-squared:                  0.999\n",
1322
       "Method:                 Least Squares   F-statistic:                     3331.\n",
1323
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
1324
       "Time:                        18:16:35   Log-Likelihood:                 1340.8\n",
1325
       "No. Observations:                 492   AIC:                            -2480.\n",
1326
       "Df Residuals:                     391   BIC:                            -2056.\n",
1327
       "Df Model:                         101                                         \n",
1328
       "Covariance Type:            nonrobust                                         \n",
1329
       "==============================================================================\n",
1330
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
1331
       "------------------------------------------------------------------------------\n",
1332
       "102            0.0038      0.003      1.171      0.242      -0.003       0.010\n",
1333
       "107           -0.0015      0.001     -0.982      0.327      -0.004       0.001\n",
1334
       "108           -0.0006      0.002     -0.377      0.706      -0.004       0.003\n",
1335
       "109           -0.0021      0.004     -0.585      0.559      -0.009       0.005\n",
1336
       "111           -0.0038      0.003     -1.432      0.153      -0.009       0.001\n",
1337
       "112           -0.0092      0.004     -2.553      0.011      -0.016      -0.002\n",
1338
       "113            0.0046      0.003      1.325      0.186      -0.002       0.011\n",
1339
       "114            0.0184      0.021      0.868      0.386      -0.023       0.060\n",
1340
       "115           -0.0002      0.004     -0.047      0.963      -0.007       0.007\n",
1341
       "160            0.0020      0.004      0.564      0.573      -0.005       0.009\n",
1342
       "161            0.0081      0.006      1.424      0.155      -0.003       0.019\n",
1343
       "163            0.0006      0.004      0.142      0.887      -0.008       0.009\n",
1344
       "207            0.0157      0.004      3.698      0.000       0.007       0.024\n",
1345
       "208            0.0044      0.006      0.792      0.429      -0.007       0.015\n",
1346
       "572            0.0089      0.004      1.975      0.049    3.82e-05       0.018\n",
1347
       "801           -0.0057      0.003     -1.772      0.077      -0.012       0.001\n",
1348
       "805            0.0053      0.004      1.280      0.201      -0.003       0.013\n",
1349
       "808            0.0055      0.005      1.141      0.255      -0.004       0.015\n",
1350
       "814           -0.0012      0.003     -0.477      0.633      -0.006       0.004\n",
1351
       "842           -0.0072      0.004     -1.804      0.072      -0.015       0.001\n",
1352
       "867            0.0034      0.005      0.666      0.506      -0.007       0.014\n",
1353
       "983            0.0062      0.002      3.919      0.000       0.003       0.009\n",
1354
       "998           -0.0097      0.005     -1.854      0.064      -0.020       0.001\n",
1355
       "1026           0.0025      0.001      1.717      0.087      -0.000       0.005\n",
1356
       "1027          -0.0033      0.003     -1.224      0.222      -0.009       0.002\n",
1357
       "1147          -0.0025      0.004     -0.635      0.526      -0.010       0.005\n",
1358
       "1173           0.0166      0.005      3.472      0.001       0.007       0.026\n",
1359
       "1175          -0.0029      0.003     -0.927      0.355      -0.009       0.003\n",
1360
       "1211           0.0137      0.004      3.271      0.001       0.005       0.022\n",
1361
       "1213          -0.0176      0.005     -3.238      0.001      -0.028      -0.007\n",
1362
       "1385          -0.0272      0.006     -4.807      0.000      -0.038      -0.016\n",
1363
       "1445          -0.0190      0.005     -3.692      0.000      -0.029      -0.009\n",
1364
       "1950           0.0015      0.001      1.262      0.208      -0.001       0.004\n",
1365
       "1956           0.0091      0.003      3.223      0.001       0.004       0.015\n",
1366
       "2060          -0.0084      0.005     -1.800      0.073      -0.018       0.001\n",
1367
       "2308          -0.0026      0.004     -0.730      0.466      -0.010       0.004\n",
1368
       "2309           0.0015      0.002      0.613      0.540      -0.003       0.006\n",
1369
       "2475           0.0009      0.003      0.326      0.744      -0.005       0.007\n",
1370
       "2549           0.0126      0.004      2.997      0.003       0.004       0.021\n",
1371
       "2885           0.0143      0.006      2.293      0.022       0.002       0.027\n",
1372
       "2931           0.0105      0.003      3.025      0.003       0.004       0.017\n",
1373
       "3164           0.0011      0.001      0.984      0.325      -0.001       0.003\n",
1374
       "3265           0.0007      0.004      0.162      0.872      -0.008       0.009\n",
1375
       "3320           0.0039      0.003      1.407      0.160      -0.002       0.009\n",
1376
       "3709          -0.0004      0.002     -0.168      0.867      -0.005       0.004\n",
1377
       "3710           0.0032      0.002      1.278      0.202      -0.002       0.008\n",
1378
       "3845           0.0043      0.003      1.285      0.200      -0.002       0.011\n",
1379
       "4193           0.0049      0.003      1.479      0.140      -0.002       0.011\n",
1380
       "4303          -0.0028      0.004     -0.781      0.435      -0.010       0.004\n",
1381
       "4893          -0.0048      0.003     -1.744      0.082      -0.010       0.001\n",
1382
       "5136           0.0006      0.002      0.346      0.729      -0.003       0.004\n",
1383
       "5170           0.0062      0.005      1.193      0.234      -0.004       0.016\n",
1384
       "5290           0.0012      0.004      0.276      0.782      -0.007       0.010\n",
1385
       "5295          -0.0067      0.003     -2.376      0.018      -0.012      -0.001\n",
1386
       "5335           0.0026      0.004      0.607      0.544      -0.006       0.011\n",
1387
       "5566          -0.0074      0.005     -1.360      0.175      -0.018       0.003\n",
1388
       "5567          -0.0004      0.002     -0.188      0.851      -0.004       0.003\n",
1389
       "5573          -0.0023      0.005     -0.451      0.652      -0.012       0.008\n",
1390
       "5575           0.0043      0.002      1.820      0.070      -0.000       0.009\n",
1391
       "5576          -0.0015      0.004     -0.396      0.692      -0.009       0.006\n",
1392
       "5577           0.0034      0.002      1.822      0.069      -0.000       0.007\n",
1393
       "5578           0.0009      0.002      0.418      0.676      -0.003       0.005\n",
1394
       "5580          -0.0008      0.003     -0.276      0.783      -0.007       0.005\n",
1395
       "5581          -0.0089      0.005     -1.857      0.064      -0.018       0.001\n",
1396
       "5582           0.0085      0.017      0.491      0.624      -0.026       0.043\n",
1397
       "5594           0.0003      0.006      0.055      0.956      -0.012       0.012\n",
1398
       "5595           0.0037      0.004      0.935      0.350      -0.004       0.011\n",
1399
       "5604          -0.0115      0.004     -3.119      0.002      -0.019      -0.004\n",
1400
       "5605           0.0087      0.005      1.745      0.082      -0.001       0.018\n",
1401
       "5728       -2.957e-05      0.002     -0.016      0.987      -0.004       0.004\n",
1402
       "5894           0.0119      0.006      2.114      0.035       0.001       0.023\n",
1403
       "6199          -0.0020      0.004     -0.486      0.627      -0.010       0.006\n",
1404
       "6456          -0.0035      0.001     -2.815      0.005      -0.006      -0.001\n",
1405
       "6464           0.0086      0.005      1.901      0.058      -0.000       0.018\n",
1406
       "6654          -0.0153      0.006     -2.782      0.006      -0.026      -0.004\n",
1407
       "6714           0.0003      0.003      0.118      0.906      -0.005       0.006\n",
1408
       "6868          -0.0008      0.005     -0.152      0.880      -0.011       0.009\n",
1409
       "7249           0.0062      0.006      1.129      0.259      -0.005       0.017\n",
1410
       "7311           0.0123      0.004      2.798      0.005       0.004       0.021\n",
1411
       "7529           0.0052      0.005      1.055      0.292      -0.004       0.015\n",
1412
       "8027           0.0059      0.005      1.259      0.209      -0.003       0.015\n",
1413
       "8038          -0.0037      0.003     -1.379      0.169      -0.009       0.002\n",
1414
       "9146          -0.0085      0.006     -1.370      0.172      -0.021       0.004\n",
1415
       "10000         -0.0011      0.003     -0.442      0.659      -0.006       0.004\n",
1416
       "10252          0.0025      0.002      1.088      0.277      -0.002       0.007\n",
1417
       "10253          0.0016      0.003      0.595      0.552      -0.004       0.007\n",
1418
       "10254          0.0036      0.004      0.861      0.390      -0.005       0.012\n",
1419
       "11140          0.0068      0.006      1.064      0.288      -0.006       0.019\n",
1420
       "23239          0.0050      0.003      1.664      0.097      -0.001       0.011\n",
1421
       "26018          0.0061      0.003      2.397      0.017       0.001       0.011\n",
1422
       "29924         -0.0105      0.005     -2.092      0.037      -0.020      -0.001\n",
1423
       "30011         -0.0037      0.003     -1.088      0.277      -0.010       0.003\n",
1424
       "55824          0.0007      0.003      0.277      0.782      -0.004       0.006\n",
1425
       "57761         -0.0018      0.002     -0.844      0.399      -0.006       0.002\n",
1426
       "58513         -0.0083      0.007     -1.252      0.211      -0.021       0.005\n",
1427
       "64223         -0.0173      0.006     -3.144      0.002      -0.028      -0.006\n",
1428
       "79109          0.0100      0.006      1.691      0.092      -0.002       0.022\n",
1429
       "84335         -0.0074      0.005     -1.453      0.147      -0.018       0.003\n",
1430
       "117145         0.0006      0.003      0.199      0.843      -0.006       0.007\n",
1431
       "196883        -0.0055      0.003     -1.924      0.055      -0.011       0.000\n",
1432
       "253260         0.0202      0.005      3.858      0.000       0.010       0.030\n",
1433
       "==============================================================================\n",
1434
       "Omnibus:                       63.897   Durbin-Watson:                   1.907\n",
1435
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):              218.126\n",
1436
       "Skew:                           0.560   Prob(JB):                     4.31e-48\n",
1437
       "Kurtosis:                       6.063   Cond. No.                     1.34e+03\n",
1438
       "==============================================================================\n",
1439
       "\n",
1440
       "Warnings:\n",
1441
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
1442
       "[2] The condition number is large, 1.34e+03. This might indicate that there are\n",
1443
       "strong multicollinearity or other numerical problems.\n",
1444
       "\"\"\""
1445
      ]
1446
     },
1447
     "execution_count": 13,
1448
     "metadata": {},
1449
     "output_type": "execute_result"
1450
    }
1451
   ],
1452
   "source": [
1453
    "X = PRADEEGFR\n",
1454
    "y = PredPRAD.detach().numpy()\n",
1455
    "\n",
1456
    "# Note the difference in argument order\n",
1457
    "model = sm.OLS(y, X).fit()\n",
1458
    "predictions = model.predict(X) # make the predictions by the model\n",
1459
    "\n",
1460
    "# Print out the statistics\n",
1461
    "model.summary()"
1462
   ]
1463
  },
1464
  {
1465
   "cell_type": "code",
1466
   "execution_count": 14,
1467
   "metadata": {},
1468
   "outputs": [
1469
    {
1470
     "name": "stdout",
1471
     "output_type": "stream",
1472
     "text": [
1473
      "(array([False, False, False, False, False, False, False, False, False,\n",
1474
      "       False, False, False,  True, False, False, False, False, False,\n",
1475
      "       False, False, False,  True, False, False, False, False, False,\n",
1476
      "       False, False, False,  True,  True, False, False, False, False,\n",
1477
      "       False, False, False, False, False, False, False, False, False,\n",
1478
      "       False, False, False, False, False, False, False, False, False,\n",
1479
      "       False, False, False, False, False, False, False, False, False,\n",
1480
      "       False, False, False, False, False, False, False, False, False,\n",
1481
      "       False, False, False, False, False, False, False, False, False,\n",
1482
      "       False, False, False, False, False, False, False, False, False,\n",
1483
      "       False, False, False, False, False, False, False, False, False,\n",
1484
      "       False,  True]), array([2.44677740e+01, 3.29985438e+01, 7.13092918e+01, 5.64211888e+01,\n",
1485
      "       1.54509011e+01, 1.11658624e+00, 1.87812262e+01, 3.89558813e+01,\n",
1486
      "       9.72150779e+01, 5.78711869e+01, 1.56777500e+01, 8.95682827e+01,\n",
1487
      "       2.51327007e-02, 4.33098320e+01, 4.95100431e+00, 7.78750423e+00,\n",
1488
      "       2.03159154e+01, 2.57117840e+01, 6.39829202e+01, 7.27759893e+00,\n",
1489
      "       5.10754128e+01, 1.05971240e-02, 6.51089314e+00, 8.75869838e+00,\n",
1490
      "       2.23897534e+01, 5.31341414e+01, 5.80062438e-02, 3.58073011e+01,\n",
1491
      "       1.18036918e-01, 1.32133918e-01, 2.21454179e-04, 2.56837981e-02,\n",
1492
      "       2.09656993e+01, 1.39029077e-01, 7.33409742e+00, 4.70189827e+01,\n",
1493
      "       5.45715861e+01, 7.51930189e+01, 2.93103927e-01, 2.25943396e+00,\n",
1494
      "       2.68121938e-01, 3.28754694e+01, 8.80486392e+01, 1.61682450e+01,\n",
1495
      "       8.75707363e+01, 2.03855259e+01, 2.01497555e+01, 1.41306446e+01,\n",
1496
      "       4.39405657e+01, 8.28053537e+00, 7.36785350e+01, 2.36031698e+01,\n",
1497
      "       7.90162057e+01, 1.81686393e+00, 5.49588676e+01, 1.76444324e+01,\n",
1498
      "       8.59214668e+01, 6.58684271e+01, 7.02399329e+00, 6.98945587e+01,\n",
1499
      "       6.99089901e+00, 6.83073512e+01, 7.90551006e+01, 6.46732160e+00,\n",
1500
      "       6.29964799e+01, 9.65470723e+01, 3.53987822e+01, 1.96703464e-01,\n",
1501
      "       8.26706688e+00, 9.97367957e+01, 3.54868575e+00, 6.33556532e+01,\n",
1502
      "       5.17631824e-01, 5.85916267e+00, 5.72527791e-01, 9.14875297e+01,\n",
1503
      "       8.88341003e+01, 2.62037891e+01, 5.44698589e-01, 2.94806683e+01,\n",
1504
      "       2.10871067e+01, 1.70404044e+01, 1.73259414e+01, 6.65603237e+01,\n",
1505
      "       2.79829894e+01, 5.57878182e+01, 3.93676302e+01, 2.90942330e+01,\n",
1506
      "       9.78555205e+00, 1.71681592e+00, 3.74347580e+00, 2.80121684e+01,\n",
1507
      "       7.89939136e+01, 4.03277635e+01, 2.13271697e+01, 1.81303993e-01,\n",
1508
      "       9.26249584e+00, 1.48573415e+01, 8.51061003e+01, 5.55844610e+00,\n",
1509
      "       1.34920701e-02]))\n"
1510
     ]
1511
    }
1512
   ],
1513
   "source": [
1514
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
1515
   ]
1516
  },
1517
  {
1518
   "cell_type": "code",
1519
   "execution_count": 15,
1520
   "metadata": {},
1521
   "outputs": [],
1522
   "source": [
1523
    "listEGFR = KIRPE.columns.intersection(lsEGFR)\n",
1524
    "KIRPEEGFR = KIRPE[listEGFR]\n",
1525
    "KIRPMEGFR = KIRPM[listEGFR]\n",
1526
    "KIRPCEGFR = KIRPC[listEGFR]   "
1527
   ]
1528
  },
1529
  {
1530
   "cell_type": "code",
1531
   "execution_count": 16,
1532
   "metadata": {},
1533
   "outputs": [
1534
    {
1535
     "data": {
1536
      "text/html": [
1537
       "<table class=\"simpletable\">\n",
1538
       "<caption>OLS Regression Results</caption>\n",
1539
       "<tr>\n",
1540
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
1541
       "</tr>\n",
1542
       "<tr>\n",
1543
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.996</td>\n",
1544
       "</tr>\n",
1545
       "<tr>\n",
1546
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   356.2</td>\n",
1547
       "</tr>\n",
1548
       "<tr>\n",
1549
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>2.84e-62</td>\n",
1550
       "</tr>\n",
1551
       "<tr>\n",
1552
       "  <th>Time:</th>                 <td>18:16:45</td>     <th>  Log-Likelihood:    </th> <td>  409.04</td>\n",
1553
       "</tr>\n",
1554
       "<tr>\n",
1555
       "  <th>No. Observations:</th>      <td>   161</td>      <th>  AIC:               </th> <td>  -616.1</td>\n",
1556
       "</tr>\n",
1557
       "<tr>\n",
1558
       "  <th>Df Residuals:</th>          <td>    60</td>      <th>  BIC:               </th> <td>  -304.9</td>\n",
1559
       "</tr>\n",
1560
       "<tr>\n",
1561
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
1562
       "</tr>\n",
1563
       "<tr>\n",
1564
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
1565
       "</tr>\n",
1566
       "</table>\n",
1567
       "<table class=\"simpletable\">\n",
1568
       "<tr>\n",
1569
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
1570
       "</tr>\n",
1571
       "<tr>\n",
1572
       "  <th>102</th>    <td>   -0.0203</td> <td>    0.011</td> <td>   -1.884</td> <td> 0.064</td> <td>   -0.042</td> <td>    0.001</td>\n",
1573
       "</tr>\n",
1574
       "<tr>\n",
1575
       "  <th>107</th>    <td>    0.0050</td> <td>    0.015</td> <td>    0.340</td> <td> 0.735</td> <td>   -0.024</td> <td>    0.034</td>\n",
1576
       "</tr>\n",
1577
       "<tr>\n",
1578
       "  <th>108</th>    <td>   -0.0149</td> <td>    0.006</td> <td>   -2.486</td> <td> 0.016</td> <td>   -0.027</td> <td>   -0.003</td>\n",
1579
       "</tr>\n",
1580
       "<tr>\n",
1581
       "  <th>109</th>    <td>    0.0090</td> <td>    0.008</td> <td>    1.139</td> <td> 0.259</td> <td>   -0.007</td> <td>    0.025</td>\n",
1582
       "</tr>\n",
1583
       "<tr>\n",
1584
       "  <th>111</th>    <td>    0.0058</td> <td>    0.005</td> <td>    1.084</td> <td> 0.283</td> <td>   -0.005</td> <td>    0.016</td>\n",
1585
       "</tr>\n",
1586
       "<tr>\n",
1587
       "  <th>112</th>    <td>    0.0015</td> <td>    0.013</td> <td>    0.118</td> <td> 0.906</td> <td>   -0.024</td> <td>    0.027</td>\n",
1588
       "</tr>\n",
1589
       "<tr>\n",
1590
       "  <th>113</th>    <td>   -0.0260</td> <td>    0.012</td> <td>   -2.093</td> <td> 0.041</td> <td>   -0.051</td> <td>   -0.001</td>\n",
1591
       "</tr>\n",
1592
       "<tr>\n",
1593
       "  <th>114</th>    <td>   -0.0026</td> <td>    0.014</td> <td>   -0.180</td> <td> 0.858</td> <td>   -0.031</td> <td>    0.026</td>\n",
1594
       "</tr>\n",
1595
       "<tr>\n",
1596
       "  <th>115</th>    <td>    0.0049</td> <td>    0.012</td> <td>    0.412</td> <td> 0.682</td> <td>   -0.019</td> <td>    0.029</td>\n",
1597
       "</tr>\n",
1598
       "<tr>\n",
1599
       "  <th>160</th>    <td>    0.0032</td> <td>    0.018</td> <td>    0.179</td> <td> 0.859</td> <td>   -0.032</td> <td>    0.039</td>\n",
1600
       "</tr>\n",
1601
       "<tr>\n",
1602
       "  <th>161</th>    <td>   -0.0461</td> <td>    0.019</td> <td>   -2.433</td> <td> 0.018</td> <td>   -0.084</td> <td>   -0.008</td>\n",
1603
       "</tr>\n",
1604
       "<tr>\n",
1605
       "  <th>163</th>    <td>    0.0056</td> <td>    0.019</td> <td>    0.295</td> <td> 0.769</td> <td>   -0.032</td> <td>    0.044</td>\n",
1606
       "</tr>\n",
1607
       "<tr>\n",
1608
       "  <th>207</th>    <td>   -0.0264</td> <td>    0.016</td> <td>   -1.696</td> <td> 0.095</td> <td>   -0.058</td> <td>    0.005</td>\n",
1609
       "</tr>\n",
1610
       "<tr>\n",
1611
       "  <th>208</th>    <td>    0.0413</td> <td>    0.016</td> <td>    2.571</td> <td> 0.013</td> <td>    0.009</td> <td>    0.073</td>\n",
1612
       "</tr>\n",
1613
       "<tr>\n",
1614
       "  <th>572</th>    <td>    0.0455</td> <td>    0.019</td> <td>    2.357</td> <td> 0.022</td> <td>    0.007</td> <td>    0.084</td>\n",
1615
       "</tr>\n",
1616
       "<tr>\n",
1617
       "  <th>801</th>    <td>   -0.0083</td> <td>    0.013</td> <td>   -0.614</td> <td> 0.542</td> <td>   -0.035</td> <td>    0.019</td>\n",
1618
       "</tr>\n",
1619
       "<tr>\n",
1620
       "  <th>805</th>    <td>   -0.0226</td> <td>    0.023</td> <td>   -0.972</td> <td> 0.335</td> <td>   -0.069</td> <td>    0.024</td>\n",
1621
       "</tr>\n",
1622
       "<tr>\n",
1623
       "  <th>808</th>    <td>   -0.0373</td> <td>    0.018</td> <td>   -2.121</td> <td> 0.038</td> <td>   -0.072</td> <td>   -0.002</td>\n",
1624
       "</tr>\n",
1625
       "<tr>\n",
1626
       "  <th>814</th>    <td>    0.0115</td> <td>    0.020</td> <td>    0.561</td> <td> 0.577</td> <td>   -0.029</td> <td>    0.052</td>\n",
1627
       "</tr>\n",
1628
       "<tr>\n",
1629
       "  <th>842</th>    <td>   -0.0297</td> <td>    0.015</td> <td>   -2.021</td> <td> 0.048</td> <td>   -0.059</td> <td>   -0.000</td>\n",
1630
       "</tr>\n",
1631
       "<tr>\n",
1632
       "  <th>867</th>    <td>   -0.0059</td> <td>    0.024</td> <td>   -0.248</td> <td> 0.805</td> <td>   -0.053</td> <td>    0.042</td>\n",
1633
       "</tr>\n",
1634
       "<tr>\n",
1635
       "  <th>983</th>    <td>    0.0012</td> <td>    0.007</td> <td>    0.169</td> <td> 0.867</td> <td>   -0.013</td> <td>    0.016</td>\n",
1636
       "</tr>\n",
1637
       "<tr>\n",
1638
       "  <th>998</th>    <td>    0.0193</td> <td>    0.024</td> <td>    0.822</td> <td> 0.414</td> <td>   -0.028</td> <td>    0.066</td>\n",
1639
       "</tr>\n",
1640
       "<tr>\n",
1641
       "  <th>1026</th>   <td>   -0.0095</td> <td>    0.008</td> <td>   -1.213</td> <td> 0.230</td> <td>   -0.025</td> <td>    0.006</td>\n",
1642
       "</tr>\n",
1643
       "<tr>\n",
1644
       "  <th>1027</th>   <td>   -0.0049</td> <td>    0.012</td> <td>   -0.417</td> <td> 0.678</td> <td>   -0.028</td> <td>    0.019</td>\n",
1645
       "</tr>\n",
1646
       "<tr>\n",
1647
       "  <th>1147</th>   <td>    0.0307</td> <td>    0.020</td> <td>    1.518</td> <td> 0.134</td> <td>   -0.010</td> <td>    0.071</td>\n",
1648
       "</tr>\n",
1649
       "<tr>\n",
1650
       "  <th>1173</th>   <td>    0.0353</td> <td>    0.019</td> <td>    1.827</td> <td> 0.073</td> <td>   -0.003</td> <td>    0.074</td>\n",
1651
       "</tr>\n",
1652
       "<tr>\n",
1653
       "  <th>1175</th>   <td>    0.0282</td> <td>    0.024</td> <td>    1.194</td> <td> 0.237</td> <td>   -0.019</td> <td>    0.075</td>\n",
1654
       "</tr>\n",
1655
       "<tr>\n",
1656
       "  <th>1211</th>   <td>    0.0049</td> <td>    0.015</td> <td>    0.337</td> <td> 0.738</td> <td>   -0.024</td> <td>    0.034</td>\n",
1657
       "</tr>\n",
1658
       "<tr>\n",
1659
       "  <th>1213</th>   <td>   -0.0081</td> <td>    0.017</td> <td>   -0.484</td> <td> 0.630</td> <td>   -0.041</td> <td>    0.025</td>\n",
1660
       "</tr>\n",
1661
       "<tr>\n",
1662
       "  <th>1385</th>   <td>   -0.0046</td> <td>    0.020</td> <td>   -0.234</td> <td> 0.816</td> <td>   -0.044</td> <td>    0.035</td>\n",
1663
       "</tr>\n",
1664
       "<tr>\n",
1665
       "  <th>1445</th>   <td>    0.0122</td> <td>    0.019</td> <td>    0.658</td> <td> 0.513</td> <td>   -0.025</td> <td>    0.049</td>\n",
1666
       "</tr>\n",
1667
       "<tr>\n",
1668
       "  <th>1950</th>   <td>    0.0033</td> <td>    0.008</td> <td>    0.419</td> <td> 0.677</td> <td>   -0.012</td> <td>    0.019</td>\n",
1669
       "</tr>\n",
1670
       "<tr>\n",
1671
       "  <th>1956</th>   <td>    0.0367</td> <td>    0.010</td> <td>    3.749</td> <td> 0.000</td> <td>    0.017</td> <td>    0.056</td>\n",
1672
       "</tr>\n",
1673
       "<tr>\n",
1674
       "  <th>2060</th>   <td>   -0.0484</td> <td>    0.024</td> <td>   -2.035</td> <td> 0.046</td> <td>   -0.096</td> <td>   -0.001</td>\n",
1675
       "</tr>\n",
1676
       "<tr>\n",
1677
       "  <th>2308</th>   <td>   -0.0066</td> <td>    0.013</td> <td>   -0.519</td> <td> 0.606</td> <td>   -0.032</td> <td>    0.019</td>\n",
1678
       "</tr>\n",
1679
       "<tr>\n",
1680
       "  <th>2309</th>   <td>    0.0204</td> <td>    0.016</td> <td>    1.298</td> <td> 0.199</td> <td>   -0.011</td> <td>    0.052</td>\n",
1681
       "</tr>\n",
1682
       "<tr>\n",
1683
       "  <th>2475</th>   <td>    0.0076</td> <td>    0.019</td> <td>    0.407</td> <td> 0.685</td> <td>   -0.030</td> <td>    0.045</td>\n",
1684
       "</tr>\n",
1685
       "<tr>\n",
1686
       "  <th>2549</th>   <td>    0.0033</td> <td>    0.014</td> <td>    0.235</td> <td> 0.815</td> <td>   -0.025</td> <td>    0.031</td>\n",
1687
       "</tr>\n",
1688
       "<tr>\n",
1689
       "  <th>2885</th>   <td>    0.0503</td> <td>    0.023</td> <td>    2.180</td> <td> 0.033</td> <td>    0.004</td> <td>    0.097</td>\n",
1690
       "</tr>\n",
1691
       "<tr>\n",
1692
       "  <th>2931</th>   <td>   -0.0348</td> <td>    0.026</td> <td>   -1.337</td> <td> 0.186</td> <td>   -0.087</td> <td>    0.017</td>\n",
1693
       "</tr>\n",
1694
       "<tr>\n",
1695
       "  <th>3164</th>   <td>    0.0059</td> <td>    0.003</td> <td>    1.970</td> <td> 0.054</td> <td>-9.21e-05</td> <td>    0.012</td>\n",
1696
       "</tr>\n",
1697
       "<tr>\n",
1698
       "  <th>3265</th>   <td>   -0.0328</td> <td>    0.018</td> <td>   -1.859</td> <td> 0.068</td> <td>   -0.068</td> <td>    0.002</td>\n",
1699
       "</tr>\n",
1700
       "<tr>\n",
1701
       "  <th>3320</th>   <td>    0.0247</td> <td>    0.012</td> <td>    2.075</td> <td> 0.042</td> <td>    0.001</td> <td>    0.049</td>\n",
1702
       "</tr>\n",
1703
       "<tr>\n",
1704
       "  <th>3709</th>   <td>   -0.0169</td> <td>    0.011</td> <td>   -1.583</td> <td> 0.119</td> <td>   -0.038</td> <td>    0.004</td>\n",
1705
       "</tr>\n",
1706
       "<tr>\n",
1707
       "  <th>3710</th>   <td>    0.0055</td> <td>    0.006</td> <td>    0.935</td> <td> 0.354</td> <td>   -0.006</td> <td>    0.017</td>\n",
1708
       "</tr>\n",
1709
       "<tr>\n",
1710
       "  <th>3845</th>   <td>    0.0395</td> <td>    0.017</td> <td>    2.365</td> <td> 0.021</td> <td>    0.006</td> <td>    0.073</td>\n",
1711
       "</tr>\n",
1712
       "<tr>\n",
1713
       "  <th>4193</th>   <td>    0.0065</td> <td>    0.012</td> <td>    0.553</td> <td> 0.582</td> <td>   -0.017</td> <td>    0.030</td>\n",
1714
       "</tr>\n",
1715
       "<tr>\n",
1716
       "  <th>4303</th>   <td>   -0.0064</td> <td>    0.014</td> <td>   -0.460</td> <td> 0.647</td> <td>   -0.034</td> <td>    0.021</td>\n",
1717
       "</tr>\n",
1718
       "<tr>\n",
1719
       "  <th>4893</th>   <td>    0.0045</td> <td>    0.019</td> <td>    0.238</td> <td> 0.813</td> <td>   -0.033</td> <td>    0.042</td>\n",
1720
       "</tr>\n",
1721
       "<tr>\n",
1722
       "  <th>5136</th>   <td>   -0.0038</td> <td>    0.003</td> <td>   -1.194</td> <td> 0.237</td> <td>   -0.010</td> <td>    0.003</td>\n",
1723
       "</tr>\n",
1724
       "<tr>\n",
1725
       "  <th>5170</th>   <td>    0.0335</td> <td>    0.023</td> <td>    1.470</td> <td> 0.147</td> <td>   -0.012</td> <td>    0.079</td>\n",
1726
       "</tr>\n",
1727
       "<tr>\n",
1728
       "  <th>5290</th>   <td>   -0.0036</td> <td>    0.021</td> <td>   -0.171</td> <td> 0.865</td> <td>   -0.046</td> <td>    0.039</td>\n",
1729
       "</tr>\n",
1730
       "<tr>\n",
1731
       "  <th>5295</th>   <td>   -0.0123</td> <td>    0.010</td> <td>   -1.216</td> <td> 0.229</td> <td>   -0.033</td> <td>    0.008</td>\n",
1732
       "</tr>\n",
1733
       "<tr>\n",
1734
       "  <th>5335</th>   <td>    0.0016</td> <td>    0.013</td> <td>    0.119</td> <td> 0.906</td> <td>   -0.025</td> <td>    0.028</td>\n",
1735
       "</tr>\n",
1736
       "<tr>\n",
1737
       "  <th>5566</th>   <td>    0.0412</td> <td>    0.024</td> <td>    1.688</td> <td> 0.097</td> <td>   -0.008</td> <td>    0.090</td>\n",
1738
       "</tr>\n",
1739
       "<tr>\n",
1740
       "  <th>5567</th>   <td>    0.0210</td> <td>    0.013</td> <td>    1.658</td> <td> 0.102</td> <td>   -0.004</td> <td>    0.046</td>\n",
1741
       "</tr>\n",
1742
       "<tr>\n",
1743
       "  <th>5573</th>   <td>   -0.0320</td> <td>    0.020</td> <td>   -1.609</td> <td> 0.113</td> <td>   -0.072</td> <td>    0.008</td>\n",
1744
       "</tr>\n",
1745
       "<tr>\n",
1746
       "  <th>5575</th>   <td>   -0.0102</td> <td>    0.009</td> <td>   -1.115</td> <td> 0.269</td> <td>   -0.029</td> <td>    0.008</td>\n",
1747
       "</tr>\n",
1748
       "<tr>\n",
1749
       "  <th>5576</th>   <td>   -0.0010</td> <td>    0.013</td> <td>   -0.077</td> <td> 0.939</td> <td>   -0.027</td> <td>    0.025</td>\n",
1750
       "</tr>\n",
1751
       "<tr>\n",
1752
       "  <th>5577</th>   <td>    0.0011</td> <td>    0.005</td> <td>    0.212</td> <td> 0.833</td> <td>   -0.009</td> <td>    0.012</td>\n",
1753
       "</tr>\n",
1754
       "<tr>\n",
1755
       "  <th>5578</th>   <td>   -0.0180</td> <td>    0.013</td> <td>   -1.352</td> <td> 0.181</td> <td>   -0.045</td> <td>    0.009</td>\n",
1756
       "</tr>\n",
1757
       "<tr>\n",
1758
       "  <th>5580</th>   <td>   -0.0243</td> <td>    0.013</td> <td>   -1.828</td> <td> 0.073</td> <td>   -0.051</td> <td>    0.002</td>\n",
1759
       "</tr>\n",
1760
       "<tr>\n",
1761
       "  <th>5581</th>   <td>    0.0281</td> <td>    0.015</td> <td>    1.921</td> <td> 0.059</td> <td>   -0.001</td> <td>    0.057</td>\n",
1762
       "</tr>\n",
1763
       "<tr>\n",
1764
       "  <th>5582</th>   <td>   -0.0033</td> <td>    0.008</td> <td>   -0.436</td> <td> 0.665</td> <td>   -0.018</td> <td>    0.012</td>\n",
1765
       "</tr>\n",
1766
       "<tr>\n",
1767
       "  <th>5594</th>   <td>   -0.0107</td> <td>    0.018</td> <td>   -0.589</td> <td> 0.558</td> <td>   -0.047</td> <td>    0.026</td>\n",
1768
       "</tr>\n",
1769
       "<tr>\n",
1770
       "  <th>5595</th>   <td>   -0.0510</td> <td>    0.021</td> <td>   -2.426</td> <td> 0.018</td> <td>   -0.093</td> <td>   -0.009</td>\n",
1771
       "</tr>\n",
1772
       "<tr>\n",
1773
       "  <th>5604</th>   <td>   -0.0220</td> <td>    0.018</td> <td>   -1.243</td> <td> 0.219</td> <td>   -0.057</td> <td>    0.013</td>\n",
1774
       "</tr>\n",
1775
       "<tr>\n",
1776
       "  <th>5605</th>   <td>    0.0326</td> <td>    0.022</td> <td>    1.493</td> <td> 0.141</td> <td>   -0.011</td> <td>    0.076</td>\n",
1777
       "</tr>\n",
1778
       "<tr>\n",
1779
       "  <th>5728</th>   <td>   -0.0120</td> <td>    0.016</td> <td>   -0.770</td> <td> 0.444</td> <td>   -0.043</td> <td>    0.019</td>\n",
1780
       "</tr>\n",
1781
       "<tr>\n",
1782
       "  <th>5894</th>   <td>    0.0340</td> <td>    0.024</td> <td>    1.439</td> <td> 0.155</td> <td>   -0.013</td> <td>    0.081</td>\n",
1783
       "</tr>\n",
1784
       "<tr>\n",
1785
       "  <th>6199</th>   <td>   -0.0215</td> <td>    0.019</td> <td>   -1.106</td> <td> 0.273</td> <td>   -0.060</td> <td>    0.017</td>\n",
1786
       "</tr>\n",
1787
       "<tr>\n",
1788
       "  <th>6456</th>   <td>   -0.0011</td> <td>    0.004</td> <td>   -0.299</td> <td> 0.766</td> <td>   -0.009</td> <td>    0.007</td>\n",
1789
       "</tr>\n",
1790
       "<tr>\n",
1791
       "  <th>6464</th>   <td>    0.0065</td> <td>    0.016</td> <td>    0.405</td> <td> 0.687</td> <td>   -0.025</td> <td>    0.038</td>\n",
1792
       "</tr>\n",
1793
       "<tr>\n",
1794
       "  <th>6654</th>   <td>   -0.0203</td> <td>    0.022</td> <td>   -0.938</td> <td> 0.352</td> <td>   -0.063</td> <td>    0.023</td>\n",
1795
       "</tr>\n",
1796
       "<tr>\n",
1797
       "  <th>6714</th>   <td>   -0.0079</td> <td>    0.011</td> <td>   -0.735</td> <td> 0.465</td> <td>   -0.029</td> <td>    0.014</td>\n",
1798
       "</tr>\n",
1799
       "<tr>\n",
1800
       "  <th>6868</th>   <td>   -0.0096</td> <td>    0.015</td> <td>   -0.627</td> <td> 0.533</td> <td>   -0.040</td> <td>    0.021</td>\n",
1801
       "</tr>\n",
1802
       "<tr>\n",
1803
       "  <th>7249</th>   <td>    0.0322</td> <td>    0.020</td> <td>    1.614</td> <td> 0.112</td> <td>   -0.008</td> <td>    0.072</td>\n",
1804
       "</tr>\n",
1805
       "<tr>\n",
1806
       "  <th>7311</th>   <td>    0.0065</td> <td>    0.021</td> <td>    0.310</td> <td> 0.758</td> <td>   -0.036</td> <td>    0.049</td>\n",
1807
       "</tr>\n",
1808
       "<tr>\n",
1809
       "  <th>7529</th>   <td>    0.0649</td> <td>    0.022</td> <td>    2.910</td> <td> 0.005</td> <td>    0.020</td> <td>    0.109</td>\n",
1810
       "</tr>\n",
1811
       "<tr>\n",
1812
       "  <th>8027</th>   <td>   -0.0083</td> <td>    0.019</td> <td>   -0.436</td> <td> 0.665</td> <td>   -0.046</td> <td>    0.030</td>\n",
1813
       "</tr>\n",
1814
       "<tr>\n",
1815
       "  <th>8038</th>   <td>   -0.0063</td> <td>    0.006</td> <td>   -1.116</td> <td> 0.269</td> <td>   -0.018</td> <td>    0.005</td>\n",
1816
       "</tr>\n",
1817
       "<tr>\n",
1818
       "  <th>9146</th>   <td>    0.0263</td> <td>    0.022</td> <td>    1.217</td> <td> 0.228</td> <td>   -0.017</td> <td>    0.070</td>\n",
1819
       "</tr>\n",
1820
       "<tr>\n",
1821
       "  <th>10000</th>  <td>    0.0119</td> <td>    0.010</td> <td>    1.229</td> <td> 0.224</td> <td>   -0.007</td> <td>    0.031</td>\n",
1822
       "</tr>\n",
1823
       "<tr>\n",
1824
       "  <th>10252</th>  <td>    0.0047</td> <td>    0.008</td> <td>    0.564</td> <td> 0.575</td> <td>   -0.012</td> <td>    0.021</td>\n",
1825
       "</tr>\n",
1826
       "<tr>\n",
1827
       "  <th>10253</th>  <td>   -0.0065</td> <td>    0.009</td> <td>   -0.744</td> <td> 0.460</td> <td>   -0.024</td> <td>    0.011</td>\n",
1828
       "</tr>\n",
1829
       "<tr>\n",
1830
       "  <th>10254</th>  <td>   -0.0071</td> <td>    0.023</td> <td>   -0.305</td> <td> 0.761</td> <td>   -0.054</td> <td>    0.040</td>\n",
1831
       "</tr>\n",
1832
       "<tr>\n",
1833
       "  <th>11140</th>  <td>    0.0294</td> <td>    0.027</td> <td>    1.076</td> <td> 0.286</td> <td>   -0.025</td> <td>    0.084</td>\n",
1834
       "</tr>\n",
1835
       "<tr>\n",
1836
       "  <th>23239</th>  <td>    0.0164</td> <td>    0.012</td> <td>    1.338</td> <td> 0.186</td> <td>   -0.008</td> <td>    0.041</td>\n",
1837
       "</tr>\n",
1838
       "<tr>\n",
1839
       "  <th>26018</th>  <td>   -0.0269</td> <td>    0.011</td> <td>   -2.414</td> <td> 0.019</td> <td>   -0.049</td> <td>   -0.005</td>\n",
1840
       "</tr>\n",
1841
       "<tr>\n",
1842
       "  <th>29924</th>  <td>   -0.0294</td> <td>    0.020</td> <td>   -1.479</td> <td> 0.144</td> <td>   -0.069</td> <td>    0.010</td>\n",
1843
       "</tr>\n",
1844
       "<tr>\n",
1845
       "  <th>30011</th>  <td>   -0.0023</td> <td>    0.008</td> <td>   -0.285</td> <td> 0.777</td> <td>   -0.019</td> <td>    0.014</td>\n",
1846
       "</tr>\n",
1847
       "<tr>\n",
1848
       "  <th>55824</th>  <td>   -0.0012</td> <td>    0.013</td> <td>   -0.090</td> <td> 0.929</td> <td>   -0.028</td> <td>    0.026</td>\n",
1849
       "</tr>\n",
1850
       "<tr>\n",
1851
       "  <th>57761</th>  <td>    0.0011</td> <td>    0.005</td> <td>    0.213</td> <td> 0.832</td> <td>   -0.009</td> <td>    0.011</td>\n",
1852
       "</tr>\n",
1853
       "<tr>\n",
1854
       "  <th>58513</th>  <td>   -0.0452</td> <td>    0.015</td> <td>   -3.034</td> <td> 0.004</td> <td>   -0.075</td> <td>   -0.015</td>\n",
1855
       "</tr>\n",
1856
       "<tr>\n",
1857
       "  <th>64223</th>  <td>   -0.0160</td> <td>    0.022</td> <td>   -0.733</td> <td> 0.466</td> <td>   -0.060</td> <td>    0.028</td>\n",
1858
       "</tr>\n",
1859
       "<tr>\n",
1860
       "  <th>79109</th>  <td>    0.0001</td> <td>    0.017</td> <td>    0.006</td> <td> 0.995</td> <td>   -0.035</td> <td>    0.035</td>\n",
1861
       "</tr>\n",
1862
       "<tr>\n",
1863
       "  <th>84335</th>  <td>   -0.0121</td> <td>    0.025</td> <td>   -0.491</td> <td> 0.625</td> <td>   -0.061</td> <td>    0.037</td>\n",
1864
       "</tr>\n",
1865
       "<tr>\n",
1866
       "  <th>117145</th> <td>   -0.0002</td> <td>    0.017</td> <td>   -0.010</td> <td> 0.992</td> <td>   -0.034</td> <td>    0.034</td>\n",
1867
       "</tr>\n",
1868
       "<tr>\n",
1869
       "  <th>196883</th> <td>   -0.0140</td> <td>    0.013</td> <td>   -1.094</td> <td> 0.278</td> <td>   -0.040</td> <td>    0.012</td>\n",
1870
       "</tr>\n",
1871
       "<tr>\n",
1872
       "  <th>253260</th> <td>   -0.0265</td> <td>    0.018</td> <td>   -1.485</td> <td> 0.143</td> <td>   -0.062</td> <td>    0.009</td>\n",
1873
       "</tr>\n",
1874
       "</table>\n",
1875
       "<table class=\"simpletable\">\n",
1876
       "<tr>\n",
1877
       "  <th>Omnibus:</th>       <td> 0.349</td> <th>  Durbin-Watson:     </th> <td>   1.769</td>\n",
1878
       "</tr>\n",
1879
       "<tr>\n",
1880
       "  <th>Prob(Omnibus):</th> <td> 0.840</td> <th>  Jarque-Bera (JB):  </th> <td>   0.161</td>\n",
1881
       "</tr>\n",
1882
       "<tr>\n",
1883
       "  <th>Skew:</th>          <td>-0.067</td> <th>  Prob(JB):          </th> <td>   0.922</td>\n",
1884
       "</tr>\n",
1885
       "<tr>\n",
1886
       "  <th>Kurtosis:</th>      <td> 3.077</td> <th>  Cond. No.          </th> <td>    941.</td>\n",
1887
       "</tr>\n",
1888
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
1889
      ],
1890
      "text/plain": [
1891
       "<class 'statsmodels.iolib.summary.Summary'>\n",
1892
       "\"\"\"\n",
1893
       "                            OLS Regression Results                            \n",
1894
       "==============================================================================\n",
1895
       "Dep. Variable:                      y   R-squared:                       0.998\n",
1896
       "Model:                            OLS   Adj. R-squared:                  0.996\n",
1897
       "Method:                 Least Squares   F-statistic:                     356.2\n",
1898
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           2.84e-62\n",
1899
       "Time:                        18:16:45   Log-Likelihood:                 409.04\n",
1900
       "No. Observations:                 161   AIC:                            -616.1\n",
1901
       "Df Residuals:                      60   BIC:                            -304.9\n",
1902
       "Df Model:                         101                                         \n",
1903
       "Covariance Type:            nonrobust                                         \n",
1904
       "==============================================================================\n",
1905
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
1906
       "------------------------------------------------------------------------------\n",
1907
       "102           -0.0203      0.011     -1.884      0.064      -0.042       0.001\n",
1908
       "107            0.0050      0.015      0.340      0.735      -0.024       0.034\n",
1909
       "108           -0.0149      0.006     -2.486      0.016      -0.027      -0.003\n",
1910
       "109            0.0090      0.008      1.139      0.259      -0.007       0.025\n",
1911
       "111            0.0058      0.005      1.084      0.283      -0.005       0.016\n",
1912
       "112            0.0015      0.013      0.118      0.906      -0.024       0.027\n",
1913
       "113           -0.0260      0.012     -2.093      0.041      -0.051      -0.001\n",
1914
       "114           -0.0026      0.014     -0.180      0.858      -0.031       0.026\n",
1915
       "115            0.0049      0.012      0.412      0.682      -0.019       0.029\n",
1916
       "160            0.0032      0.018      0.179      0.859      -0.032       0.039\n",
1917
       "161           -0.0461      0.019     -2.433      0.018      -0.084      -0.008\n",
1918
       "163            0.0056      0.019      0.295      0.769      -0.032       0.044\n",
1919
       "207           -0.0264      0.016     -1.696      0.095      -0.058       0.005\n",
1920
       "208            0.0413      0.016      2.571      0.013       0.009       0.073\n",
1921
       "572            0.0455      0.019      2.357      0.022       0.007       0.084\n",
1922
       "801           -0.0083      0.013     -0.614      0.542      -0.035       0.019\n",
1923
       "805           -0.0226      0.023     -0.972      0.335      -0.069       0.024\n",
1924
       "808           -0.0373      0.018     -2.121      0.038      -0.072      -0.002\n",
1925
       "814            0.0115      0.020      0.561      0.577      -0.029       0.052\n",
1926
       "842           -0.0297      0.015     -2.021      0.048      -0.059      -0.000\n",
1927
       "867           -0.0059      0.024     -0.248      0.805      -0.053       0.042\n",
1928
       "983            0.0012      0.007      0.169      0.867      -0.013       0.016\n",
1929
       "998            0.0193      0.024      0.822      0.414      -0.028       0.066\n",
1930
       "1026          -0.0095      0.008     -1.213      0.230      -0.025       0.006\n",
1931
       "1027          -0.0049      0.012     -0.417      0.678      -0.028       0.019\n",
1932
       "1147           0.0307      0.020      1.518      0.134      -0.010       0.071\n",
1933
       "1173           0.0353      0.019      1.827      0.073      -0.003       0.074\n",
1934
       "1175           0.0282      0.024      1.194      0.237      -0.019       0.075\n",
1935
       "1211           0.0049      0.015      0.337      0.738      -0.024       0.034\n",
1936
       "1213          -0.0081      0.017     -0.484      0.630      -0.041       0.025\n",
1937
       "1385          -0.0046      0.020     -0.234      0.816      -0.044       0.035\n",
1938
       "1445           0.0122      0.019      0.658      0.513      -0.025       0.049\n",
1939
       "1950           0.0033      0.008      0.419      0.677      -0.012       0.019\n",
1940
       "1956           0.0367      0.010      3.749      0.000       0.017       0.056\n",
1941
       "2060          -0.0484      0.024     -2.035      0.046      -0.096      -0.001\n",
1942
       "2308          -0.0066      0.013     -0.519      0.606      -0.032       0.019\n",
1943
       "2309           0.0204      0.016      1.298      0.199      -0.011       0.052\n",
1944
       "2475           0.0076      0.019      0.407      0.685      -0.030       0.045\n",
1945
       "2549           0.0033      0.014      0.235      0.815      -0.025       0.031\n",
1946
       "2885           0.0503      0.023      2.180      0.033       0.004       0.097\n",
1947
       "2931          -0.0348      0.026     -1.337      0.186      -0.087       0.017\n",
1948
       "3164           0.0059      0.003      1.970      0.054   -9.21e-05       0.012\n",
1949
       "3265          -0.0328      0.018     -1.859      0.068      -0.068       0.002\n",
1950
       "3320           0.0247      0.012      2.075      0.042       0.001       0.049\n",
1951
       "3709          -0.0169      0.011     -1.583      0.119      -0.038       0.004\n",
1952
       "3710           0.0055      0.006      0.935      0.354      -0.006       0.017\n",
1953
       "3845           0.0395      0.017      2.365      0.021       0.006       0.073\n",
1954
       "4193           0.0065      0.012      0.553      0.582      -0.017       0.030\n",
1955
       "4303          -0.0064      0.014     -0.460      0.647      -0.034       0.021\n",
1956
       "4893           0.0045      0.019      0.238      0.813      -0.033       0.042\n",
1957
       "5136          -0.0038      0.003     -1.194      0.237      -0.010       0.003\n",
1958
       "5170           0.0335      0.023      1.470      0.147      -0.012       0.079\n",
1959
       "5290          -0.0036      0.021     -0.171      0.865      -0.046       0.039\n",
1960
       "5295          -0.0123      0.010     -1.216      0.229      -0.033       0.008\n",
1961
       "5335           0.0016      0.013      0.119      0.906      -0.025       0.028\n",
1962
       "5566           0.0412      0.024      1.688      0.097      -0.008       0.090\n",
1963
       "5567           0.0210      0.013      1.658      0.102      -0.004       0.046\n",
1964
       "5573          -0.0320      0.020     -1.609      0.113      -0.072       0.008\n",
1965
       "5575          -0.0102      0.009     -1.115      0.269      -0.029       0.008\n",
1966
       "5576          -0.0010      0.013     -0.077      0.939      -0.027       0.025\n",
1967
       "5577           0.0011      0.005      0.212      0.833      -0.009       0.012\n",
1968
       "5578          -0.0180      0.013     -1.352      0.181      -0.045       0.009\n",
1969
       "5580          -0.0243      0.013     -1.828      0.073      -0.051       0.002\n",
1970
       "5581           0.0281      0.015      1.921      0.059      -0.001       0.057\n",
1971
       "5582          -0.0033      0.008     -0.436      0.665      -0.018       0.012\n",
1972
       "5594          -0.0107      0.018     -0.589      0.558      -0.047       0.026\n",
1973
       "5595          -0.0510      0.021     -2.426      0.018      -0.093      -0.009\n",
1974
       "5604          -0.0220      0.018     -1.243      0.219      -0.057       0.013\n",
1975
       "5605           0.0326      0.022      1.493      0.141      -0.011       0.076\n",
1976
       "5728          -0.0120      0.016     -0.770      0.444      -0.043       0.019\n",
1977
       "5894           0.0340      0.024      1.439      0.155      -0.013       0.081\n",
1978
       "6199          -0.0215      0.019     -1.106      0.273      -0.060       0.017\n",
1979
       "6456          -0.0011      0.004     -0.299      0.766      -0.009       0.007\n",
1980
       "6464           0.0065      0.016      0.405      0.687      -0.025       0.038\n",
1981
       "6654          -0.0203      0.022     -0.938      0.352      -0.063       0.023\n",
1982
       "6714          -0.0079      0.011     -0.735      0.465      -0.029       0.014\n",
1983
       "6868          -0.0096      0.015     -0.627      0.533      -0.040       0.021\n",
1984
       "7249           0.0322      0.020      1.614      0.112      -0.008       0.072\n",
1985
       "7311           0.0065      0.021      0.310      0.758      -0.036       0.049\n",
1986
       "7529           0.0649      0.022      2.910      0.005       0.020       0.109\n",
1987
       "8027          -0.0083      0.019     -0.436      0.665      -0.046       0.030\n",
1988
       "8038          -0.0063      0.006     -1.116      0.269      -0.018       0.005\n",
1989
       "9146           0.0263      0.022      1.217      0.228      -0.017       0.070\n",
1990
       "10000          0.0119      0.010      1.229      0.224      -0.007       0.031\n",
1991
       "10252          0.0047      0.008      0.564      0.575      -0.012       0.021\n",
1992
       "10253         -0.0065      0.009     -0.744      0.460      -0.024       0.011\n",
1993
       "10254         -0.0071      0.023     -0.305      0.761      -0.054       0.040\n",
1994
       "11140          0.0294      0.027      1.076      0.286      -0.025       0.084\n",
1995
       "23239          0.0164      0.012      1.338      0.186      -0.008       0.041\n",
1996
       "26018         -0.0269      0.011     -2.414      0.019      -0.049      -0.005\n",
1997
       "29924         -0.0294      0.020     -1.479      0.144      -0.069       0.010\n",
1998
       "30011         -0.0023      0.008     -0.285      0.777      -0.019       0.014\n",
1999
       "55824         -0.0012      0.013     -0.090      0.929      -0.028       0.026\n",
2000
       "57761          0.0011      0.005      0.213      0.832      -0.009       0.011\n",
2001
       "58513         -0.0452      0.015     -3.034      0.004      -0.075      -0.015\n",
2002
       "64223         -0.0160      0.022     -0.733      0.466      -0.060       0.028\n",
2003
       "79109          0.0001      0.017      0.006      0.995      -0.035       0.035\n",
2004
       "84335         -0.0121      0.025     -0.491      0.625      -0.061       0.037\n",
2005
       "117145        -0.0002      0.017     -0.010      0.992      -0.034       0.034\n",
2006
       "196883        -0.0140      0.013     -1.094      0.278      -0.040       0.012\n",
2007
       "253260        -0.0265      0.018     -1.485      0.143      -0.062       0.009\n",
2008
       "==============================================================================\n",
2009
       "Omnibus:                        0.349   Durbin-Watson:                   1.769\n",
2010
       "Prob(Omnibus):                  0.840   Jarque-Bera (JB):                0.161\n",
2011
       "Skew:                          -0.067   Prob(JB):                        0.922\n",
2012
       "Kurtosis:                       3.077   Cond. No.                         941.\n",
2013
       "==============================================================================\n",
2014
       "\n",
2015
       "Warnings:\n",
2016
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
2017
       "\"\"\""
2018
      ]
2019
     },
2020
     "execution_count": 16,
2021
     "metadata": {},
2022
     "output_type": "execute_result"
2023
    }
2024
   ],
2025
   "source": [
2026
    "X = KIRPEEGFR\n",
2027
    "y = PredKIRP.detach().numpy()\n",
2028
    "\n",
2029
    "# Note the difference in argument order\n",
2030
    "model = sm.OLS(y, X).fit()\n",
2031
    "predictions = model.predict(X) # make the predictions by the model\n",
2032
    "\n",
2033
    "# Print out the statistics\n",
2034
    "model.summary()"
2035
   ]
2036
  },
2037
  {
2038
   "cell_type": "code",
2039
   "execution_count": 17,
2040
   "metadata": {},
2041
   "outputs": [
2042
    {
2043
     "name": "stdout",
2044
     "output_type": "stream",
2045
     "text": [
2046
      "(array([False, False, False, False, False, False, False, False, False,\n",
2047
      "       False, False, False, False, False, False, False, False, False,\n",
2048
      "       False, False, False, False, False, False, False, False, False,\n",
2049
      "       False, False, False, False, False, False,  True, False, False,\n",
2050
      "       False, False, False, False, False, False, False, False, False,\n",
2051
      "       False, False, False, False, False, False, False, False, False,\n",
2052
      "       False, False, False, False, False, False, False, False, False,\n",
2053
      "       False, False, False, False, False, False, False, False, False,\n",
2054
      "       False, False, False, False, False, False, False, False, False,\n",
2055
      "       False, False, False, False, False, False, False, False, False,\n",
2056
      "       False, False, False, False, False, False, False, False, False,\n",
2057
      "       False, False]), array([6.50924133e+00, 7.42508317e+01, 1.58961376e+00, 2.61829337e+01,\n",
2058
      "       2.85630855e+01, 9.15220271e+01, 4.09864261e+00, 8.66437911e+01,\n",
2059
      "       6.88992687e+01, 8.67118407e+01, 1.81588898e+00, 7.76483486e+01,\n",
2060
      "       9.60385004e+00, 1.27689793e+00, 2.19346945e+00, 5.46999451e+01,\n",
2061
      "       3.38503661e+01, 3.84427918e+00, 5.82378406e+01, 4.81885670e+00,\n",
2062
      "       8.13261753e+01, 8.75253594e+01, 4.18628277e+01, 2.32005412e+01,\n",
2063
      "       6.85096311e+01, 1.35581058e+01, 7.33793889e+00, 2.39558196e+01,\n",
2064
      "       7.44977002e+01, 6.36432682e+01, 8.23873035e+01, 5.18247252e+01,\n",
2065
      "       6.83479532e+01, 4.05302431e-02, 4.67644580e+00, 6.11696195e+01,\n",
2066
      "       2.01160277e+01, 6.92197855e+01, 8.23371412e+01, 3.35360038e+00,\n",
2067
      "       1.88156864e+01, 5.40424117e+00, 6.86220958e+00, 4.26728145e+00,\n",
2068
      "       1.19916210e+01, 3.57124009e+01, 2.14829823e+00, 5.88141982e+01,\n",
2069
      "       6.53761469e+01, 8.20869339e+01, 2.39499744e+01, 1.48225996e+01,\n",
2070
      "       8.73590669e+01, 2.31123957e+01, 9.14734677e+01, 9.75047102e+00,\n",
2071
      "       1.03462166e+01, 1.14103902e+01, 2.72031836e+01, 9.48489001e+01,\n",
2072
      "       8.41114074e+01, 1.83243560e+01, 7.32889060e+00, 6.00363356e+00,\n",
2073
      "       6.71240175e+01, 5.63404437e+01, 1.84848930e+00, 2.21054211e+01,\n",
2074
      "       1.42180451e+01, 4.48681168e+01, 1.57037134e+01, 2.76076095e+01,\n",
2075
      "       7.73776675e+01, 6.93695531e+01, 3.55455956e+01, 4.69821872e+01,\n",
2076
      "       5.38662274e+01, 1.12966032e+01, 7.65297883e+01, 5.11549906e-01,\n",
2077
      "       6.71327921e+01, 2.71551225e+01, 2.30738015e+01, 2.26089771e+01,\n",
2078
      "       5.80858590e+01, 4.64402309e+01, 7.68757282e+01, 2.89040346e+01,\n",
2079
      "       1.87755282e+01, 1.90200962e+00, 1.45689516e+01, 7.84330917e+01,\n",
2080
      "       9.38119545e+01, 8.40761596e+01, 3.60228247e-01, 4.70918026e+01,\n",
2081
      "       1.00494392e+02, 6.31281113e+01, 1.00236688e+02, 2.81050319e+01,\n",
2082
      "       1.44242354e+01]))\n"
2083
     ]
2084
    }
2085
   ],
2086
   "source": [
2087
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
2088
   ]
2089
  },
2090
  {
2091
   "cell_type": "code",
2092
   "execution_count": 18,
2093
   "metadata": {},
2094
   "outputs": [],
2095
   "source": [
2096
    "listEGFR = BLCAE.columns.intersection(lsEGFR)\n",
2097
    "BLCAEEGFR = BLCAE[listEGFR]\n",
2098
    "BLCAMEGFR = BLCAM[listEGFR]\n",
2099
    "BLCACEGFR = BLCAC[listEGFR]   "
2100
   ]
2101
  },
2102
  {
2103
   "cell_type": "code",
2104
   "execution_count": 19,
2105
   "metadata": {},
2106
   "outputs": [
2107
    {
2108
     "data": {
2109
      "text/html": [
2110
       "<table class=\"simpletable\">\n",
2111
       "<caption>OLS Regression Results</caption>\n",
2112
       "<tr>\n",
2113
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
2114
       "</tr>\n",
2115
       "<tr>\n",
2116
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.987</td>\n",
2117
       "</tr>\n",
2118
       "<tr>\n",
2119
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   91.05</td>\n",
2120
       "</tr>\n",
2121
       "<tr>\n",
2122
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>4.86e-18</td>\n",
2123
       "</tr>\n",
2124
       "<tr>\n",
2125
       "  <th>Time:</th>                 <td>18:16:52</td>     <th>  Log-Likelihood:    </th> <td>  291.96</td>\n",
2126
       "</tr>\n",
2127
       "<tr>\n",
2128
       "  <th>No. Observations:</th>      <td>   123</td>      <th>  AIC:               </th> <td>  -381.9</td>\n",
2129
       "</tr>\n",
2130
       "<tr>\n",
2131
       "  <th>Df Residuals:</th>          <td>    22</td>      <th>  BIC:               </th> <td>  -97.88</td>\n",
2132
       "</tr>\n",
2133
       "<tr>\n",
2134
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
2135
       "</tr>\n",
2136
       "<tr>\n",
2137
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
2138
       "</tr>\n",
2139
       "</table>\n",
2140
       "<table class=\"simpletable\">\n",
2141
       "<tr>\n",
2142
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
2143
       "</tr>\n",
2144
       "<tr>\n",
2145
       "  <th>102</th>    <td>    0.0508</td> <td>    0.025</td> <td>    2.072</td> <td> 0.050</td> <td>-4.41e-05</td> <td>    0.102</td>\n",
2146
       "</tr>\n",
2147
       "<tr>\n",
2148
       "  <th>107</th>    <td>    0.0081</td> <td>    0.011</td> <td>    0.733</td> <td> 0.471</td> <td>   -0.015</td> <td>    0.031</td>\n",
2149
       "</tr>\n",
2150
       "<tr>\n",
2151
       "  <th>108</th>    <td>    0.0587</td> <td>    0.039</td> <td>    1.503</td> <td> 0.147</td> <td>   -0.022</td> <td>    0.140</td>\n",
2152
       "</tr>\n",
2153
       "<tr>\n",
2154
       "  <th>109</th>    <td>    0.0054</td> <td>    0.021</td> <td>    0.253</td> <td> 0.803</td> <td>   -0.039</td> <td>    0.050</td>\n",
2155
       "</tr>\n",
2156
       "<tr>\n",
2157
       "  <th>111</th>    <td>    0.0114</td> <td>    0.018</td> <td>    0.620</td> <td> 0.541</td> <td>   -0.027</td> <td>    0.050</td>\n",
2158
       "</tr>\n",
2159
       "<tr>\n",
2160
       "  <th>112</th>    <td>   -0.0243</td> <td>    0.023</td> <td>   -1.037</td> <td> 0.311</td> <td>   -0.073</td> <td>    0.024</td>\n",
2161
       "</tr>\n",
2162
       "<tr>\n",
2163
       "  <th>113</th>    <td>    0.0279</td> <td>    0.022</td> <td>    1.292</td> <td> 0.210</td> <td>   -0.017</td> <td>    0.073</td>\n",
2164
       "</tr>\n",
2165
       "<tr>\n",
2166
       "  <th>114</th>    <td>    0.4896</td> <td>    0.285</td> <td>    1.715</td> <td> 0.100</td> <td>   -0.102</td> <td>    1.081</td>\n",
2167
       "</tr>\n",
2168
       "<tr>\n",
2169
       "  <th>115</th>    <td>   -0.0062</td> <td>    0.029</td> <td>   -0.215</td> <td> 0.831</td> <td>   -0.066</td> <td>    0.054</td>\n",
2170
       "</tr>\n",
2171
       "<tr>\n",
2172
       "  <th>160</th>    <td>   -0.0502</td> <td>    0.034</td> <td>   -1.455</td> <td> 0.160</td> <td>   -0.122</td> <td>    0.021</td>\n",
2173
       "</tr>\n",
2174
       "<tr>\n",
2175
       "  <th>161</th>    <td>    0.0540</td> <td>    0.036</td> <td>    1.521</td> <td> 0.143</td> <td>   -0.020</td> <td>    0.128</td>\n",
2176
       "</tr>\n",
2177
       "<tr>\n",
2178
       "  <th>163</th>    <td>   -0.0213</td> <td>    0.025</td> <td>   -0.852</td> <td> 0.403</td> <td>   -0.073</td> <td>    0.031</td>\n",
2179
       "</tr>\n",
2180
       "<tr>\n",
2181
       "  <th>207</th>    <td>    0.0241</td> <td>    0.034</td> <td>    0.714</td> <td> 0.483</td> <td>   -0.046</td> <td>    0.094</td>\n",
2182
       "</tr>\n",
2183
       "<tr>\n",
2184
       "  <th>208</th>    <td>   -0.0699</td> <td>    0.042</td> <td>   -1.659</td> <td> 0.111</td> <td>   -0.157</td> <td>    0.018</td>\n",
2185
       "</tr>\n",
2186
       "<tr>\n",
2187
       "  <th>572</th>    <td>    0.0345</td> <td>    0.029</td> <td>    1.182</td> <td> 0.250</td> <td>   -0.026</td> <td>    0.095</td>\n",
2188
       "</tr>\n",
2189
       "<tr>\n",
2190
       "  <th>801</th>    <td>    0.0148</td> <td>    0.033</td> <td>    0.448</td> <td> 0.659</td> <td>   -0.054</td> <td>    0.083</td>\n",
2191
       "</tr>\n",
2192
       "<tr>\n",
2193
       "  <th>805</th>    <td>    0.0813</td> <td>    0.027</td> <td>    3.000</td> <td> 0.007</td> <td>    0.025</td> <td>    0.138</td>\n",
2194
       "</tr>\n",
2195
       "<tr>\n",
2196
       "  <th>808</th>    <td>    0.0226</td> <td>    0.035</td> <td>    0.651</td> <td> 0.522</td> <td>   -0.049</td> <td>    0.095</td>\n",
2197
       "</tr>\n",
2198
       "<tr>\n",
2199
       "  <th>814</th>    <td>    0.0117</td> <td>    0.025</td> <td>    0.469</td> <td> 0.644</td> <td>   -0.040</td> <td>    0.063</td>\n",
2200
       "</tr>\n",
2201
       "<tr>\n",
2202
       "  <th>842</th>    <td>    0.0053</td> <td>    0.026</td> <td>    0.203</td> <td> 0.841</td> <td>   -0.049</td> <td>    0.060</td>\n",
2203
       "</tr>\n",
2204
       "<tr>\n",
2205
       "  <th>867</th>    <td>   -0.0095</td> <td>    0.034</td> <td>   -0.280</td> <td> 0.782</td> <td>   -0.080</td> <td>    0.061</td>\n",
2206
       "</tr>\n",
2207
       "<tr>\n",
2208
       "  <th>983</th>    <td>    0.0057</td> <td>    0.018</td> <td>    0.317</td> <td> 0.754</td> <td>   -0.031</td> <td>    0.043</td>\n",
2209
       "</tr>\n",
2210
       "<tr>\n",
2211
       "  <th>998</th>    <td>   -0.0248</td> <td>    0.042</td> <td>   -0.598</td> <td> 0.556</td> <td>   -0.111</td> <td>    0.061</td>\n",
2212
       "</tr>\n",
2213
       "<tr>\n",
2214
       "  <th>1026</th>   <td>    0.0170</td> <td>    0.011</td> <td>    1.504</td> <td> 0.147</td> <td>   -0.006</td> <td>    0.040</td>\n",
2215
       "</tr>\n",
2216
       "<tr>\n",
2217
       "  <th>1027</th>   <td>   -0.0315</td> <td>    0.023</td> <td>   -1.370</td> <td> 0.185</td> <td>   -0.079</td> <td>    0.016</td>\n",
2218
       "</tr>\n",
2219
       "<tr>\n",
2220
       "  <th>1147</th>   <td>   -0.0044</td> <td>    0.032</td> <td>   -0.136</td> <td> 0.893</td> <td>   -0.071</td> <td>    0.062</td>\n",
2221
       "</tr>\n",
2222
       "<tr>\n",
2223
       "  <th>1173</th>   <td>    0.0110</td> <td>    0.034</td> <td>    0.319</td> <td> 0.752</td> <td>   -0.061</td> <td>    0.083</td>\n",
2224
       "</tr>\n",
2225
       "<tr>\n",
2226
       "  <th>1175</th>   <td>    0.0015</td> <td>    0.050</td> <td>    0.029</td> <td> 0.977</td> <td>   -0.102</td> <td>    0.105</td>\n",
2227
       "</tr>\n",
2228
       "<tr>\n",
2229
       "  <th>1211</th>   <td>   -0.0113</td> <td>    0.024</td> <td>   -0.467</td> <td> 0.645</td> <td>   -0.062</td> <td>    0.039</td>\n",
2230
       "</tr>\n",
2231
       "<tr>\n",
2232
       "  <th>1213</th>   <td>   -0.0472</td> <td>    0.042</td> <td>   -1.116</td> <td> 0.277</td> <td>   -0.135</td> <td>    0.041</td>\n",
2233
       "</tr>\n",
2234
       "<tr>\n",
2235
       "  <th>1385</th>   <td>   -0.0493</td> <td>    0.043</td> <td>   -1.155</td> <td> 0.261</td> <td>   -0.138</td> <td>    0.039</td>\n",
2236
       "</tr>\n",
2237
       "<tr>\n",
2238
       "  <th>1445</th>   <td>    0.0032</td> <td>    0.033</td> <td>    0.096</td> <td> 0.924</td> <td>   -0.065</td> <td>    0.072</td>\n",
2239
       "</tr>\n",
2240
       "<tr>\n",
2241
       "  <th>1950</th>   <td>    0.0118</td> <td>    0.023</td> <td>    0.507</td> <td> 0.617</td> <td>   -0.037</td> <td>    0.060</td>\n",
2242
       "</tr>\n",
2243
       "<tr>\n",
2244
       "  <th>1956</th>   <td>   -0.0207</td> <td>    0.010</td> <td>   -2.088</td> <td> 0.049</td> <td>   -0.041</td> <td>   -0.000</td>\n",
2245
       "</tr>\n",
2246
       "<tr>\n",
2247
       "  <th>2060</th>   <td>   -0.1096</td> <td>    0.052</td> <td>   -2.090</td> <td> 0.048</td> <td>   -0.218</td> <td>   -0.001</td>\n",
2248
       "</tr>\n",
2249
       "<tr>\n",
2250
       "  <th>2308</th>   <td>    0.0061</td> <td>    0.021</td> <td>    0.298</td> <td> 0.769</td> <td>   -0.037</td> <td>    0.049</td>\n",
2251
       "</tr>\n",
2252
       "<tr>\n",
2253
       "  <th>2309</th>   <td>    0.0169</td> <td>    0.027</td> <td>    0.615</td> <td> 0.545</td> <td>   -0.040</td> <td>    0.074</td>\n",
2254
       "</tr>\n",
2255
       "<tr>\n",
2256
       "  <th>2475</th>   <td>   -0.0019</td> <td>    0.036</td> <td>   -0.054</td> <td> 0.957</td> <td>   -0.076</td> <td>    0.072</td>\n",
2257
       "</tr>\n",
2258
       "<tr>\n",
2259
       "  <th>2549</th>   <td>    0.0047</td> <td>    0.024</td> <td>    0.196</td> <td> 0.846</td> <td>   -0.045</td> <td>    0.055</td>\n",
2260
       "</tr>\n",
2261
       "<tr>\n",
2262
       "  <th>2885</th>   <td>   -0.0271</td> <td>    0.072</td> <td>   -0.378</td> <td> 0.709</td> <td>   -0.176</td> <td>    0.121</td>\n",
2263
       "</tr>\n",
2264
       "<tr>\n",
2265
       "  <th>2931</th>   <td>    0.0249</td> <td>    0.045</td> <td>    0.552</td> <td> 0.587</td> <td>   -0.069</td> <td>    0.118</td>\n",
2266
       "</tr>\n",
2267
       "<tr>\n",
2268
       "  <th>3164</th>   <td>   -0.0043</td> <td>    0.009</td> <td>   -0.454</td> <td> 0.654</td> <td>   -0.024</td> <td>    0.015</td>\n",
2269
       "</tr>\n",
2270
       "<tr>\n",
2271
       "  <th>3265</th>   <td>   -0.0165</td> <td>    0.020</td> <td>   -0.815</td> <td> 0.424</td> <td>   -0.059</td> <td>    0.026</td>\n",
2272
       "</tr>\n",
2273
       "<tr>\n",
2274
       "  <th>3320</th>   <td>   -0.0002</td> <td>    0.028</td> <td>   -0.007</td> <td> 0.994</td> <td>   -0.058</td> <td>    0.057</td>\n",
2275
       "</tr>\n",
2276
       "<tr>\n",
2277
       "  <th>3709</th>   <td>    0.0107</td> <td>    0.025</td> <td>    0.426</td> <td> 0.674</td> <td>   -0.041</td> <td>    0.063</td>\n",
2278
       "</tr>\n",
2279
       "<tr>\n",
2280
       "  <th>3710</th>   <td>   -0.0068</td> <td>    0.025</td> <td>   -0.278</td> <td> 0.784</td> <td>   -0.058</td> <td>    0.044</td>\n",
2281
       "</tr>\n",
2282
       "<tr>\n",
2283
       "  <th>3845</th>   <td>    0.0211</td> <td>    0.026</td> <td>    0.796</td> <td> 0.435</td> <td>   -0.034</td> <td>    0.076</td>\n",
2284
       "</tr>\n",
2285
       "<tr>\n",
2286
       "  <th>4193</th>   <td>    0.0029</td> <td>    0.013</td> <td>    0.218</td> <td> 0.829</td> <td>   -0.025</td> <td>    0.031</td>\n",
2287
       "</tr>\n",
2288
       "<tr>\n",
2289
       "  <th>4303</th>   <td>   -0.0276</td> <td>    0.019</td> <td>   -1.427</td> <td> 0.168</td> <td>   -0.068</td> <td>    0.012</td>\n",
2290
       "</tr>\n",
2291
       "<tr>\n",
2292
       "  <th>4893</th>   <td>    0.0211</td> <td>    0.028</td> <td>    0.753</td> <td> 0.460</td> <td>   -0.037</td> <td>    0.079</td>\n",
2293
       "</tr>\n",
2294
       "<tr>\n",
2295
       "  <th>5136</th>   <td>   -0.0105</td> <td>    0.022</td> <td>   -0.466</td> <td> 0.646</td> <td>   -0.057</td> <td>    0.036</td>\n",
2296
       "</tr>\n",
2297
       "<tr>\n",
2298
       "  <th>5170</th>   <td>    0.0493</td> <td>    0.043</td> <td>    1.140</td> <td> 0.267</td> <td>   -0.040</td> <td>    0.139</td>\n",
2299
       "</tr>\n",
2300
       "<tr>\n",
2301
       "  <th>5290</th>   <td>    0.0312</td> <td>    0.051</td> <td>    0.610</td> <td> 0.548</td> <td>   -0.075</td> <td>    0.137</td>\n",
2302
       "</tr>\n",
2303
       "<tr>\n",
2304
       "  <th>5295</th>   <td>    0.0059</td> <td>    0.016</td> <td>    0.369</td> <td> 0.715</td> <td>   -0.027</td> <td>    0.039</td>\n",
2305
       "</tr>\n",
2306
       "<tr>\n",
2307
       "  <th>5335</th>   <td>    0.0182</td> <td>    0.025</td> <td>    0.722</td> <td> 0.478</td> <td>   -0.034</td> <td>    0.071</td>\n",
2308
       "</tr>\n",
2309
       "<tr>\n",
2310
       "  <th>5566</th>   <td>    0.0261</td> <td>    0.035</td> <td>    0.738</td> <td> 0.468</td> <td>   -0.047</td> <td>    0.099</td>\n",
2311
       "</tr>\n",
2312
       "<tr>\n",
2313
       "  <th>5567</th>   <td>   -0.0521</td> <td>    0.022</td> <td>   -2.371</td> <td> 0.027</td> <td>   -0.098</td> <td>   -0.007</td>\n",
2314
       "</tr>\n",
2315
       "<tr>\n",
2316
       "  <th>5573</th>   <td>    0.0343</td> <td>    0.047</td> <td>    0.724</td> <td> 0.477</td> <td>   -0.064</td> <td>    0.133</td>\n",
2317
       "</tr>\n",
2318
       "<tr>\n",
2319
       "  <th>5575</th>   <td>    0.0220</td> <td>    0.018</td> <td>    1.251</td> <td> 0.224</td> <td>   -0.014</td> <td>    0.058</td>\n",
2320
       "</tr>\n",
2321
       "<tr>\n",
2322
       "  <th>5576</th>   <td>    0.0253</td> <td>    0.032</td> <td>    0.784</td> <td> 0.442</td> <td>   -0.042</td> <td>    0.092</td>\n",
2323
       "</tr>\n",
2324
       "<tr>\n",
2325
       "  <th>5577</th>   <td>   -0.0081</td> <td>    0.011</td> <td>   -0.739</td> <td> 0.468</td> <td>   -0.031</td> <td>    0.015</td>\n",
2326
       "</tr>\n",
2327
       "<tr>\n",
2328
       "  <th>5578</th>   <td>   -0.0108</td> <td>    0.021</td> <td>   -0.518</td> <td> 0.609</td> <td>   -0.054</td> <td>    0.033</td>\n",
2329
       "</tr>\n",
2330
       "<tr>\n",
2331
       "  <th>5580</th>   <td>    0.0522</td> <td>    0.019</td> <td>    2.714</td> <td> 0.013</td> <td>    0.012</td> <td>    0.092</td>\n",
2332
       "</tr>\n",
2333
       "<tr>\n",
2334
       "  <th>5581</th>   <td>   -0.0035</td> <td>    0.027</td> <td>   -0.132</td> <td> 0.896</td> <td>   -0.059</td> <td>    0.052</td>\n",
2335
       "</tr>\n",
2336
       "<tr>\n",
2337
       "  <th>5582</th>   <td>   -0.0975</td> <td>    0.153</td> <td>   -0.637</td> <td> 0.531</td> <td>   -0.415</td> <td>    0.220</td>\n",
2338
       "</tr>\n",
2339
       "<tr>\n",
2340
       "  <th>5594</th>   <td>   -0.0243</td> <td>    0.034</td> <td>   -0.714</td> <td> 0.483</td> <td>   -0.095</td> <td>    0.046</td>\n",
2341
       "</tr>\n",
2342
       "<tr>\n",
2343
       "  <th>5595</th>   <td>    0.0219</td> <td>    0.018</td> <td>    1.201</td> <td> 0.243</td> <td>   -0.016</td> <td>    0.060</td>\n",
2344
       "</tr>\n",
2345
       "<tr>\n",
2346
       "  <th>5604</th>   <td>   -0.0548</td> <td>    0.045</td> <td>   -1.231</td> <td> 0.231</td> <td>   -0.147</td> <td>    0.038</td>\n",
2347
       "</tr>\n",
2348
       "<tr>\n",
2349
       "  <th>5605</th>   <td>   -0.0198</td> <td>    0.042</td> <td>   -0.471</td> <td> 0.643</td> <td>   -0.107</td> <td>    0.067</td>\n",
2350
       "</tr>\n",
2351
       "<tr>\n",
2352
       "  <th>5728</th>   <td>    0.0233</td> <td>    0.025</td> <td>    0.939</td> <td> 0.358</td> <td>   -0.028</td> <td>    0.075</td>\n",
2353
       "</tr>\n",
2354
       "<tr>\n",
2355
       "  <th>5894</th>   <td>   -0.0257</td> <td>    0.021</td> <td>   -1.216</td> <td> 0.237</td> <td>   -0.069</td> <td>    0.018</td>\n",
2356
       "</tr>\n",
2357
       "<tr>\n",
2358
       "  <th>6199</th>   <td>    0.0482</td> <td>    0.036</td> <td>    1.332</td> <td> 0.196</td> <td>   -0.027</td> <td>    0.123</td>\n",
2359
       "</tr>\n",
2360
       "<tr>\n",
2361
       "  <th>6456</th>   <td>   -0.0083</td> <td>    0.010</td> <td>   -0.844</td> <td> 0.408</td> <td>   -0.029</td> <td>    0.012</td>\n",
2362
       "</tr>\n",
2363
       "<tr>\n",
2364
       "  <th>6464</th>   <td>   -0.0142</td> <td>    0.015</td> <td>   -0.923</td> <td> 0.366</td> <td>   -0.046</td> <td>    0.018</td>\n",
2365
       "</tr>\n",
2366
       "<tr>\n",
2367
       "  <th>6654</th>   <td>    0.0373</td> <td>    0.032</td> <td>    1.178</td> <td> 0.252</td> <td>   -0.028</td> <td>    0.103</td>\n",
2368
       "</tr>\n",
2369
       "<tr>\n",
2370
       "  <th>6714</th>   <td>   -0.0454</td> <td>    0.029</td> <td>   -1.579</td> <td> 0.129</td> <td>   -0.105</td> <td>    0.014</td>\n",
2371
       "</tr>\n",
2372
       "<tr>\n",
2373
       "  <th>6868</th>   <td>   -0.0322</td> <td>    0.026</td> <td>   -1.261</td> <td> 0.220</td> <td>   -0.085</td> <td>    0.021</td>\n",
2374
       "</tr>\n",
2375
       "<tr>\n",
2376
       "  <th>7249</th>   <td>   -0.0030</td> <td>    0.050</td> <td>   -0.060</td> <td> 0.953</td> <td>   -0.107</td> <td>    0.101</td>\n",
2377
       "</tr>\n",
2378
       "<tr>\n",
2379
       "  <th>7311</th>   <td>    0.0187</td> <td>    0.039</td> <td>    0.480</td> <td> 0.636</td> <td>   -0.062</td> <td>    0.099</td>\n",
2380
       "</tr>\n",
2381
       "<tr>\n",
2382
       "  <th>7529</th>   <td>    0.0140</td> <td>    0.033</td> <td>    0.427</td> <td> 0.674</td> <td>   -0.054</td> <td>    0.082</td>\n",
2383
       "</tr>\n",
2384
       "<tr>\n",
2385
       "  <th>8027</th>   <td>   -0.0421</td> <td>    0.026</td> <td>   -1.641</td> <td> 0.115</td> <td>   -0.095</td> <td>    0.011</td>\n",
2386
       "</tr>\n",
2387
       "<tr>\n",
2388
       "  <th>8038</th>   <td>   -0.0077</td> <td>    0.012</td> <td>   -0.620</td> <td> 0.542</td> <td>   -0.033</td> <td>    0.018</td>\n",
2389
       "</tr>\n",
2390
       "<tr>\n",
2391
       "  <th>9146</th>   <td>    0.0875</td> <td>    0.045</td> <td>    1.952</td> <td> 0.064</td> <td>   -0.005</td> <td>    0.180</td>\n",
2392
       "</tr>\n",
2393
       "<tr>\n",
2394
       "  <th>10000</th>  <td>   -0.0031</td> <td>    0.017</td> <td>   -0.184</td> <td> 0.856</td> <td>   -0.038</td> <td>    0.031</td>\n",
2395
       "</tr>\n",
2396
       "<tr>\n",
2397
       "  <th>10252</th>  <td>   -0.0044</td> <td>    0.022</td> <td>   -0.205</td> <td> 0.840</td> <td>   -0.049</td> <td>    0.040</td>\n",
2398
       "</tr>\n",
2399
       "<tr>\n",
2400
       "  <th>10253</th>  <td>   -0.0123</td> <td>    0.014</td> <td>   -0.861</td> <td> 0.398</td> <td>   -0.042</td> <td>    0.017</td>\n",
2401
       "</tr>\n",
2402
       "<tr>\n",
2403
       "  <th>10254</th>  <td>   -0.0406</td> <td>    0.037</td> <td>   -1.092</td> <td> 0.286</td> <td>   -0.118</td> <td>    0.036</td>\n",
2404
       "</tr>\n",
2405
       "<tr>\n",
2406
       "  <th>11140</th>  <td>   -0.0482</td> <td>    0.039</td> <td>   -1.249</td> <td> 0.225</td> <td>   -0.128</td> <td>    0.032</td>\n",
2407
       "</tr>\n",
2408
       "<tr>\n",
2409
       "  <th>23239</th>  <td>    0.0444</td> <td>    0.022</td> <td>    2.051</td> <td> 0.052</td> <td>   -0.000</td> <td>    0.089</td>\n",
2410
       "</tr>\n",
2411
       "<tr>\n",
2412
       "  <th>26018</th>  <td>   -0.0159</td> <td>    0.010</td> <td>   -1.601</td> <td> 0.124</td> <td>   -0.036</td> <td>    0.005</td>\n",
2413
       "</tr>\n",
2414
       "<tr>\n",
2415
       "  <th>29924</th>  <td>   -0.0028</td> <td>    0.038</td> <td>   -0.073</td> <td> 0.942</td> <td>   -0.083</td> <td>    0.077</td>\n",
2416
       "</tr>\n",
2417
       "<tr>\n",
2418
       "  <th>30011</th>  <td>    0.0035</td> <td>    0.011</td> <td>    0.316</td> <td> 0.755</td> <td>   -0.020</td> <td>    0.027</td>\n",
2419
       "</tr>\n",
2420
       "<tr>\n",
2421
       "  <th>55824</th>  <td>    0.0007</td> <td>    0.022</td> <td>    0.033</td> <td> 0.974</td> <td>   -0.045</td> <td>    0.047</td>\n",
2422
       "</tr>\n",
2423
       "<tr>\n",
2424
       "  <th>57761</th>  <td>   -0.0003</td> <td>    0.013</td> <td>   -0.020</td> <td> 0.984</td> <td>   -0.028</td> <td>    0.027</td>\n",
2425
       "</tr>\n",
2426
       "<tr>\n",
2427
       "  <th>58513</th>  <td>    0.0410</td> <td>    0.043</td> <td>    0.946</td> <td> 0.354</td> <td>   -0.049</td> <td>    0.131</td>\n",
2428
       "</tr>\n",
2429
       "<tr>\n",
2430
       "  <th>64223</th>  <td>   -0.1063</td> <td>    0.041</td> <td>   -2.564</td> <td> 0.018</td> <td>   -0.192</td> <td>   -0.020</td>\n",
2431
       "</tr>\n",
2432
       "<tr>\n",
2433
       "  <th>79109</th>  <td>    0.0476</td> <td>    0.033</td> <td>    1.462</td> <td> 0.158</td> <td>   -0.020</td> <td>    0.115</td>\n",
2434
       "</tr>\n",
2435
       "<tr>\n",
2436
       "  <th>84335</th>  <td>    0.0365</td> <td>    0.033</td> <td>    1.117</td> <td> 0.276</td> <td>   -0.031</td> <td>    0.104</td>\n",
2437
       "</tr>\n",
2438
       "<tr>\n",
2439
       "  <th>117145</th> <td>    0.0150</td> <td>    0.018</td> <td>    0.821</td> <td> 0.421</td> <td>   -0.023</td> <td>    0.053</td>\n",
2440
       "</tr>\n",
2441
       "<tr>\n",
2442
       "  <th>196883</th> <td>   -0.0293</td> <td>    0.029</td> <td>   -1.017</td> <td> 0.320</td> <td>   -0.089</td> <td>    0.030</td>\n",
2443
       "</tr>\n",
2444
       "<tr>\n",
2445
       "  <th>253260</th> <td>   -0.0124</td> <td>    0.029</td> <td>   -0.433</td> <td> 0.669</td> <td>   -0.072</td> <td>    0.047</td>\n",
2446
       "</tr>\n",
2447
       "</table>\n",
2448
       "<table class=\"simpletable\">\n",
2449
       "<tr>\n",
2450
       "  <th>Omnibus:</th>       <td> 7.830</td> <th>  Durbin-Watson:     </th> <td>   2.262</td>\n",
2451
       "</tr>\n",
2452
       "<tr>\n",
2453
       "  <th>Prob(Omnibus):</th> <td> 0.020</td> <th>  Jarque-Bera (JB):  </th> <td>   8.086</td>\n",
2454
       "</tr>\n",
2455
       "<tr>\n",
2456
       "  <th>Skew:</th>          <td> 0.465</td> <th>  Prob(JB):          </th> <td>  0.0175</td>\n",
2457
       "</tr>\n",
2458
       "<tr>\n",
2459
       "  <th>Kurtosis:</th>      <td> 3.845</td> <th>  Cond. No.          </th> <td>3.01e+03</td>\n",
2460
       "</tr>\n",
2461
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 3.01e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
2462
      ],
2463
      "text/plain": [
2464
       "<class 'statsmodels.iolib.summary.Summary'>\n",
2465
       "\"\"\"\n",
2466
       "                            OLS Regression Results                            \n",
2467
       "==============================================================================\n",
2468
       "Dep. Variable:                      y   R-squared:                       0.998\n",
2469
       "Model:                            OLS   Adj. R-squared:                  0.987\n",
2470
       "Method:                 Least Squares   F-statistic:                     91.05\n",
2471
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           4.86e-18\n",
2472
       "Time:                        18:16:52   Log-Likelihood:                 291.96\n",
2473
       "No. Observations:                 123   AIC:                            -381.9\n",
2474
       "Df Residuals:                      22   BIC:                            -97.88\n",
2475
       "Df Model:                         101                                         \n",
2476
       "Covariance Type:            nonrobust                                         \n",
2477
       "==============================================================================\n",
2478
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
2479
       "------------------------------------------------------------------------------\n",
2480
       "102            0.0508      0.025      2.072      0.050   -4.41e-05       0.102\n",
2481
       "107            0.0081      0.011      0.733      0.471      -0.015       0.031\n",
2482
       "108            0.0587      0.039      1.503      0.147      -0.022       0.140\n",
2483
       "109            0.0054      0.021      0.253      0.803      -0.039       0.050\n",
2484
       "111            0.0114      0.018      0.620      0.541      -0.027       0.050\n",
2485
       "112           -0.0243      0.023     -1.037      0.311      -0.073       0.024\n",
2486
       "113            0.0279      0.022      1.292      0.210      -0.017       0.073\n",
2487
       "114            0.4896      0.285      1.715      0.100      -0.102       1.081\n",
2488
       "115           -0.0062      0.029     -0.215      0.831      -0.066       0.054\n",
2489
       "160           -0.0502      0.034     -1.455      0.160      -0.122       0.021\n",
2490
       "161            0.0540      0.036      1.521      0.143      -0.020       0.128\n",
2491
       "163           -0.0213      0.025     -0.852      0.403      -0.073       0.031\n",
2492
       "207            0.0241      0.034      0.714      0.483      -0.046       0.094\n",
2493
       "208           -0.0699      0.042     -1.659      0.111      -0.157       0.018\n",
2494
       "572            0.0345      0.029      1.182      0.250      -0.026       0.095\n",
2495
       "801            0.0148      0.033      0.448      0.659      -0.054       0.083\n",
2496
       "805            0.0813      0.027      3.000      0.007       0.025       0.138\n",
2497
       "808            0.0226      0.035      0.651      0.522      -0.049       0.095\n",
2498
       "814            0.0117      0.025      0.469      0.644      -0.040       0.063\n",
2499
       "842            0.0053      0.026      0.203      0.841      -0.049       0.060\n",
2500
       "867           -0.0095      0.034     -0.280      0.782      -0.080       0.061\n",
2501
       "983            0.0057      0.018      0.317      0.754      -0.031       0.043\n",
2502
       "998           -0.0248      0.042     -0.598      0.556      -0.111       0.061\n",
2503
       "1026           0.0170      0.011      1.504      0.147      -0.006       0.040\n",
2504
       "1027          -0.0315      0.023     -1.370      0.185      -0.079       0.016\n",
2505
       "1147          -0.0044      0.032     -0.136      0.893      -0.071       0.062\n",
2506
       "1173           0.0110      0.034      0.319      0.752      -0.061       0.083\n",
2507
       "1175           0.0015      0.050      0.029      0.977      -0.102       0.105\n",
2508
       "1211          -0.0113      0.024     -0.467      0.645      -0.062       0.039\n",
2509
       "1213          -0.0472      0.042     -1.116      0.277      -0.135       0.041\n",
2510
       "1385          -0.0493      0.043     -1.155      0.261      -0.138       0.039\n",
2511
       "1445           0.0032      0.033      0.096      0.924      -0.065       0.072\n",
2512
       "1950           0.0118      0.023      0.507      0.617      -0.037       0.060\n",
2513
       "1956          -0.0207      0.010     -2.088      0.049      -0.041      -0.000\n",
2514
       "2060          -0.1096      0.052     -2.090      0.048      -0.218      -0.001\n",
2515
       "2308           0.0061      0.021      0.298      0.769      -0.037       0.049\n",
2516
       "2309           0.0169      0.027      0.615      0.545      -0.040       0.074\n",
2517
       "2475          -0.0019      0.036     -0.054      0.957      -0.076       0.072\n",
2518
       "2549           0.0047      0.024      0.196      0.846      -0.045       0.055\n",
2519
       "2885          -0.0271      0.072     -0.378      0.709      -0.176       0.121\n",
2520
       "2931           0.0249      0.045      0.552      0.587      -0.069       0.118\n",
2521
       "3164          -0.0043      0.009     -0.454      0.654      -0.024       0.015\n",
2522
       "3265          -0.0165      0.020     -0.815      0.424      -0.059       0.026\n",
2523
       "3320          -0.0002      0.028     -0.007      0.994      -0.058       0.057\n",
2524
       "3709           0.0107      0.025      0.426      0.674      -0.041       0.063\n",
2525
       "3710          -0.0068      0.025     -0.278      0.784      -0.058       0.044\n",
2526
       "3845           0.0211      0.026      0.796      0.435      -0.034       0.076\n",
2527
       "4193           0.0029      0.013      0.218      0.829      -0.025       0.031\n",
2528
       "4303          -0.0276      0.019     -1.427      0.168      -0.068       0.012\n",
2529
       "4893           0.0211      0.028      0.753      0.460      -0.037       0.079\n",
2530
       "5136          -0.0105      0.022     -0.466      0.646      -0.057       0.036\n",
2531
       "5170           0.0493      0.043      1.140      0.267      -0.040       0.139\n",
2532
       "5290           0.0312      0.051      0.610      0.548      -0.075       0.137\n",
2533
       "5295           0.0059      0.016      0.369      0.715      -0.027       0.039\n",
2534
       "5335           0.0182      0.025      0.722      0.478      -0.034       0.071\n",
2535
       "5566           0.0261      0.035      0.738      0.468      -0.047       0.099\n",
2536
       "5567          -0.0521      0.022     -2.371      0.027      -0.098      -0.007\n",
2537
       "5573           0.0343      0.047      0.724      0.477      -0.064       0.133\n",
2538
       "5575           0.0220      0.018      1.251      0.224      -0.014       0.058\n",
2539
       "5576           0.0253      0.032      0.784      0.442      -0.042       0.092\n",
2540
       "5577          -0.0081      0.011     -0.739      0.468      -0.031       0.015\n",
2541
       "5578          -0.0108      0.021     -0.518      0.609      -0.054       0.033\n",
2542
       "5580           0.0522      0.019      2.714      0.013       0.012       0.092\n",
2543
       "5581          -0.0035      0.027     -0.132      0.896      -0.059       0.052\n",
2544
       "5582          -0.0975      0.153     -0.637      0.531      -0.415       0.220\n",
2545
       "5594          -0.0243      0.034     -0.714      0.483      -0.095       0.046\n",
2546
       "5595           0.0219      0.018      1.201      0.243      -0.016       0.060\n",
2547
       "5604          -0.0548      0.045     -1.231      0.231      -0.147       0.038\n",
2548
       "5605          -0.0198      0.042     -0.471      0.643      -0.107       0.067\n",
2549
       "5728           0.0233      0.025      0.939      0.358      -0.028       0.075\n",
2550
       "5894          -0.0257      0.021     -1.216      0.237      -0.069       0.018\n",
2551
       "6199           0.0482      0.036      1.332      0.196      -0.027       0.123\n",
2552
       "6456          -0.0083      0.010     -0.844      0.408      -0.029       0.012\n",
2553
       "6464          -0.0142      0.015     -0.923      0.366      -0.046       0.018\n",
2554
       "6654           0.0373      0.032      1.178      0.252      -0.028       0.103\n",
2555
       "6714          -0.0454      0.029     -1.579      0.129      -0.105       0.014\n",
2556
       "6868          -0.0322      0.026     -1.261      0.220      -0.085       0.021\n",
2557
       "7249          -0.0030      0.050     -0.060      0.953      -0.107       0.101\n",
2558
       "7311           0.0187      0.039      0.480      0.636      -0.062       0.099\n",
2559
       "7529           0.0140      0.033      0.427      0.674      -0.054       0.082\n",
2560
       "8027          -0.0421      0.026     -1.641      0.115      -0.095       0.011\n",
2561
       "8038          -0.0077      0.012     -0.620      0.542      -0.033       0.018\n",
2562
       "9146           0.0875      0.045      1.952      0.064      -0.005       0.180\n",
2563
       "10000         -0.0031      0.017     -0.184      0.856      -0.038       0.031\n",
2564
       "10252         -0.0044      0.022     -0.205      0.840      -0.049       0.040\n",
2565
       "10253         -0.0123      0.014     -0.861      0.398      -0.042       0.017\n",
2566
       "10254         -0.0406      0.037     -1.092      0.286      -0.118       0.036\n",
2567
       "11140         -0.0482      0.039     -1.249      0.225      -0.128       0.032\n",
2568
       "23239          0.0444      0.022      2.051      0.052      -0.000       0.089\n",
2569
       "26018         -0.0159      0.010     -1.601      0.124      -0.036       0.005\n",
2570
       "29924         -0.0028      0.038     -0.073      0.942      -0.083       0.077\n",
2571
       "30011          0.0035      0.011      0.316      0.755      -0.020       0.027\n",
2572
       "55824          0.0007      0.022      0.033      0.974      -0.045       0.047\n",
2573
       "57761         -0.0003      0.013     -0.020      0.984      -0.028       0.027\n",
2574
       "58513          0.0410      0.043      0.946      0.354      -0.049       0.131\n",
2575
       "64223         -0.1063      0.041     -2.564      0.018      -0.192      -0.020\n",
2576
       "79109          0.0476      0.033      1.462      0.158      -0.020       0.115\n",
2577
       "84335          0.0365      0.033      1.117      0.276      -0.031       0.104\n",
2578
       "117145         0.0150      0.018      0.821      0.421      -0.023       0.053\n",
2579
       "196883        -0.0293      0.029     -1.017      0.320      -0.089       0.030\n",
2580
       "253260        -0.0124      0.029     -0.433      0.669      -0.072       0.047\n",
2581
       "==============================================================================\n",
2582
       "Omnibus:                        7.830   Durbin-Watson:                   2.262\n",
2583
       "Prob(Omnibus):                  0.020   Jarque-Bera (JB):                8.086\n",
2584
       "Skew:                           0.465   Prob(JB):                       0.0175\n",
2585
       "Kurtosis:                       3.845   Cond. No.                     3.01e+03\n",
2586
       "==============================================================================\n",
2587
       "\n",
2588
       "Warnings:\n",
2589
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
2590
       "[2] The condition number is large, 3.01e+03. This might indicate that there are\n",
2591
       "strong multicollinearity or other numerical problems.\n",
2592
       "\"\"\""
2593
      ]
2594
     },
2595
     "execution_count": 19,
2596
     "metadata": {},
2597
     "output_type": "execute_result"
2598
    }
2599
   ],
2600
   "source": [
2601
    "X = BLCAEEGFR\n",
2602
    "y = PredBLCA.detach().numpy()\n",
2603
    "\n",
2604
    "# Note the difference in argument order\n",
2605
    "model = sm.OLS(y, X).fit()\n",
2606
    "predictions = model.predict(X) # make the predictions by the model\n",
2607
    "\n",
2608
    "# Print out the statistics\n",
2609
    "model.summary()"
2610
   ]
2611
  },
2612
  {
2613
   "cell_type": "code",
2614
   "execution_count": 20,
2615
   "metadata": {},
2616
   "outputs": [
2617
    {
2618
     "name": "stdout",
2619
     "output_type": "stream",
2620
     "text": [
2621
      "(array([False, False, False, False, False, False, False, False, False,\n",
2622
      "       False, False, False, False, False, False, False, False, False,\n",
2623
      "       False, False, False, False, False, False, False, False, False,\n",
2624
      "       False, False, False, False, False, False, False, False, False,\n",
2625
      "       False, False, False, False, False, False, False, False, False,\n",
2626
      "       False, False, False, False, False, False, False, False, False,\n",
2627
      "       False, False, False, False, False, False, False, False, False,\n",
2628
      "       False, False, False, False, False, False, False, False, False,\n",
2629
      "       False, False, False, False, False, False, False, False, False,\n",
2630
      "       False, False, False, False, False, False, False, False, False,\n",
2631
      "       False, False, False, False, False, False, False, False, False,\n",
2632
      "       False, False]), array([  5.06840256,  47.60953626,  14.86290717,  81.05458141,\n",
2633
      "        54.68706095,  31.41998312,  21.17450173,  10.13547604,\n",
2634
      "        83.97572787,  16.15018927,  14.39431539,  40.7389554 ,\n",
2635
      "        48.7622678 ,  11.25163908,  25.24933182,  66.53217609,\n",
2636
      "         0.66653408,  52.68913068,  65.01624174,  84.97032154,\n",
2637
      "        78.96788527,  76.19414517,  56.17863377,  14.82209503,\n",
2638
      "        18.64538126,  90.23406241,  76.00057555,  98.65567388,\n",
2639
      "        65.17566821,  27.92891398,  26.31132821,  93.33782611,\n",
2640
      "        62.32498483,   4.90432369,   4.88402139,  77.6546395 ,\n",
2641
      "        55.05906614,  96.68173076,  85.47561601,  71.6146374 ,\n",
2642
      "        59.23824967,  66.09782514,  42.82351029, 100.43457182,\n",
2643
      "        68.10069573,  79.14434754,  43.89125917,  83.76063748,\n",
2644
      "        16.92229075,  46.42142942,  65.22891846,  26.92473721,\n",
2645
      "        55.37319615,  72.25861436,  48.27420002,  47.27537543,\n",
2646
      "         2.72175867,  48.17531057,  22.64234997,  44.59470743,\n",
2647
      "        47.22090177,  61.55844715,   1.28104209,  90.54064326,\n",
2648
      "        53.63107252,  48.75299788,  24.49973515,  23.37963417,\n",
2649
      "        64.89529953,  36.14780333,  23.93290514,  19.82990307,\n",
2650
      "        41.20579122,  36.98475504,  25.406636  ,  12.98970355,\n",
2651
      "        22.25623324,  96.23507665,  64.22040675,  68.02830777,\n",
2652
      "        11.62448639,  54.70568442,   6.43666445,  86.45827115,\n",
2653
      "        84.79664747,  40.22874239,  28.93374255,  22.700239  ,\n",
2654
      "         5.29036532,  12.49659856,  95.16948586,  76.25114624,\n",
2655
      "        98.35586247,  99.42202676,  35.79864472,   1.78787595,\n",
2656
      "        15.94299284,  27.89141517,  42.48126526,  32.32102167,\n",
2657
      "        67.60312619]))\n"
2658
     ]
2659
    }
2660
   ],
2661
   "source": [
2662
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
2663
   ]
2664
  },
2665
  {
2666
   "cell_type": "code",
2667
   "execution_count": 21,
2668
   "metadata": {},
2669
   "outputs": [],
2670
   "source": [
2671
    "listEGFR = BRCAE.columns.intersection(lsEGFR)\n",
2672
    "BRCAEEGFR = BRCAE[listEGFR]\n",
2673
    "BRCAMEGFR = BRCAM[listEGFR]\n",
2674
    "BRCACEGFR = BRCAC[listEGFR]  "
2675
   ]
2676
  },
2677
  {
2678
   "cell_type": "code",
2679
   "execution_count": 22,
2680
   "metadata": {},
2681
   "outputs": [
2682
    {
2683
     "data": {
2684
      "text/html": [
2685
       "<table class=\"simpletable\">\n",
2686
       "<caption>OLS Regression Results</caption>\n",
2687
       "<tr>\n",
2688
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
2689
       "</tr>\n",
2690
       "<tr>\n",
2691
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.999</td>\n",
2692
       "</tr>\n",
2693
       "<tr>\n",
2694
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   6893.</td>\n",
2695
       "</tr>\n",
2696
       "<tr>\n",
2697
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
2698
       "</tr>\n",
2699
       "<tr>\n",
2700
       "  <th>Time:</th>                 <td>18:16:54</td>     <th>  Log-Likelihood:    </th> <td>  2466.9</td>\n",
2701
       "</tr>\n",
2702
       "<tr>\n",
2703
       "  <th>No. Observations:</th>      <td>   921</td>      <th>  AIC:               </th> <td>  -4732.</td>\n",
2704
       "</tr>\n",
2705
       "<tr>\n",
2706
       "  <th>Df Residuals:</th>          <td>   820</td>      <th>  BIC:               </th> <td>  -4245.</td>\n",
2707
       "</tr>\n",
2708
       "<tr>\n",
2709
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
2710
       "</tr>\n",
2711
       "<tr>\n",
2712
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
2713
       "</tr>\n",
2714
       "</table>\n",
2715
       "<table class=\"simpletable\">\n",
2716
       "<tr>\n",
2717
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
2718
       "</tr>\n",
2719
       "<tr>\n",
2720
       "  <th>102</th>    <td>    0.0011</td> <td>    0.002</td> <td>    0.672</td> <td> 0.502</td> <td>   -0.002</td> <td>    0.004</td>\n",
2721
       "</tr>\n",
2722
       "<tr>\n",
2723
       "  <th>107</th>    <td>   -0.0011</td> <td>    0.000</td> <td>   -2.468</td> <td> 0.014</td> <td>   -0.002</td> <td>   -0.000</td>\n",
2724
       "</tr>\n",
2725
       "<tr>\n",
2726
       "  <th>108</th>    <td>   -0.0013</td> <td>    0.001</td> <td>   -2.000</td> <td> 0.046</td> <td>   -0.003</td> <td>-2.48e-05</td>\n",
2727
       "</tr>\n",
2728
       "<tr>\n",
2729
       "  <th>109</th>    <td>    0.0002</td> <td>    0.001</td> <td>    0.183</td> <td> 0.855</td> <td>   -0.002</td> <td>    0.003</td>\n",
2730
       "</tr>\n",
2731
       "<tr>\n",
2732
       "  <th>111</th>    <td>   -0.0003</td> <td>    0.001</td> <td>   -0.611</td> <td> 0.541</td> <td>   -0.001</td> <td>    0.001</td>\n",
2733
       "</tr>\n",
2734
       "<tr>\n",
2735
       "  <th>112</th>    <td>    0.0015</td> <td>    0.001</td> <td>    1.019</td> <td> 0.309</td> <td>   -0.001</td> <td>    0.004</td>\n",
2736
       "</tr>\n",
2737
       "<tr>\n",
2738
       "  <th>113</th>    <td>   -0.0020</td> <td>    0.001</td> <td>   -1.351</td> <td> 0.177</td> <td>   -0.005</td> <td>    0.001</td>\n",
2739
       "</tr>\n",
2740
       "<tr>\n",
2741
       "  <th>114</th>    <td>   -0.0026</td> <td>    0.002</td> <td>   -1.382</td> <td> 0.167</td> <td>   -0.006</td> <td>    0.001</td>\n",
2742
       "</tr>\n",
2743
       "<tr>\n",
2744
       "  <th>115</th>    <td>    0.0008</td> <td>    0.001</td> <td>    0.651</td> <td> 0.515</td> <td>   -0.002</td> <td>    0.003</td>\n",
2745
       "</tr>\n",
2746
       "<tr>\n",
2747
       "  <th>160</th>    <td>    0.0067</td> <td>    0.002</td> <td>    3.978</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
2748
       "</tr>\n",
2749
       "<tr>\n",
2750
       "  <th>161</th>    <td>   -0.0014</td> <td>    0.002</td> <td>   -0.675</td> <td> 0.500</td> <td>   -0.006</td> <td>    0.003</td>\n",
2751
       "</tr>\n",
2752
       "<tr>\n",
2753
       "  <th>163</th>    <td> 5.495e-05</td> <td>    0.001</td> <td>    0.047</td> <td> 0.962</td> <td>   -0.002</td> <td>    0.002</td>\n",
2754
       "</tr>\n",
2755
       "<tr>\n",
2756
       "  <th>207</th>    <td>    0.0028</td> <td>    0.002</td> <td>    1.587</td> <td> 0.113</td> <td>   -0.001</td> <td>    0.006</td>\n",
2757
       "</tr>\n",
2758
       "<tr>\n",
2759
       "  <th>208</th>    <td>   -0.0023</td> <td>    0.002</td> <td>   -1.390</td> <td> 0.165</td> <td>   -0.006</td> <td>    0.001</td>\n",
2760
       "</tr>\n",
2761
       "<tr>\n",
2762
       "  <th>572</th>    <td>   -0.0023</td> <td>    0.002</td> <td>   -1.392</td> <td> 0.164</td> <td>   -0.005</td> <td>    0.001</td>\n",
2763
       "</tr>\n",
2764
       "<tr>\n",
2765
       "  <th>801</th>    <td>    0.0022</td> <td>    0.002</td> <td>    1.102</td> <td> 0.271</td> <td>   -0.002</td> <td>    0.006</td>\n",
2766
       "</tr>\n",
2767
       "<tr>\n",
2768
       "  <th>805</th>    <td>    0.0063</td> <td>    0.002</td> <td>    3.883</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
2769
       "</tr>\n",
2770
       "<tr>\n",
2771
       "  <th>808</th>    <td>    0.0019</td> <td>    0.002</td> <td>    0.939</td> <td> 0.348</td> <td>   -0.002</td> <td>    0.006</td>\n",
2772
       "</tr>\n",
2773
       "<tr>\n",
2774
       "  <th>814</th>    <td>    0.0010</td> <td>    0.002</td> <td>    0.553</td> <td> 0.580</td> <td>   -0.003</td> <td>    0.004</td>\n",
2775
       "</tr>\n",
2776
       "<tr>\n",
2777
       "  <th>842</th>    <td>   -0.0025</td> <td>    0.002</td> <td>   -1.337</td> <td> 0.182</td> <td>   -0.006</td> <td>    0.001</td>\n",
2778
       "</tr>\n",
2779
       "<tr>\n",
2780
       "  <th>867</th>    <td>   -0.0038</td> <td>    0.002</td> <td>   -1.849</td> <td> 0.065</td> <td>   -0.008</td> <td>    0.000</td>\n",
2781
       "</tr>\n",
2782
       "<tr>\n",
2783
       "  <th>983</th>    <td> 1.694e-05</td> <td>    0.001</td> <td>    0.019</td> <td> 0.985</td> <td>   -0.002</td> <td>    0.002</td>\n",
2784
       "</tr>\n",
2785
       "<tr>\n",
2786
       "  <th>998</th>    <td>    0.0023</td> <td>    0.002</td> <td>    0.999</td> <td> 0.318</td> <td>   -0.002</td> <td>    0.007</td>\n",
2787
       "</tr>\n",
2788
       "<tr>\n",
2789
       "  <th>1026</th>   <td>   -0.0022</td> <td>    0.001</td> <td>   -2.660</td> <td> 0.008</td> <td>   -0.004</td> <td>   -0.001</td>\n",
2790
       "</tr>\n",
2791
       "<tr>\n",
2792
       "  <th>1027</th>   <td>    0.0012</td> <td>    0.001</td> <td>    1.529</td> <td> 0.127</td> <td>   -0.000</td> <td>    0.003</td>\n",
2793
       "</tr>\n",
2794
       "<tr>\n",
2795
       "  <th>1147</th>   <td>    0.0039</td> <td>    0.002</td> <td>    2.011</td> <td> 0.045</td> <td> 9.45e-05</td> <td>    0.008</td>\n",
2796
       "</tr>\n",
2797
       "<tr>\n",
2798
       "  <th>1173</th>   <td>   -0.0016</td> <td>    0.002</td> <td>   -0.793</td> <td> 0.428</td> <td>   -0.006</td> <td>    0.002</td>\n",
2799
       "</tr>\n",
2800
       "<tr>\n",
2801
       "  <th>1175</th>   <td>    0.0050</td> <td>    0.002</td> <td>    2.424</td> <td> 0.016</td> <td>    0.001</td> <td>    0.009</td>\n",
2802
       "</tr>\n",
2803
       "<tr>\n",
2804
       "  <th>1211</th>   <td>    0.0079</td> <td>    0.002</td> <td>    4.775</td> <td> 0.000</td> <td>    0.005</td> <td>    0.011</td>\n",
2805
       "</tr>\n",
2806
       "<tr>\n",
2807
       "  <th>1213</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.815</td> <td> 0.415</td> <td>   -0.002</td> <td>    0.004</td>\n",
2808
       "</tr>\n",
2809
       "<tr>\n",
2810
       "  <th>1385</th>   <td>   -0.0048</td> <td>    0.003</td> <td>   -1.743</td> <td> 0.082</td> <td>   -0.010</td> <td>    0.001</td>\n",
2811
       "</tr>\n",
2812
       "<tr>\n",
2813
       "  <th>1445</th>   <td>   -0.0036</td> <td>    0.002</td> <td>   -1.679</td> <td> 0.094</td> <td>   -0.008</td> <td>    0.001</td>\n",
2814
       "</tr>\n",
2815
       "<tr>\n",
2816
       "  <th>1950</th>   <td>    0.0010</td> <td>    0.001</td> <td>    1.717</td> <td> 0.086</td> <td>   -0.000</td> <td>    0.002</td>\n",
2817
       "</tr>\n",
2818
       "<tr>\n",
2819
       "  <th>1956</th>   <td>    0.0041</td> <td>    0.001</td> <td>    5.366</td> <td> 0.000</td> <td>    0.003</td> <td>    0.006</td>\n",
2820
       "</tr>\n",
2821
       "<tr>\n",
2822
       "  <th>2060</th>   <td>   -0.0023</td> <td>    0.002</td> <td>   -1.028</td> <td> 0.304</td> <td>   -0.007</td> <td>    0.002</td>\n",
2823
       "</tr>\n",
2824
       "<tr>\n",
2825
       "  <th>2308</th>   <td>   -0.0014</td> <td>    0.002</td> <td>   -0.885</td> <td> 0.377</td> <td>   -0.004</td> <td>    0.002</td>\n",
2826
       "</tr>\n",
2827
       "<tr>\n",
2828
       "  <th>2309</th>   <td>    0.0012</td> <td>    0.001</td> <td>    0.939</td> <td> 0.348</td> <td>   -0.001</td> <td>    0.004</td>\n",
2829
       "</tr>\n",
2830
       "<tr>\n",
2831
       "  <th>2475</th>   <td>   -0.0078</td> <td>    0.002</td> <td>   -3.325</td> <td> 0.001</td> <td>   -0.012</td> <td>   -0.003</td>\n",
2832
       "</tr>\n",
2833
       "<tr>\n",
2834
       "  <th>2549</th>   <td>   -0.0019</td> <td>    0.001</td> <td>   -1.341</td> <td> 0.180</td> <td>   -0.005</td> <td>    0.001</td>\n",
2835
       "</tr>\n",
2836
       "<tr>\n",
2837
       "  <th>2885</th>   <td>   -0.0030</td> <td>    0.001</td> <td>   -2.034</td> <td> 0.042</td> <td>   -0.006</td> <td>   -0.000</td>\n",
2838
       "</tr>\n",
2839
       "<tr>\n",
2840
       "  <th>2931</th>   <td>    0.0036</td> <td>    0.002</td> <td>    1.483</td> <td> 0.138</td> <td>   -0.001</td> <td>    0.008</td>\n",
2841
       "</tr>\n",
2842
       "<tr>\n",
2843
       "  <th>3164</th>   <td>    0.0007</td> <td>    0.001</td> <td>    1.135</td> <td> 0.257</td> <td>   -0.000</td> <td>    0.002</td>\n",
2844
       "</tr>\n",
2845
       "<tr>\n",
2846
       "  <th>3265</th>   <td>    0.0008</td> <td>    0.001</td> <td>    0.563</td> <td> 0.574</td> <td>   -0.002</td> <td>    0.004</td>\n",
2847
       "</tr>\n",
2848
       "<tr>\n",
2849
       "  <th>3320</th>   <td>    0.0034</td> <td>    0.001</td> <td>    2.468</td> <td> 0.014</td> <td>    0.001</td> <td>    0.006</td>\n",
2850
       "</tr>\n",
2851
       "<tr>\n",
2852
       "  <th>3709</th>   <td>    0.0015</td> <td>    0.001</td> <td>    2.033</td> <td> 0.042</td> <td> 5.28e-05</td> <td>    0.003</td>\n",
2853
       "</tr>\n",
2854
       "<tr>\n",
2855
       "  <th>3710</th>   <td>   -0.0004</td> <td>    0.001</td> <td>   -0.382</td> <td> 0.703</td> <td>   -0.003</td> <td>    0.002</td>\n",
2856
       "</tr>\n",
2857
       "<tr>\n",
2858
       "  <th>3845</th>   <td>    0.0007</td> <td>    0.001</td> <td>    0.462</td> <td> 0.644</td> <td>   -0.002</td> <td>    0.004</td>\n",
2859
       "</tr>\n",
2860
       "<tr>\n",
2861
       "  <th>4193</th>   <td>   -0.0005</td> <td>    0.001</td> <td>   -0.443</td> <td> 0.658</td> <td>   -0.003</td> <td>    0.002</td>\n",
2862
       "</tr>\n",
2863
       "<tr>\n",
2864
       "  <th>4303</th>   <td>    0.0018</td> <td>    0.001</td> <td>    1.436</td> <td> 0.151</td> <td>   -0.001</td> <td>    0.004</td>\n",
2865
       "</tr>\n",
2866
       "<tr>\n",
2867
       "  <th>4893</th>   <td>   -0.0007</td> <td>    0.001</td> <td>   -0.608</td> <td> 0.544</td> <td>   -0.003</td> <td>    0.001</td>\n",
2868
       "</tr>\n",
2869
       "<tr>\n",
2870
       "  <th>5136</th>   <td>   -0.0017</td> <td>    0.001</td> <td>   -1.156</td> <td> 0.248</td> <td>   -0.004</td> <td>    0.001</td>\n",
2871
       "</tr>\n",
2872
       "<tr>\n",
2873
       "  <th>5170</th>   <td>   -0.0022</td> <td>    0.002</td> <td>   -0.930</td> <td> 0.352</td> <td>   -0.007</td> <td>    0.002</td>\n",
2874
       "</tr>\n",
2875
       "<tr>\n",
2876
       "  <th>5290</th>   <td>    0.0066</td> <td>    0.002</td> <td>    3.998</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
2877
       "</tr>\n",
2878
       "<tr>\n",
2879
       "  <th>5295</th>   <td>   -0.0001</td> <td>    0.001</td> <td>   -0.152</td> <td> 0.880</td> <td>   -0.002</td> <td>    0.002</td>\n",
2880
       "</tr>\n",
2881
       "<tr>\n",
2882
       "  <th>5335</th>   <td>    0.0002</td> <td>    0.002</td> <td>    0.112</td> <td> 0.911</td> <td>   -0.003</td> <td>    0.004</td>\n",
2883
       "</tr>\n",
2884
       "<tr>\n",
2885
       "  <th>5566</th>   <td>    0.0012</td> <td>    0.002</td> <td>    0.609</td> <td> 0.543</td> <td>   -0.003</td> <td>    0.005</td>\n",
2886
       "</tr>\n",
2887
       "<tr>\n",
2888
       "  <th>5567</th>   <td>   -0.0010</td> <td>    0.001</td> <td>   -1.916</td> <td> 0.056</td> <td>   -0.002</td> <td> 2.43e-05</td>\n",
2889
       "</tr>\n",
2890
       "<tr>\n",
2891
       "  <th>5573</th>   <td>    0.0001</td> <td>    0.002</td> <td>    0.059</td> <td> 0.953</td> <td>   -0.004</td> <td>    0.004</td>\n",
2892
       "</tr>\n",
2893
       "<tr>\n",
2894
       "  <th>5575</th>   <td>   -0.0016</td> <td>    0.001</td> <td>   -1.479</td> <td> 0.139</td> <td>   -0.004</td> <td>    0.001</td>\n",
2895
       "</tr>\n",
2896
       "<tr>\n",
2897
       "  <th>5576</th>   <td>   -0.0040</td> <td>    0.002</td> <td>   -2.427</td> <td> 0.015</td> <td>   -0.007</td> <td>   -0.001</td>\n",
2898
       "</tr>\n",
2899
       "<tr>\n",
2900
       "  <th>5577</th>   <td>    0.0004</td> <td>    0.001</td> <td>    0.627</td> <td> 0.531</td> <td>   -0.001</td> <td>    0.002</td>\n",
2901
       "</tr>\n",
2902
       "<tr>\n",
2903
       "  <th>5578</th>   <td>    0.0007</td> <td>    0.001</td> <td>    0.548</td> <td> 0.584</td> <td>   -0.002</td> <td>    0.003</td>\n",
2904
       "</tr>\n",
2905
       "<tr>\n",
2906
       "  <th>5580</th>   <td>   -0.0017</td> <td>    0.001</td> <td>   -1.395</td> <td> 0.163</td> <td>   -0.004</td> <td>    0.001</td>\n",
2907
       "</tr>\n",
2908
       "<tr>\n",
2909
       "  <th>5581</th>   <td>   -0.0006</td> <td>    0.001</td> <td>   -0.528</td> <td> 0.598</td> <td>   -0.003</td> <td>    0.002</td>\n",
2910
       "</tr>\n",
2911
       "<tr>\n",
2912
       "  <th>5582</th>   <td>   -0.0025</td> <td>    0.003</td> <td>   -0.781</td> <td> 0.435</td> <td>   -0.009</td> <td>    0.004</td>\n",
2913
       "</tr>\n",
2914
       "<tr>\n",
2915
       "  <th>5594</th>   <td>    0.0020</td> <td>    0.002</td> <td>    1.066</td> <td> 0.287</td> <td>   -0.002</td> <td>    0.006</td>\n",
2916
       "</tr>\n",
2917
       "<tr>\n",
2918
       "  <th>5595</th>   <td>    0.0047</td> <td>    0.002</td> <td>    2.902</td> <td> 0.004</td> <td>    0.002</td> <td>    0.008</td>\n",
2919
       "</tr>\n",
2920
       "<tr>\n",
2921
       "  <th>5604</th>   <td>    0.0006</td> <td>    0.002</td> <td>    0.311</td> <td> 0.756</td> <td>   -0.003</td> <td>    0.005</td>\n",
2922
       "</tr>\n",
2923
       "<tr>\n",
2924
       "  <th>5605</th>   <td>    0.0032</td> <td>    0.002</td> <td>    1.607</td> <td> 0.108</td> <td>   -0.001</td> <td>    0.007</td>\n",
2925
       "</tr>\n",
2926
       "<tr>\n",
2927
       "  <th>5728</th>   <td>    0.0010</td> <td>    0.001</td> <td>    0.796</td> <td> 0.426</td> <td>   -0.002</td> <td>    0.004</td>\n",
2928
       "</tr>\n",
2929
       "<tr>\n",
2930
       "  <th>5894</th>   <td>    0.0035</td> <td>    0.002</td> <td>    1.753</td> <td> 0.080</td> <td>   -0.000</td> <td>    0.007</td>\n",
2931
       "</tr>\n",
2932
       "<tr>\n",
2933
       "  <th>6199</th>   <td>   -0.0008</td> <td>    0.001</td> <td>   -0.650</td> <td> 0.516</td> <td>   -0.003</td> <td>    0.002</td>\n",
2934
       "</tr>\n",
2935
       "<tr>\n",
2936
       "  <th>6456</th>   <td>   -0.0005</td> <td>    0.001</td> <td>   -0.475</td> <td> 0.635</td> <td>   -0.003</td> <td>    0.002</td>\n",
2937
       "</tr>\n",
2938
       "<tr>\n",
2939
       "  <th>6464</th>   <td>    0.0028</td> <td>    0.002</td> <td>    1.581</td> <td> 0.114</td> <td>   -0.001</td> <td>    0.006</td>\n",
2940
       "</tr>\n",
2941
       "<tr>\n",
2942
       "  <th>6654</th>   <td>    0.0027</td> <td>    0.002</td> <td>    1.176</td> <td> 0.240</td> <td>   -0.002</td> <td>    0.007</td>\n",
2943
       "</tr>\n",
2944
       "<tr>\n",
2945
       "  <th>6714</th>   <td>   -0.0009</td> <td>    0.001</td> <td>   -0.742</td> <td> 0.458</td> <td>   -0.003</td> <td>    0.002</td>\n",
2946
       "</tr>\n",
2947
       "<tr>\n",
2948
       "  <th>6868</th>   <td>   -0.0019</td> <td>    0.002</td> <td>   -0.882</td> <td> 0.378</td> <td>   -0.006</td> <td>    0.002</td>\n",
2949
       "</tr>\n",
2950
       "<tr>\n",
2951
       "  <th>7249</th>   <td>    0.0059</td> <td>    0.002</td> <td>    2.794</td> <td> 0.005</td> <td>    0.002</td> <td>    0.010</td>\n",
2952
       "</tr>\n",
2953
       "<tr>\n",
2954
       "  <th>7311</th>   <td>    0.0100</td> <td>    0.002</td> <td>    5.557</td> <td> 0.000</td> <td>    0.006</td> <td>    0.014</td>\n",
2955
       "</tr>\n",
2956
       "<tr>\n",
2957
       "  <th>7529</th>   <td>    0.0060</td> <td>    0.002</td> <td>    3.019</td> <td> 0.003</td> <td>    0.002</td> <td>    0.010</td>\n",
2958
       "</tr>\n",
2959
       "<tr>\n",
2960
       "  <th>8027</th>   <td>    0.0003</td> <td>    0.002</td> <td>    0.130</td> <td> 0.897</td> <td>   -0.004</td> <td>    0.004</td>\n",
2961
       "</tr>\n",
2962
       "<tr>\n",
2963
       "  <th>8038</th>   <td>   -0.0018</td> <td>    0.001</td> <td>   -2.249</td> <td> 0.025</td> <td>   -0.003</td> <td>   -0.000</td>\n",
2964
       "</tr>\n",
2965
       "<tr>\n",
2966
       "  <th>9146</th>   <td>    0.0067</td> <td>    0.002</td> <td>    3.353</td> <td> 0.001</td> <td>    0.003</td> <td>    0.011</td>\n",
2967
       "</tr>\n",
2968
       "<tr>\n",
2969
       "  <th>10000</th>  <td>    0.0001</td> <td>    0.001</td> <td>    0.161</td> <td> 0.872</td> <td>   -0.001</td> <td>    0.002</td>\n",
2970
       "</tr>\n",
2971
       "<tr>\n",
2972
       "  <th>10252</th>  <td>    0.0016</td> <td>    0.001</td> <td>    1.389</td> <td> 0.165</td> <td>   -0.001</td> <td>    0.004</td>\n",
2973
       "</tr>\n",
2974
       "<tr>\n",
2975
       "  <th>10253</th>  <td>   -0.0026</td> <td>    0.001</td> <td>   -2.333</td> <td> 0.020</td> <td>   -0.005</td> <td>   -0.000</td>\n",
2976
       "</tr>\n",
2977
       "<tr>\n",
2978
       "  <th>10254</th>  <td>    0.0004</td> <td>    0.002</td> <td>    0.148</td> <td> 0.883</td> <td>   -0.004</td> <td>    0.005</td>\n",
2979
       "</tr>\n",
2980
       "<tr>\n",
2981
       "  <th>11140</th>  <td>    0.0034</td> <td>    0.002</td> <td>    1.598</td> <td> 0.111</td> <td>   -0.001</td> <td>    0.008</td>\n",
2982
       "</tr>\n",
2983
       "<tr>\n",
2984
       "  <th>23239</th>  <td>   -0.0026</td> <td>    0.001</td> <td>   -2.319</td> <td> 0.021</td> <td>   -0.005</td> <td>   -0.000</td>\n",
2985
       "</tr>\n",
2986
       "<tr>\n",
2987
       "  <th>26018</th>  <td>    0.0004</td> <td>    0.001</td> <td>    0.409</td> <td> 0.683</td> <td>   -0.002</td> <td>    0.002</td>\n",
2988
       "</tr>\n",
2989
       "<tr>\n",
2990
       "  <th>29924</th>  <td>    0.0002</td> <td>    0.002</td> <td>    0.084</td> <td> 0.933</td> <td>   -0.004</td> <td>    0.004</td>\n",
2991
       "</tr>\n",
2992
       "<tr>\n",
2993
       "  <th>30011</th>  <td>    0.0027</td> <td>    0.001</td> <td>    1.923</td> <td> 0.055</td> <td>-5.67e-05</td> <td>    0.006</td>\n",
2994
       "</tr>\n",
2995
       "<tr>\n",
2996
       "  <th>55824</th>  <td>   -0.0003</td> <td>    0.002</td> <td>   -0.210</td> <td> 0.833</td> <td>   -0.004</td> <td>    0.003</td>\n",
2997
       "</tr>\n",
2998
       "<tr>\n",
2999
       "  <th>57761</th>  <td>    0.0011</td> <td>    0.001</td> <td>    1.311</td> <td> 0.190</td> <td>   -0.001</td> <td>    0.003</td>\n",
3000
       "</tr>\n",
3001
       "<tr>\n",
3002
       "  <th>58513</th>  <td>   -0.0034</td> <td>    0.002</td> <td>   -1.942</td> <td> 0.052</td> <td>   -0.007</td> <td>  3.6e-05</td>\n",
3003
       "</tr>\n",
3004
       "<tr>\n",
3005
       "  <th>64223</th>  <td>    0.0007</td> <td>    0.002</td> <td>    0.316</td> <td> 0.752</td> <td>   -0.004</td> <td>    0.005</td>\n",
3006
       "</tr>\n",
3007
       "<tr>\n",
3008
       "  <th>79109</th>  <td>    0.0022</td> <td>    0.002</td> <td>    1.107</td> <td> 0.268</td> <td>   -0.002</td> <td>    0.006</td>\n",
3009
       "</tr>\n",
3010
       "<tr>\n",
3011
       "  <th>84335</th>  <td>   -0.0057</td> <td>    0.002</td> <td>   -2.607</td> <td> 0.009</td> <td>   -0.010</td> <td>   -0.001</td>\n",
3012
       "</tr>\n",
3013
       "<tr>\n",
3014
       "  <th>117145</th> <td>    0.0032</td> <td>    0.001</td> <td>    2.624</td> <td> 0.009</td> <td>    0.001</td> <td>    0.006</td>\n",
3015
       "</tr>\n",
3016
       "<tr>\n",
3017
       "  <th>196883</th> <td>   -0.0030</td> <td>    0.001</td> <td>   -2.182</td> <td> 0.029</td> <td>   -0.006</td> <td>   -0.000</td>\n",
3018
       "</tr>\n",
3019
       "<tr>\n",
3020
       "  <th>253260</th> <td>   -0.0004</td> <td>    0.002</td> <td>   -0.247</td> <td> 0.805</td> <td>   -0.004</td> <td>    0.003</td>\n",
3021
       "</tr>\n",
3022
       "</table>\n",
3023
       "<table class=\"simpletable\">\n",
3024
       "<tr>\n",
3025
       "  <th>Omnibus:</th>       <td>126.984</td> <th>  Durbin-Watson:     </th> <td>   1.943</td>\n",
3026
       "</tr>\n",
3027
       "<tr>\n",
3028
       "  <th>Prob(Omnibus):</th> <td> 0.000</td>  <th>  Jarque-Bera (JB):  </th> <td> 391.225</td>\n",
3029
       "</tr>\n",
3030
       "<tr>\n",
3031
       "  <th>Skew:</th>          <td> 0.676</td>  <th>  Prob(JB):          </th> <td>1.11e-85</td>\n",
3032
       "</tr>\n",
3033
       "<tr>\n",
3034
       "  <th>Kurtosis:</th>      <td> 5.893</td>  <th>  Cond. No.          </th> <td>    311.</td>\n",
3035
       "</tr>\n",
3036
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
3037
      ],
3038
      "text/plain": [
3039
       "<class 'statsmodels.iolib.summary.Summary'>\n",
3040
       "\"\"\"\n",
3041
       "                            OLS Regression Results                            \n",
3042
       "==============================================================================\n",
3043
       "Dep. Variable:                      y   R-squared:                       0.999\n",
3044
       "Model:                            OLS   Adj. R-squared:                  0.999\n",
3045
       "Method:                 Least Squares   F-statistic:                     6893.\n",
3046
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
3047
       "Time:                        18:16:54   Log-Likelihood:                 2466.9\n",
3048
       "No. Observations:                 921   AIC:                            -4732.\n",
3049
       "Df Residuals:                     820   BIC:                            -4245.\n",
3050
       "Df Model:                         101                                         \n",
3051
       "Covariance Type:            nonrobust                                         \n",
3052
       "==============================================================================\n",
3053
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
3054
       "------------------------------------------------------------------------------\n",
3055
       "102            0.0011      0.002      0.672      0.502      -0.002       0.004\n",
3056
       "107           -0.0011      0.000     -2.468      0.014      -0.002      -0.000\n",
3057
       "108           -0.0013      0.001     -2.000      0.046      -0.003   -2.48e-05\n",
3058
       "109            0.0002      0.001      0.183      0.855      -0.002       0.003\n",
3059
       "111           -0.0003      0.001     -0.611      0.541      -0.001       0.001\n",
3060
       "112            0.0015      0.001      1.019      0.309      -0.001       0.004\n",
3061
       "113           -0.0020      0.001     -1.351      0.177      -0.005       0.001\n",
3062
       "114           -0.0026      0.002     -1.382      0.167      -0.006       0.001\n",
3063
       "115            0.0008      0.001      0.651      0.515      -0.002       0.003\n",
3064
       "160            0.0067      0.002      3.978      0.000       0.003       0.010\n",
3065
       "161           -0.0014      0.002     -0.675      0.500      -0.006       0.003\n",
3066
       "163         5.495e-05      0.001      0.047      0.962      -0.002       0.002\n",
3067
       "207            0.0028      0.002      1.587      0.113      -0.001       0.006\n",
3068
       "208           -0.0023      0.002     -1.390      0.165      -0.006       0.001\n",
3069
       "572           -0.0023      0.002     -1.392      0.164      -0.005       0.001\n",
3070
       "801            0.0022      0.002      1.102      0.271      -0.002       0.006\n",
3071
       "805            0.0063      0.002      3.883      0.000       0.003       0.010\n",
3072
       "808            0.0019      0.002      0.939      0.348      -0.002       0.006\n",
3073
       "814            0.0010      0.002      0.553      0.580      -0.003       0.004\n",
3074
       "842           -0.0025      0.002     -1.337      0.182      -0.006       0.001\n",
3075
       "867           -0.0038      0.002     -1.849      0.065      -0.008       0.000\n",
3076
       "983         1.694e-05      0.001      0.019      0.985      -0.002       0.002\n",
3077
       "998            0.0023      0.002      0.999      0.318      -0.002       0.007\n",
3078
       "1026          -0.0022      0.001     -2.660      0.008      -0.004      -0.001\n",
3079
       "1027           0.0012      0.001      1.529      0.127      -0.000       0.003\n",
3080
       "1147           0.0039      0.002      2.011      0.045    9.45e-05       0.008\n",
3081
       "1173          -0.0016      0.002     -0.793      0.428      -0.006       0.002\n",
3082
       "1175           0.0050      0.002      2.424      0.016       0.001       0.009\n",
3083
       "1211           0.0079      0.002      4.775      0.000       0.005       0.011\n",
3084
       "1213           0.0011      0.001      0.815      0.415      -0.002       0.004\n",
3085
       "1385          -0.0048      0.003     -1.743      0.082      -0.010       0.001\n",
3086
       "1445          -0.0036      0.002     -1.679      0.094      -0.008       0.001\n",
3087
       "1950           0.0010      0.001      1.717      0.086      -0.000       0.002\n",
3088
       "1956           0.0041      0.001      5.366      0.000       0.003       0.006\n",
3089
       "2060          -0.0023      0.002     -1.028      0.304      -0.007       0.002\n",
3090
       "2308          -0.0014      0.002     -0.885      0.377      -0.004       0.002\n",
3091
       "2309           0.0012      0.001      0.939      0.348      -0.001       0.004\n",
3092
       "2475          -0.0078      0.002     -3.325      0.001      -0.012      -0.003\n",
3093
       "2549          -0.0019      0.001     -1.341      0.180      -0.005       0.001\n",
3094
       "2885          -0.0030      0.001     -2.034      0.042      -0.006      -0.000\n",
3095
       "2931           0.0036      0.002      1.483      0.138      -0.001       0.008\n",
3096
       "3164           0.0007      0.001      1.135      0.257      -0.000       0.002\n",
3097
       "3265           0.0008      0.001      0.563      0.574      -0.002       0.004\n",
3098
       "3320           0.0034      0.001      2.468      0.014       0.001       0.006\n",
3099
       "3709           0.0015      0.001      2.033      0.042    5.28e-05       0.003\n",
3100
       "3710          -0.0004      0.001     -0.382      0.703      -0.003       0.002\n",
3101
       "3845           0.0007      0.001      0.462      0.644      -0.002       0.004\n",
3102
       "4193          -0.0005      0.001     -0.443      0.658      -0.003       0.002\n",
3103
       "4303           0.0018      0.001      1.436      0.151      -0.001       0.004\n",
3104
       "4893          -0.0007      0.001     -0.608      0.544      -0.003       0.001\n",
3105
       "5136          -0.0017      0.001     -1.156      0.248      -0.004       0.001\n",
3106
       "5170          -0.0022      0.002     -0.930      0.352      -0.007       0.002\n",
3107
       "5290           0.0066      0.002      3.998      0.000       0.003       0.010\n",
3108
       "5295          -0.0001      0.001     -0.152      0.880      -0.002       0.002\n",
3109
       "5335           0.0002      0.002      0.112      0.911      -0.003       0.004\n",
3110
       "5566           0.0012      0.002      0.609      0.543      -0.003       0.005\n",
3111
       "5567          -0.0010      0.001     -1.916      0.056      -0.002    2.43e-05\n",
3112
       "5573           0.0001      0.002      0.059      0.953      -0.004       0.004\n",
3113
       "5575          -0.0016      0.001     -1.479      0.139      -0.004       0.001\n",
3114
       "5576          -0.0040      0.002     -2.427      0.015      -0.007      -0.001\n",
3115
       "5577           0.0004      0.001      0.627      0.531      -0.001       0.002\n",
3116
       "5578           0.0007      0.001      0.548      0.584      -0.002       0.003\n",
3117
       "5580          -0.0017      0.001     -1.395      0.163      -0.004       0.001\n",
3118
       "5581          -0.0006      0.001     -0.528      0.598      -0.003       0.002\n",
3119
       "5582          -0.0025      0.003     -0.781      0.435      -0.009       0.004\n",
3120
       "5594           0.0020      0.002      1.066      0.287      -0.002       0.006\n",
3121
       "5595           0.0047      0.002      2.902      0.004       0.002       0.008\n",
3122
       "5604           0.0006      0.002      0.311      0.756      -0.003       0.005\n",
3123
       "5605           0.0032      0.002      1.607      0.108      -0.001       0.007\n",
3124
       "5728           0.0010      0.001      0.796      0.426      -0.002       0.004\n",
3125
       "5894           0.0035      0.002      1.753      0.080      -0.000       0.007\n",
3126
       "6199          -0.0008      0.001     -0.650      0.516      -0.003       0.002\n",
3127
       "6456          -0.0005      0.001     -0.475      0.635      -0.003       0.002\n",
3128
       "6464           0.0028      0.002      1.581      0.114      -0.001       0.006\n",
3129
       "6654           0.0027      0.002      1.176      0.240      -0.002       0.007\n",
3130
       "6714          -0.0009      0.001     -0.742      0.458      -0.003       0.002\n",
3131
       "6868          -0.0019      0.002     -0.882      0.378      -0.006       0.002\n",
3132
       "7249           0.0059      0.002      2.794      0.005       0.002       0.010\n",
3133
       "7311           0.0100      0.002      5.557      0.000       0.006       0.014\n",
3134
       "7529           0.0060      0.002      3.019      0.003       0.002       0.010\n",
3135
       "8027           0.0003      0.002      0.130      0.897      -0.004       0.004\n",
3136
       "8038          -0.0018      0.001     -2.249      0.025      -0.003      -0.000\n",
3137
       "9146           0.0067      0.002      3.353      0.001       0.003       0.011\n",
3138
       "10000          0.0001      0.001      0.161      0.872      -0.001       0.002\n",
3139
       "10252          0.0016      0.001      1.389      0.165      -0.001       0.004\n",
3140
       "10253         -0.0026      0.001     -2.333      0.020      -0.005      -0.000\n",
3141
       "10254          0.0004      0.002      0.148      0.883      -0.004       0.005\n",
3142
       "11140          0.0034      0.002      1.598      0.111      -0.001       0.008\n",
3143
       "23239         -0.0026      0.001     -2.319      0.021      -0.005      -0.000\n",
3144
       "26018          0.0004      0.001      0.409      0.683      -0.002       0.002\n",
3145
       "29924          0.0002      0.002      0.084      0.933      -0.004       0.004\n",
3146
       "30011          0.0027      0.001      1.923      0.055   -5.67e-05       0.006\n",
3147
       "55824         -0.0003      0.002     -0.210      0.833      -0.004       0.003\n",
3148
       "57761          0.0011      0.001      1.311      0.190      -0.001       0.003\n",
3149
       "58513         -0.0034      0.002     -1.942      0.052      -0.007     3.6e-05\n",
3150
       "64223          0.0007      0.002      0.316      0.752      -0.004       0.005\n",
3151
       "79109          0.0022      0.002      1.107      0.268      -0.002       0.006\n",
3152
       "84335         -0.0057      0.002     -2.607      0.009      -0.010      -0.001\n",
3153
       "117145         0.0032      0.001      2.624      0.009       0.001       0.006\n",
3154
       "196883        -0.0030      0.001     -2.182      0.029      -0.006      -0.000\n",
3155
       "253260        -0.0004      0.002     -0.247      0.805      -0.004       0.003\n",
3156
       "==============================================================================\n",
3157
       "Omnibus:                      126.984   Durbin-Watson:                   1.943\n",
3158
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):              391.225\n",
3159
       "Skew:                           0.676   Prob(JB):                     1.11e-85\n",
3160
       "Kurtosis:                       5.893   Cond. No.                         311.\n",
3161
       "==============================================================================\n",
3162
       "\n",
3163
       "Warnings:\n",
3164
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
3165
       "\"\"\""
3166
      ]
3167
     },
3168
     "execution_count": 22,
3169
     "metadata": {},
3170
     "output_type": "execute_result"
3171
    }
3172
   ],
3173
   "source": [
3174
    "X = BRCAEEGFR\n",
3175
    "y = PredBRCA.detach().numpy()\n",
3176
    "\n",
3177
    "# Note the difference in argument order\n",
3178
    "model = sm.OLS(y, X).fit()\n",
3179
    "predictions = model.predict(X) # make the predictions by the model\n",
3180
    "\n",
3181
    "# Print out the statistics\n",
3182
    "model.summary()"
3183
   ]
3184
  },
3185
  {
3186
   "cell_type": "code",
3187
   "execution_count": 23,
3188
   "metadata": {},
3189
   "outputs": [
3190
    {
3191
     "name": "stdout",
3192
     "output_type": "stream",
3193
     "text": [
3194
      "(array([False, False, False, False, False, False, False, False, False,\n",
3195
      "        True, False, False, False, False, False, False,  True, False,\n",
3196
      "       False, False, False, False, False, False, False, False, False,\n",
3197
      "       False,  True, False, False, False, False,  True, False, False,\n",
3198
      "       False, False, False, False, False, False, False, False, False,\n",
3199
      "       False, False, False, False, False, False, False,  True, False,\n",
3200
      "       False, False, False, False, False, False, False, False, False,\n",
3201
      "       False, False, False, False, False, False, False, False, False,\n",
3202
      "       False, False, False, False, False, False,  True, False, False,\n",
3203
      "       False, False, False, False, False, False, False, False, False,\n",
3204
      "       False, False, False, False, False, False, False, False, False,\n",
3205
      "       False, False]), array([5.06825579e+01, 1.39415496e+00, 4.62728727e+00, 8.63416692e+01,\n",
3206
      "       5.46703973e+01, 3.11693984e+01, 1.78878580e+01, 1.69076294e+01,\n",
3207
      "       5.20628937e+01, 7.63775258e-03, 5.05012625e+01, 9.71747798e+01,\n",
3208
      "       1.14047619e+01, 1.66547204e+01, 1.66002678e+01, 2.73452647e+01,\n",
3209
      "       1.12394342e-02, 3.51638004e+01, 5.86190456e+01, 1.83319447e+01,\n",
3210
      "       6.54695299e+00, 9.94635493e+01, 3.21067659e+01, 8.05601110e-01,\n",
3211
      "       1.27822910e+01, 4.50467121e+00, 4.32100404e+01, 1.57037777e+00,\n",
3212
      "       2.14790333e-04, 4.19184595e+01, 8.24890270e+00, 9.44769827e+00,\n",
3213
      "       8.71708891e+00, 1.05952842e-05, 3.07063472e+01, 3.80319057e+01,\n",
3214
      "       3.51583973e+01, 9.33166240e-02, 1.82235046e+01, 4.27457416e+00,\n",
3215
      "       1.39790238e+01, 2.59382965e+01, 5.79606884e+01, 1.39245484e+00,\n",
3216
      "       4.27958874e+00, 7.09755293e+01, 6.50415536e+01, 6.64791509e+01,\n",
3217
      "       1.52878177e+01, 5.49098096e+01, 2.50604528e+01, 3.55940643e+01,\n",
3218
      "       7.04010042e-03, 8.88385006e+01, 9.20001680e+01, 5.48126545e+01,\n",
3219
      "       5.62914348e+00, 9.62605127e+01, 1.40816890e+01, 1.55929716e+00,\n",
3220
      "       5.35888105e+01, 5.89525602e+01, 1.65058150e+01, 6.03911904e+01,\n",
3221
      "       4.39569903e+01, 2.89789530e+01, 3.84153378e-01, 7.63121342e+01,\n",
3222
      "       1.09480929e+01, 4.30656336e+01, 8.07928547e+00, 5.20798960e+01,\n",
3223
      "       6.41025395e+01, 1.15472161e+01, 2.42427222e+01, 4.62966468e+01,\n",
3224
      "       3.81851918e+01, 5.37540468e-01, 3.74789829e-06, 2.64334431e-01,\n",
3225
      "       9.05906863e+01, 2.50371075e+00, 8.43106091e-02, 8.80570964e+01,\n",
3226
      "       1.66905322e+01, 2.00775994e+00, 8.91590873e+01, 1.11621004e+01,\n",
3227
      "       2.08549764e+00, 6.89553451e+01, 9.42277767e+01, 5.53765760e+00,\n",
3228
      "       8.41771233e+01, 1.92219594e+01, 5.29466937e+00, 7.59562851e+01,\n",
3229
      "       2.71121774e+01, 9.38706483e-01, 8.93055121e-01, 2.97120494e+00,\n",
3230
      "       8.13250292e+01]))\n"
3231
     ]
3232
    }
3233
   ],
3234
   "source": [
3235
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
3236
   ]
3237
  },
3238
  {
3239
   "cell_type": "code",
3240
   "execution_count": 24,
3241
   "metadata": {},
3242
   "outputs": [],
3243
   "source": [
3244
    "listEGFR = PAADE.columns.intersection(lsEGFR)\n",
3245
    "PAADEEGFR = PAADE[listEGFR]\n",
3246
    "PAADMEGFR = PAADM[listEGFR]\n",
3247
    "PAADCEGFR = PAADC[listEGFR]   "
3248
   ]
3249
  },
3250
  {
3251
   "cell_type": "code",
3252
   "execution_count": 25,
3253
   "metadata": {},
3254
   "outputs": [
3255
    {
3256
     "data": {
3257
      "text/html": [
3258
       "<table class=\"simpletable\">\n",
3259
       "<caption>OLS Regression Results</caption>\n",
3260
       "<tr>\n",
3261
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
3262
       "</tr>\n",
3263
       "<tr>\n",
3264
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.995</td>\n",
3265
       "</tr>\n",
3266
       "<tr>\n",
3267
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   251.5</td>\n",
3268
       "</tr>\n",
3269
       "<tr>\n",
3270
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>1.78e-29</td>\n",
3271
       "</tr>\n",
3272
       "<tr>\n",
3273
       "  <th>Time:</th>                 <td>18:17:00</td>     <th>  Log-Likelihood:    </th> <td>  364.21</td>\n",
3274
       "</tr>\n",
3275
       "<tr>\n",
3276
       "  <th>No. Observations:</th>      <td>   130</td>      <th>  AIC:               </th> <td>  -526.4</td>\n",
3277
       "</tr>\n",
3278
       "<tr>\n",
3279
       "  <th>Df Residuals:</th>          <td>    29</td>      <th>  BIC:               </th> <td>  -236.8</td>\n",
3280
       "</tr>\n",
3281
       "<tr>\n",
3282
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
3283
       "</tr>\n",
3284
       "<tr>\n",
3285
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
3286
       "</tr>\n",
3287
       "</table>\n",
3288
       "<table class=\"simpletable\">\n",
3289
       "<tr>\n",
3290
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
3291
       "</tr>\n",
3292
       "<tr>\n",
3293
       "  <th>102</th>    <td>   -0.0263</td> <td>    0.023</td> <td>   -1.162</td> <td> 0.255</td> <td>   -0.072</td> <td>    0.020</td>\n",
3294
       "</tr>\n",
3295
       "<tr>\n",
3296
       "  <th>107</th>    <td>    0.0055</td> <td>    0.013</td> <td>    0.414</td> <td> 0.682</td> <td>   -0.022</td> <td>    0.032</td>\n",
3297
       "</tr>\n",
3298
       "<tr>\n",
3299
       "  <th>108</th>    <td>   -0.0179</td> <td>    0.025</td> <td>   -0.717</td> <td> 0.479</td> <td>   -0.069</td> <td>    0.033</td>\n",
3300
       "</tr>\n",
3301
       "<tr>\n",
3302
       "  <th>109</th>    <td>    0.0112</td> <td>    0.019</td> <td>    0.600</td> <td> 0.553</td> <td>   -0.027</td> <td>    0.049</td>\n",
3303
       "</tr>\n",
3304
       "<tr>\n",
3305
       "  <th>111</th>    <td>    0.0037</td> <td>    0.013</td> <td>    0.282</td> <td> 0.780</td> <td>   -0.023</td> <td>    0.031</td>\n",
3306
       "</tr>\n",
3307
       "<tr>\n",
3308
       "  <th>112</th>    <td>   -0.0120</td> <td>    0.023</td> <td>   -0.527</td> <td> 0.602</td> <td>   -0.058</td> <td>    0.034</td>\n",
3309
       "</tr>\n",
3310
       "<tr>\n",
3311
       "  <th>113</th>    <td>    0.0143</td> <td>    0.014</td> <td>    1.031</td> <td> 0.311</td> <td>   -0.014</td> <td>    0.043</td>\n",
3312
       "</tr>\n",
3313
       "<tr>\n",
3314
       "  <th>114</th>    <td>   -0.0118</td> <td>    0.039</td> <td>   -0.302</td> <td> 0.765</td> <td>   -0.092</td> <td>    0.068</td>\n",
3315
       "</tr>\n",
3316
       "<tr>\n",
3317
       "  <th>115</th>    <td>    0.0008</td> <td>    0.036</td> <td>    0.022</td> <td> 0.983</td> <td>   -0.072</td> <td>    0.074</td>\n",
3318
       "</tr>\n",
3319
       "<tr>\n",
3320
       "  <th>160</th>    <td>   -0.0357</td> <td>    0.032</td> <td>   -1.114</td> <td> 0.274</td> <td>   -0.101</td> <td>    0.030</td>\n",
3321
       "</tr>\n",
3322
       "<tr>\n",
3323
       "  <th>161</th>    <td>   -0.0005</td> <td>    0.027</td> <td>   -0.017</td> <td> 0.987</td> <td>   -0.056</td> <td>    0.056</td>\n",
3324
       "</tr>\n",
3325
       "<tr>\n",
3326
       "  <th>163</th>    <td>    0.0254</td> <td>    0.031</td> <td>    0.828</td> <td> 0.415</td> <td>   -0.037</td> <td>    0.088</td>\n",
3327
       "</tr>\n",
3328
       "<tr>\n",
3329
       "  <th>207</th>    <td>    0.0099</td> <td>    0.031</td> <td>    0.323</td> <td> 0.749</td> <td>   -0.053</td> <td>    0.073</td>\n",
3330
       "</tr>\n",
3331
       "<tr>\n",
3332
       "  <th>208</th>    <td>   -0.0149</td> <td>    0.016</td> <td>   -0.959</td> <td> 0.346</td> <td>   -0.047</td> <td>    0.017</td>\n",
3333
       "</tr>\n",
3334
       "<tr>\n",
3335
       "  <th>572</th>    <td>    0.0064</td> <td>    0.024</td> <td>    0.267</td> <td> 0.791</td> <td>   -0.042</td> <td>    0.055</td>\n",
3336
       "</tr>\n",
3337
       "<tr>\n",
3338
       "  <th>801</th>    <td>    0.0163</td> <td>    0.040</td> <td>    0.408</td> <td> 0.686</td> <td>   -0.065</td> <td>    0.098</td>\n",
3339
       "</tr>\n",
3340
       "<tr>\n",
3341
       "  <th>805</th>    <td>    0.0394</td> <td>    0.032</td> <td>    1.232</td> <td> 0.228</td> <td>   -0.026</td> <td>    0.105</td>\n",
3342
       "</tr>\n",
3343
       "<tr>\n",
3344
       "  <th>808</th>    <td>    0.0136</td> <td>    0.037</td> <td>    0.367</td> <td> 0.716</td> <td>   -0.062</td> <td>    0.089</td>\n",
3345
       "</tr>\n",
3346
       "<tr>\n",
3347
       "  <th>814</th>    <td>   -0.0174</td> <td>    0.015</td> <td>   -1.129</td> <td> 0.268</td> <td>   -0.049</td> <td>    0.014</td>\n",
3348
       "</tr>\n",
3349
       "<tr>\n",
3350
       "  <th>842</th>    <td>   -0.0067</td> <td>    0.021</td> <td>   -0.327</td> <td> 0.746</td> <td>   -0.049</td> <td>    0.035</td>\n",
3351
       "</tr>\n",
3352
       "<tr>\n",
3353
       "  <th>867</th>    <td>   -0.0127</td> <td>    0.029</td> <td>   -0.442</td> <td> 0.661</td> <td>   -0.072</td> <td>    0.046</td>\n",
3354
       "</tr>\n",
3355
       "<tr>\n",
3356
       "  <th>983</th>    <td>    0.0013</td> <td>    0.015</td> <td>    0.089</td> <td> 0.930</td> <td>   -0.029</td> <td>    0.032</td>\n",
3357
       "</tr>\n",
3358
       "<tr>\n",
3359
       "  <th>998</th>    <td>   -0.0207</td> <td>    0.043</td> <td>   -0.482</td> <td> 0.633</td> <td>   -0.109</td> <td>    0.067</td>\n",
3360
       "</tr>\n",
3361
       "<tr>\n",
3362
       "  <th>1026</th>   <td>   -0.0120</td> <td>    0.014</td> <td>   -0.878</td> <td> 0.387</td> <td>   -0.040</td> <td>    0.016</td>\n",
3363
       "</tr>\n",
3364
       "<tr>\n",
3365
       "  <th>1027</th>   <td>    0.0094</td> <td>    0.025</td> <td>    0.382</td> <td> 0.705</td> <td>   -0.041</td> <td>    0.060</td>\n",
3366
       "</tr>\n",
3367
       "<tr>\n",
3368
       "  <th>1147</th>   <td>   -0.0039</td> <td>    0.035</td> <td>   -0.111</td> <td> 0.912</td> <td>   -0.075</td> <td>    0.068</td>\n",
3369
       "</tr>\n",
3370
       "<tr>\n",
3371
       "  <th>1173</th>   <td>    0.0050</td> <td>    0.041</td> <td>    0.121</td> <td> 0.905</td> <td>   -0.079</td> <td>    0.089</td>\n",
3372
       "</tr>\n",
3373
       "<tr>\n",
3374
       "  <th>1175</th>   <td>    0.0105</td> <td>    0.033</td> <td>    0.318</td> <td> 0.753</td> <td>   -0.057</td> <td>    0.078</td>\n",
3375
       "</tr>\n",
3376
       "<tr>\n",
3377
       "  <th>1211</th>   <td>   -0.0214</td> <td>    0.015</td> <td>   -1.443</td> <td> 0.160</td> <td>   -0.052</td> <td>    0.009</td>\n",
3378
       "</tr>\n",
3379
       "<tr>\n",
3380
       "  <th>1213</th>   <td>    0.0339</td> <td>    0.041</td> <td>    0.830</td> <td> 0.413</td> <td>   -0.050</td> <td>    0.117</td>\n",
3381
       "</tr>\n",
3382
       "<tr>\n",
3383
       "  <th>1385</th>   <td>   -0.0315</td> <td>    0.034</td> <td>   -0.915</td> <td> 0.367</td> <td>   -0.102</td> <td>    0.039</td>\n",
3384
       "</tr>\n",
3385
       "<tr>\n",
3386
       "  <th>1445</th>   <td>    0.0012</td> <td>    0.022</td> <td>    0.056</td> <td> 0.956</td> <td>   -0.044</td> <td>    0.046</td>\n",
3387
       "</tr>\n",
3388
       "<tr>\n",
3389
       "  <th>1950</th>   <td>  2.53e-05</td> <td>    0.011</td> <td>    0.002</td> <td> 0.998</td> <td>   -0.023</td> <td>    0.023</td>\n",
3390
       "</tr>\n",
3391
       "<tr>\n",
3392
       "  <th>1956</th>   <td>    0.0078</td> <td>    0.017</td> <td>    0.457</td> <td> 0.651</td> <td>   -0.027</td> <td>    0.043</td>\n",
3393
       "</tr>\n",
3394
       "<tr>\n",
3395
       "  <th>2060</th>   <td>    0.0024</td> <td>    0.044</td> <td>    0.054</td> <td> 0.958</td> <td>   -0.087</td> <td>    0.092</td>\n",
3396
       "</tr>\n",
3397
       "<tr>\n",
3398
       "  <th>2308</th>   <td>   -0.0160</td> <td>    0.020</td> <td>   -0.788</td> <td> 0.437</td> <td>   -0.057</td> <td>    0.025</td>\n",
3399
       "</tr>\n",
3400
       "<tr>\n",
3401
       "  <th>2309</th>   <td>    0.0524</td> <td>    0.019</td> <td>    2.699</td> <td> 0.011</td> <td>    0.013</td> <td>    0.092</td>\n",
3402
       "</tr>\n",
3403
       "<tr>\n",
3404
       "  <th>2475</th>   <td>   -0.0235</td> <td>    0.034</td> <td>   -0.688</td> <td> 0.497</td> <td>   -0.093</td> <td>    0.046</td>\n",
3405
       "</tr>\n",
3406
       "<tr>\n",
3407
       "  <th>2549</th>   <td>   -0.0339</td> <td>    0.028</td> <td>   -1.213</td> <td> 0.235</td> <td>   -0.091</td> <td>    0.023</td>\n",
3408
       "</tr>\n",
3409
       "<tr>\n",
3410
       "  <th>2885</th>   <td>    0.0448</td> <td>    0.041</td> <td>    1.094</td> <td> 0.283</td> <td>   -0.039</td> <td>    0.129</td>\n",
3411
       "</tr>\n",
3412
       "<tr>\n",
3413
       "  <th>2931</th>   <td>    0.0273</td> <td>    0.035</td> <td>    0.787</td> <td> 0.438</td> <td>   -0.044</td> <td>    0.098</td>\n",
3414
       "</tr>\n",
3415
       "<tr>\n",
3416
       "  <th>3164</th>   <td>    0.0046</td> <td>    0.006</td> <td>    0.796</td> <td> 0.433</td> <td>   -0.007</td> <td>    0.016</td>\n",
3417
       "</tr>\n",
3418
       "<tr>\n",
3419
       "  <th>3265</th>   <td>   -0.0047</td> <td>    0.023</td> <td>   -0.207</td> <td> 0.838</td> <td>   -0.051</td> <td>    0.042</td>\n",
3420
       "</tr>\n",
3421
       "<tr>\n",
3422
       "  <th>3320</th>   <td>    0.0023</td> <td>    0.023</td> <td>    0.100</td> <td> 0.921</td> <td>   -0.045</td> <td>    0.050</td>\n",
3423
       "</tr>\n",
3424
       "<tr>\n",
3425
       "  <th>3709</th>   <td>    0.0019</td> <td>    0.016</td> <td>    0.120</td> <td> 0.905</td> <td>   -0.030</td> <td>    0.034</td>\n",
3426
       "</tr>\n",
3427
       "<tr>\n",
3428
       "  <th>3710</th>   <td>    0.0056</td> <td>    0.017</td> <td>    0.323</td> <td> 0.749</td> <td>   -0.030</td> <td>    0.041</td>\n",
3429
       "</tr>\n",
3430
       "<tr>\n",
3431
       "  <th>3845</th>   <td>    0.0280</td> <td>    0.030</td> <td>    0.933</td> <td> 0.359</td> <td>   -0.033</td> <td>    0.090</td>\n",
3432
       "</tr>\n",
3433
       "<tr>\n",
3434
       "  <th>4193</th>   <td>    0.0034</td> <td>    0.016</td> <td>    0.206</td> <td> 0.838</td> <td>   -0.030</td> <td>    0.037</td>\n",
3435
       "</tr>\n",
3436
       "<tr>\n",
3437
       "  <th>4303</th>   <td>   -0.0149</td> <td>    0.020</td> <td>   -0.760</td> <td> 0.454</td> <td>   -0.055</td> <td>    0.025</td>\n",
3438
       "</tr>\n",
3439
       "<tr>\n",
3440
       "  <th>4893</th>   <td>   -0.0206</td> <td>    0.037</td> <td>   -0.560</td> <td> 0.579</td> <td>   -0.096</td> <td>    0.055</td>\n",
3441
       "</tr>\n",
3442
       "<tr>\n",
3443
       "  <th>5136</th>   <td>    0.0218</td> <td>    0.018</td> <td>    1.187</td> <td> 0.245</td> <td>   -0.016</td> <td>    0.059</td>\n",
3444
       "</tr>\n",
3445
       "<tr>\n",
3446
       "  <th>5170</th>   <td>    0.0066</td> <td>    0.041</td> <td>    0.160</td> <td> 0.874</td> <td>   -0.078</td> <td>    0.091</td>\n",
3447
       "</tr>\n",
3448
       "<tr>\n",
3449
       "  <th>5290</th>   <td>   -0.0134</td> <td>    0.039</td> <td>   -0.345</td> <td> 0.732</td> <td>   -0.093</td> <td>    0.066</td>\n",
3450
       "</tr>\n",
3451
       "<tr>\n",
3452
       "  <th>5295</th>   <td>    0.0044</td> <td>    0.022</td> <td>    0.196</td> <td> 0.846</td> <td>   -0.041</td> <td>    0.050</td>\n",
3453
       "</tr>\n",
3454
       "<tr>\n",
3455
       "  <th>5335</th>   <td>    0.0357</td> <td>    0.038</td> <td>    0.950</td> <td> 0.350</td> <td>   -0.041</td> <td>    0.113</td>\n",
3456
       "</tr>\n",
3457
       "<tr>\n",
3458
       "  <th>5566</th>   <td>    0.0261</td> <td>    0.033</td> <td>    0.804</td> <td> 0.428</td> <td>   -0.040</td> <td>    0.093</td>\n",
3459
       "</tr>\n",
3460
       "<tr>\n",
3461
       "  <th>5567</th>   <td>   -0.0081</td> <td>    0.014</td> <td>   -0.573</td> <td> 0.571</td> <td>   -0.037</td> <td>    0.021</td>\n",
3462
       "</tr>\n",
3463
       "<tr>\n",
3464
       "  <th>5573</th>   <td>   -0.0622</td> <td>    0.036</td> <td>   -1.748</td> <td> 0.091</td> <td>   -0.135</td> <td>    0.011</td>\n",
3465
       "</tr>\n",
3466
       "<tr>\n",
3467
       "  <th>5575</th>   <td>   -0.0046</td> <td>    0.018</td> <td>   -0.255</td> <td> 0.801</td> <td>   -0.042</td> <td>    0.032</td>\n",
3468
       "</tr>\n",
3469
       "<tr>\n",
3470
       "  <th>5576</th>   <td>    0.0045</td> <td>    0.022</td> <td>    0.205</td> <td> 0.839</td> <td>   -0.040</td> <td>    0.049</td>\n",
3471
       "</tr>\n",
3472
       "<tr>\n",
3473
       "  <th>5577</th>   <td>   -0.0068</td> <td>    0.012</td> <td>   -0.554</td> <td> 0.584</td> <td>   -0.032</td> <td>    0.018</td>\n",
3474
       "</tr>\n",
3475
       "<tr>\n",
3476
       "  <th>5578</th>   <td>   -0.0009</td> <td>    0.018</td> <td>   -0.047</td> <td> 0.963</td> <td>   -0.039</td> <td>    0.037</td>\n",
3477
       "</tr>\n",
3478
       "<tr>\n",
3479
       "  <th>5580</th>   <td>   -0.0330</td> <td>    0.019</td> <td>   -1.732</td> <td> 0.094</td> <td>   -0.072</td> <td>    0.006</td>\n",
3480
       "</tr>\n",
3481
       "<tr>\n",
3482
       "  <th>5581</th>   <td>   -0.0278</td> <td>    0.027</td> <td>   -1.015</td> <td> 0.319</td> <td>   -0.084</td> <td>    0.028</td>\n",
3483
       "</tr>\n",
3484
       "<tr>\n",
3485
       "  <th>5582</th>   <td>    0.0025</td> <td>    0.006</td> <td>    0.455</td> <td> 0.652</td> <td>   -0.009</td> <td>    0.014</td>\n",
3486
       "</tr>\n",
3487
       "<tr>\n",
3488
       "  <th>5594</th>   <td>   -0.0124</td> <td>    0.032</td> <td>   -0.386</td> <td> 0.702</td> <td>   -0.078</td> <td>    0.053</td>\n",
3489
       "</tr>\n",
3490
       "<tr>\n",
3491
       "  <th>5595</th>   <td>   -0.0193</td> <td>    0.024</td> <td>   -0.796</td> <td> 0.433</td> <td>   -0.069</td> <td>    0.030</td>\n",
3492
       "</tr>\n",
3493
       "<tr>\n",
3494
       "  <th>5604</th>   <td>   -0.0006</td> <td>    0.035</td> <td>   -0.018</td> <td> 0.986</td> <td>   -0.071</td> <td>    0.070</td>\n",
3495
       "</tr>\n",
3496
       "<tr>\n",
3497
       "  <th>5605</th>   <td>    0.0225</td> <td>    0.037</td> <td>    0.606</td> <td> 0.549</td> <td>   -0.053</td> <td>    0.098</td>\n",
3498
       "</tr>\n",
3499
       "<tr>\n",
3500
       "  <th>5728</th>   <td>   -0.0036</td> <td>    0.034</td> <td>   -0.106</td> <td> 0.917</td> <td>   -0.072</td> <td>    0.065</td>\n",
3501
       "</tr>\n",
3502
       "<tr>\n",
3503
       "  <th>5894</th>   <td>    0.0304</td> <td>    0.044</td> <td>    0.698</td> <td> 0.491</td> <td>   -0.059</td> <td>    0.120</td>\n",
3504
       "</tr>\n",
3505
       "<tr>\n",
3506
       "  <th>6199</th>   <td>   -0.0566</td> <td>    0.030</td> <td>   -1.915</td> <td> 0.065</td> <td>   -0.117</td> <td>    0.004</td>\n",
3507
       "</tr>\n",
3508
       "<tr>\n",
3509
       "  <th>6456</th>   <td>    0.0099</td> <td>    0.011</td> <td>    0.881</td> <td> 0.385</td> <td>   -0.013</td> <td>    0.033</td>\n",
3510
       "</tr>\n",
3511
       "<tr>\n",
3512
       "  <th>6464</th>   <td>   -0.0511</td> <td>    0.024</td> <td>   -2.113</td> <td> 0.043</td> <td>   -0.100</td> <td>   -0.002</td>\n",
3513
       "</tr>\n",
3514
       "<tr>\n",
3515
       "  <th>6654</th>   <td>   -0.0389</td> <td>    0.034</td> <td>   -1.147</td> <td> 0.261</td> <td>   -0.108</td> <td>    0.030</td>\n",
3516
       "</tr>\n",
3517
       "<tr>\n",
3518
       "  <th>6714</th>   <td>    0.0157</td> <td>    0.018</td> <td>    0.861</td> <td> 0.396</td> <td>   -0.022</td> <td>    0.053</td>\n",
3519
       "</tr>\n",
3520
       "<tr>\n",
3521
       "  <th>6868</th>   <td>    0.0148</td> <td>    0.031</td> <td>    0.477</td> <td> 0.637</td> <td>   -0.049</td> <td>    0.078</td>\n",
3522
       "</tr>\n",
3523
       "<tr>\n",
3524
       "  <th>7249</th>   <td>    0.0824</td> <td>    0.044</td> <td>    1.858</td> <td> 0.073</td> <td>   -0.008</td> <td>    0.173</td>\n",
3525
       "</tr>\n",
3526
       "<tr>\n",
3527
       "  <th>7311</th>   <td>    0.0085</td> <td>    0.031</td> <td>    0.270</td> <td> 0.789</td> <td>   -0.056</td> <td>    0.073</td>\n",
3528
       "</tr>\n",
3529
       "<tr>\n",
3530
       "  <th>7529</th>   <td>   -0.0009</td> <td>    0.039</td> <td>   -0.023</td> <td> 0.982</td> <td>   -0.082</td> <td>    0.080</td>\n",
3531
       "</tr>\n",
3532
       "<tr>\n",
3533
       "  <th>8027</th>   <td>   -0.0035</td> <td>    0.027</td> <td>   -0.133</td> <td> 0.895</td> <td>   -0.058</td> <td>    0.051</td>\n",
3534
       "</tr>\n",
3535
       "<tr>\n",
3536
       "  <th>8038</th>   <td>    0.0074</td> <td>    0.009</td> <td>    0.807</td> <td> 0.426</td> <td>   -0.011</td> <td>    0.026</td>\n",
3537
       "</tr>\n",
3538
       "<tr>\n",
3539
       "  <th>9146</th>   <td>    0.0232</td> <td>    0.033</td> <td>    0.706</td> <td> 0.486</td> <td>   -0.044</td> <td>    0.090</td>\n",
3540
       "</tr>\n",
3541
       "<tr>\n",
3542
       "  <th>10000</th>  <td>   -0.0273</td> <td>    0.024</td> <td>   -1.146</td> <td> 0.261</td> <td>   -0.076</td> <td>    0.021</td>\n",
3543
       "</tr>\n",
3544
       "<tr>\n",
3545
       "  <th>10252</th>  <td>    0.0124</td> <td>    0.015</td> <td>    0.815</td> <td> 0.422</td> <td>   -0.019</td> <td>    0.044</td>\n",
3546
       "</tr>\n",
3547
       "<tr>\n",
3548
       "  <th>10253</th>  <td>   -0.0064</td> <td>    0.016</td> <td>   -0.395</td> <td> 0.696</td> <td>   -0.040</td> <td>    0.027</td>\n",
3549
       "</tr>\n",
3550
       "<tr>\n",
3551
       "  <th>10254</th>  <td>    0.0475</td> <td>    0.034</td> <td>    1.379</td> <td> 0.178</td> <td>   -0.023</td> <td>    0.118</td>\n",
3552
       "</tr>\n",
3553
       "<tr>\n",
3554
       "  <th>11140</th>  <td>    0.0289</td> <td>    0.038</td> <td>    0.753</td> <td> 0.458</td> <td>   -0.050</td> <td>    0.108</td>\n",
3555
       "</tr>\n",
3556
       "<tr>\n",
3557
       "  <th>23239</th>  <td>    0.0138</td> <td>    0.026</td> <td>    0.539</td> <td> 0.594</td> <td>   -0.038</td> <td>    0.066</td>\n",
3558
       "</tr>\n",
3559
       "<tr>\n",
3560
       "  <th>26018</th>  <td>    0.0022</td> <td>    0.015</td> <td>    0.148</td> <td> 0.884</td> <td>   -0.029</td> <td>    0.033</td>\n",
3561
       "</tr>\n",
3562
       "<tr>\n",
3563
       "  <th>29924</th>  <td>    0.0047</td> <td>    0.033</td> <td>    0.143</td> <td> 0.887</td> <td>   -0.062</td> <td>    0.072</td>\n",
3564
       "</tr>\n",
3565
       "<tr>\n",
3566
       "  <th>30011</th>  <td>    0.0248</td> <td>    0.022</td> <td>    1.143</td> <td> 0.262</td> <td>   -0.020</td> <td>    0.069</td>\n",
3567
       "</tr>\n",
3568
       "<tr>\n",
3569
       "  <th>55824</th>  <td>    0.0005</td> <td>    0.020</td> <td>    0.025</td> <td> 0.980</td> <td>   -0.040</td> <td>    0.042</td>\n",
3570
       "</tr>\n",
3571
       "<tr>\n",
3572
       "  <th>57761</th>  <td>   -0.0018</td> <td>    0.013</td> <td>   -0.137</td> <td> 0.892</td> <td>   -0.029</td> <td>    0.025</td>\n",
3573
       "</tr>\n",
3574
       "<tr>\n",
3575
       "  <th>58513</th>  <td>   -0.0800</td> <td>    0.037</td> <td>   -2.155</td> <td> 0.040</td> <td>   -0.156</td> <td>   -0.004</td>\n",
3576
       "</tr>\n",
3577
       "<tr>\n",
3578
       "  <th>64223</th>  <td>   -0.0533</td> <td>    0.040</td> <td>   -1.339</td> <td> 0.191</td> <td>   -0.135</td> <td>    0.028</td>\n",
3579
       "</tr>\n",
3580
       "<tr>\n",
3581
       "  <th>79109</th>  <td>    0.0152</td> <td>    0.033</td> <td>    0.457</td> <td> 0.651</td> <td>   -0.053</td> <td>    0.083</td>\n",
3582
       "</tr>\n",
3583
       "<tr>\n",
3584
       "  <th>84335</th>  <td>   -0.0249</td> <td>    0.037</td> <td>   -0.670</td> <td> 0.508</td> <td>   -0.101</td> <td>    0.051</td>\n",
3585
       "</tr>\n",
3586
       "<tr>\n",
3587
       "  <th>117145</th> <td>    0.0240</td> <td>    0.025</td> <td>    0.961</td> <td> 0.344</td> <td>   -0.027</td> <td>    0.075</td>\n",
3588
       "</tr>\n",
3589
       "<tr>\n",
3590
       "  <th>196883</th> <td>   -0.0494</td> <td>    0.015</td> <td>   -3.296</td> <td> 0.003</td> <td>   -0.080</td> <td>   -0.019</td>\n",
3591
       "</tr>\n",
3592
       "<tr>\n",
3593
       "  <th>253260</th> <td>    0.0409</td> <td>    0.046</td> <td>    0.893</td> <td> 0.379</td> <td>   -0.053</td> <td>    0.135</td>\n",
3594
       "</tr>\n",
3595
       "</table>\n",
3596
       "<table class=\"simpletable\">\n",
3597
       "<tr>\n",
3598
       "  <th>Omnibus:</th>       <td> 0.520</td> <th>  Durbin-Watson:     </th> <td>   2.062</td>\n",
3599
       "</tr>\n",
3600
       "<tr>\n",
3601
       "  <th>Prob(Omnibus):</th> <td> 0.771</td> <th>  Jarque-Bera (JB):  </th> <td>   0.425</td>\n",
3602
       "</tr>\n",
3603
       "<tr>\n",
3604
       "  <th>Skew:</th>          <td> 0.140</td> <th>  Prob(JB):          </th> <td>   0.808</td>\n",
3605
       "</tr>\n",
3606
       "<tr>\n",
3607
       "  <th>Kurtosis:</th>      <td> 2.980</td> <th>  Cond. No.          </th> <td>1.84e+03</td>\n",
3608
       "</tr>\n",
3609
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 1.84e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
3610
      ],
3611
      "text/plain": [
3612
       "<class 'statsmodels.iolib.summary.Summary'>\n",
3613
       "\"\"\"\n",
3614
       "                            OLS Regression Results                            \n",
3615
       "==============================================================================\n",
3616
       "Dep. Variable:                      y   R-squared:                       0.999\n",
3617
       "Model:                            OLS   Adj. R-squared:                  0.995\n",
3618
       "Method:                 Least Squares   F-statistic:                     251.5\n",
3619
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           1.78e-29\n",
3620
       "Time:                        18:17:00   Log-Likelihood:                 364.21\n",
3621
       "No. Observations:                 130   AIC:                            -526.4\n",
3622
       "Df Residuals:                      29   BIC:                            -236.8\n",
3623
       "Df Model:                         101                                         \n",
3624
       "Covariance Type:            nonrobust                                         \n",
3625
       "==============================================================================\n",
3626
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
3627
       "------------------------------------------------------------------------------\n",
3628
       "102           -0.0263      0.023     -1.162      0.255      -0.072       0.020\n",
3629
       "107            0.0055      0.013      0.414      0.682      -0.022       0.032\n",
3630
       "108           -0.0179      0.025     -0.717      0.479      -0.069       0.033\n",
3631
       "109            0.0112      0.019      0.600      0.553      -0.027       0.049\n",
3632
       "111            0.0037      0.013      0.282      0.780      -0.023       0.031\n",
3633
       "112           -0.0120      0.023     -0.527      0.602      -0.058       0.034\n",
3634
       "113            0.0143      0.014      1.031      0.311      -0.014       0.043\n",
3635
       "114           -0.0118      0.039     -0.302      0.765      -0.092       0.068\n",
3636
       "115            0.0008      0.036      0.022      0.983      -0.072       0.074\n",
3637
       "160           -0.0357      0.032     -1.114      0.274      -0.101       0.030\n",
3638
       "161           -0.0005      0.027     -0.017      0.987      -0.056       0.056\n",
3639
       "163            0.0254      0.031      0.828      0.415      -0.037       0.088\n",
3640
       "207            0.0099      0.031      0.323      0.749      -0.053       0.073\n",
3641
       "208           -0.0149      0.016     -0.959      0.346      -0.047       0.017\n",
3642
       "572            0.0064      0.024      0.267      0.791      -0.042       0.055\n",
3643
       "801            0.0163      0.040      0.408      0.686      -0.065       0.098\n",
3644
       "805            0.0394      0.032      1.232      0.228      -0.026       0.105\n",
3645
       "808            0.0136      0.037      0.367      0.716      -0.062       0.089\n",
3646
       "814           -0.0174      0.015     -1.129      0.268      -0.049       0.014\n",
3647
       "842           -0.0067      0.021     -0.327      0.746      -0.049       0.035\n",
3648
       "867           -0.0127      0.029     -0.442      0.661      -0.072       0.046\n",
3649
       "983            0.0013      0.015      0.089      0.930      -0.029       0.032\n",
3650
       "998           -0.0207      0.043     -0.482      0.633      -0.109       0.067\n",
3651
       "1026          -0.0120      0.014     -0.878      0.387      -0.040       0.016\n",
3652
       "1027           0.0094      0.025      0.382      0.705      -0.041       0.060\n",
3653
       "1147          -0.0039      0.035     -0.111      0.912      -0.075       0.068\n",
3654
       "1173           0.0050      0.041      0.121      0.905      -0.079       0.089\n",
3655
       "1175           0.0105      0.033      0.318      0.753      -0.057       0.078\n",
3656
       "1211          -0.0214      0.015     -1.443      0.160      -0.052       0.009\n",
3657
       "1213           0.0339      0.041      0.830      0.413      -0.050       0.117\n",
3658
       "1385          -0.0315      0.034     -0.915      0.367      -0.102       0.039\n",
3659
       "1445           0.0012      0.022      0.056      0.956      -0.044       0.046\n",
3660
       "1950         2.53e-05      0.011      0.002      0.998      -0.023       0.023\n",
3661
       "1956           0.0078      0.017      0.457      0.651      -0.027       0.043\n",
3662
       "2060           0.0024      0.044      0.054      0.958      -0.087       0.092\n",
3663
       "2308          -0.0160      0.020     -0.788      0.437      -0.057       0.025\n",
3664
       "2309           0.0524      0.019      2.699      0.011       0.013       0.092\n",
3665
       "2475          -0.0235      0.034     -0.688      0.497      -0.093       0.046\n",
3666
       "2549          -0.0339      0.028     -1.213      0.235      -0.091       0.023\n",
3667
       "2885           0.0448      0.041      1.094      0.283      -0.039       0.129\n",
3668
       "2931           0.0273      0.035      0.787      0.438      -0.044       0.098\n",
3669
       "3164           0.0046      0.006      0.796      0.433      -0.007       0.016\n",
3670
       "3265          -0.0047      0.023     -0.207      0.838      -0.051       0.042\n",
3671
       "3320           0.0023      0.023      0.100      0.921      -0.045       0.050\n",
3672
       "3709           0.0019      0.016      0.120      0.905      -0.030       0.034\n",
3673
       "3710           0.0056      0.017      0.323      0.749      -0.030       0.041\n",
3674
       "3845           0.0280      0.030      0.933      0.359      -0.033       0.090\n",
3675
       "4193           0.0034      0.016      0.206      0.838      -0.030       0.037\n",
3676
       "4303          -0.0149      0.020     -0.760      0.454      -0.055       0.025\n",
3677
       "4893          -0.0206      0.037     -0.560      0.579      -0.096       0.055\n",
3678
       "5136           0.0218      0.018      1.187      0.245      -0.016       0.059\n",
3679
       "5170           0.0066      0.041      0.160      0.874      -0.078       0.091\n",
3680
       "5290          -0.0134      0.039     -0.345      0.732      -0.093       0.066\n",
3681
       "5295           0.0044      0.022      0.196      0.846      -0.041       0.050\n",
3682
       "5335           0.0357      0.038      0.950      0.350      -0.041       0.113\n",
3683
       "5566           0.0261      0.033      0.804      0.428      -0.040       0.093\n",
3684
       "5567          -0.0081      0.014     -0.573      0.571      -0.037       0.021\n",
3685
       "5573          -0.0622      0.036     -1.748      0.091      -0.135       0.011\n",
3686
       "5575          -0.0046      0.018     -0.255      0.801      -0.042       0.032\n",
3687
       "5576           0.0045      0.022      0.205      0.839      -0.040       0.049\n",
3688
       "5577          -0.0068      0.012     -0.554      0.584      -0.032       0.018\n",
3689
       "5578          -0.0009      0.018     -0.047      0.963      -0.039       0.037\n",
3690
       "5580          -0.0330      0.019     -1.732      0.094      -0.072       0.006\n",
3691
       "5581          -0.0278      0.027     -1.015      0.319      -0.084       0.028\n",
3692
       "5582           0.0025      0.006      0.455      0.652      -0.009       0.014\n",
3693
       "5594          -0.0124      0.032     -0.386      0.702      -0.078       0.053\n",
3694
       "5595          -0.0193      0.024     -0.796      0.433      -0.069       0.030\n",
3695
       "5604          -0.0006      0.035     -0.018      0.986      -0.071       0.070\n",
3696
       "5605           0.0225      0.037      0.606      0.549      -0.053       0.098\n",
3697
       "5728          -0.0036      0.034     -0.106      0.917      -0.072       0.065\n",
3698
       "5894           0.0304      0.044      0.698      0.491      -0.059       0.120\n",
3699
       "6199          -0.0566      0.030     -1.915      0.065      -0.117       0.004\n",
3700
       "6456           0.0099      0.011      0.881      0.385      -0.013       0.033\n",
3701
       "6464          -0.0511      0.024     -2.113      0.043      -0.100      -0.002\n",
3702
       "6654          -0.0389      0.034     -1.147      0.261      -0.108       0.030\n",
3703
       "6714           0.0157      0.018      0.861      0.396      -0.022       0.053\n",
3704
       "6868           0.0148      0.031      0.477      0.637      -0.049       0.078\n",
3705
       "7249           0.0824      0.044      1.858      0.073      -0.008       0.173\n",
3706
       "7311           0.0085      0.031      0.270      0.789      -0.056       0.073\n",
3707
       "7529          -0.0009      0.039     -0.023      0.982      -0.082       0.080\n",
3708
       "8027          -0.0035      0.027     -0.133      0.895      -0.058       0.051\n",
3709
       "8038           0.0074      0.009      0.807      0.426      -0.011       0.026\n",
3710
       "9146           0.0232      0.033      0.706      0.486      -0.044       0.090\n",
3711
       "10000         -0.0273      0.024     -1.146      0.261      -0.076       0.021\n",
3712
       "10252          0.0124      0.015      0.815      0.422      -0.019       0.044\n",
3713
       "10253         -0.0064      0.016     -0.395      0.696      -0.040       0.027\n",
3714
       "10254          0.0475      0.034      1.379      0.178      -0.023       0.118\n",
3715
       "11140          0.0289      0.038      0.753      0.458      -0.050       0.108\n",
3716
       "23239          0.0138      0.026      0.539      0.594      -0.038       0.066\n",
3717
       "26018          0.0022      0.015      0.148      0.884      -0.029       0.033\n",
3718
       "29924          0.0047      0.033      0.143      0.887      -0.062       0.072\n",
3719
       "30011          0.0248      0.022      1.143      0.262      -0.020       0.069\n",
3720
       "55824          0.0005      0.020      0.025      0.980      -0.040       0.042\n",
3721
       "57761         -0.0018      0.013     -0.137      0.892      -0.029       0.025\n",
3722
       "58513         -0.0800      0.037     -2.155      0.040      -0.156      -0.004\n",
3723
       "64223         -0.0533      0.040     -1.339      0.191      -0.135       0.028\n",
3724
       "79109          0.0152      0.033      0.457      0.651      -0.053       0.083\n",
3725
       "84335         -0.0249      0.037     -0.670      0.508      -0.101       0.051\n",
3726
       "117145         0.0240      0.025      0.961      0.344      -0.027       0.075\n",
3727
       "196883        -0.0494      0.015     -3.296      0.003      -0.080      -0.019\n",
3728
       "253260         0.0409      0.046      0.893      0.379      -0.053       0.135\n",
3729
       "==============================================================================\n",
3730
       "Omnibus:                        0.520   Durbin-Watson:                   2.062\n",
3731
       "Prob(Omnibus):                  0.771   Jarque-Bera (JB):                0.425\n",
3732
       "Skew:                           0.140   Prob(JB):                        0.808\n",
3733
       "Kurtosis:                       2.980   Cond. No.                     1.84e+03\n",
3734
       "==============================================================================\n",
3735
       "\n",
3736
       "Warnings:\n",
3737
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
3738
       "[2] The condition number is large, 1.84e+03. This might indicate that there are\n",
3739
       "strong multicollinearity or other numerical problems.\n",
3740
       "\"\"\""
3741
      ]
3742
     },
3743
     "execution_count": 25,
3744
     "metadata": {},
3745
     "output_type": "execute_result"
3746
    }
3747
   ],
3748
   "source": [
3749
    "X = PAADEEGFR\n",
3750
    "y = PredPAAD.detach().numpy()\n",
3751
    "\n",
3752
    "# Note the difference in argument order\n",
3753
    "model = sm.OLS(y, X).fit()\n",
3754
    "predictions = model.predict(X) # make the predictions by the model\n",
3755
    "\n",
3756
    "# Print out the statistics\n",
3757
    "model.summary()"
3758
   ]
3759
  },
3760
  {
3761
   "cell_type": "code",
3762
   "execution_count": 26,
3763
   "metadata": {},
3764
   "outputs": [
3765
    {
3766
     "name": "stdout",
3767
     "output_type": "stream",
3768
     "text": [
3769
      "(array([False, False, False, False, False, False, False, False, False,\n",
3770
      "       False, False, False, False, False, False, False, False, False,\n",
3771
      "       False, False, False, False, False, False, False, False, False,\n",
3772
      "       False, False, False, False, False, False, False, False, False,\n",
3773
      "       False, False, False, False, False, False, False, False, False,\n",
3774
      "       False, False, False, False, False, False, False, False, False,\n",
3775
      "       False, False, False, False, False, False, False, False, False,\n",
3776
      "       False, False, False, False, False, False, False, False, False,\n",
3777
      "       False, False, False, False, False, False, False, False, False,\n",
3778
      "       False, False, False, False, False, False, False, False, False,\n",
3779
      "       False, False, False, False, False, False, False, False, False,\n",
3780
      "       False, False]), array([ 25.71654309,  68.88518558,  48.36811448,  55.88769998,\n",
3781
      "        78.77410576,  60.81762683,  31.41328896,  77.27234893,\n",
3782
      "        99.26457938,  27.69865112,  99.63968843,  41.88315265,\n",
3783
      "        75.66797196,  34.90617989,  79.8933991 ,  69.30354072,\n",
3784
      "        23.00476997,  72.33422978,  27.07452713,  75.35095038,\n",
3785
      "        66.80529175,  93.88769455,  63.96140325,  39.08209479,\n",
3786
      "        71.23367866,  92.15867969,  91.37186718,  76.01882484,\n",
3787
      "        16.14111894,  41.75472905,  37.11689276,  96.5460688 ,\n",
3788
      "       100.82224759,  65.73660253,  96.71260657,  44.13402207,\n",
3789
      "         1.15992322,  50.19810301,  23.73852166,  28.57323444,\n",
3790
      "        44.22337651,  43.70695825,  84.62151046,  92.99084428,\n",
3791
      "        91.44853418,  75.6610565 ,  36.23271223,  84.62829129,\n",
3792
      "        45.80441445,  58.5278806 ,  24.72551896,  88.26889264,\n",
3793
      "        73.97259467,  85.44107636,  35.35726029,  43.22913637,\n",
3794
      "        57.65320828,   9.1996007 ,  80.86879718,  84.77120041,\n",
3795
      "        58.95560148,  97.25785942,   9.48626631,  32.18986414,\n",
3796
      "        65.87683843,  70.92028156,  43.70345735,  99.56588916,\n",
3797
      "        55.4642251 ,  92.57057856,  49.57156958,   6.60461621,\n",
3798
      "        38.91499157,   4.37977584,  26.3190754 ,  40.00277853,\n",
3799
      "        64.33396747,   7.40933999,  79.70251399,  99.14239031,\n",
3800
      "        90.41462196,  43.03320452,  49.04616013,  26.37560835,\n",
3801
      "        42.58791361,  70.27675668,  18.01385415,  46.21681982,\n",
3802
      "        59.96442188,  89.24362686,  89.57925236,  26.51049056,\n",
3803
      "        99.00283406,  90.1284706 ,   4.00007563,  19.29804105,\n",
3804
      "        65.75781376,  51.32626421,  34.78151666,   0.26216152,\n",
3805
      "        38.29707195]))\n"
3806
     ]
3807
    }
3808
   ],
3809
   "source": [
3810
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
3811
   ]
3812
  },
3813
  {
3814
   "cell_type": "code",
3815
   "execution_count": 27,
3816
   "metadata": {},
3817
   "outputs": [],
3818
   "source": [
3819
    "listEGFR = LUADE.columns.intersection(lsEGFR)\n",
3820
    "LUADEEGFR = LUADE[listEGFR]\n",
3821
    "LUADMEGFR = LUADM[listEGFR]\n",
3822
    "LUADCEGFR = LUADC[listEGFR]"
3823
   ]
3824
  },
3825
  {
3826
   "cell_type": "code",
3827
   "execution_count": 28,
3828
   "metadata": {},
3829
   "outputs": [
3830
    {
3831
     "data": {
3832
      "text/html": [
3833
       "<table class=\"simpletable\">\n",
3834
       "<caption>OLS Regression Results</caption>\n",
3835
       "<tr>\n",
3836
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
3837
       "</tr>\n",
3838
       "<tr>\n",
3839
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.998</td>\n",
3840
       "</tr>\n",
3841
       "<tr>\n",
3842
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   1895.</td>\n",
3843
       "</tr>\n",
3844
       "<tr>\n",
3845
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
3846
       "</tr>\n",
3847
       "<tr>\n",
3848
       "  <th>Time:</th>                 <td>18:17:07</td>     <th>  Log-Likelihood:    </th> <td>  1160.5</td>\n",
3849
       "</tr>\n",
3850
       "<tr>\n",
3851
       "  <th>No. Observations:</th>      <td>   475</td>      <th>  AIC:               </th> <td>  -2119.</td>\n",
3852
       "</tr>\n",
3853
       "<tr>\n",
3854
       "  <th>Df Residuals:</th>          <td>   374</td>      <th>  BIC:               </th> <td>  -1699.</td>\n",
3855
       "</tr>\n",
3856
       "<tr>\n",
3857
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
3858
       "</tr>\n",
3859
       "<tr>\n",
3860
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
3861
       "</tr>\n",
3862
       "</table>\n",
3863
       "<table class=\"simpletable\">\n",
3864
       "<tr>\n",
3865
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
3866
       "</tr>\n",
3867
       "<tr>\n",
3868
       "  <th>102</th>    <td>    0.0028</td> <td>    0.003</td> <td>    0.825</td> <td> 0.410</td> <td>   -0.004</td> <td>    0.010</td>\n",
3869
       "</tr>\n",
3870
       "<tr>\n",
3871
       "  <th>107</th>    <td>   -0.0021</td> <td>    0.002</td> <td>   -1.161</td> <td> 0.247</td> <td>   -0.006</td> <td>    0.001</td>\n",
3872
       "</tr>\n",
3873
       "<tr>\n",
3874
       "  <th>108</th>    <td>    0.0013</td> <td>    0.003</td> <td>    0.412</td> <td> 0.681</td> <td>   -0.005</td> <td>    0.007</td>\n",
3875
       "</tr>\n",
3876
       "<tr>\n",
3877
       "  <th>109</th>    <td>    0.0013</td> <td>    0.003</td> <td>    0.524</td> <td> 0.600</td> <td>   -0.004</td> <td>    0.006</td>\n",
3878
       "</tr>\n",
3879
       "<tr>\n",
3880
       "  <th>111</th>    <td>   -0.0006</td> <td>    0.002</td> <td>   -0.411</td> <td> 0.681</td> <td>   -0.004</td> <td>    0.002</td>\n",
3881
       "</tr>\n",
3882
       "<tr>\n",
3883
       "  <th>112</th>    <td>    0.0020</td> <td>    0.003</td> <td>    0.771</td> <td> 0.441</td> <td>   -0.003</td> <td>    0.007</td>\n",
3884
       "</tr>\n",
3885
       "<tr>\n",
3886
       "  <th>113</th>    <td>   -0.0034</td> <td>    0.002</td> <td>   -1.652</td> <td> 0.099</td> <td>   -0.007</td> <td>    0.001</td>\n",
3887
       "</tr>\n",
3888
       "<tr>\n",
3889
       "  <th>114</th>    <td>   -0.0046</td> <td>    0.004</td> <td>   -1.172</td> <td> 0.242</td> <td>   -0.012</td> <td>    0.003</td>\n",
3890
       "</tr>\n",
3891
       "<tr>\n",
3892
       "  <th>115</th>    <td>    0.0020</td> <td>    0.003</td> <td>    0.798</td> <td> 0.426</td> <td>   -0.003</td> <td>    0.007</td>\n",
3893
       "</tr>\n",
3894
       "<tr>\n",
3895
       "  <th>160</th>    <td>    0.0059</td> <td>    0.005</td> <td>    1.221</td> <td> 0.223</td> <td>   -0.004</td> <td>    0.015</td>\n",
3896
       "</tr>\n",
3897
       "<tr>\n",
3898
       "  <th>161</th>    <td>   -0.0005</td> <td>    0.004</td> <td>   -0.128</td> <td> 0.898</td> <td>   -0.007</td> <td>    0.007</td>\n",
3899
       "</tr>\n",
3900
       "<tr>\n",
3901
       "  <th>163</th>    <td>    0.0038</td> <td>    0.003</td> <td>    1.226</td> <td> 0.221</td> <td>   -0.002</td> <td>    0.010</td>\n",
3902
       "</tr>\n",
3903
       "<tr>\n",
3904
       "  <th>207</th>    <td>   -0.0066</td> <td>    0.004</td> <td>   -1.705</td> <td> 0.089</td> <td>   -0.014</td> <td>    0.001</td>\n",
3905
       "</tr>\n",
3906
       "<tr>\n",
3907
       "  <th>208</th>    <td>    0.0001</td> <td>    0.004</td> <td>    0.033</td> <td> 0.974</td> <td>   -0.008</td> <td>    0.008</td>\n",
3908
       "</tr>\n",
3909
       "<tr>\n",
3910
       "  <th>572</th>    <td>   -0.0035</td> <td>    0.003</td> <td>   -1.009</td> <td> 0.314</td> <td>   -0.010</td> <td>    0.003</td>\n",
3911
       "</tr>\n",
3912
       "<tr>\n",
3913
       "  <th>801</th>    <td>   -0.0123</td> <td>    0.004</td> <td>   -3.097</td> <td> 0.002</td> <td>   -0.020</td> <td>   -0.004</td>\n",
3914
       "</tr>\n",
3915
       "<tr>\n",
3916
       "  <th>805</th>    <td>    0.0078</td> <td>    0.004</td> <td>    2.175</td> <td> 0.030</td> <td>    0.001</td> <td>    0.015</td>\n",
3917
       "</tr>\n",
3918
       "<tr>\n",
3919
       "  <th>808</th>    <td>    0.0009</td> <td>    0.005</td> <td>    0.193</td> <td> 0.847</td> <td>   -0.008</td> <td>    0.010</td>\n",
3920
       "</tr>\n",
3921
       "<tr>\n",
3922
       "  <th>814</th>    <td>   -0.0018</td> <td>    0.004</td> <td>   -0.500</td> <td> 0.617</td> <td>   -0.009</td> <td>    0.005</td>\n",
3923
       "</tr>\n",
3924
       "<tr>\n",
3925
       "  <th>842</th>    <td>    0.0039</td> <td>    0.004</td> <td>    1.104</td> <td> 0.270</td> <td>   -0.003</td> <td>    0.011</td>\n",
3926
       "</tr>\n",
3927
       "<tr>\n",
3928
       "  <th>867</th>    <td>    0.0006</td> <td>    0.004</td> <td>    0.139</td> <td> 0.890</td> <td>   -0.008</td> <td>    0.009</td>\n",
3929
       "</tr>\n",
3930
       "<tr>\n",
3931
       "  <th>983</th>    <td>    0.0038</td> <td>    0.002</td> <td>    1.848</td> <td> 0.065</td> <td>   -0.000</td> <td>    0.008</td>\n",
3932
       "</tr>\n",
3933
       "<tr>\n",
3934
       "  <th>998</th>    <td>    0.0167</td> <td>    0.005</td> <td>    3.572</td> <td> 0.000</td> <td>    0.008</td> <td>    0.026</td>\n",
3935
       "</tr>\n",
3936
       "<tr>\n",
3937
       "  <th>1026</th>   <td>   -0.0011</td> <td>    0.002</td> <td>   -0.589</td> <td> 0.556</td> <td>   -0.005</td> <td>    0.003</td>\n",
3938
       "</tr>\n",
3939
       "<tr>\n",
3940
       "  <th>1027</th>   <td>    0.0030</td> <td>    0.003</td> <td>    1.039</td> <td> 0.300</td> <td>   -0.003</td> <td>    0.009</td>\n",
3941
       "</tr>\n",
3942
       "<tr>\n",
3943
       "  <th>1147</th>   <td>    0.0067</td> <td>    0.005</td> <td>    1.436</td> <td> 0.152</td> <td>   -0.002</td> <td>    0.016</td>\n",
3944
       "</tr>\n",
3945
       "<tr>\n",
3946
       "  <th>1173</th>   <td>    0.0047</td> <td>    0.004</td> <td>    1.070</td> <td> 0.285</td> <td>   -0.004</td> <td>    0.013</td>\n",
3947
       "</tr>\n",
3948
       "<tr>\n",
3949
       "  <th>1175</th>   <td>   -0.0014</td> <td>    0.005</td> <td>   -0.270</td> <td> 0.787</td> <td>   -0.012</td> <td>    0.009</td>\n",
3950
       "</tr>\n",
3951
       "<tr>\n",
3952
       "  <th>1211</th>   <td>    0.0088</td> <td>    0.003</td> <td>    2.973</td> <td> 0.003</td> <td>    0.003</td> <td>    0.015</td>\n",
3953
       "</tr>\n",
3954
       "<tr>\n",
3955
       "  <th>1213</th>   <td>   -0.0055</td> <td>    0.004</td> <td>   -1.259</td> <td> 0.209</td> <td>   -0.014</td> <td>    0.003</td>\n",
3956
       "</tr>\n",
3957
       "<tr>\n",
3958
       "  <th>1385</th>   <td>   -0.0124</td> <td>    0.005</td> <td>   -2.316</td> <td> 0.021</td> <td>   -0.023</td> <td>   -0.002</td>\n",
3959
       "</tr>\n",
3960
       "<tr>\n",
3961
       "  <th>1445</th>   <td>   -0.0057</td> <td>    0.004</td> <td>   -1.319</td> <td> 0.188</td> <td>   -0.014</td> <td>    0.003</td>\n",
3962
       "</tr>\n",
3963
       "<tr>\n",
3964
       "  <th>1950</th>   <td>    0.0016</td> <td>    0.001</td> <td>    1.287</td> <td> 0.199</td> <td>   -0.001</td> <td>    0.004</td>\n",
3965
       "</tr>\n",
3966
       "<tr>\n",
3967
       "  <th>1956</th>   <td>    0.0073</td> <td>    0.001</td> <td>    5.165</td> <td> 0.000</td> <td>    0.005</td> <td>    0.010</td>\n",
3968
       "</tr>\n",
3969
       "<tr>\n",
3970
       "  <th>2060</th>   <td>   -0.0115</td> <td>    0.005</td> <td>   -2.353</td> <td> 0.019</td> <td>   -0.021</td> <td>   -0.002</td>\n",
3971
       "</tr>\n",
3972
       "<tr>\n",
3973
       "  <th>2308</th>   <td>    0.0014</td> <td>    0.003</td> <td>    0.494</td> <td> 0.621</td> <td>   -0.004</td> <td>    0.007</td>\n",
3974
       "</tr>\n",
3975
       "<tr>\n",
3976
       "  <th>2309</th>   <td>    0.0060</td> <td>    0.003</td> <td>    1.801</td> <td> 0.072</td> <td>   -0.001</td> <td>    0.013</td>\n",
3977
       "</tr>\n",
3978
       "<tr>\n",
3979
       "  <th>2475</th>   <td>   -0.0080</td> <td>    0.005</td> <td>   -1.582</td> <td> 0.114</td> <td>   -0.018</td> <td>    0.002</td>\n",
3980
       "</tr>\n",
3981
       "<tr>\n",
3982
       "  <th>2549</th>   <td>   -0.0035</td> <td>    0.003</td> <td>   -1.021</td> <td> 0.308</td> <td>   -0.010</td> <td>    0.003</td>\n",
3983
       "</tr>\n",
3984
       "<tr>\n",
3985
       "  <th>2885</th>   <td>    0.0081</td> <td>    0.006</td> <td>    1.414</td> <td> 0.158</td> <td>   -0.003</td> <td>    0.019</td>\n",
3986
       "</tr>\n",
3987
       "<tr>\n",
3988
       "  <th>2931</th>   <td>    0.0025</td> <td>    0.005</td> <td>    0.479</td> <td> 0.632</td> <td>   -0.008</td> <td>    0.013</td>\n",
3989
       "</tr>\n",
3990
       "<tr>\n",
3991
       "  <th>3164</th>   <td>    0.0004</td> <td>    0.001</td> <td>    0.398</td> <td> 0.691</td> <td>   -0.002</td> <td>    0.002</td>\n",
3992
       "</tr>\n",
3993
       "<tr>\n",
3994
       "  <th>3265</th>   <td>    0.0018</td> <td>    0.003</td> <td>    0.574</td> <td> 0.566</td> <td>   -0.004</td> <td>    0.008</td>\n",
3995
       "</tr>\n",
3996
       "<tr>\n",
3997
       "  <th>3320</th>   <td>    0.0070</td> <td>    0.003</td> <td>    2.097</td> <td> 0.037</td> <td>    0.000</td> <td>    0.014</td>\n",
3998
       "</tr>\n",
3999
       "<tr>\n",
4000
       "  <th>3709</th>   <td>   -0.0031</td> <td>    0.002</td> <td>   -1.262</td> <td> 0.208</td> <td>   -0.008</td> <td>    0.002</td>\n",
4001
       "</tr>\n",
4002
       "<tr>\n",
4003
       "  <th>3710</th>   <td>    0.0024</td> <td>    0.002</td> <td>    1.164</td> <td> 0.245</td> <td>   -0.002</td> <td>    0.006</td>\n",
4004
       "</tr>\n",
4005
       "<tr>\n",
4006
       "  <th>3845</th>   <td>   -0.0020</td> <td>    0.002</td> <td>   -0.814</td> <td> 0.416</td> <td>   -0.007</td> <td>    0.003</td>\n",
4007
       "</tr>\n",
4008
       "<tr>\n",
4009
       "  <th>4193</th>   <td>    0.0026</td> <td>    0.002</td> <td>    1.360</td> <td> 0.175</td> <td>   -0.001</td> <td>    0.006</td>\n",
4010
       "</tr>\n",
4011
       "<tr>\n",
4012
       "  <th>4303</th>   <td>   -0.0058</td> <td>    0.003</td> <td>   -2.072</td> <td> 0.039</td> <td>   -0.011</td> <td>   -0.000</td>\n",
4013
       "</tr>\n",
4014
       "<tr>\n",
4015
       "  <th>4893</th>   <td>   -0.0029</td> <td>    0.003</td> <td>   -0.855</td> <td> 0.393</td> <td>   -0.010</td> <td>    0.004</td>\n",
4016
       "</tr>\n",
4017
       "<tr>\n",
4018
       "  <th>5136</th>   <td>   -0.0007</td> <td>    0.002</td> <td>   -0.346</td> <td> 0.729</td> <td>   -0.005</td> <td>    0.003</td>\n",
4019
       "</tr>\n",
4020
       "<tr>\n",
4021
       "  <th>5170</th>   <td>   -0.0020</td> <td>    0.005</td> <td>   -0.365</td> <td> 0.715</td> <td>   -0.013</td> <td>    0.009</td>\n",
4022
       "</tr>\n",
4023
       "<tr>\n",
4024
       "  <th>5290</th>   <td>   -0.0005</td> <td>    0.005</td> <td>   -0.108</td> <td> 0.914</td> <td>   -0.010</td> <td>    0.009</td>\n",
4025
       "</tr>\n",
4026
       "<tr>\n",
4027
       "  <th>5295</th>   <td>    0.0006</td> <td>    0.003</td> <td>    0.179</td> <td> 0.858</td> <td>   -0.006</td> <td>    0.007</td>\n",
4028
       "</tr>\n",
4029
       "<tr>\n",
4030
       "  <th>5335</th>   <td>    0.0037</td> <td>    0.003</td> <td>    1.104</td> <td> 0.270</td> <td>   -0.003</td> <td>    0.010</td>\n",
4031
       "</tr>\n",
4032
       "<tr>\n",
4033
       "  <th>5566</th>   <td>   -0.0012</td> <td>    0.006</td> <td>   -0.208</td> <td> 0.835</td> <td>   -0.012</td> <td>    0.010</td>\n",
4034
       "</tr>\n",
4035
       "<tr>\n",
4036
       "  <th>5567</th>   <td>   -0.0006</td> <td>    0.002</td> <td>   -0.311</td> <td> 0.756</td> <td>   -0.004</td> <td>    0.003</td>\n",
4037
       "</tr>\n",
4038
       "<tr>\n",
4039
       "  <th>5573</th>   <td>    0.0005</td> <td>    0.004</td> <td>    0.108</td> <td> 0.914</td> <td>   -0.008</td> <td>    0.009</td>\n",
4040
       "</tr>\n",
4041
       "<tr>\n",
4042
       "  <th>5575</th>   <td>    0.0013</td> <td>    0.002</td> <td>    0.627</td> <td> 0.531</td> <td>   -0.003</td> <td>    0.005</td>\n",
4043
       "</tr>\n",
4044
       "<tr>\n",
4045
       "  <th>5576</th>   <td>   -0.0134</td> <td>    0.003</td> <td>   -3.897</td> <td> 0.000</td> <td>   -0.020</td> <td>   -0.007</td>\n",
4046
       "</tr>\n",
4047
       "<tr>\n",
4048
       "  <th>5577</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.742</td> <td> 0.459</td> <td>   -0.002</td> <td>    0.004</td>\n",
4049
       "</tr>\n",
4050
       "<tr>\n",
4051
       "  <th>5578</th>   <td>   -0.0005</td> <td>    0.002</td> <td>   -0.249</td> <td> 0.804</td> <td>   -0.004</td> <td>    0.003</td>\n",
4052
       "</tr>\n",
4053
       "<tr>\n",
4054
       "  <th>5580</th>   <td>    0.0077</td> <td>    0.003</td> <td>    2.264</td> <td> 0.024</td> <td>    0.001</td> <td>    0.014</td>\n",
4055
       "</tr>\n",
4056
       "<tr>\n",
4057
       "  <th>5581</th>   <td>    0.0029</td> <td>    0.003</td> <td>    0.860</td> <td> 0.391</td> <td>   -0.004</td> <td>    0.009</td>\n",
4058
       "</tr>\n",
4059
       "<tr>\n",
4060
       "  <th>5582</th>   <td>    0.0045</td> <td>    0.004</td> <td>    1.147</td> <td> 0.252</td> <td>   -0.003</td> <td>    0.012</td>\n",
4061
       "</tr>\n",
4062
       "<tr>\n",
4063
       "  <th>5594</th>   <td>    0.0058</td> <td>    0.003</td> <td>    1.729</td> <td> 0.085</td> <td>   -0.001</td> <td>    0.012</td>\n",
4064
       "</tr>\n",
4065
       "<tr>\n",
4066
       "  <th>5595</th>   <td>   -0.0001</td> <td>    0.003</td> <td>   -0.038</td> <td> 0.970</td> <td>   -0.006</td> <td>    0.006</td>\n",
4067
       "</tr>\n",
4068
       "<tr>\n",
4069
       "  <th>5604</th>   <td>    0.0019</td> <td>    0.003</td> <td>    0.543</td> <td> 0.588</td> <td>   -0.005</td> <td>    0.009</td>\n",
4070
       "</tr>\n",
4071
       "<tr>\n",
4072
       "  <th>5605</th>   <td>    0.0131</td> <td>    0.004</td> <td>    2.988</td> <td> 0.003</td> <td>    0.004</td> <td>    0.022</td>\n",
4073
       "</tr>\n",
4074
       "<tr>\n",
4075
       "  <th>5728</th>   <td>   -0.0003</td> <td>    0.004</td> <td>   -0.063</td> <td> 0.950</td> <td>   -0.008</td> <td>    0.008</td>\n",
4076
       "</tr>\n",
4077
       "<tr>\n",
4078
       "  <th>5894</th>   <td>   -0.0067</td> <td>    0.005</td> <td>   -1.381</td> <td> 0.168</td> <td>   -0.016</td> <td>    0.003</td>\n",
4079
       "</tr>\n",
4080
       "<tr>\n",
4081
       "  <th>6199</th>   <td>    0.0016</td> <td>    0.004</td> <td>    0.425</td> <td> 0.671</td> <td>   -0.006</td> <td>    0.009</td>\n",
4082
       "</tr>\n",
4083
       "<tr>\n",
4084
       "  <th>6456</th>   <td>   -0.0059</td> <td>    0.002</td> <td>   -2.729</td> <td> 0.007</td> <td>   -0.010</td> <td>   -0.002</td>\n",
4085
       "</tr>\n",
4086
       "<tr>\n",
4087
       "  <th>6464</th>   <td>    0.0003</td> <td>    0.003</td> <td>    0.114</td> <td> 0.909</td> <td>   -0.005</td> <td>    0.006</td>\n",
4088
       "</tr>\n",
4089
       "<tr>\n",
4090
       "  <th>6654</th>   <td>    0.0022</td> <td>    0.005</td> <td>    0.410</td> <td> 0.682</td> <td>   -0.008</td> <td>    0.013</td>\n",
4091
       "</tr>\n",
4092
       "<tr>\n",
4093
       "  <th>6714</th>   <td>   -0.0010</td> <td>    0.003</td> <td>   -0.366</td> <td> 0.715</td> <td>   -0.006</td> <td>    0.004</td>\n",
4094
       "</tr>\n",
4095
       "<tr>\n",
4096
       "  <th>6868</th>   <td>    0.0093</td> <td>    0.003</td> <td>    2.710</td> <td> 0.007</td> <td>    0.003</td> <td>    0.016</td>\n",
4097
       "</tr>\n",
4098
       "<tr>\n",
4099
       "  <th>7249</th>   <td>    0.0050</td> <td>    0.005</td> <td>    1.030</td> <td> 0.304</td> <td>   -0.005</td> <td>    0.014</td>\n",
4100
       "</tr>\n",
4101
       "<tr>\n",
4102
       "  <th>7311</th>   <td>    0.0098</td> <td>    0.004</td> <td>    2.696</td> <td> 0.007</td> <td>    0.003</td> <td>    0.017</td>\n",
4103
       "</tr>\n",
4104
       "<tr>\n",
4105
       "  <th>7529</th>   <td>   -0.0035</td> <td>    0.005</td> <td>   -0.746</td> <td> 0.456</td> <td>   -0.013</td> <td>    0.006</td>\n",
4106
       "</tr>\n",
4107
       "<tr>\n",
4108
       "  <th>8027</th>   <td>    0.0026</td> <td>    0.004</td> <td>    0.731</td> <td> 0.465</td> <td>   -0.004</td> <td>    0.010</td>\n",
4109
       "</tr>\n",
4110
       "<tr>\n",
4111
       "  <th>8038</th>   <td>   -0.0044</td> <td>    0.001</td> <td>   -2.945</td> <td> 0.003</td> <td>   -0.007</td> <td>   -0.001</td>\n",
4112
       "</tr>\n",
4113
       "<tr>\n",
4114
       "  <th>9146</th>   <td>    0.0017</td> <td>    0.005</td> <td>    0.339</td> <td> 0.734</td> <td>   -0.008</td> <td>    0.011</td>\n",
4115
       "</tr>\n",
4116
       "<tr>\n",
4117
       "  <th>10000</th>  <td>    0.0011</td> <td>    0.002</td> <td>    0.525</td> <td> 0.600</td> <td>   -0.003</td> <td>    0.005</td>\n",
4118
       "</tr>\n",
4119
       "<tr>\n",
4120
       "  <th>10252</th>  <td>    0.0010</td> <td>    0.002</td> <td>    0.430</td> <td> 0.667</td> <td>   -0.004</td> <td>    0.006</td>\n",
4121
       "</tr>\n",
4122
       "<tr>\n",
4123
       "  <th>10253</th>  <td>   -0.0067</td> <td>    0.002</td> <td>   -2.954</td> <td> 0.003</td> <td>   -0.011</td> <td>   -0.002</td>\n",
4124
       "</tr>\n",
4125
       "<tr>\n",
4126
       "  <th>10254</th>  <td>   -0.0002</td> <td>    0.005</td> <td>   -0.035</td> <td> 0.972</td> <td>   -0.009</td> <td>    0.009</td>\n",
4127
       "</tr>\n",
4128
       "<tr>\n",
4129
       "  <th>11140</th>  <td>    0.0005</td> <td>    0.005</td> <td>    0.101</td> <td> 0.920</td> <td>   -0.010</td> <td>    0.011</td>\n",
4130
       "</tr>\n",
4131
       "<tr>\n",
4132
       "  <th>23239</th>  <td>    0.0006</td> <td>    0.002</td> <td>    0.252</td> <td> 0.801</td> <td>   -0.004</td> <td>    0.005</td>\n",
4133
       "</tr>\n",
4134
       "<tr>\n",
4135
       "  <th>26018</th>  <td>    0.0047</td> <td>    0.002</td> <td>    2.039</td> <td> 0.042</td> <td>    0.000</td> <td>    0.009</td>\n",
4136
       "</tr>\n",
4137
       "<tr>\n",
4138
       "  <th>29924</th>  <td>    0.0014</td> <td>    0.004</td> <td>    0.323</td> <td> 0.747</td> <td>   -0.007</td> <td>    0.010</td>\n",
4139
       "</tr>\n",
4140
       "<tr>\n",
4141
       "  <th>30011</th>  <td>   -0.0027</td> <td>    0.002</td> <td>   -1.093</td> <td> 0.275</td> <td>   -0.007</td> <td>    0.002</td>\n",
4142
       "</tr>\n",
4143
       "<tr>\n",
4144
       "  <th>55824</th>  <td>    0.0011</td> <td>    0.002</td> <td>    0.521</td> <td> 0.602</td> <td>   -0.003</td> <td>    0.005</td>\n",
4145
       "</tr>\n",
4146
       "<tr>\n",
4147
       "  <th>57761</th>  <td>   -0.0023</td> <td>    0.002</td> <td>   -1.336</td> <td> 0.182</td> <td>   -0.006</td> <td>    0.001</td>\n",
4148
       "</tr>\n",
4149
       "<tr>\n",
4150
       "  <th>58513</th>  <td>   -0.0008</td> <td>    0.005</td> <td>   -0.177</td> <td> 0.860</td> <td>   -0.010</td> <td>    0.008</td>\n",
4151
       "</tr>\n",
4152
       "<tr>\n",
4153
       "  <th>64223</th>  <td>    0.0029</td> <td>    0.005</td> <td>    0.623</td> <td> 0.534</td> <td>   -0.006</td> <td>    0.012</td>\n",
4154
       "</tr>\n",
4155
       "<tr>\n",
4156
       "  <th>79109</th>  <td>   -0.0021</td> <td>    0.004</td> <td>   -0.504</td> <td> 0.615</td> <td>   -0.010</td> <td>    0.006</td>\n",
4157
       "</tr>\n",
4158
       "<tr>\n",
4159
       "  <th>84335</th>  <td>   -0.0087</td> <td>    0.005</td> <td>   -1.845</td> <td> 0.066</td> <td>   -0.018</td> <td>    0.001</td>\n",
4160
       "</tr>\n",
4161
       "<tr>\n",
4162
       "  <th>117145</th> <td>    0.0064</td> <td>    0.003</td> <td>    2.461</td> <td> 0.014</td> <td>    0.001</td> <td>    0.012</td>\n",
4163
       "</tr>\n",
4164
       "<tr>\n",
4165
       "  <th>196883</th> <td>   -0.0042</td> <td>    0.003</td> <td>   -1.559</td> <td> 0.120</td> <td>   -0.010</td> <td>    0.001</td>\n",
4166
       "</tr>\n",
4167
       "<tr>\n",
4168
       "  <th>253260</th> <td>    0.0034</td> <td>    0.003</td> <td>    1.329</td> <td> 0.185</td> <td>   -0.002</td> <td>    0.009</td>\n",
4169
       "</tr>\n",
4170
       "</table>\n",
4171
       "<table class=\"simpletable\">\n",
4172
       "<tr>\n",
4173
       "  <th>Omnibus:</th>       <td>43.558</td> <th>  Durbin-Watson:     </th> <td>   2.088</td>\n",
4174
       "</tr>\n",
4175
       "<tr>\n",
4176
       "  <th>Prob(Omnibus):</th> <td> 0.000</td> <th>  Jarque-Bera (JB):  </th> <td>  91.380</td>\n",
4177
       "</tr>\n",
4178
       "<tr>\n",
4179
       "  <th>Skew:</th>          <td> 0.522</td> <th>  Prob(JB):          </th> <td>1.44e-20</td>\n",
4180
       "</tr>\n",
4181
       "<tr>\n",
4182
       "  <th>Kurtosis:</th>      <td> 4.878</td> <th>  Cond. No.          </th> <td>    366.</td>\n",
4183
       "</tr>\n",
4184
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
4185
      ],
4186
      "text/plain": [
4187
       "<class 'statsmodels.iolib.summary.Summary'>\n",
4188
       "\"\"\"\n",
4189
       "                            OLS Regression Results                            \n",
4190
       "==============================================================================\n",
4191
       "Dep. Variable:                      y   R-squared:                       0.998\n",
4192
       "Model:                            OLS   Adj. R-squared:                  0.998\n",
4193
       "Method:                 Least Squares   F-statistic:                     1895.\n",
4194
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
4195
       "Time:                        18:17:07   Log-Likelihood:                 1160.5\n",
4196
       "No. Observations:                 475   AIC:                            -2119.\n",
4197
       "Df Residuals:                     374   BIC:                            -1699.\n",
4198
       "Df Model:                         101                                         \n",
4199
       "Covariance Type:            nonrobust                                         \n",
4200
       "==============================================================================\n",
4201
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
4202
       "------------------------------------------------------------------------------\n",
4203
       "102            0.0028      0.003      0.825      0.410      -0.004       0.010\n",
4204
       "107           -0.0021      0.002     -1.161      0.247      -0.006       0.001\n",
4205
       "108            0.0013      0.003      0.412      0.681      -0.005       0.007\n",
4206
       "109            0.0013      0.003      0.524      0.600      -0.004       0.006\n",
4207
       "111           -0.0006      0.002     -0.411      0.681      -0.004       0.002\n",
4208
       "112            0.0020      0.003      0.771      0.441      -0.003       0.007\n",
4209
       "113           -0.0034      0.002     -1.652      0.099      -0.007       0.001\n",
4210
       "114           -0.0046      0.004     -1.172      0.242      -0.012       0.003\n",
4211
       "115            0.0020      0.003      0.798      0.426      -0.003       0.007\n",
4212
       "160            0.0059      0.005      1.221      0.223      -0.004       0.015\n",
4213
       "161           -0.0005      0.004     -0.128      0.898      -0.007       0.007\n",
4214
       "163            0.0038      0.003      1.226      0.221      -0.002       0.010\n",
4215
       "207           -0.0066      0.004     -1.705      0.089      -0.014       0.001\n",
4216
       "208            0.0001      0.004      0.033      0.974      -0.008       0.008\n",
4217
       "572           -0.0035      0.003     -1.009      0.314      -0.010       0.003\n",
4218
       "801           -0.0123      0.004     -3.097      0.002      -0.020      -0.004\n",
4219
       "805            0.0078      0.004      2.175      0.030       0.001       0.015\n",
4220
       "808            0.0009      0.005      0.193      0.847      -0.008       0.010\n",
4221
       "814           -0.0018      0.004     -0.500      0.617      -0.009       0.005\n",
4222
       "842            0.0039      0.004      1.104      0.270      -0.003       0.011\n",
4223
       "867            0.0006      0.004      0.139      0.890      -0.008       0.009\n",
4224
       "983            0.0038      0.002      1.848      0.065      -0.000       0.008\n",
4225
       "998            0.0167      0.005      3.572      0.000       0.008       0.026\n",
4226
       "1026          -0.0011      0.002     -0.589      0.556      -0.005       0.003\n",
4227
       "1027           0.0030      0.003      1.039      0.300      -0.003       0.009\n",
4228
       "1147           0.0067      0.005      1.436      0.152      -0.002       0.016\n",
4229
       "1173           0.0047      0.004      1.070      0.285      -0.004       0.013\n",
4230
       "1175          -0.0014      0.005     -0.270      0.787      -0.012       0.009\n",
4231
       "1211           0.0088      0.003      2.973      0.003       0.003       0.015\n",
4232
       "1213          -0.0055      0.004     -1.259      0.209      -0.014       0.003\n",
4233
       "1385          -0.0124      0.005     -2.316      0.021      -0.023      -0.002\n",
4234
       "1445          -0.0057      0.004     -1.319      0.188      -0.014       0.003\n",
4235
       "1950           0.0016      0.001      1.287      0.199      -0.001       0.004\n",
4236
       "1956           0.0073      0.001      5.165      0.000       0.005       0.010\n",
4237
       "2060          -0.0115      0.005     -2.353      0.019      -0.021      -0.002\n",
4238
       "2308           0.0014      0.003      0.494      0.621      -0.004       0.007\n",
4239
       "2309           0.0060      0.003      1.801      0.072      -0.001       0.013\n",
4240
       "2475          -0.0080      0.005     -1.582      0.114      -0.018       0.002\n",
4241
       "2549          -0.0035      0.003     -1.021      0.308      -0.010       0.003\n",
4242
       "2885           0.0081      0.006      1.414      0.158      -0.003       0.019\n",
4243
       "2931           0.0025      0.005      0.479      0.632      -0.008       0.013\n",
4244
       "3164           0.0004      0.001      0.398      0.691      -0.002       0.002\n",
4245
       "3265           0.0018      0.003      0.574      0.566      -0.004       0.008\n",
4246
       "3320           0.0070      0.003      2.097      0.037       0.000       0.014\n",
4247
       "3709          -0.0031      0.002     -1.262      0.208      -0.008       0.002\n",
4248
       "3710           0.0024      0.002      1.164      0.245      -0.002       0.006\n",
4249
       "3845          -0.0020      0.002     -0.814      0.416      -0.007       0.003\n",
4250
       "4193           0.0026      0.002      1.360      0.175      -0.001       0.006\n",
4251
       "4303          -0.0058      0.003     -2.072      0.039      -0.011      -0.000\n",
4252
       "4893          -0.0029      0.003     -0.855      0.393      -0.010       0.004\n",
4253
       "5136          -0.0007      0.002     -0.346      0.729      -0.005       0.003\n",
4254
       "5170          -0.0020      0.005     -0.365      0.715      -0.013       0.009\n",
4255
       "5290          -0.0005      0.005     -0.108      0.914      -0.010       0.009\n",
4256
       "5295           0.0006      0.003      0.179      0.858      -0.006       0.007\n",
4257
       "5335           0.0037      0.003      1.104      0.270      -0.003       0.010\n",
4258
       "5566          -0.0012      0.006     -0.208      0.835      -0.012       0.010\n",
4259
       "5567          -0.0006      0.002     -0.311      0.756      -0.004       0.003\n",
4260
       "5573           0.0005      0.004      0.108      0.914      -0.008       0.009\n",
4261
       "5575           0.0013      0.002      0.627      0.531      -0.003       0.005\n",
4262
       "5576          -0.0134      0.003     -3.897      0.000      -0.020      -0.007\n",
4263
       "5577           0.0011      0.001      0.742      0.459      -0.002       0.004\n",
4264
       "5578          -0.0005      0.002     -0.249      0.804      -0.004       0.003\n",
4265
       "5580           0.0077      0.003      2.264      0.024       0.001       0.014\n",
4266
       "5581           0.0029      0.003      0.860      0.391      -0.004       0.009\n",
4267
       "5582           0.0045      0.004      1.147      0.252      -0.003       0.012\n",
4268
       "5594           0.0058      0.003      1.729      0.085      -0.001       0.012\n",
4269
       "5595          -0.0001      0.003     -0.038      0.970      -0.006       0.006\n",
4270
       "5604           0.0019      0.003      0.543      0.588      -0.005       0.009\n",
4271
       "5605           0.0131      0.004      2.988      0.003       0.004       0.022\n",
4272
       "5728          -0.0003      0.004     -0.063      0.950      -0.008       0.008\n",
4273
       "5894          -0.0067      0.005     -1.381      0.168      -0.016       0.003\n",
4274
       "6199           0.0016      0.004      0.425      0.671      -0.006       0.009\n",
4275
       "6456          -0.0059      0.002     -2.729      0.007      -0.010      -0.002\n",
4276
       "6464           0.0003      0.003      0.114      0.909      -0.005       0.006\n",
4277
       "6654           0.0022      0.005      0.410      0.682      -0.008       0.013\n",
4278
       "6714          -0.0010      0.003     -0.366      0.715      -0.006       0.004\n",
4279
       "6868           0.0093      0.003      2.710      0.007       0.003       0.016\n",
4280
       "7249           0.0050      0.005      1.030      0.304      -0.005       0.014\n",
4281
       "7311           0.0098      0.004      2.696      0.007       0.003       0.017\n",
4282
       "7529          -0.0035      0.005     -0.746      0.456      -0.013       0.006\n",
4283
       "8027           0.0026      0.004      0.731      0.465      -0.004       0.010\n",
4284
       "8038          -0.0044      0.001     -2.945      0.003      -0.007      -0.001\n",
4285
       "9146           0.0017      0.005      0.339      0.734      -0.008       0.011\n",
4286
       "10000          0.0011      0.002      0.525      0.600      -0.003       0.005\n",
4287
       "10252          0.0010      0.002      0.430      0.667      -0.004       0.006\n",
4288
       "10253         -0.0067      0.002     -2.954      0.003      -0.011      -0.002\n",
4289
       "10254         -0.0002      0.005     -0.035      0.972      -0.009       0.009\n",
4290
       "11140          0.0005      0.005      0.101      0.920      -0.010       0.011\n",
4291
       "23239          0.0006      0.002      0.252      0.801      -0.004       0.005\n",
4292
       "26018          0.0047      0.002      2.039      0.042       0.000       0.009\n",
4293
       "29924          0.0014      0.004      0.323      0.747      -0.007       0.010\n",
4294
       "30011         -0.0027      0.002     -1.093      0.275      -0.007       0.002\n",
4295
       "55824          0.0011      0.002      0.521      0.602      -0.003       0.005\n",
4296
       "57761         -0.0023      0.002     -1.336      0.182      -0.006       0.001\n",
4297
       "58513         -0.0008      0.005     -0.177      0.860      -0.010       0.008\n",
4298
       "64223          0.0029      0.005      0.623      0.534      -0.006       0.012\n",
4299
       "79109         -0.0021      0.004     -0.504      0.615      -0.010       0.006\n",
4300
       "84335         -0.0087      0.005     -1.845      0.066      -0.018       0.001\n",
4301
       "117145         0.0064      0.003      2.461      0.014       0.001       0.012\n",
4302
       "196883        -0.0042      0.003     -1.559      0.120      -0.010       0.001\n",
4303
       "253260         0.0034      0.003      1.329      0.185      -0.002       0.009\n",
4304
       "==============================================================================\n",
4305
       "Omnibus:                       43.558   Durbin-Watson:                   2.088\n",
4306
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):               91.380\n",
4307
       "Skew:                           0.522   Prob(JB):                     1.44e-20\n",
4308
       "Kurtosis:                       4.878   Cond. No.                         366.\n",
4309
       "==============================================================================\n",
4310
       "\n",
4311
       "Warnings:\n",
4312
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
4313
       "\"\"\""
4314
      ]
4315
     },
4316
     "execution_count": 28,
4317
     "metadata": {},
4318
     "output_type": "execute_result"
4319
    }
4320
   ],
4321
   "source": [
4322
    "X = LUADEEGFR\n",
4323
    "y = PredLUAD.detach().numpy()\n",
4324
    "\n",
4325
    "# Note the difference in argument order\n",
4326
    "model = sm.OLS(y, X).fit()\n",
4327
    "predictions = model.predict(X) # make the predictions by the model\n",
4328
    "\n",
4329
    "# Print out the statistics\n",
4330
    "model.summary()"
4331
   ]
4332
  },
4333
  {
4334
   "cell_type": "code",
4335
   "execution_count": 29,
4336
   "metadata": {},
4337
   "outputs": [
4338
    {
4339
     "name": "stdout",
4340
     "output_type": "stream",
4341
     "text": [
4342
      "(array([False, False, False, False, False, False, False, False, False,\n",
4343
      "       False, False, False, False, False, False, False, False, False,\n",
4344
      "       False, False, False, False,  True, False, False, False, False,\n",
4345
      "       False, False, False, False, False, False,  True, False, False,\n",
4346
      "       False, False, False, False, False, False, False, False, False,\n",
4347
      "       False, False, False, False, False, False, False, False, False,\n",
4348
      "       False, False, False, False, False,  True, False, False, False,\n",
4349
      "       False, False, False, False, False, False, False, False, False,\n",
4350
      "       False, False, False, False, False, False, False, False, False,\n",
4351
      "       False, False, False, False, False, False, False, False, False,\n",
4352
      "       False, False, False, False, False, False, False, False, False,\n",
4353
      "       False, False]), array([4.14015235e+01, 2.48985067e+01, 6.87655871e+01, 6.06407957e+01,\n",
4354
      "       6.88273009e+01, 4.45728983e+01, 1.00290905e+01, 2.44236844e+01,\n",
4355
      "       4.29890891e+01, 2.25237889e+01, 9.06980328e+01, 2.23338545e+01,\n",
4356
      "       8.98385247e+00, 9.83573120e+01, 3.16968699e+01, 2.12121420e-01,\n",
4357
      "       3.05570011e+00, 8.55424721e+01, 6.23448783e+01, 2.72870372e+01,\n",
4358
      "       8.98633410e+01, 6.60488342e+00, 4.04124456e-02, 5.61667950e+01,\n",
4359
      "       3.02544778e+01, 1.53460327e+01, 2.88183240e+01, 7.95045115e+01,\n",
4360
      "       3.17136654e-01, 2.10836580e+01, 2.13318505e+00, 1.89824840e+01,\n",
4361
      "       2.01032828e+01, 3.95510978e-05, 1.93330041e+00, 6.27577203e+01,\n",
4362
      "       7.31548083e+00, 1.15576665e+01, 3.10839496e+01, 1.59815035e+01,\n",
4363
      "       6.38765252e+01, 6.98100511e+01, 5.71721313e+01, 3.70384856e+00,\n",
4364
      "       2.09784933e+01, 2.47574544e+01, 4.20584879e+01, 1.76263456e+01,\n",
4365
      "       3.93798091e+00, 3.97200959e+01, 7.36691578e+01, 7.22146703e+01,\n",
4366
      "       9.23540721e+01, 8.66810315e+01, 2.73081316e+01, 8.43743365e+01,\n",
4367
      "       7.63168572e+01, 9.22877149e+01, 5.36144076e+01, 1.16774259e-02,\n",
4368
      "       4.63226333e+01, 8.11664996e+01, 2.43831571e+00, 3.94516497e+01,\n",
4369
      "       2.54481798e+01, 8.54272406e+00, 9.79257383e+01, 5.93442425e+01,\n",
4370
      "       3.02023351e-01, 9.59097960e+01, 1.69827761e+01, 6.77638632e+01,\n",
4371
      "       6.71173946e-01, 9.18373294e+01, 6.88643524e+01, 7.21909359e+01,\n",
4372
      "       7.11695781e-01, 3.06722593e+01, 7.40430399e-01, 4.60684526e+01,\n",
4373
      "       4.69928395e+01, 3.46394030e-01, 7.41824356e+01, 6.06084806e+01,\n",
4374
      "       6.73821895e+01, 3.37003178e-01, 9.81702523e+01, 9.29017562e+01,\n",
4375
      "       8.09271567e+01, 4.26041583e+00, 7.54604810e+01, 2.77902496e+01,\n",
4376
      "       6.08401844e+01, 1.84323634e+01, 8.68103068e+01, 5.38850645e+01,\n",
4377
      "       6.21034660e+01, 6.64966356e+00, 1.44584009e+00, 1.21155444e+01,\n",
4378
      "       1.86469613e+01]))\n"
4379
     ]
4380
    }
4381
   ],
4382
   "source": [
4383
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
4384
   ]
4385
  },
4386
  {
4387
   "cell_type": "code",
4388
   "execution_count": null,
4389
   "metadata": {},
4390
   "outputs": [],
4391
   "source": []
4392
  }
4393
 ],
4394
 "metadata": {
4395
  "kernelspec": {
4396
   "display_name": "Python 3",
4397
   "language": "python",
4398
   "name": "python3"
4399
  },
4400
  "language_info": {
4401
   "codemirror_mode": {
4402
    "name": "ipython",
4403
    "version": 3
4404
   },
4405
   "file_extension": ".py",
4406
   "mimetype": "text/x-python",
4407
   "name": "python",
4408
   "nbconvert_exporter": "python",
4409
   "pygments_lexer": "ipython3",
4410
   "version": "3.6.5"
4411
  }
4412
 },
4413
 "nbformat": 4,
4414
 "nbformat_minor": 2
4415
}