[d90d15]: / EGFR / EGFRv7.ipynb

Download this file

4416 lines (4415 with data), 252.5 kB

{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/sklearn/utils/validation.py:475: DataConversionWarning: Data with input dtype object was converted to float64 by StandardScaler.\n",
      "  warnings.warn(msg, DataConversionWarning)\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/sklearn/utils/validation.py:475: DataConversionWarning: Data with input dtype object was converted to float64 by StandardScaler.\n",
      "  warnings.warn(msg, DataConversionWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9440567436313729\n",
      "0.7222222222222222\n",
      "0.8\n"
     ]
    }
   ],
   "source": [
    "import torch \n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "import torch.optim as optim\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import pandas as pd\n",
    "import math\n",
    "import sklearn.preprocessing as sk\n",
    "import seaborn as sns\n",
    "from sklearn import metrics\n",
    "from sklearn.feature_selection import VarianceThreshold\n",
    "from sklearn.model_selection import train_test_split\n",
    "from utils import AllTripletSelector,HardestNegativeTripletSelector, RandomNegativeTripletSelector, SemihardNegativeTripletSelector # Strategies for selecting triplets within a minibatch\n",
    "from metrics import AverageNonzeroTripletsMetric\n",
    "from torch.utils.data.sampler import WeightedRandomSampler\n",
    "from sklearn.metrics import roc_auc_score\n",
    "from sklearn.metrics import average_precision_score\n",
    "import random\n",
    "from random import randint\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "\n",
    "save_results_to = '/home/hnoghabi/EGFR/'\n",
    "torch.manual_seed(42)\n",
    "random.seed(42)\n",
    "\n",
    "GDSCE = pd.read_csv(\"GDSC_exprs.z.EGFRi.tsv\", \n",
    "                    sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "GDSCE = pd.DataFrame.transpose(GDSCE)\n",
    "\n",
    "GDSCM = pd.read_csv(\"GDSC_mutations.EGFRi.tsv\", \n",
    "                    sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "GDSCM = pd.DataFrame.transpose(GDSCM)\n",
    "GDSCM = GDSCM.loc[:,~GDSCM.columns.duplicated()]\n",
    "\n",
    "GDSCC = pd.read_csv(\"GDSC_CNA.EGFRi.tsv\", \n",
    "                    sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "GDSCC.drop_duplicates(keep='last')\n",
    "GDSCC = pd.DataFrame.transpose(GDSCC)\n",
    "GDSCC = GDSCC.loc[:,~GDSCC.columns.duplicated()]\n",
    "\n",
    "PDXEerlo = pd.read_csv(\"PDX_exprs.Erlotinib.eb_with.GDSC_exprs.Erlotinib.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXEerlo = pd.DataFrame.transpose(PDXEerlo)\n",
    "PDXMerlo = pd.read_csv(\"PDX_mutations.Erlotinib.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXMerlo = pd.DataFrame.transpose(PDXMerlo)\n",
    "PDXCerlo = pd.read_csv(\"PDX_CNA.Erlotinib.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXCerlo.drop_duplicates(keep='last')\n",
    "PDXCerlo = pd.DataFrame.transpose(PDXCerlo)\n",
    "PDXCerlo = PDXCerlo.loc[:,~PDXCerlo.columns.duplicated()]\n",
    "\n",
    "PDXEcet = pd.read_csv(\"PDX_exprs.Cetuximab.eb_with.GDSC_exprs.Cetuximab.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXEcet = pd.DataFrame.transpose(PDXEcet)\n",
    "PDXMcet = pd.read_csv(\"PDX_mutations.Cetuximab.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXMcet = pd.DataFrame.transpose(PDXMcet)\n",
    "PDXCcet = pd.read_csv(\"PDX_CNA.Cetuximab.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXCcet.drop_duplicates(keep='last')\n",
    "PDXCcet = pd.DataFrame.transpose(PDXCcet)\n",
    "PDXCcet = PDXCcet.loc[:,~PDXCcet.columns.duplicated()]\n",
    "\n",
    "selector = VarianceThreshold(0.05)\n",
    "selector.fit_transform(GDSCE)\n",
    "GDSCE = GDSCE[GDSCE.columns[selector.get_support(indices=True)]]\n",
    "\n",
    "GDSCM = GDSCM.fillna(0)\n",
    "GDSCM[GDSCM != 0.0] = 1\n",
    "GDSCC = GDSCC.fillna(0)\n",
    "GDSCC[GDSCC != 0.0] = 1\n",
    "\n",
    "ls = GDSCE.columns.intersection(GDSCM.columns)\n",
    "ls = ls.intersection(GDSCC.columns)\n",
    "ls = ls.intersection(PDXEerlo.columns)\n",
    "ls = ls.intersection(PDXMerlo.columns)\n",
    "ls = ls.intersection(PDXCerlo.columns)\n",
    "ls = ls.intersection(PDXEcet.columns)\n",
    "ls = ls.intersection(PDXMcet.columns)\n",
    "ls = ls.intersection(PDXCcet.columns)\n",
    "ls2 = GDSCE.index.intersection(GDSCM.index)\n",
    "ls2 = ls2.intersection(GDSCC.index)\n",
    "ls3 = PDXEerlo.index.intersection(PDXMerlo.index)\n",
    "ls3 = ls3.intersection(PDXCerlo.index)\n",
    "ls4 = PDXEcet.index.intersection(PDXMcet.index)\n",
    "ls4 = ls4.intersection(PDXCcet.index)\n",
    "ls = pd.unique(ls)\n",
    "\n",
    "PDXEerlo = PDXEerlo.loc[ls3,ls]\n",
    "PDXMerlo = PDXMerlo.loc[ls3,ls]\n",
    "PDXCerlo = PDXCerlo.loc[ls3,ls]\n",
    "PDXEcet = PDXEcet.loc[ls4,ls]\n",
    "PDXMcet = PDXMcet.loc[ls4,ls]\n",
    "PDXCcet = PDXCcet.loc[ls4,ls]\n",
    "GDSCE = GDSCE.loc[:,ls]\n",
    "GDSCM = GDSCM.loc[:,ls]\n",
    "GDSCC = GDSCC.loc[:,ls]\n",
    "\n",
    "GDSCR = pd.read_csv(\"GDSC_response.EGFRi.tsv\", \n",
    "                    sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "\n",
    "GDSCR.rename(mapper = str, axis = 'index', inplace = True)\n",
    "\n",
    "d = {\"R\":0,\"S\":1}\n",
    "GDSCR[\"response\"] = GDSCR.loc[:,\"response\"].apply(lambda x: d[x])\n",
    "\n",
    "responses = GDSCR\n",
    "drugs = set(responses[\"drug\"].values)\n",
    "exprs_z = GDSCE\n",
    "cna = GDSCC\n",
    "mut = GDSCM\n",
    "expression_zscores = []\n",
    "CNA=[]\n",
    "mutations = []\n",
    "for drug in drugs:\n",
    "    samples = responses.loc[responses[\"drug\"]==drug,:].index.values\n",
    "    e_z = exprs_z.loc[samples,:]\n",
    "    c = cna.loc[samples,:]\n",
    "    m = mut.loc[samples,:]\n",
    "    m = mut.loc[samples,:]\n",
    "    # next 3 rows if you want non-unique sample names\n",
    "    e_z.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
    "    c.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
    "    m.rename(lambda x : str(x)+\"_\"+drug, axis = \"index\", inplace=True)\n",
    "    expression_zscores.append(e_z)\n",
    "    CNA.append(c)\n",
    "    mutations.append(m)\n",
    "responses.index = responses.index.values +\"_\"+responses[\"drug\"].values\n",
    "GDSCEv2 = pd.concat(expression_zscores, axis =0 )\n",
    "GDSCCv2 = pd.concat(CNA, axis =0 )\n",
    "GDSCMv2 = pd.concat(mutations, axis =0 )\n",
    "GDSCRv2 = responses\n",
    "\n",
    "ls2 = GDSCEv2.index.intersection(GDSCMv2.index)\n",
    "ls2 = ls2.intersection(GDSCCv2.index)\n",
    "GDSCEv2 = GDSCEv2.loc[ls2,:]\n",
    "GDSCMv2 = GDSCMv2.loc[ls2,:]\n",
    "GDSCCv2 = GDSCCv2.loc[ls2,:]\n",
    "GDSCRv2 = GDSCRv2.loc[ls2,:]\n",
    "\n",
    "Y = GDSCRv2['response'].values\n",
    "\n",
    "PDXRcet = pd.read_csv(\"PDX_response.Cetuximab.tsv\", \n",
    "                       sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXRcet.loc[PDXRcet.iloc[:,0] == 'R'] = 0\n",
    "PDXRcet.loc[PDXRcet.iloc[:,0] == 'S'] = 1\n",
    "PDXRcet = PDXRcet.loc[ls4,:]\n",
    "Ytscet = PDXRcet['response'].values    \n",
    "\n",
    "PDXRerlo = pd.read_csv(\"PDX_response.Erlotinib.tsv\", \n",
    "                       sep = \"\\t\", index_col=0, decimal = \",\")\n",
    "PDXRerlo.loc[PDXRerlo.iloc[:,0] == 'R'] = 0\n",
    "PDXRerlo.loc[PDXRerlo.iloc[:,0] == 'S'] = 1\n",
    "PDXRerlo = PDXRerlo.loc[ls3,:]\n",
    "Ytserlo = PDXRerlo['response'].values  \n",
    "\n",
    "hdm1 = 32\n",
    "hdm2 = 16\n",
    "hdm3 = 256\n",
    "rate1 = 0.5\n",
    "rate2 = 0.8\n",
    "rate3 = 0.5\n",
    "rate4 = 0.3\n",
    "\n",
    "scalerGDSC = sk.StandardScaler()\n",
    "scalerGDSC.fit(GDSCEv2.values)\n",
    "X_trainE = scalerGDSC.transform(GDSCEv2.values)\n",
    "X_testEerlo = scalerGDSC.transform(PDXEerlo.values)    \n",
    "X_testEcet = scalerGDSC.transform(PDXEcet.values)    \n",
    "\n",
    "X_trainM = np.nan_to_num(GDSCMv2.values)\n",
    "X_trainC = np.nan_to_num(GDSCCv2.values)\n",
    "X_testMerlo = np.nan_to_num(PDXMerlo.values)\n",
    "X_testCerlo = np.nan_to_num(PDXCerlo.values)\n",
    "X_testMcet = np.nan_to_num(PDXMcet.values)\n",
    "X_testCcet = np.nan_to_num(PDXCcet.values)\n",
    "\n",
    "TX_testEerlo = torch.FloatTensor(X_testEerlo)\n",
    "TX_testMerlo = torch.FloatTensor(X_testMerlo)\n",
    "TX_testCerlo = torch.FloatTensor(X_testCerlo)\n",
    "ty_testEerlo = torch.FloatTensor(Ytserlo.astype(int))\n",
    "\n",
    "TX_testEcet = torch.FloatTensor(X_testEcet)\n",
    "TX_testMcet = torch.FloatTensor(X_testMcet)\n",
    "TX_testCcet = torch.FloatTensor(X_testCcet)\n",
    "ty_testEcet = torch.FloatTensor(Ytscet.astype(int))\n",
    "\n",
    "n_sampE, IE_dim = X_trainE.shape\n",
    "n_sampM, IM_dim = X_trainM.shape\n",
    "n_sampC, IC_dim = X_trainC.shape\n",
    "\n",
    "h_dim1 = hdm1\n",
    "h_dim2 = hdm2\n",
    "h_dim3 = hdm3        \n",
    "Z_in = h_dim1 + h_dim2 + h_dim3\n",
    "\n",
    "costtr = []\n",
    "auctr = []\n",
    "costts = []\n",
    "aucts = []\n",
    "\n",
    "class AEE(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(AEE, self).__init__()\n",
    "        self.EnE = torch.nn.Sequential(\n",
    "            nn.Linear(IE_dim, h_dim1),\n",
    "            nn.BatchNorm1d(h_dim1),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(rate1))\n",
    "    def forward(self, x):\n",
    "        output = self.EnE(x)\n",
    "        return output\n",
    "\n",
    "class AEM(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(AEM, self).__init__()\n",
    "        self.EnM = torch.nn.Sequential(\n",
    "            nn.Linear(IM_dim, h_dim2),\n",
    "            nn.BatchNorm1d(h_dim2),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(rate2))\n",
    "    def forward(self, x):\n",
    "        output = self.EnM(x)\n",
    "        return output    \n",
    "\n",
    "\n",
    "class AEC(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(AEC, self).__init__()\n",
    "        self.EnC = torch.nn.Sequential(\n",
    "            nn.Linear(IM_dim, h_dim3),\n",
    "            nn.BatchNorm1d(h_dim3),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(rate3))\n",
    "    def forward(self, x):\n",
    "        output = self.EnC(x)\n",
    "        return output       \n",
    "\n",
    "class Classifier(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Classifier, self).__init__()\n",
    "        self.FC = torch.nn.Sequential(\n",
    "            nn.Linear(Z_in, 1),\n",
    "            nn.Dropout(rate4),\n",
    "            nn.Sigmoid())\n",
    "    def forward(self, x):\n",
    "        return self.FC(x)\n",
    "\n",
    "torch.cuda.manual_seed_all(42)\n",
    "\n",
    "AutoencoderE = torch.load('EGFRv2Exprs.pt')\n",
    "AutoencoderM = torch.load('EGFRv2Mut.pt')\n",
    "AutoencoderC = torch.load('EGFRv2CNA.pt')\n",
    "\n",
    "Clas = torch.load('EGFRv2Class.pt')\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "ZEX = AutoencoderE(torch.FloatTensor(X_trainE))\n",
    "ZMX = AutoencoderM(torch.FloatTensor(X_trainM))\n",
    "ZCX = AutoencoderC(torch.FloatTensor(X_trainC))\n",
    "ZTX = torch.cat((ZEX, ZMX, ZCX), 1)\n",
    "ZTX = F.normalize(ZTX, p=2, dim=0)\n",
    "PredX = Clas(ZTX)\n",
    "AUCt = roc_auc_score(Y, PredX.detach().numpy())\n",
    "print(AUCt)\n",
    "\n",
    "ZETerlo = AutoencoderE(TX_testEerlo)\n",
    "ZMTerlo = AutoencoderM(TX_testMerlo)\n",
    "ZCTerlo = AutoencoderC(TX_testCerlo)\n",
    "ZTTerlo = torch.cat((ZETerlo, ZMTerlo, ZCTerlo), 1)\n",
    "ZTTerlo = F.normalize(ZTTerlo, p=2, dim=0)\n",
    "PredTerlo = Clas(ZTTerlo)\n",
    "AUCterlo = roc_auc_score(Ytserlo, PredTerlo.detach().numpy())\n",
    "print(AUCterlo)\n",
    "\n",
    "ZETcet = AutoencoderE(TX_testEcet)\n",
    "ZMTcet = AutoencoderM(TX_testMcet)\n",
    "ZCTcet = AutoencoderC(TX_testCcet)\n",
    "ZTTcet = torch.cat((ZETcet, ZMTcet, ZCTcet), 1)\n",
    "ZTTcet = F.normalize(ZTTcet, p=2, dim=0)\n",
    "PredTcet = Clas(ZTTcet)\n",
    "AUCtcet = roc_auc_score(Ytscet, PredTcet.detach().numpy())\n",
    "print(AUCtcet)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(492, 13081)\n",
      "(492, 13081)\n",
      "(492, 13081)\n"
     ]
    }
   ],
   "source": [
    "PRADE = pd.read_csv(\"TCGA-PRAD_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PRADE = pd.DataFrame.transpose(PRADE)\n",
    "\n",
    "PRADM = pd.read_csv(\"TCGA-PRAD_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PRADM = pd.DataFrame.transpose(PRADM)\n",
    "PRADM = PRADM.loc[:,~PRADM.columns.duplicated()]\n",
    "\n",
    "PRADC = pd.read_csv(\"TCGA-PRAD_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PRADC = pd.DataFrame.transpose(PRADC)\n",
    "PRADC = PRADC.loc[:,~PRADC.columns.duplicated()]\n",
    "\n",
    "PRADM = PRADM.fillna(0)\n",
    "PRADM[PRADM != 0.0] = 1\n",
    "PRADC = PRADC.fillna(0)\n",
    "PRADC[PRADC != 0.0] = 1\n",
    "\n",
    "#PRADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#PRADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#PRADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsPRAD = PRADE.index.intersection(PRADM.index)\n",
    "lsPRAD = lsPRAD.intersection(PRADC.index)\n",
    "lsPRAD = pd.unique(lsPRAD)\n",
    "\n",
    "PRADE = PRADE.loc[lsPRAD,ls]\n",
    "PRADM = PRADM.loc[lsPRAD,ls]\n",
    "PRADC = PRADC.loc[lsPRAD,ls]\n",
    "\n",
    "print(PRADE.shape)\n",
    "print(PRADM.shape)\n",
    "print(PRADC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "PRADE2 = np.nan_to_num(PRADE.values)\n",
    "PRADM2 = np.nan_to_num(PRADM.values)\n",
    "PRADC2 = np.nan_to_num(PRADC.values)\n",
    "\n",
    "NPRADE2 = scalerGDSC.transform(PRADE2)    \n",
    "\n",
    "PRADexprs = torch.FloatTensor(NPRADE2)\n",
    "PRADmut = torch.FloatTensor(PRADM2)\n",
    "PRADcna = torch.FloatTensor(PRADC2)\n",
    "\n",
    "PRADZE = AutoencoderE(PRADexprs)\n",
    "PRADZM = AutoencoderM(PRADmut)\n",
    "PRADZC = AutoencoderC(PRADcna)\n",
    "\n",
    "PRADZT = torch.cat((PRADZE, PRADZM, PRADZC), 1)\n",
    "PRADZTX = F.normalize(PRADZT, p=2, dim=0)\n",
    "PredPRAD = Clas(PRADZTX)\n",
    "\n",
    "#print(PredPRAD.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(161, 13081)\n",
      "(161, 13081)\n",
      "(161, 13081)\n"
     ]
    }
   ],
   "source": [
    "KIRPE = pd.read_csv(\"TCGA-KIRP_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "KIRPE = pd.DataFrame.transpose(KIRPE)\n",
    "\n",
    "KIRPM = pd.read_csv(\"TCGA-KIRP_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "KIRPM = pd.DataFrame.transpose(KIRPM)\n",
    "KIRPM = KIRPM.loc[:,~KIRPM.columns.duplicated()]\n",
    "\n",
    "KIRPC = pd.read_csv(\"TCGA-KIRP_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "KIRPC = pd.DataFrame.transpose(KIRPC)\n",
    "KIRPC = KIRPC.loc[:,~KIRPC.columns.duplicated()]\n",
    "\n",
    "KIRPM = KIRPM.fillna(0)\n",
    "KIRPM[KIRPM != 0.0] = 1\n",
    "KIRPC = KIRPC.fillna(0)\n",
    "KIRPC[KIRPC != 0.0] = 1\n",
    "\n",
    "#KIRPE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#KIRPM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#KIRPC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsKIRP = KIRPE.index.intersection(KIRPM.index)\n",
    "lsKIRP = lsKIRP.intersection(KIRPC.index)\n",
    "lsKIRP = pd.unique(lsKIRP)\n",
    "\n",
    "KIRPE = KIRPE.loc[lsKIRP,ls]\n",
    "KIRPM = KIRPM.loc[lsKIRP,ls]\n",
    "KIRPC = KIRPC.loc[lsKIRP,ls]\n",
    "\n",
    "print(KIRPE.shape)\n",
    "print(KIRPM.shape)\n",
    "print(KIRPC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "KIRPE2 = np.nan_to_num(KIRPE.values)\n",
    "KIRPM2 = np.nan_to_num(KIRPM.values)\n",
    "KIRPC2 = np.nan_to_num(KIRPC.values)\n",
    "\n",
    "NKIRPE2 = scalerGDSC.transform(KIRPE2)    \n",
    "\n",
    "KIRPexprs = torch.FloatTensor(NKIRPE2)\n",
    "KIRPmut = torch.FloatTensor(KIRPM2)\n",
    "KIRPcna = torch.FloatTensor(KIRPC2)\n",
    "\n",
    "KIRPZE = AutoencoderE(KIRPexprs)\n",
    "KIRPZM = AutoencoderM(KIRPmut)\n",
    "KIRPZC = AutoencoderC(KIRPcna)\n",
    "\n",
    "KIRPZT = torch.cat((KIRPZE, KIRPZM, KIRPZC), 1)\n",
    "KIRPZTX = F.normalize(KIRPZT, p=2, dim=0)\n",
    "PredKIRP = Clas(KIRPZTX)\n",
    "\n",
    "#print(PredKIRP.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(123, 13081)\n",
      "(123, 13081)\n",
      "(123, 13081)\n"
     ]
    }
   ],
   "source": [
    "BLCAE = pd.read_csv(\"TCGA-BLCA_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BLCAE = pd.DataFrame.transpose(BLCAE)\n",
    "\n",
    "BLCAM = pd.read_csv(\"TCGA-BLCA_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BLCAM = pd.DataFrame.transpose(BLCAM)\n",
    "BLCAM = BLCAM.loc[:,~BLCAM.columns.duplicated()]\n",
    "\n",
    "BLCAC = pd.read_csv(\"TCGA-BLCA_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BLCAC = pd.DataFrame.transpose(BLCAC)\n",
    "BLCAC = BLCAC.loc[:,~BLCAC.columns.duplicated()]\n",
    "\n",
    "BLCAM = BLCAM.fillna(0)\n",
    "BLCAM[BLCAM != 0.0] = 1\n",
    "BLCAC = BLCAC.fillna(0)\n",
    "BLCAC[BLCAC != 0.0] = 1\n",
    "\n",
    "#BLCAE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#BLCAM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#BLCAC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsBLCA = BLCAE.index.intersection(BLCAM.index)\n",
    "lsBLCA = lsBLCA.intersection(BLCAC.index)\n",
    "lsBLCA = pd.unique(lsBLCA)\n",
    "\n",
    "BLCAE = BLCAE.loc[lsBLCA,ls]\n",
    "BLCAM = BLCAM.loc[lsBLCA,ls]\n",
    "BLCAC = BLCAC.loc[lsBLCA,ls]\n",
    "\n",
    "print(BLCAE.shape)\n",
    "print(BLCAM.shape)\n",
    "print(BLCAC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "BLCAE2 = np.nan_to_num(BLCAE.values)\n",
    "BLCAM2 = np.nan_to_num(BLCAM.values)\n",
    "BLCAC2 = np.nan_to_num(BLCAC.values)\n",
    "\n",
    "NBLCAE2 = scalerGDSC.transform(BLCAE2)    \n",
    "\n",
    "BLCAexprs = torch.FloatTensor(NBLCAE2)\n",
    "BLCAmut = torch.FloatTensor(BLCAM2)\n",
    "BLCAcna = torch.FloatTensor(BLCAC2)\n",
    "\n",
    "BLCAZE = AutoencoderE(BLCAexprs)\n",
    "BLCAZM = AutoencoderM(BLCAmut)\n",
    "BLCAZC = AutoencoderC(BLCAcna)\n",
    "\n",
    "BLCAZT = torch.cat((BLCAZE, BLCAZM, BLCAZC), 1)\n",
    "BLCAZTX = F.normalize(BLCAZT, p=2, dim=0)\n",
    "PredBLCA = Clas(BLCAZTX)\n",
    "\n",
    "#print(PredBLCA.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(921, 13081)\n",
      "(921, 13081)\n",
      "(921, 13081)\n"
     ]
    }
   ],
   "source": [
    "BRCAE = pd.read_csv(\"TCGA-BRCA_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BRCAE = pd.DataFrame.transpose(BRCAE)\n",
    "\n",
    "BRCAM = pd.read_csv(\"TCGA-BRCA_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BRCAM = pd.DataFrame.transpose(BRCAM)\n",
    "BRCAM = BRCAM.loc[:,~BRCAM.columns.duplicated()]\n",
    "\n",
    "BRCAC = pd.read_csv(\"TCGA-BRCA_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "BRCAC = pd.DataFrame.transpose(BRCAC)\n",
    "BRCAC = BRCAC.loc[:,~BRCAC.columns.duplicated()]\n",
    "\n",
    "BRCAM = BRCAM.fillna(0)\n",
    "BRCAM[BRCAM != 0.0] = 1\n",
    "BRCAC = BRCAC.fillna(0)\n",
    "BRCAC[BRCAC != 0.0] = 1\n",
    "\n",
    "#BRCAE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#BRCAM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#BRCAC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsBRCA = BRCAE.index.intersection(BRCAM.index)\n",
    "lsBRCA = lsBRCA.intersection(BRCAC.index)\n",
    "lsBRCA = pd.unique(lsBRCA)\n",
    "\n",
    "BRCAE = BRCAE.loc[lsBRCA,ls]\n",
    "BRCAM = BRCAM.loc[lsBRCA,ls]\n",
    "BRCAC = BRCAC.loc[lsBRCA,ls]\n",
    "\n",
    "print(BRCAE.shape)\n",
    "print(BRCAM.shape)\n",
    "print(BRCAC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "BRCAE2 = np.nan_to_num(BRCAE.values)\n",
    "BRCAM2 = np.nan_to_num(BRCAM.values)\n",
    "BRCAC2 = np.nan_to_num(BRCAC.values)\n",
    "\n",
    "NBRCAE2 = scalerGDSC.transform(BRCAE2)    \n",
    "\n",
    "BRCAexprs = torch.FloatTensor(NBRCAE2)\n",
    "BRCAmut = torch.FloatTensor(BRCAM2)\n",
    "BRCAcna = torch.FloatTensor(BRCAC2)\n",
    "\n",
    "BRCAZE = AutoencoderE(BRCAexprs)\n",
    "BRCAZM = AutoencoderM(BRCAmut)\n",
    "BRCAZC = AutoencoderC(BRCAcna)\n",
    "\n",
    "BRCAZT = torch.cat((BRCAZE, BRCAZM, BRCAZC), 1)\n",
    "BRCAZTX = F.normalize(BRCAZT, p=2, dim=0)\n",
    "PredBRCA = Clas(BRCAZTX)\n",
    "\n",
    "#print(PredBRCA.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(130, 13081)\n",
      "(130, 13081)\n",
      "(130, 13081)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    }
   ],
   "source": [
    "PAADE = pd.read_csv(\"TCGA-PAAD_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PAADE = pd.DataFrame.transpose(PAADE)\n",
    "\n",
    "PAADM = pd.read_csv(\"TCGA-PAAD_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PAADM = pd.DataFrame.transpose(PAADM)\n",
    "PAADM = PAADM.loc[:,~PAADM.columns.duplicated()]\n",
    "\n",
    "PAADC = pd.read_csv(\"TCGA-PAAD_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "PAADC = pd.DataFrame.transpose(PAADC)\n",
    "PAADC = PAADC.loc[:,~PAADC.columns.duplicated()]\n",
    "\n",
    "PAADM = PAADM.fillna(0)\n",
    "PAADM[PAADM != 0.0] = 1\n",
    "PAADC = PAADC.fillna(0)\n",
    "PAADC[PAADC != 0.0] = 1\n",
    "\n",
    "#PAADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#PAADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#PAADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsPAAD = PAADE.index.intersection(PAADM.index)\n",
    "lsPAAD = lsPAAD.intersection(PAADC.index)\n",
    "lsPAAD = pd.unique(lsPAAD)\n",
    "\n",
    "PAADE = PAADE.loc[lsPAAD,ls]\n",
    "PAADM = PAADM.loc[lsPAAD,ls]\n",
    "PAADC = PAADC.loc[lsPAAD,ls]\n",
    "\n",
    "print(PAADE.shape)\n",
    "print(PAADM.shape)\n",
    "print(PAADC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "PAADE2 = np.nan_to_num(PAADE.values)\n",
    "PAADM2 = np.nan_to_num(PAADM.values)\n",
    "PAADC2 = np.nan_to_num(PAADC.values)\n",
    "\n",
    "NPAADE2 = scalerGDSC.transform(PAADE2)    \n",
    "\n",
    "PAADexprs = torch.FloatTensor(NPAADE2)\n",
    "PAADmut = torch.FloatTensor(PAADM2)\n",
    "PAADcna = torch.FloatTensor(PAADC2)\n",
    "\n",
    "PAADZE = AutoencoderE(PAADexprs)\n",
    "PAADZM = AutoencoderM(PAADmut)\n",
    "PAADZC = AutoencoderC(PAADcna)\n",
    "\n",
    "PAADZT = torch.cat((PAADZE, PAADZM, PAADZC), 1)\n",
    "PAADZTX = F.normalize(PAADZT, p=2, dim=0)\n",
    "PredPAAD = Clas(PAADZTX)\n",
    "\n",
    "#print(PredPAAD.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:28: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:29: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n",
      "/home/hnoghabi/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:30: FutureWarning: \n",
      "Passing list-likes to .loc or [] with any missing label will raise\n",
      "KeyError in the future, you can use .reindex() as an alternative.\n",
      "\n",
      "See the documentation here:\n",
      "https://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate-loc-reindex-listlike\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(475, 13081)\n",
      "(475, 13081)\n",
      "(475, 13081)\n"
     ]
    }
   ],
   "source": [
    "LUADE = pd.read_csv(\"TCGA-LUAD_exprs.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "LUADE = pd.DataFrame.transpose(LUADE)\n",
    "\n",
    "LUADM = pd.read_csv(\"TCGA-LUAD_mutations.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "LUADM = pd.DataFrame.transpose(LUADM)\n",
    "LUADM = LUADM.loc[:,~LUADM.columns.duplicated()]\n",
    "\n",
    "LUADC = pd.read_csv(\"TCGA-LUAD_CNA.tsv\", \n",
    "                   sep = \"\\t\", index_col=0, decimal = \".\")\n",
    "LUADC = pd.DataFrame.transpose(LUADC)\n",
    "LUADC = LUADC.loc[:,~LUADC.columns.duplicated()]\n",
    "\n",
    "LUADM = LUADM.fillna(0)\n",
    "LUADM[LUADM != 0.0] = 1\n",
    "LUADC = LUADC.fillna(0)\n",
    "LUADC[LUADC != 0.0] = 1\n",
    "\n",
    "#LUADE.rename(lambda x : x[0:11], axis = \"index\", inplace=True)  \n",
    "#LUADM.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "#LUADC.rename(lambda x : x[0:11], axis = \"index\", inplace=True)   \n",
    "\n",
    "lsLUAD = LUADE.index.intersection(LUADM.index)\n",
    "lsLUAD = lsLUAD.intersection(LUADC.index)\n",
    "lsLUAD = pd.unique(lsLUAD)\n",
    "\n",
    "LUADE = LUADE.loc[lsLUAD,ls]\n",
    "LUADM = LUADM.loc[lsLUAD,ls]\n",
    "LUADC = LUADC.loc[lsLUAD,ls]\n",
    "\n",
    "print(LUADE.shape)\n",
    "print(LUADM.shape)\n",
    "print(LUADC.shape)\n",
    "\n",
    "AutoencoderE.eval()\n",
    "AutoencoderM.eval()\n",
    "AutoencoderC.eval()\n",
    "Clas.eval()\n",
    "\n",
    "LUADE2 = np.nan_to_num(LUADE.values)\n",
    "LUADM2 = np.nan_to_num(LUADM.values)\n",
    "LUADC2 = np.nan_to_num(LUADC.values)\n",
    "\n",
    "NLUADE2 = scalerGDSC.transform(LUADE2)    \n",
    "\n",
    "LUADexprs = torch.FloatTensor(NLUADE2)\n",
    "LUADmut = torch.FloatTensor(LUADM2)\n",
    "LUADcna = torch.FloatTensor(LUADC2)\n",
    "\n",
    "LUADZE = AutoencoderE(LUADexprs)\n",
    "LUADZM = AutoencoderM(LUADmut)\n",
    "LUADZC = AutoencoderC(LUADcna)\n",
    "\n",
    "LUADZT = torch.cat((LUADZE, LUADZM, LUADZC), 1)\n",
    "LUADZTX = F.normalize(LUADZT, p=2, dim=0)\n",
    "PredLUAD = Clas(LUADZTX)\n",
    "\n",
    "#print(PredLUAD.detach().numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "from scipy.stats.stats import pearsonr\n",
    "from scipy.stats import spearmanr\n",
    "import statsmodels.api as sm\n",
    "from mne.stats import bonferroni_correction\n",
    "\n",
    "lsEGFR = [10000, 102, 10252, 10253,10254,1026,1027,107,108,109,111,11140,112,113,114,1147,115,117145,1173,1175,1211,1213,1385,1445,156,160,161,163,1950,1956,196883,2060,207,208,2308,2309,23239,2475,253260,2549,26018,2885,2931,29924,30011,3164,3265,3320,3709,3710,3845,4193,4303,4893,5136,5153,5170,5290,5295,5335,5566,5567,5568,5573,5575,5576,5577,5578,5580,5581,5582,55824,5594,5595,5604,5605,572,5728,57761,58513,5894,6199,6233,64223,6456,6464,6654,6714,6868,7249,728590,729120,730418,7311,731292,7529,79109,801,8027,8038,805,808,814,842,84335,867,9146,983,998]\n",
    "\n",
    "#lsEGFR = [10000,1026,1027,10298,10718,1398,1399,145957,1839,1950,1956,1978,2002,2064,2065,2066,2069,207,208,23533,23642,2475,25,2549,25759,27,2885,2932,3084,3265,369,3725,374,3845,399694,4609,4690,4893,5058,5062,5063,5290,5291,5293,5294,5295,5296,5335,53358,5336,5578,5579,5582,5594,5595,5599,5601,5602,5604,5605,5609,56924,57144,572,5747,5894,6198,6199,6416,6464,6654,6655,6714,673,6776,6777,685,7039,815,816,817,818,8440,8503,867,868,9542]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = PRADE.columns.intersection(lsEGFR)\n",
    "PRADEEGFR = PRADE[listEGFR]\n",
    "PRADMEGFR = PRADM[listEGFR]\n",
    "PRADCEGFR = PRADC[listEGFR]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.999</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   3331.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:16:35</td>     <th>  Log-Likelihood:    </th> <td>  1340.8</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   492</td>      <th>  AIC:               </th> <td>  -2480.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>   391</td>      <th>  BIC:               </th> <td>  -2056.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>    0.0038</td> <td>    0.003</td> <td>    1.171</td> <td> 0.242</td> <td>   -0.003</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>   -0.0015</td> <td>    0.001</td> <td>   -0.982</td> <td> 0.327</td> <td>   -0.004</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>   -0.0006</td> <td>    0.002</td> <td>   -0.377</td> <td> 0.706</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>   -0.0021</td> <td>    0.004</td> <td>   -0.585</td> <td> 0.559</td> <td>   -0.009</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>   -0.0038</td> <td>    0.003</td> <td>   -1.432</td> <td> 0.153</td> <td>   -0.009</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>   -0.0092</td> <td>    0.004</td> <td>   -2.553</td> <td> 0.011</td> <td>   -0.016</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>    0.0046</td> <td>    0.003</td> <td>    1.325</td> <td> 0.186</td> <td>   -0.002</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>    0.0184</td> <td>    0.021</td> <td>    0.868</td> <td> 0.386</td> <td>   -0.023</td> <td>    0.060</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>   -0.0002</td> <td>    0.004</td> <td>   -0.047</td> <td> 0.963</td> <td>   -0.007</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>    0.0020</td> <td>    0.004</td> <td>    0.564</td> <td> 0.573</td> <td>   -0.005</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>    0.0081</td> <td>    0.006</td> <td>    1.424</td> <td> 0.155</td> <td>   -0.003</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td>    0.0006</td> <td>    0.004</td> <td>    0.142</td> <td> 0.887</td> <td>   -0.008</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>    0.0157</td> <td>    0.004</td> <td>    3.698</td> <td> 0.000</td> <td>    0.007</td> <td>    0.024</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>    0.0044</td> <td>    0.006</td> <td>    0.792</td> <td> 0.429</td> <td>   -0.007</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>    0.0089</td> <td>    0.004</td> <td>    1.975</td> <td> 0.049</td> <td> 3.82e-05</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>   -0.0057</td> <td>    0.003</td> <td>   -1.772</td> <td> 0.077</td> <td>   -0.012</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>    0.0053</td> <td>    0.004</td> <td>    1.280</td> <td> 0.201</td> <td>   -0.003</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>    0.0055</td> <td>    0.005</td> <td>    1.141</td> <td> 0.255</td> <td>   -0.004</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>   -0.0012</td> <td>    0.003</td> <td>   -0.477</td> <td> 0.633</td> <td>   -0.006</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>   -0.0072</td> <td>    0.004</td> <td>   -1.804</td> <td> 0.072</td> <td>   -0.015</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>    0.0034</td> <td>    0.005</td> <td>    0.666</td> <td> 0.506</td> <td>   -0.007</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td>    0.0062</td> <td>    0.002</td> <td>    3.919</td> <td> 0.000</td> <td>    0.003</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>   -0.0097</td> <td>    0.005</td> <td>   -1.854</td> <td> 0.064</td> <td>   -0.020</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>    0.0025</td> <td>    0.001</td> <td>    1.717</td> <td> 0.087</td> <td>   -0.000</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>   -0.0033</td> <td>    0.003</td> <td>   -1.224</td> <td> 0.222</td> <td>   -0.009</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>   -0.0025</td> <td>    0.004</td> <td>   -0.635</td> <td> 0.526</td> <td>   -0.010</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>    0.0166</td> <td>    0.005</td> <td>    3.472</td> <td> 0.001</td> <td>    0.007</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>   -0.0029</td> <td>    0.003</td> <td>   -0.927</td> <td> 0.355</td> <td>   -0.009</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>    0.0137</td> <td>    0.004</td> <td>    3.271</td> <td> 0.001</td> <td>    0.005</td> <td>    0.022</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>   -0.0176</td> <td>    0.005</td> <td>   -3.238</td> <td> 0.001</td> <td>   -0.028</td> <td>   -0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0272</td> <td>    0.006</td> <td>   -4.807</td> <td> 0.000</td> <td>   -0.038</td> <td>   -0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>   -0.0190</td> <td>    0.005</td> <td>   -3.692</td> <td> 0.000</td> <td>   -0.029</td> <td>   -0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>    0.0015</td> <td>    0.001</td> <td>    1.262</td> <td> 0.208</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>    0.0091</td> <td>    0.003</td> <td>    3.223</td> <td> 0.001</td> <td>    0.004</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>   -0.0084</td> <td>    0.005</td> <td>   -1.800</td> <td> 0.073</td> <td>   -0.018</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>   -0.0026</td> <td>    0.004</td> <td>   -0.730</td> <td> 0.466</td> <td>   -0.010</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0015</td> <td>    0.002</td> <td>    0.613</td> <td> 0.540</td> <td>   -0.003</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>    0.0009</td> <td>    0.003</td> <td>    0.326</td> <td> 0.744</td> <td>   -0.005</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>    0.0126</td> <td>    0.004</td> <td>    2.997</td> <td> 0.003</td> <td>    0.004</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>    0.0143</td> <td>    0.006</td> <td>    2.293</td> <td> 0.022</td> <td>    0.002</td> <td>    0.027</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>    0.0105</td> <td>    0.003</td> <td>    3.025</td> <td> 0.003</td> <td>    0.004</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.984</td> <td> 0.325</td> <td>   -0.001</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>    0.0007</td> <td>    0.004</td> <td>    0.162</td> <td> 0.872</td> <td>   -0.008</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>    0.0039</td> <td>    0.003</td> <td>    1.407</td> <td> 0.160</td> <td>   -0.002</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>   -0.0004</td> <td>    0.002</td> <td>   -0.168</td> <td> 0.867</td> <td>   -0.005</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>    0.0032</td> <td>    0.002</td> <td>    1.278</td> <td> 0.202</td> <td>   -0.002</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>    0.0043</td> <td>    0.003</td> <td>    1.285</td> <td> 0.200</td> <td>   -0.002</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>    0.0049</td> <td>    0.003</td> <td>    1.479</td> <td> 0.140</td> <td>   -0.002</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>   -0.0028</td> <td>    0.004</td> <td>   -0.781</td> <td> 0.435</td> <td>   -0.010</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>   -0.0048</td> <td>    0.003</td> <td>   -1.744</td> <td> 0.082</td> <td>   -0.010</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>    0.0006</td> <td>    0.002</td> <td>    0.346</td> <td> 0.729</td> <td>   -0.003</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>    0.0062</td> <td>    0.005</td> <td>    1.193</td> <td> 0.234</td> <td>   -0.004</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>    0.0012</td> <td>    0.004</td> <td>    0.276</td> <td> 0.782</td> <td>   -0.007</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>   -0.0067</td> <td>    0.003</td> <td>   -2.376</td> <td> 0.018</td> <td>   -0.012</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0026</td> <td>    0.004</td> <td>    0.607</td> <td> 0.544</td> <td>   -0.006</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>   -0.0074</td> <td>    0.005</td> <td>   -1.360</td> <td> 0.175</td> <td>   -0.018</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>   -0.0004</td> <td>    0.002</td> <td>   -0.188</td> <td> 0.851</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>   -0.0023</td> <td>    0.005</td> <td>   -0.451</td> <td> 0.652</td> <td>   -0.012</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>    0.0043</td> <td>    0.002</td> <td>    1.820</td> <td> 0.070</td> <td>   -0.000</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>   -0.0015</td> <td>    0.004</td> <td>   -0.396</td> <td> 0.692</td> <td>   -0.009</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>    0.0034</td> <td>    0.002</td> <td>    1.822</td> <td> 0.069</td> <td>   -0.000</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>    0.0009</td> <td>    0.002</td> <td>    0.418</td> <td> 0.676</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>   -0.0008</td> <td>    0.003</td> <td>   -0.276</td> <td> 0.783</td> <td>   -0.007</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>   -0.0089</td> <td>    0.005</td> <td>   -1.857</td> <td> 0.064</td> <td>   -0.018</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>    0.0085</td> <td>    0.017</td> <td>    0.491</td> <td> 0.624</td> <td>   -0.026</td> <td>    0.043</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>    0.0003</td> <td>    0.006</td> <td>    0.055</td> <td> 0.956</td> <td>   -0.012</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>    0.0037</td> <td>    0.004</td> <td>    0.935</td> <td> 0.350</td> <td>   -0.004</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>   -0.0115</td> <td>    0.004</td> <td>   -3.119</td> <td> 0.002</td> <td>   -0.019</td> <td>   -0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>    0.0087</td> <td>    0.005</td> <td>    1.745</td> <td> 0.082</td> <td>   -0.001</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>-2.957e-05</td> <td>    0.002</td> <td>   -0.016</td> <td> 0.987</td> <td>   -0.004</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>    0.0119</td> <td>    0.006</td> <td>    2.114</td> <td> 0.035</td> <td>    0.001</td> <td>    0.023</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>   -0.0020</td> <td>    0.004</td> <td>   -0.486</td> <td> 0.627</td> <td>   -0.010</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>   -0.0035</td> <td>    0.001</td> <td>   -2.815</td> <td> 0.005</td> <td>   -0.006</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>    0.0086</td> <td>    0.005</td> <td>    1.901</td> <td> 0.058</td> <td>   -0.000</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>   -0.0153</td> <td>    0.006</td> <td>   -2.782</td> <td> 0.006</td> <td>   -0.026</td> <td>   -0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>    0.0003</td> <td>    0.003</td> <td>    0.118</td> <td> 0.906</td> <td>   -0.005</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>   -0.0008</td> <td>    0.005</td> <td>   -0.152</td> <td> 0.880</td> <td>   -0.011</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>    0.0062</td> <td>    0.006</td> <td>    1.129</td> <td> 0.259</td> <td>   -0.005</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0123</td> <td>    0.004</td> <td>    2.798</td> <td> 0.005</td> <td>    0.004</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>    0.0052</td> <td>    0.005</td> <td>    1.055</td> <td> 0.292</td> <td>   -0.004</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>    0.0059</td> <td>    0.005</td> <td>    1.259</td> <td> 0.209</td> <td>   -0.003</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>   -0.0037</td> <td>    0.003</td> <td>   -1.379</td> <td> 0.169</td> <td>   -0.009</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>   -0.0085</td> <td>    0.006</td> <td>   -1.370</td> <td> 0.172</td> <td>   -0.021</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>   -0.0011</td> <td>    0.003</td> <td>   -0.442</td> <td> 0.659</td> <td>   -0.006</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>    0.0025</td> <td>    0.002</td> <td>    1.088</td> <td> 0.277</td> <td>   -0.002</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>    0.0016</td> <td>    0.003</td> <td>    0.595</td> <td> 0.552</td> <td>   -0.004</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>    0.0036</td> <td>    0.004</td> <td>    0.861</td> <td> 0.390</td> <td>   -0.005</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>    0.0068</td> <td>    0.006</td> <td>    1.064</td> <td> 0.288</td> <td>   -0.006</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>    0.0050</td> <td>    0.003</td> <td>    1.664</td> <td> 0.097</td> <td>   -0.001</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>    0.0061</td> <td>    0.003</td> <td>    2.397</td> <td> 0.017</td> <td>    0.001</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>   -0.0105</td> <td>    0.005</td> <td>   -2.092</td> <td> 0.037</td> <td>   -0.020</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>   -0.0037</td> <td>    0.003</td> <td>   -1.088</td> <td> 0.277</td> <td>   -0.010</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>    0.0007</td> <td>    0.003</td> <td>    0.277</td> <td> 0.782</td> <td>   -0.004</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>   -0.0018</td> <td>    0.002</td> <td>   -0.844</td> <td> 0.399</td> <td>   -0.006</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>   -0.0083</td> <td>    0.007</td> <td>   -1.252</td> <td> 0.211</td> <td>   -0.021</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>   -0.0173</td> <td>    0.006</td> <td>   -3.144</td> <td> 0.002</td> <td>   -0.028</td> <td>   -0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>    0.0100</td> <td>    0.006</td> <td>    1.691</td> <td> 0.092</td> <td>   -0.002</td> <td>    0.022</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>   -0.0074</td> <td>    0.005</td> <td>   -1.453</td> <td> 0.147</td> <td>   -0.018</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>    0.0006</td> <td>    0.003</td> <td>    0.199</td> <td> 0.843</td> <td>   -0.006</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0055</td> <td>    0.003</td> <td>   -1.924</td> <td> 0.055</td> <td>   -0.011</td> <td>    0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>    0.0202</td> <td>    0.005</td> <td>    3.858</td> <td> 0.000</td> <td>    0.010</td> <td>    0.030</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td>63.897</td> <th>  Durbin-Watson:     </th> <td>   1.907</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.000</td> <th>  Jarque-Bera (JB):  </th> <td> 218.126</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td> 0.560</td> <th>  Prob(JB):          </th> <td>4.31e-48</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 6.063</td> <th>  Cond. No.          </th> <td>1.34e+03</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 1.34e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.999\n",
       "Model:                            OLS   Adj. R-squared:                  0.999\n",
       "Method:                 Least Squares   F-statistic:                     3331.\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
       "Time:                        18:16:35   Log-Likelihood:                 1340.8\n",
       "No. Observations:                 492   AIC:                            -2480.\n",
       "Df Residuals:                     391   BIC:                            -2056.\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102            0.0038      0.003      1.171      0.242      -0.003       0.010\n",
       "107           -0.0015      0.001     -0.982      0.327      -0.004       0.001\n",
       "108           -0.0006      0.002     -0.377      0.706      -0.004       0.003\n",
       "109           -0.0021      0.004     -0.585      0.559      -0.009       0.005\n",
       "111           -0.0038      0.003     -1.432      0.153      -0.009       0.001\n",
       "112           -0.0092      0.004     -2.553      0.011      -0.016      -0.002\n",
       "113            0.0046      0.003      1.325      0.186      -0.002       0.011\n",
       "114            0.0184      0.021      0.868      0.386      -0.023       0.060\n",
       "115           -0.0002      0.004     -0.047      0.963      -0.007       0.007\n",
       "160            0.0020      0.004      0.564      0.573      -0.005       0.009\n",
       "161            0.0081      0.006      1.424      0.155      -0.003       0.019\n",
       "163            0.0006      0.004      0.142      0.887      -0.008       0.009\n",
       "207            0.0157      0.004      3.698      0.000       0.007       0.024\n",
       "208            0.0044      0.006      0.792      0.429      -0.007       0.015\n",
       "572            0.0089      0.004      1.975      0.049    3.82e-05       0.018\n",
       "801           -0.0057      0.003     -1.772      0.077      -0.012       0.001\n",
       "805            0.0053      0.004      1.280      0.201      -0.003       0.013\n",
       "808            0.0055      0.005      1.141      0.255      -0.004       0.015\n",
       "814           -0.0012      0.003     -0.477      0.633      -0.006       0.004\n",
       "842           -0.0072      0.004     -1.804      0.072      -0.015       0.001\n",
       "867            0.0034      0.005      0.666      0.506      -0.007       0.014\n",
       "983            0.0062      0.002      3.919      0.000       0.003       0.009\n",
       "998           -0.0097      0.005     -1.854      0.064      -0.020       0.001\n",
       "1026           0.0025      0.001      1.717      0.087      -0.000       0.005\n",
       "1027          -0.0033      0.003     -1.224      0.222      -0.009       0.002\n",
       "1147          -0.0025      0.004     -0.635      0.526      -0.010       0.005\n",
       "1173           0.0166      0.005      3.472      0.001       0.007       0.026\n",
       "1175          -0.0029      0.003     -0.927      0.355      -0.009       0.003\n",
       "1211           0.0137      0.004      3.271      0.001       0.005       0.022\n",
       "1213          -0.0176      0.005     -3.238      0.001      -0.028      -0.007\n",
       "1385          -0.0272      0.006     -4.807      0.000      -0.038      -0.016\n",
       "1445          -0.0190      0.005     -3.692      0.000      -0.029      -0.009\n",
       "1950           0.0015      0.001      1.262      0.208      -0.001       0.004\n",
       "1956           0.0091      0.003      3.223      0.001       0.004       0.015\n",
       "2060          -0.0084      0.005     -1.800      0.073      -0.018       0.001\n",
       "2308          -0.0026      0.004     -0.730      0.466      -0.010       0.004\n",
       "2309           0.0015      0.002      0.613      0.540      -0.003       0.006\n",
       "2475           0.0009      0.003      0.326      0.744      -0.005       0.007\n",
       "2549           0.0126      0.004      2.997      0.003       0.004       0.021\n",
       "2885           0.0143      0.006      2.293      0.022       0.002       0.027\n",
       "2931           0.0105      0.003      3.025      0.003       0.004       0.017\n",
       "3164           0.0011      0.001      0.984      0.325      -0.001       0.003\n",
       "3265           0.0007      0.004      0.162      0.872      -0.008       0.009\n",
       "3320           0.0039      0.003      1.407      0.160      -0.002       0.009\n",
       "3709          -0.0004      0.002     -0.168      0.867      -0.005       0.004\n",
       "3710           0.0032      0.002      1.278      0.202      -0.002       0.008\n",
       "3845           0.0043      0.003      1.285      0.200      -0.002       0.011\n",
       "4193           0.0049      0.003      1.479      0.140      -0.002       0.011\n",
       "4303          -0.0028      0.004     -0.781      0.435      -0.010       0.004\n",
       "4893          -0.0048      0.003     -1.744      0.082      -0.010       0.001\n",
       "5136           0.0006      0.002      0.346      0.729      -0.003       0.004\n",
       "5170           0.0062      0.005      1.193      0.234      -0.004       0.016\n",
       "5290           0.0012      0.004      0.276      0.782      -0.007       0.010\n",
       "5295          -0.0067      0.003     -2.376      0.018      -0.012      -0.001\n",
       "5335           0.0026      0.004      0.607      0.544      -0.006       0.011\n",
       "5566          -0.0074      0.005     -1.360      0.175      -0.018       0.003\n",
       "5567          -0.0004      0.002     -0.188      0.851      -0.004       0.003\n",
       "5573          -0.0023      0.005     -0.451      0.652      -0.012       0.008\n",
       "5575           0.0043      0.002      1.820      0.070      -0.000       0.009\n",
       "5576          -0.0015      0.004     -0.396      0.692      -0.009       0.006\n",
       "5577           0.0034      0.002      1.822      0.069      -0.000       0.007\n",
       "5578           0.0009      0.002      0.418      0.676      -0.003       0.005\n",
       "5580          -0.0008      0.003     -0.276      0.783      -0.007       0.005\n",
       "5581          -0.0089      0.005     -1.857      0.064      -0.018       0.001\n",
       "5582           0.0085      0.017      0.491      0.624      -0.026       0.043\n",
       "5594           0.0003      0.006      0.055      0.956      -0.012       0.012\n",
       "5595           0.0037      0.004      0.935      0.350      -0.004       0.011\n",
       "5604          -0.0115      0.004     -3.119      0.002      -0.019      -0.004\n",
       "5605           0.0087      0.005      1.745      0.082      -0.001       0.018\n",
       "5728       -2.957e-05      0.002     -0.016      0.987      -0.004       0.004\n",
       "5894           0.0119      0.006      2.114      0.035       0.001       0.023\n",
       "6199          -0.0020      0.004     -0.486      0.627      -0.010       0.006\n",
       "6456          -0.0035      0.001     -2.815      0.005      -0.006      -0.001\n",
       "6464           0.0086      0.005      1.901      0.058      -0.000       0.018\n",
       "6654          -0.0153      0.006     -2.782      0.006      -0.026      -0.004\n",
       "6714           0.0003      0.003      0.118      0.906      -0.005       0.006\n",
       "6868          -0.0008      0.005     -0.152      0.880      -0.011       0.009\n",
       "7249           0.0062      0.006      1.129      0.259      -0.005       0.017\n",
       "7311           0.0123      0.004      2.798      0.005       0.004       0.021\n",
       "7529           0.0052      0.005      1.055      0.292      -0.004       0.015\n",
       "8027           0.0059      0.005      1.259      0.209      -0.003       0.015\n",
       "8038          -0.0037      0.003     -1.379      0.169      -0.009       0.002\n",
       "9146          -0.0085      0.006     -1.370      0.172      -0.021       0.004\n",
       "10000         -0.0011      0.003     -0.442      0.659      -0.006       0.004\n",
       "10252          0.0025      0.002      1.088      0.277      -0.002       0.007\n",
       "10253          0.0016      0.003      0.595      0.552      -0.004       0.007\n",
       "10254          0.0036      0.004      0.861      0.390      -0.005       0.012\n",
       "11140          0.0068      0.006      1.064      0.288      -0.006       0.019\n",
       "23239          0.0050      0.003      1.664      0.097      -0.001       0.011\n",
       "26018          0.0061      0.003      2.397      0.017       0.001       0.011\n",
       "29924         -0.0105      0.005     -2.092      0.037      -0.020      -0.001\n",
       "30011         -0.0037      0.003     -1.088      0.277      -0.010       0.003\n",
       "55824          0.0007      0.003      0.277      0.782      -0.004       0.006\n",
       "57761         -0.0018      0.002     -0.844      0.399      -0.006       0.002\n",
       "58513         -0.0083      0.007     -1.252      0.211      -0.021       0.005\n",
       "64223         -0.0173      0.006     -3.144      0.002      -0.028      -0.006\n",
       "79109          0.0100      0.006      1.691      0.092      -0.002       0.022\n",
       "84335         -0.0074      0.005     -1.453      0.147      -0.018       0.003\n",
       "117145         0.0006      0.003      0.199      0.843      -0.006       0.007\n",
       "196883        -0.0055      0.003     -1.924      0.055      -0.011       0.000\n",
       "253260         0.0202      0.005      3.858      0.000       0.010       0.030\n",
       "==============================================================================\n",
       "Omnibus:                       63.897   Durbin-Watson:                   1.907\n",
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):              218.126\n",
       "Skew:                           0.560   Prob(JB):                     4.31e-48\n",
       "Kurtosis:                       6.063   Cond. No.                     1.34e+03\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "[2] The condition number is large, 1.34e+03. This might indicate that there are\n",
       "strong multicollinearity or other numerical problems.\n",
       "\"\"\""
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = PRADEEGFR\n",
    "y = PredPRAD.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False,  True, False, False, False, False, False,\n",
      "       False, False, False,  True, False, False, False, False, False,\n",
      "       False, False, False,  True,  True, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False,  True]), array([2.44677740e+01, 3.29985438e+01, 7.13092918e+01, 5.64211888e+01,\n",
      "       1.54509011e+01, 1.11658624e+00, 1.87812262e+01, 3.89558813e+01,\n",
      "       9.72150779e+01, 5.78711869e+01, 1.56777500e+01, 8.95682827e+01,\n",
      "       2.51327007e-02, 4.33098320e+01, 4.95100431e+00, 7.78750423e+00,\n",
      "       2.03159154e+01, 2.57117840e+01, 6.39829202e+01, 7.27759893e+00,\n",
      "       5.10754128e+01, 1.05971240e-02, 6.51089314e+00, 8.75869838e+00,\n",
      "       2.23897534e+01, 5.31341414e+01, 5.80062438e-02, 3.58073011e+01,\n",
      "       1.18036918e-01, 1.32133918e-01, 2.21454179e-04, 2.56837981e-02,\n",
      "       2.09656993e+01, 1.39029077e-01, 7.33409742e+00, 4.70189827e+01,\n",
      "       5.45715861e+01, 7.51930189e+01, 2.93103927e-01, 2.25943396e+00,\n",
      "       2.68121938e-01, 3.28754694e+01, 8.80486392e+01, 1.61682450e+01,\n",
      "       8.75707363e+01, 2.03855259e+01, 2.01497555e+01, 1.41306446e+01,\n",
      "       4.39405657e+01, 8.28053537e+00, 7.36785350e+01, 2.36031698e+01,\n",
      "       7.90162057e+01, 1.81686393e+00, 5.49588676e+01, 1.76444324e+01,\n",
      "       8.59214668e+01, 6.58684271e+01, 7.02399329e+00, 6.98945587e+01,\n",
      "       6.99089901e+00, 6.83073512e+01, 7.90551006e+01, 6.46732160e+00,\n",
      "       6.29964799e+01, 9.65470723e+01, 3.53987822e+01, 1.96703464e-01,\n",
      "       8.26706688e+00, 9.97367957e+01, 3.54868575e+00, 6.33556532e+01,\n",
      "       5.17631824e-01, 5.85916267e+00, 5.72527791e-01, 9.14875297e+01,\n",
      "       8.88341003e+01, 2.62037891e+01, 5.44698589e-01, 2.94806683e+01,\n",
      "       2.10871067e+01, 1.70404044e+01, 1.73259414e+01, 6.65603237e+01,\n",
      "       2.79829894e+01, 5.57878182e+01, 3.93676302e+01, 2.90942330e+01,\n",
      "       9.78555205e+00, 1.71681592e+00, 3.74347580e+00, 2.80121684e+01,\n",
      "       7.89939136e+01, 4.03277635e+01, 2.13271697e+01, 1.81303993e-01,\n",
      "       9.26249584e+00, 1.48573415e+01, 8.51061003e+01, 5.55844610e+00,\n",
      "       1.34920701e-02]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = KIRPE.columns.intersection(lsEGFR)\n",
    "KIRPEEGFR = KIRPE[listEGFR]\n",
    "KIRPMEGFR = KIRPM[listEGFR]\n",
    "KIRPCEGFR = KIRPC[listEGFR]   "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.996</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   356.2</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>2.84e-62</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:16:45</td>     <th>  Log-Likelihood:    </th> <td>  409.04</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   161</td>      <th>  AIC:               </th> <td>  -616.1</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>    60</td>      <th>  BIC:               </th> <td>  -304.9</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>   -0.0203</td> <td>    0.011</td> <td>   -1.884</td> <td> 0.064</td> <td>   -0.042</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>    0.0050</td> <td>    0.015</td> <td>    0.340</td> <td> 0.735</td> <td>   -0.024</td> <td>    0.034</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>   -0.0149</td> <td>    0.006</td> <td>   -2.486</td> <td> 0.016</td> <td>   -0.027</td> <td>   -0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>    0.0090</td> <td>    0.008</td> <td>    1.139</td> <td> 0.259</td> <td>   -0.007</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>    0.0058</td> <td>    0.005</td> <td>    1.084</td> <td> 0.283</td> <td>   -0.005</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>    0.0015</td> <td>    0.013</td> <td>    0.118</td> <td> 0.906</td> <td>   -0.024</td> <td>    0.027</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>   -0.0260</td> <td>    0.012</td> <td>   -2.093</td> <td> 0.041</td> <td>   -0.051</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>   -0.0026</td> <td>    0.014</td> <td>   -0.180</td> <td> 0.858</td> <td>   -0.031</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>    0.0049</td> <td>    0.012</td> <td>    0.412</td> <td> 0.682</td> <td>   -0.019</td> <td>    0.029</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>    0.0032</td> <td>    0.018</td> <td>    0.179</td> <td> 0.859</td> <td>   -0.032</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>   -0.0461</td> <td>    0.019</td> <td>   -2.433</td> <td> 0.018</td> <td>   -0.084</td> <td>   -0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td>    0.0056</td> <td>    0.019</td> <td>    0.295</td> <td> 0.769</td> <td>   -0.032</td> <td>    0.044</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>   -0.0264</td> <td>    0.016</td> <td>   -1.696</td> <td> 0.095</td> <td>   -0.058</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>    0.0413</td> <td>    0.016</td> <td>    2.571</td> <td> 0.013</td> <td>    0.009</td> <td>    0.073</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>    0.0455</td> <td>    0.019</td> <td>    2.357</td> <td> 0.022</td> <td>    0.007</td> <td>    0.084</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>   -0.0083</td> <td>    0.013</td> <td>   -0.614</td> <td> 0.542</td> <td>   -0.035</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>   -0.0226</td> <td>    0.023</td> <td>   -0.972</td> <td> 0.335</td> <td>   -0.069</td> <td>    0.024</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>   -0.0373</td> <td>    0.018</td> <td>   -2.121</td> <td> 0.038</td> <td>   -0.072</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>    0.0115</td> <td>    0.020</td> <td>    0.561</td> <td> 0.577</td> <td>   -0.029</td> <td>    0.052</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>   -0.0297</td> <td>    0.015</td> <td>   -2.021</td> <td> 0.048</td> <td>   -0.059</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>   -0.0059</td> <td>    0.024</td> <td>   -0.248</td> <td> 0.805</td> <td>   -0.053</td> <td>    0.042</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td>    0.0012</td> <td>    0.007</td> <td>    0.169</td> <td> 0.867</td> <td>   -0.013</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>    0.0193</td> <td>    0.024</td> <td>    0.822</td> <td> 0.414</td> <td>   -0.028</td> <td>    0.066</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>   -0.0095</td> <td>    0.008</td> <td>   -1.213</td> <td> 0.230</td> <td>   -0.025</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>   -0.0049</td> <td>    0.012</td> <td>   -0.417</td> <td> 0.678</td> <td>   -0.028</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>    0.0307</td> <td>    0.020</td> <td>    1.518</td> <td> 0.134</td> <td>   -0.010</td> <td>    0.071</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>    0.0353</td> <td>    0.019</td> <td>    1.827</td> <td> 0.073</td> <td>   -0.003</td> <td>    0.074</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>    0.0282</td> <td>    0.024</td> <td>    1.194</td> <td> 0.237</td> <td>   -0.019</td> <td>    0.075</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>    0.0049</td> <td>    0.015</td> <td>    0.337</td> <td> 0.738</td> <td>   -0.024</td> <td>    0.034</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>   -0.0081</td> <td>    0.017</td> <td>   -0.484</td> <td> 0.630</td> <td>   -0.041</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0046</td> <td>    0.020</td> <td>   -0.234</td> <td> 0.816</td> <td>   -0.044</td> <td>    0.035</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>    0.0122</td> <td>    0.019</td> <td>    0.658</td> <td> 0.513</td> <td>   -0.025</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>    0.0033</td> <td>    0.008</td> <td>    0.419</td> <td> 0.677</td> <td>   -0.012</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>    0.0367</td> <td>    0.010</td> <td>    3.749</td> <td> 0.000</td> <td>    0.017</td> <td>    0.056</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>   -0.0484</td> <td>    0.024</td> <td>   -2.035</td> <td> 0.046</td> <td>   -0.096</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>   -0.0066</td> <td>    0.013</td> <td>   -0.519</td> <td> 0.606</td> <td>   -0.032</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0204</td> <td>    0.016</td> <td>    1.298</td> <td> 0.199</td> <td>   -0.011</td> <td>    0.052</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>    0.0076</td> <td>    0.019</td> <td>    0.407</td> <td> 0.685</td> <td>   -0.030</td> <td>    0.045</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>    0.0033</td> <td>    0.014</td> <td>    0.235</td> <td> 0.815</td> <td>   -0.025</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>    0.0503</td> <td>    0.023</td> <td>    2.180</td> <td> 0.033</td> <td>    0.004</td> <td>    0.097</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>   -0.0348</td> <td>    0.026</td> <td>   -1.337</td> <td> 0.186</td> <td>   -0.087</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>    0.0059</td> <td>    0.003</td> <td>    1.970</td> <td> 0.054</td> <td>-9.21e-05</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>   -0.0328</td> <td>    0.018</td> <td>   -1.859</td> <td> 0.068</td> <td>   -0.068</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>    0.0247</td> <td>    0.012</td> <td>    2.075</td> <td> 0.042</td> <td>    0.001</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>   -0.0169</td> <td>    0.011</td> <td>   -1.583</td> <td> 0.119</td> <td>   -0.038</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>    0.0055</td> <td>    0.006</td> <td>    0.935</td> <td> 0.354</td> <td>   -0.006</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>    0.0395</td> <td>    0.017</td> <td>    2.365</td> <td> 0.021</td> <td>    0.006</td> <td>    0.073</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>    0.0065</td> <td>    0.012</td> <td>    0.553</td> <td> 0.582</td> <td>   -0.017</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>   -0.0064</td> <td>    0.014</td> <td>   -0.460</td> <td> 0.647</td> <td>   -0.034</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>    0.0045</td> <td>    0.019</td> <td>    0.238</td> <td> 0.813</td> <td>   -0.033</td> <td>    0.042</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>   -0.0038</td> <td>    0.003</td> <td>   -1.194</td> <td> 0.237</td> <td>   -0.010</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>    0.0335</td> <td>    0.023</td> <td>    1.470</td> <td> 0.147</td> <td>   -0.012</td> <td>    0.079</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>   -0.0036</td> <td>    0.021</td> <td>   -0.171</td> <td> 0.865</td> <td>   -0.046</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>   -0.0123</td> <td>    0.010</td> <td>   -1.216</td> <td> 0.229</td> <td>   -0.033</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0016</td> <td>    0.013</td> <td>    0.119</td> <td> 0.906</td> <td>   -0.025</td> <td>    0.028</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>    0.0412</td> <td>    0.024</td> <td>    1.688</td> <td> 0.097</td> <td>   -0.008</td> <td>    0.090</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>    0.0210</td> <td>    0.013</td> <td>    1.658</td> <td> 0.102</td> <td>   -0.004</td> <td>    0.046</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>   -0.0320</td> <td>    0.020</td> <td>   -1.609</td> <td> 0.113</td> <td>   -0.072</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>   -0.0102</td> <td>    0.009</td> <td>   -1.115</td> <td> 0.269</td> <td>   -0.029</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>   -0.0010</td> <td>    0.013</td> <td>   -0.077</td> <td> 0.939</td> <td>   -0.027</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>    0.0011</td> <td>    0.005</td> <td>    0.212</td> <td> 0.833</td> <td>   -0.009</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>   -0.0180</td> <td>    0.013</td> <td>   -1.352</td> <td> 0.181</td> <td>   -0.045</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>   -0.0243</td> <td>    0.013</td> <td>   -1.828</td> <td> 0.073</td> <td>   -0.051</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>    0.0281</td> <td>    0.015</td> <td>    1.921</td> <td> 0.059</td> <td>   -0.001</td> <td>    0.057</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>   -0.0033</td> <td>    0.008</td> <td>   -0.436</td> <td> 0.665</td> <td>   -0.018</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>   -0.0107</td> <td>    0.018</td> <td>   -0.589</td> <td> 0.558</td> <td>   -0.047</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>   -0.0510</td> <td>    0.021</td> <td>   -2.426</td> <td> 0.018</td> <td>   -0.093</td> <td>   -0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>   -0.0220</td> <td>    0.018</td> <td>   -1.243</td> <td> 0.219</td> <td>   -0.057</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>    0.0326</td> <td>    0.022</td> <td>    1.493</td> <td> 0.141</td> <td>   -0.011</td> <td>    0.076</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>   -0.0120</td> <td>    0.016</td> <td>   -0.770</td> <td> 0.444</td> <td>   -0.043</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>    0.0340</td> <td>    0.024</td> <td>    1.439</td> <td> 0.155</td> <td>   -0.013</td> <td>    0.081</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>   -0.0215</td> <td>    0.019</td> <td>   -1.106</td> <td> 0.273</td> <td>   -0.060</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>   -0.0011</td> <td>    0.004</td> <td>   -0.299</td> <td> 0.766</td> <td>   -0.009</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>    0.0065</td> <td>    0.016</td> <td>    0.405</td> <td> 0.687</td> <td>   -0.025</td> <td>    0.038</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>   -0.0203</td> <td>    0.022</td> <td>   -0.938</td> <td> 0.352</td> <td>   -0.063</td> <td>    0.023</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>   -0.0079</td> <td>    0.011</td> <td>   -0.735</td> <td> 0.465</td> <td>   -0.029</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>   -0.0096</td> <td>    0.015</td> <td>   -0.627</td> <td> 0.533</td> <td>   -0.040</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>    0.0322</td> <td>    0.020</td> <td>    1.614</td> <td> 0.112</td> <td>   -0.008</td> <td>    0.072</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0065</td> <td>    0.021</td> <td>    0.310</td> <td> 0.758</td> <td>   -0.036</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>    0.0649</td> <td>    0.022</td> <td>    2.910</td> <td> 0.005</td> <td>    0.020</td> <td>    0.109</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>   -0.0083</td> <td>    0.019</td> <td>   -0.436</td> <td> 0.665</td> <td>   -0.046</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>   -0.0063</td> <td>    0.006</td> <td>   -1.116</td> <td> 0.269</td> <td>   -0.018</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>    0.0263</td> <td>    0.022</td> <td>    1.217</td> <td> 0.228</td> <td>   -0.017</td> <td>    0.070</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>    0.0119</td> <td>    0.010</td> <td>    1.229</td> <td> 0.224</td> <td>   -0.007</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>    0.0047</td> <td>    0.008</td> <td>    0.564</td> <td> 0.575</td> <td>   -0.012</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>   -0.0065</td> <td>    0.009</td> <td>   -0.744</td> <td> 0.460</td> <td>   -0.024</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>   -0.0071</td> <td>    0.023</td> <td>   -0.305</td> <td> 0.761</td> <td>   -0.054</td> <td>    0.040</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>    0.0294</td> <td>    0.027</td> <td>    1.076</td> <td> 0.286</td> <td>   -0.025</td> <td>    0.084</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>    0.0164</td> <td>    0.012</td> <td>    1.338</td> <td> 0.186</td> <td>   -0.008</td> <td>    0.041</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>   -0.0269</td> <td>    0.011</td> <td>   -2.414</td> <td> 0.019</td> <td>   -0.049</td> <td>   -0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>   -0.0294</td> <td>    0.020</td> <td>   -1.479</td> <td> 0.144</td> <td>   -0.069</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>   -0.0023</td> <td>    0.008</td> <td>   -0.285</td> <td> 0.777</td> <td>   -0.019</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>   -0.0012</td> <td>    0.013</td> <td>   -0.090</td> <td> 0.929</td> <td>   -0.028</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>    0.0011</td> <td>    0.005</td> <td>    0.213</td> <td> 0.832</td> <td>   -0.009</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>   -0.0452</td> <td>    0.015</td> <td>   -3.034</td> <td> 0.004</td> <td>   -0.075</td> <td>   -0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>   -0.0160</td> <td>    0.022</td> <td>   -0.733</td> <td> 0.466</td> <td>   -0.060</td> <td>    0.028</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>    0.0001</td> <td>    0.017</td> <td>    0.006</td> <td> 0.995</td> <td>   -0.035</td> <td>    0.035</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>   -0.0121</td> <td>    0.025</td> <td>   -0.491</td> <td> 0.625</td> <td>   -0.061</td> <td>    0.037</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>   -0.0002</td> <td>    0.017</td> <td>   -0.010</td> <td> 0.992</td> <td>   -0.034</td> <td>    0.034</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0140</td> <td>    0.013</td> <td>   -1.094</td> <td> 0.278</td> <td>   -0.040</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>   -0.0265</td> <td>    0.018</td> <td>   -1.485</td> <td> 0.143</td> <td>   -0.062</td> <td>    0.009</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td> 0.349</td> <th>  Durbin-Watson:     </th> <td>   1.769</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.840</td> <th>  Jarque-Bera (JB):  </th> <td>   0.161</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td>-0.067</td> <th>  Prob(JB):          </th> <td>   0.922</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 3.077</td> <th>  Cond. No.          </th> <td>    941.</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.998\n",
       "Model:                            OLS   Adj. R-squared:                  0.996\n",
       "Method:                 Least Squares   F-statistic:                     356.2\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           2.84e-62\n",
       "Time:                        18:16:45   Log-Likelihood:                 409.04\n",
       "No. Observations:                 161   AIC:                            -616.1\n",
       "Df Residuals:                      60   BIC:                            -304.9\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102           -0.0203      0.011     -1.884      0.064      -0.042       0.001\n",
       "107            0.0050      0.015      0.340      0.735      -0.024       0.034\n",
       "108           -0.0149      0.006     -2.486      0.016      -0.027      -0.003\n",
       "109            0.0090      0.008      1.139      0.259      -0.007       0.025\n",
       "111            0.0058      0.005      1.084      0.283      -0.005       0.016\n",
       "112            0.0015      0.013      0.118      0.906      -0.024       0.027\n",
       "113           -0.0260      0.012     -2.093      0.041      -0.051      -0.001\n",
       "114           -0.0026      0.014     -0.180      0.858      -0.031       0.026\n",
       "115            0.0049      0.012      0.412      0.682      -0.019       0.029\n",
       "160            0.0032      0.018      0.179      0.859      -0.032       0.039\n",
       "161           -0.0461      0.019     -2.433      0.018      -0.084      -0.008\n",
       "163            0.0056      0.019      0.295      0.769      -0.032       0.044\n",
       "207           -0.0264      0.016     -1.696      0.095      -0.058       0.005\n",
       "208            0.0413      0.016      2.571      0.013       0.009       0.073\n",
       "572            0.0455      0.019      2.357      0.022       0.007       0.084\n",
       "801           -0.0083      0.013     -0.614      0.542      -0.035       0.019\n",
       "805           -0.0226      0.023     -0.972      0.335      -0.069       0.024\n",
       "808           -0.0373      0.018     -2.121      0.038      -0.072      -0.002\n",
       "814            0.0115      0.020      0.561      0.577      -0.029       0.052\n",
       "842           -0.0297      0.015     -2.021      0.048      -0.059      -0.000\n",
       "867           -0.0059      0.024     -0.248      0.805      -0.053       0.042\n",
       "983            0.0012      0.007      0.169      0.867      -0.013       0.016\n",
       "998            0.0193      0.024      0.822      0.414      -0.028       0.066\n",
       "1026          -0.0095      0.008     -1.213      0.230      -0.025       0.006\n",
       "1027          -0.0049      0.012     -0.417      0.678      -0.028       0.019\n",
       "1147           0.0307      0.020      1.518      0.134      -0.010       0.071\n",
       "1173           0.0353      0.019      1.827      0.073      -0.003       0.074\n",
       "1175           0.0282      0.024      1.194      0.237      -0.019       0.075\n",
       "1211           0.0049      0.015      0.337      0.738      -0.024       0.034\n",
       "1213          -0.0081      0.017     -0.484      0.630      -0.041       0.025\n",
       "1385          -0.0046      0.020     -0.234      0.816      -0.044       0.035\n",
       "1445           0.0122      0.019      0.658      0.513      -0.025       0.049\n",
       "1950           0.0033      0.008      0.419      0.677      -0.012       0.019\n",
       "1956           0.0367      0.010      3.749      0.000       0.017       0.056\n",
       "2060          -0.0484      0.024     -2.035      0.046      -0.096      -0.001\n",
       "2308          -0.0066      0.013     -0.519      0.606      -0.032       0.019\n",
       "2309           0.0204      0.016      1.298      0.199      -0.011       0.052\n",
       "2475           0.0076      0.019      0.407      0.685      -0.030       0.045\n",
       "2549           0.0033      0.014      0.235      0.815      -0.025       0.031\n",
       "2885           0.0503      0.023      2.180      0.033       0.004       0.097\n",
       "2931          -0.0348      0.026     -1.337      0.186      -0.087       0.017\n",
       "3164           0.0059      0.003      1.970      0.054   -9.21e-05       0.012\n",
       "3265          -0.0328      0.018     -1.859      0.068      -0.068       0.002\n",
       "3320           0.0247      0.012      2.075      0.042       0.001       0.049\n",
       "3709          -0.0169      0.011     -1.583      0.119      -0.038       0.004\n",
       "3710           0.0055      0.006      0.935      0.354      -0.006       0.017\n",
       "3845           0.0395      0.017      2.365      0.021       0.006       0.073\n",
       "4193           0.0065      0.012      0.553      0.582      -0.017       0.030\n",
       "4303          -0.0064      0.014     -0.460      0.647      -0.034       0.021\n",
       "4893           0.0045      0.019      0.238      0.813      -0.033       0.042\n",
       "5136          -0.0038      0.003     -1.194      0.237      -0.010       0.003\n",
       "5170           0.0335      0.023      1.470      0.147      -0.012       0.079\n",
       "5290          -0.0036      0.021     -0.171      0.865      -0.046       0.039\n",
       "5295          -0.0123      0.010     -1.216      0.229      -0.033       0.008\n",
       "5335           0.0016      0.013      0.119      0.906      -0.025       0.028\n",
       "5566           0.0412      0.024      1.688      0.097      -0.008       0.090\n",
       "5567           0.0210      0.013      1.658      0.102      -0.004       0.046\n",
       "5573          -0.0320      0.020     -1.609      0.113      -0.072       0.008\n",
       "5575          -0.0102      0.009     -1.115      0.269      -0.029       0.008\n",
       "5576          -0.0010      0.013     -0.077      0.939      -0.027       0.025\n",
       "5577           0.0011      0.005      0.212      0.833      -0.009       0.012\n",
       "5578          -0.0180      0.013     -1.352      0.181      -0.045       0.009\n",
       "5580          -0.0243      0.013     -1.828      0.073      -0.051       0.002\n",
       "5581           0.0281      0.015      1.921      0.059      -0.001       0.057\n",
       "5582          -0.0033      0.008     -0.436      0.665      -0.018       0.012\n",
       "5594          -0.0107      0.018     -0.589      0.558      -0.047       0.026\n",
       "5595          -0.0510      0.021     -2.426      0.018      -0.093      -0.009\n",
       "5604          -0.0220      0.018     -1.243      0.219      -0.057       0.013\n",
       "5605           0.0326      0.022      1.493      0.141      -0.011       0.076\n",
       "5728          -0.0120      0.016     -0.770      0.444      -0.043       0.019\n",
       "5894           0.0340      0.024      1.439      0.155      -0.013       0.081\n",
       "6199          -0.0215      0.019     -1.106      0.273      -0.060       0.017\n",
       "6456          -0.0011      0.004     -0.299      0.766      -0.009       0.007\n",
       "6464           0.0065      0.016      0.405      0.687      -0.025       0.038\n",
       "6654          -0.0203      0.022     -0.938      0.352      -0.063       0.023\n",
       "6714          -0.0079      0.011     -0.735      0.465      -0.029       0.014\n",
       "6868          -0.0096      0.015     -0.627      0.533      -0.040       0.021\n",
       "7249           0.0322      0.020      1.614      0.112      -0.008       0.072\n",
       "7311           0.0065      0.021      0.310      0.758      -0.036       0.049\n",
       "7529           0.0649      0.022      2.910      0.005       0.020       0.109\n",
       "8027          -0.0083      0.019     -0.436      0.665      -0.046       0.030\n",
       "8038          -0.0063      0.006     -1.116      0.269      -0.018       0.005\n",
       "9146           0.0263      0.022      1.217      0.228      -0.017       0.070\n",
       "10000          0.0119      0.010      1.229      0.224      -0.007       0.031\n",
       "10252          0.0047      0.008      0.564      0.575      -0.012       0.021\n",
       "10253         -0.0065      0.009     -0.744      0.460      -0.024       0.011\n",
       "10254         -0.0071      0.023     -0.305      0.761      -0.054       0.040\n",
       "11140          0.0294      0.027      1.076      0.286      -0.025       0.084\n",
       "23239          0.0164      0.012      1.338      0.186      -0.008       0.041\n",
       "26018         -0.0269      0.011     -2.414      0.019      -0.049      -0.005\n",
       "29924         -0.0294      0.020     -1.479      0.144      -0.069       0.010\n",
       "30011         -0.0023      0.008     -0.285      0.777      -0.019       0.014\n",
       "55824         -0.0012      0.013     -0.090      0.929      -0.028       0.026\n",
       "57761          0.0011      0.005      0.213      0.832      -0.009       0.011\n",
       "58513         -0.0452      0.015     -3.034      0.004      -0.075      -0.015\n",
       "64223         -0.0160      0.022     -0.733      0.466      -0.060       0.028\n",
       "79109          0.0001      0.017      0.006      0.995      -0.035       0.035\n",
       "84335         -0.0121      0.025     -0.491      0.625      -0.061       0.037\n",
       "117145        -0.0002      0.017     -0.010      0.992      -0.034       0.034\n",
       "196883        -0.0140      0.013     -1.094      0.278      -0.040       0.012\n",
       "253260        -0.0265      0.018     -1.485      0.143      -0.062       0.009\n",
       "==============================================================================\n",
       "Omnibus:                        0.349   Durbin-Watson:                   1.769\n",
       "Prob(Omnibus):                  0.840   Jarque-Bera (JB):                0.161\n",
       "Skew:                          -0.067   Prob(JB):                        0.922\n",
       "Kurtosis:                       3.077   Cond. No.                         941.\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "\"\"\""
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = KIRPEEGFR\n",
    "y = PredKIRP.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False,  True, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False]), array([6.50924133e+00, 7.42508317e+01, 1.58961376e+00, 2.61829337e+01,\n",
      "       2.85630855e+01, 9.15220271e+01, 4.09864261e+00, 8.66437911e+01,\n",
      "       6.88992687e+01, 8.67118407e+01, 1.81588898e+00, 7.76483486e+01,\n",
      "       9.60385004e+00, 1.27689793e+00, 2.19346945e+00, 5.46999451e+01,\n",
      "       3.38503661e+01, 3.84427918e+00, 5.82378406e+01, 4.81885670e+00,\n",
      "       8.13261753e+01, 8.75253594e+01, 4.18628277e+01, 2.32005412e+01,\n",
      "       6.85096311e+01, 1.35581058e+01, 7.33793889e+00, 2.39558196e+01,\n",
      "       7.44977002e+01, 6.36432682e+01, 8.23873035e+01, 5.18247252e+01,\n",
      "       6.83479532e+01, 4.05302431e-02, 4.67644580e+00, 6.11696195e+01,\n",
      "       2.01160277e+01, 6.92197855e+01, 8.23371412e+01, 3.35360038e+00,\n",
      "       1.88156864e+01, 5.40424117e+00, 6.86220958e+00, 4.26728145e+00,\n",
      "       1.19916210e+01, 3.57124009e+01, 2.14829823e+00, 5.88141982e+01,\n",
      "       6.53761469e+01, 8.20869339e+01, 2.39499744e+01, 1.48225996e+01,\n",
      "       8.73590669e+01, 2.31123957e+01, 9.14734677e+01, 9.75047102e+00,\n",
      "       1.03462166e+01, 1.14103902e+01, 2.72031836e+01, 9.48489001e+01,\n",
      "       8.41114074e+01, 1.83243560e+01, 7.32889060e+00, 6.00363356e+00,\n",
      "       6.71240175e+01, 5.63404437e+01, 1.84848930e+00, 2.21054211e+01,\n",
      "       1.42180451e+01, 4.48681168e+01, 1.57037134e+01, 2.76076095e+01,\n",
      "       7.73776675e+01, 6.93695531e+01, 3.55455956e+01, 4.69821872e+01,\n",
      "       5.38662274e+01, 1.12966032e+01, 7.65297883e+01, 5.11549906e-01,\n",
      "       6.71327921e+01, 2.71551225e+01, 2.30738015e+01, 2.26089771e+01,\n",
      "       5.80858590e+01, 4.64402309e+01, 7.68757282e+01, 2.89040346e+01,\n",
      "       1.87755282e+01, 1.90200962e+00, 1.45689516e+01, 7.84330917e+01,\n",
      "       9.38119545e+01, 8.40761596e+01, 3.60228247e-01, 4.70918026e+01,\n",
      "       1.00494392e+02, 6.31281113e+01, 1.00236688e+02, 2.81050319e+01,\n",
      "       1.44242354e+01]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = BLCAE.columns.intersection(lsEGFR)\n",
    "BLCAEEGFR = BLCAE[listEGFR]\n",
    "BLCAMEGFR = BLCAM[listEGFR]\n",
    "BLCACEGFR = BLCAC[listEGFR]   "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.987</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   91.05</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>4.86e-18</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:16:52</td>     <th>  Log-Likelihood:    </th> <td>  291.96</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   123</td>      <th>  AIC:               </th> <td>  -381.9</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>    22</td>      <th>  BIC:               </th> <td>  -97.88</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>    0.0508</td> <td>    0.025</td> <td>    2.072</td> <td> 0.050</td> <td>-4.41e-05</td> <td>    0.102</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>    0.0081</td> <td>    0.011</td> <td>    0.733</td> <td> 0.471</td> <td>   -0.015</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>    0.0587</td> <td>    0.039</td> <td>    1.503</td> <td> 0.147</td> <td>   -0.022</td> <td>    0.140</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>    0.0054</td> <td>    0.021</td> <td>    0.253</td> <td> 0.803</td> <td>   -0.039</td> <td>    0.050</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>    0.0114</td> <td>    0.018</td> <td>    0.620</td> <td> 0.541</td> <td>   -0.027</td> <td>    0.050</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>   -0.0243</td> <td>    0.023</td> <td>   -1.037</td> <td> 0.311</td> <td>   -0.073</td> <td>    0.024</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>    0.0279</td> <td>    0.022</td> <td>    1.292</td> <td> 0.210</td> <td>   -0.017</td> <td>    0.073</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>    0.4896</td> <td>    0.285</td> <td>    1.715</td> <td> 0.100</td> <td>   -0.102</td> <td>    1.081</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>   -0.0062</td> <td>    0.029</td> <td>   -0.215</td> <td> 0.831</td> <td>   -0.066</td> <td>    0.054</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>   -0.0502</td> <td>    0.034</td> <td>   -1.455</td> <td> 0.160</td> <td>   -0.122</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>    0.0540</td> <td>    0.036</td> <td>    1.521</td> <td> 0.143</td> <td>   -0.020</td> <td>    0.128</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td>   -0.0213</td> <td>    0.025</td> <td>   -0.852</td> <td> 0.403</td> <td>   -0.073</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>    0.0241</td> <td>    0.034</td> <td>    0.714</td> <td> 0.483</td> <td>   -0.046</td> <td>    0.094</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>   -0.0699</td> <td>    0.042</td> <td>   -1.659</td> <td> 0.111</td> <td>   -0.157</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>    0.0345</td> <td>    0.029</td> <td>    1.182</td> <td> 0.250</td> <td>   -0.026</td> <td>    0.095</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>    0.0148</td> <td>    0.033</td> <td>    0.448</td> <td> 0.659</td> <td>   -0.054</td> <td>    0.083</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>    0.0813</td> <td>    0.027</td> <td>    3.000</td> <td> 0.007</td> <td>    0.025</td> <td>    0.138</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>    0.0226</td> <td>    0.035</td> <td>    0.651</td> <td> 0.522</td> <td>   -0.049</td> <td>    0.095</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>    0.0117</td> <td>    0.025</td> <td>    0.469</td> <td> 0.644</td> <td>   -0.040</td> <td>    0.063</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>    0.0053</td> <td>    0.026</td> <td>    0.203</td> <td> 0.841</td> <td>   -0.049</td> <td>    0.060</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>   -0.0095</td> <td>    0.034</td> <td>   -0.280</td> <td> 0.782</td> <td>   -0.080</td> <td>    0.061</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td>    0.0057</td> <td>    0.018</td> <td>    0.317</td> <td> 0.754</td> <td>   -0.031</td> <td>    0.043</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>   -0.0248</td> <td>    0.042</td> <td>   -0.598</td> <td> 0.556</td> <td>   -0.111</td> <td>    0.061</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>    0.0170</td> <td>    0.011</td> <td>    1.504</td> <td> 0.147</td> <td>   -0.006</td> <td>    0.040</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>   -0.0315</td> <td>    0.023</td> <td>   -1.370</td> <td> 0.185</td> <td>   -0.079</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>   -0.0044</td> <td>    0.032</td> <td>   -0.136</td> <td> 0.893</td> <td>   -0.071</td> <td>    0.062</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>    0.0110</td> <td>    0.034</td> <td>    0.319</td> <td> 0.752</td> <td>   -0.061</td> <td>    0.083</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>    0.0015</td> <td>    0.050</td> <td>    0.029</td> <td> 0.977</td> <td>   -0.102</td> <td>    0.105</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>   -0.0113</td> <td>    0.024</td> <td>   -0.467</td> <td> 0.645</td> <td>   -0.062</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>   -0.0472</td> <td>    0.042</td> <td>   -1.116</td> <td> 0.277</td> <td>   -0.135</td> <td>    0.041</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0493</td> <td>    0.043</td> <td>   -1.155</td> <td> 0.261</td> <td>   -0.138</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>    0.0032</td> <td>    0.033</td> <td>    0.096</td> <td> 0.924</td> <td>   -0.065</td> <td>    0.072</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>    0.0118</td> <td>    0.023</td> <td>    0.507</td> <td> 0.617</td> <td>   -0.037</td> <td>    0.060</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>   -0.0207</td> <td>    0.010</td> <td>   -2.088</td> <td> 0.049</td> <td>   -0.041</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>   -0.1096</td> <td>    0.052</td> <td>   -2.090</td> <td> 0.048</td> <td>   -0.218</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>    0.0061</td> <td>    0.021</td> <td>    0.298</td> <td> 0.769</td> <td>   -0.037</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0169</td> <td>    0.027</td> <td>    0.615</td> <td> 0.545</td> <td>   -0.040</td> <td>    0.074</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>   -0.0019</td> <td>    0.036</td> <td>   -0.054</td> <td> 0.957</td> <td>   -0.076</td> <td>    0.072</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>    0.0047</td> <td>    0.024</td> <td>    0.196</td> <td> 0.846</td> <td>   -0.045</td> <td>    0.055</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>   -0.0271</td> <td>    0.072</td> <td>   -0.378</td> <td> 0.709</td> <td>   -0.176</td> <td>    0.121</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>    0.0249</td> <td>    0.045</td> <td>    0.552</td> <td> 0.587</td> <td>   -0.069</td> <td>    0.118</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>   -0.0043</td> <td>    0.009</td> <td>   -0.454</td> <td> 0.654</td> <td>   -0.024</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>   -0.0165</td> <td>    0.020</td> <td>   -0.815</td> <td> 0.424</td> <td>   -0.059</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>   -0.0002</td> <td>    0.028</td> <td>   -0.007</td> <td> 0.994</td> <td>   -0.058</td> <td>    0.057</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>    0.0107</td> <td>    0.025</td> <td>    0.426</td> <td> 0.674</td> <td>   -0.041</td> <td>    0.063</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>   -0.0068</td> <td>    0.025</td> <td>   -0.278</td> <td> 0.784</td> <td>   -0.058</td> <td>    0.044</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>    0.0211</td> <td>    0.026</td> <td>    0.796</td> <td> 0.435</td> <td>   -0.034</td> <td>    0.076</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>    0.0029</td> <td>    0.013</td> <td>    0.218</td> <td> 0.829</td> <td>   -0.025</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>   -0.0276</td> <td>    0.019</td> <td>   -1.427</td> <td> 0.168</td> <td>   -0.068</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>    0.0211</td> <td>    0.028</td> <td>    0.753</td> <td> 0.460</td> <td>   -0.037</td> <td>    0.079</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>   -0.0105</td> <td>    0.022</td> <td>   -0.466</td> <td> 0.646</td> <td>   -0.057</td> <td>    0.036</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>    0.0493</td> <td>    0.043</td> <td>    1.140</td> <td> 0.267</td> <td>   -0.040</td> <td>    0.139</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>    0.0312</td> <td>    0.051</td> <td>    0.610</td> <td> 0.548</td> <td>   -0.075</td> <td>    0.137</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>    0.0059</td> <td>    0.016</td> <td>    0.369</td> <td> 0.715</td> <td>   -0.027</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0182</td> <td>    0.025</td> <td>    0.722</td> <td> 0.478</td> <td>   -0.034</td> <td>    0.071</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>    0.0261</td> <td>    0.035</td> <td>    0.738</td> <td> 0.468</td> <td>   -0.047</td> <td>    0.099</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>   -0.0521</td> <td>    0.022</td> <td>   -2.371</td> <td> 0.027</td> <td>   -0.098</td> <td>   -0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>    0.0343</td> <td>    0.047</td> <td>    0.724</td> <td> 0.477</td> <td>   -0.064</td> <td>    0.133</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>    0.0220</td> <td>    0.018</td> <td>    1.251</td> <td> 0.224</td> <td>   -0.014</td> <td>    0.058</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>    0.0253</td> <td>    0.032</td> <td>    0.784</td> <td> 0.442</td> <td>   -0.042</td> <td>    0.092</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>   -0.0081</td> <td>    0.011</td> <td>   -0.739</td> <td> 0.468</td> <td>   -0.031</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>   -0.0108</td> <td>    0.021</td> <td>   -0.518</td> <td> 0.609</td> <td>   -0.054</td> <td>    0.033</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>    0.0522</td> <td>    0.019</td> <td>    2.714</td> <td> 0.013</td> <td>    0.012</td> <td>    0.092</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>   -0.0035</td> <td>    0.027</td> <td>   -0.132</td> <td> 0.896</td> <td>   -0.059</td> <td>    0.052</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>   -0.0975</td> <td>    0.153</td> <td>   -0.637</td> <td> 0.531</td> <td>   -0.415</td> <td>    0.220</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>   -0.0243</td> <td>    0.034</td> <td>   -0.714</td> <td> 0.483</td> <td>   -0.095</td> <td>    0.046</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>    0.0219</td> <td>    0.018</td> <td>    1.201</td> <td> 0.243</td> <td>   -0.016</td> <td>    0.060</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>   -0.0548</td> <td>    0.045</td> <td>   -1.231</td> <td> 0.231</td> <td>   -0.147</td> <td>    0.038</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>   -0.0198</td> <td>    0.042</td> <td>   -0.471</td> <td> 0.643</td> <td>   -0.107</td> <td>    0.067</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>    0.0233</td> <td>    0.025</td> <td>    0.939</td> <td> 0.358</td> <td>   -0.028</td> <td>    0.075</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>   -0.0257</td> <td>    0.021</td> <td>   -1.216</td> <td> 0.237</td> <td>   -0.069</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>    0.0482</td> <td>    0.036</td> <td>    1.332</td> <td> 0.196</td> <td>   -0.027</td> <td>    0.123</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>   -0.0083</td> <td>    0.010</td> <td>   -0.844</td> <td> 0.408</td> <td>   -0.029</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>   -0.0142</td> <td>    0.015</td> <td>   -0.923</td> <td> 0.366</td> <td>   -0.046</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>    0.0373</td> <td>    0.032</td> <td>    1.178</td> <td> 0.252</td> <td>   -0.028</td> <td>    0.103</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>   -0.0454</td> <td>    0.029</td> <td>   -1.579</td> <td> 0.129</td> <td>   -0.105</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>   -0.0322</td> <td>    0.026</td> <td>   -1.261</td> <td> 0.220</td> <td>   -0.085</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>   -0.0030</td> <td>    0.050</td> <td>   -0.060</td> <td> 0.953</td> <td>   -0.107</td> <td>    0.101</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0187</td> <td>    0.039</td> <td>    0.480</td> <td> 0.636</td> <td>   -0.062</td> <td>    0.099</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>    0.0140</td> <td>    0.033</td> <td>    0.427</td> <td> 0.674</td> <td>   -0.054</td> <td>    0.082</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>   -0.0421</td> <td>    0.026</td> <td>   -1.641</td> <td> 0.115</td> <td>   -0.095</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>   -0.0077</td> <td>    0.012</td> <td>   -0.620</td> <td> 0.542</td> <td>   -0.033</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>    0.0875</td> <td>    0.045</td> <td>    1.952</td> <td> 0.064</td> <td>   -0.005</td> <td>    0.180</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>   -0.0031</td> <td>    0.017</td> <td>   -0.184</td> <td> 0.856</td> <td>   -0.038</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>   -0.0044</td> <td>    0.022</td> <td>   -0.205</td> <td> 0.840</td> <td>   -0.049</td> <td>    0.040</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>   -0.0123</td> <td>    0.014</td> <td>   -0.861</td> <td> 0.398</td> <td>   -0.042</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>   -0.0406</td> <td>    0.037</td> <td>   -1.092</td> <td> 0.286</td> <td>   -0.118</td> <td>    0.036</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>   -0.0482</td> <td>    0.039</td> <td>   -1.249</td> <td> 0.225</td> <td>   -0.128</td> <td>    0.032</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>    0.0444</td> <td>    0.022</td> <td>    2.051</td> <td> 0.052</td> <td>   -0.000</td> <td>    0.089</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>   -0.0159</td> <td>    0.010</td> <td>   -1.601</td> <td> 0.124</td> <td>   -0.036</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>   -0.0028</td> <td>    0.038</td> <td>   -0.073</td> <td> 0.942</td> <td>   -0.083</td> <td>    0.077</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>    0.0035</td> <td>    0.011</td> <td>    0.316</td> <td> 0.755</td> <td>   -0.020</td> <td>    0.027</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>    0.0007</td> <td>    0.022</td> <td>    0.033</td> <td> 0.974</td> <td>   -0.045</td> <td>    0.047</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>   -0.0003</td> <td>    0.013</td> <td>   -0.020</td> <td> 0.984</td> <td>   -0.028</td> <td>    0.027</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>    0.0410</td> <td>    0.043</td> <td>    0.946</td> <td> 0.354</td> <td>   -0.049</td> <td>    0.131</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>   -0.1063</td> <td>    0.041</td> <td>   -2.564</td> <td> 0.018</td> <td>   -0.192</td> <td>   -0.020</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>    0.0476</td> <td>    0.033</td> <td>    1.462</td> <td> 0.158</td> <td>   -0.020</td> <td>    0.115</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>    0.0365</td> <td>    0.033</td> <td>    1.117</td> <td> 0.276</td> <td>   -0.031</td> <td>    0.104</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>    0.0150</td> <td>    0.018</td> <td>    0.821</td> <td> 0.421</td> <td>   -0.023</td> <td>    0.053</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0293</td> <td>    0.029</td> <td>   -1.017</td> <td> 0.320</td> <td>   -0.089</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>   -0.0124</td> <td>    0.029</td> <td>   -0.433</td> <td> 0.669</td> <td>   -0.072</td> <td>    0.047</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td> 7.830</td> <th>  Durbin-Watson:     </th> <td>   2.262</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.020</td> <th>  Jarque-Bera (JB):  </th> <td>   8.086</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td> 0.465</td> <th>  Prob(JB):          </th> <td>  0.0175</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 3.845</td> <th>  Cond. No.          </th> <td>3.01e+03</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 3.01e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.998\n",
       "Model:                            OLS   Adj. R-squared:                  0.987\n",
       "Method:                 Least Squares   F-statistic:                     91.05\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           4.86e-18\n",
       "Time:                        18:16:52   Log-Likelihood:                 291.96\n",
       "No. Observations:                 123   AIC:                            -381.9\n",
       "Df Residuals:                      22   BIC:                            -97.88\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102            0.0508      0.025      2.072      0.050   -4.41e-05       0.102\n",
       "107            0.0081      0.011      0.733      0.471      -0.015       0.031\n",
       "108            0.0587      0.039      1.503      0.147      -0.022       0.140\n",
       "109            0.0054      0.021      0.253      0.803      -0.039       0.050\n",
       "111            0.0114      0.018      0.620      0.541      -0.027       0.050\n",
       "112           -0.0243      0.023     -1.037      0.311      -0.073       0.024\n",
       "113            0.0279      0.022      1.292      0.210      -0.017       0.073\n",
       "114            0.4896      0.285      1.715      0.100      -0.102       1.081\n",
       "115           -0.0062      0.029     -0.215      0.831      -0.066       0.054\n",
       "160           -0.0502      0.034     -1.455      0.160      -0.122       0.021\n",
       "161            0.0540      0.036      1.521      0.143      -0.020       0.128\n",
       "163           -0.0213      0.025     -0.852      0.403      -0.073       0.031\n",
       "207            0.0241      0.034      0.714      0.483      -0.046       0.094\n",
       "208           -0.0699      0.042     -1.659      0.111      -0.157       0.018\n",
       "572            0.0345      0.029      1.182      0.250      -0.026       0.095\n",
       "801            0.0148      0.033      0.448      0.659      -0.054       0.083\n",
       "805            0.0813      0.027      3.000      0.007       0.025       0.138\n",
       "808            0.0226      0.035      0.651      0.522      -0.049       0.095\n",
       "814            0.0117      0.025      0.469      0.644      -0.040       0.063\n",
       "842            0.0053      0.026      0.203      0.841      -0.049       0.060\n",
       "867           -0.0095      0.034     -0.280      0.782      -0.080       0.061\n",
       "983            0.0057      0.018      0.317      0.754      -0.031       0.043\n",
       "998           -0.0248      0.042     -0.598      0.556      -0.111       0.061\n",
       "1026           0.0170      0.011      1.504      0.147      -0.006       0.040\n",
       "1027          -0.0315      0.023     -1.370      0.185      -0.079       0.016\n",
       "1147          -0.0044      0.032     -0.136      0.893      -0.071       0.062\n",
       "1173           0.0110      0.034      0.319      0.752      -0.061       0.083\n",
       "1175           0.0015      0.050      0.029      0.977      -0.102       0.105\n",
       "1211          -0.0113      0.024     -0.467      0.645      -0.062       0.039\n",
       "1213          -0.0472      0.042     -1.116      0.277      -0.135       0.041\n",
       "1385          -0.0493      0.043     -1.155      0.261      -0.138       0.039\n",
       "1445           0.0032      0.033      0.096      0.924      -0.065       0.072\n",
       "1950           0.0118      0.023      0.507      0.617      -0.037       0.060\n",
       "1956          -0.0207      0.010     -2.088      0.049      -0.041      -0.000\n",
       "2060          -0.1096      0.052     -2.090      0.048      -0.218      -0.001\n",
       "2308           0.0061      0.021      0.298      0.769      -0.037       0.049\n",
       "2309           0.0169      0.027      0.615      0.545      -0.040       0.074\n",
       "2475          -0.0019      0.036     -0.054      0.957      -0.076       0.072\n",
       "2549           0.0047      0.024      0.196      0.846      -0.045       0.055\n",
       "2885          -0.0271      0.072     -0.378      0.709      -0.176       0.121\n",
       "2931           0.0249      0.045      0.552      0.587      -0.069       0.118\n",
       "3164          -0.0043      0.009     -0.454      0.654      -0.024       0.015\n",
       "3265          -0.0165      0.020     -0.815      0.424      -0.059       0.026\n",
       "3320          -0.0002      0.028     -0.007      0.994      -0.058       0.057\n",
       "3709           0.0107      0.025      0.426      0.674      -0.041       0.063\n",
       "3710          -0.0068      0.025     -0.278      0.784      -0.058       0.044\n",
       "3845           0.0211      0.026      0.796      0.435      -0.034       0.076\n",
       "4193           0.0029      0.013      0.218      0.829      -0.025       0.031\n",
       "4303          -0.0276      0.019     -1.427      0.168      -0.068       0.012\n",
       "4893           0.0211      0.028      0.753      0.460      -0.037       0.079\n",
       "5136          -0.0105      0.022     -0.466      0.646      -0.057       0.036\n",
       "5170           0.0493      0.043      1.140      0.267      -0.040       0.139\n",
       "5290           0.0312      0.051      0.610      0.548      -0.075       0.137\n",
       "5295           0.0059      0.016      0.369      0.715      -0.027       0.039\n",
       "5335           0.0182      0.025      0.722      0.478      -0.034       0.071\n",
       "5566           0.0261      0.035      0.738      0.468      -0.047       0.099\n",
       "5567          -0.0521      0.022     -2.371      0.027      -0.098      -0.007\n",
       "5573           0.0343      0.047      0.724      0.477      -0.064       0.133\n",
       "5575           0.0220      0.018      1.251      0.224      -0.014       0.058\n",
       "5576           0.0253      0.032      0.784      0.442      -0.042       0.092\n",
       "5577          -0.0081      0.011     -0.739      0.468      -0.031       0.015\n",
       "5578          -0.0108      0.021     -0.518      0.609      -0.054       0.033\n",
       "5580           0.0522      0.019      2.714      0.013       0.012       0.092\n",
       "5581          -0.0035      0.027     -0.132      0.896      -0.059       0.052\n",
       "5582          -0.0975      0.153     -0.637      0.531      -0.415       0.220\n",
       "5594          -0.0243      0.034     -0.714      0.483      -0.095       0.046\n",
       "5595           0.0219      0.018      1.201      0.243      -0.016       0.060\n",
       "5604          -0.0548      0.045     -1.231      0.231      -0.147       0.038\n",
       "5605          -0.0198      0.042     -0.471      0.643      -0.107       0.067\n",
       "5728           0.0233      0.025      0.939      0.358      -0.028       0.075\n",
       "5894          -0.0257      0.021     -1.216      0.237      -0.069       0.018\n",
       "6199           0.0482      0.036      1.332      0.196      -0.027       0.123\n",
       "6456          -0.0083      0.010     -0.844      0.408      -0.029       0.012\n",
       "6464          -0.0142      0.015     -0.923      0.366      -0.046       0.018\n",
       "6654           0.0373      0.032      1.178      0.252      -0.028       0.103\n",
       "6714          -0.0454      0.029     -1.579      0.129      -0.105       0.014\n",
       "6868          -0.0322      0.026     -1.261      0.220      -0.085       0.021\n",
       "7249          -0.0030      0.050     -0.060      0.953      -0.107       0.101\n",
       "7311           0.0187      0.039      0.480      0.636      -0.062       0.099\n",
       "7529           0.0140      0.033      0.427      0.674      -0.054       0.082\n",
       "8027          -0.0421      0.026     -1.641      0.115      -0.095       0.011\n",
       "8038          -0.0077      0.012     -0.620      0.542      -0.033       0.018\n",
       "9146           0.0875      0.045      1.952      0.064      -0.005       0.180\n",
       "10000         -0.0031      0.017     -0.184      0.856      -0.038       0.031\n",
       "10252         -0.0044      0.022     -0.205      0.840      -0.049       0.040\n",
       "10253         -0.0123      0.014     -0.861      0.398      -0.042       0.017\n",
       "10254         -0.0406      0.037     -1.092      0.286      -0.118       0.036\n",
       "11140         -0.0482      0.039     -1.249      0.225      -0.128       0.032\n",
       "23239          0.0444      0.022      2.051      0.052      -0.000       0.089\n",
       "26018         -0.0159      0.010     -1.601      0.124      -0.036       0.005\n",
       "29924         -0.0028      0.038     -0.073      0.942      -0.083       0.077\n",
       "30011          0.0035      0.011      0.316      0.755      -0.020       0.027\n",
       "55824          0.0007      0.022      0.033      0.974      -0.045       0.047\n",
       "57761         -0.0003      0.013     -0.020      0.984      -0.028       0.027\n",
       "58513          0.0410      0.043      0.946      0.354      -0.049       0.131\n",
       "64223         -0.1063      0.041     -2.564      0.018      -0.192      -0.020\n",
       "79109          0.0476      0.033      1.462      0.158      -0.020       0.115\n",
       "84335          0.0365      0.033      1.117      0.276      -0.031       0.104\n",
       "117145         0.0150      0.018      0.821      0.421      -0.023       0.053\n",
       "196883        -0.0293      0.029     -1.017      0.320      -0.089       0.030\n",
       "253260        -0.0124      0.029     -0.433      0.669      -0.072       0.047\n",
       "==============================================================================\n",
       "Omnibus:                        7.830   Durbin-Watson:                   2.262\n",
       "Prob(Omnibus):                  0.020   Jarque-Bera (JB):                8.086\n",
       "Skew:                           0.465   Prob(JB):                       0.0175\n",
       "Kurtosis:                       3.845   Cond. No.                     3.01e+03\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "[2] The condition number is large, 3.01e+03. This might indicate that there are\n",
       "strong multicollinearity or other numerical problems.\n",
       "\"\"\""
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = BLCAEEGFR\n",
    "y = PredBLCA.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False]), array([  5.06840256,  47.60953626,  14.86290717,  81.05458141,\n",
      "        54.68706095,  31.41998312,  21.17450173,  10.13547604,\n",
      "        83.97572787,  16.15018927,  14.39431539,  40.7389554 ,\n",
      "        48.7622678 ,  11.25163908,  25.24933182,  66.53217609,\n",
      "         0.66653408,  52.68913068,  65.01624174,  84.97032154,\n",
      "        78.96788527,  76.19414517,  56.17863377,  14.82209503,\n",
      "        18.64538126,  90.23406241,  76.00057555,  98.65567388,\n",
      "        65.17566821,  27.92891398,  26.31132821,  93.33782611,\n",
      "        62.32498483,   4.90432369,   4.88402139,  77.6546395 ,\n",
      "        55.05906614,  96.68173076,  85.47561601,  71.6146374 ,\n",
      "        59.23824967,  66.09782514,  42.82351029, 100.43457182,\n",
      "        68.10069573,  79.14434754,  43.89125917,  83.76063748,\n",
      "        16.92229075,  46.42142942,  65.22891846,  26.92473721,\n",
      "        55.37319615,  72.25861436,  48.27420002,  47.27537543,\n",
      "         2.72175867,  48.17531057,  22.64234997,  44.59470743,\n",
      "        47.22090177,  61.55844715,   1.28104209,  90.54064326,\n",
      "        53.63107252,  48.75299788,  24.49973515,  23.37963417,\n",
      "        64.89529953,  36.14780333,  23.93290514,  19.82990307,\n",
      "        41.20579122,  36.98475504,  25.406636  ,  12.98970355,\n",
      "        22.25623324,  96.23507665,  64.22040675,  68.02830777,\n",
      "        11.62448639,  54.70568442,   6.43666445,  86.45827115,\n",
      "        84.79664747,  40.22874239,  28.93374255,  22.700239  ,\n",
      "         5.29036532,  12.49659856,  95.16948586,  76.25114624,\n",
      "        98.35586247,  99.42202676,  35.79864472,   1.78787595,\n",
      "        15.94299284,  27.89141517,  42.48126526,  32.32102167,\n",
      "        67.60312619]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = BRCAE.columns.intersection(lsEGFR)\n",
    "BRCAEEGFR = BRCAE[listEGFR]\n",
    "BRCAMEGFR = BRCAM[listEGFR]\n",
    "BRCACEGFR = BRCAC[listEGFR]  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.999</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   6893.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:16:54</td>     <th>  Log-Likelihood:    </th> <td>  2466.9</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   921</td>      <th>  AIC:               </th> <td>  -4732.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>   820</td>      <th>  BIC:               </th> <td>  -4245.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>    0.0011</td> <td>    0.002</td> <td>    0.672</td> <td> 0.502</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>   -0.0011</td> <td>    0.000</td> <td>   -2.468</td> <td> 0.014</td> <td>   -0.002</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>   -0.0013</td> <td>    0.001</td> <td>   -2.000</td> <td> 0.046</td> <td>   -0.003</td> <td>-2.48e-05</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>    0.0002</td> <td>    0.001</td> <td>    0.183</td> <td> 0.855</td> <td>   -0.002</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>   -0.0003</td> <td>    0.001</td> <td>   -0.611</td> <td> 0.541</td> <td>   -0.001</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>    0.0015</td> <td>    0.001</td> <td>    1.019</td> <td> 0.309</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>   -0.0020</td> <td>    0.001</td> <td>   -1.351</td> <td> 0.177</td> <td>   -0.005</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>   -0.0026</td> <td>    0.002</td> <td>   -1.382</td> <td> 0.167</td> <td>   -0.006</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>    0.0008</td> <td>    0.001</td> <td>    0.651</td> <td> 0.515</td> <td>   -0.002</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>    0.0067</td> <td>    0.002</td> <td>    3.978</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>   -0.0014</td> <td>    0.002</td> <td>   -0.675</td> <td> 0.500</td> <td>   -0.006</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td> 5.495e-05</td> <td>    0.001</td> <td>    0.047</td> <td> 0.962</td> <td>   -0.002</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>    0.0028</td> <td>    0.002</td> <td>    1.587</td> <td> 0.113</td> <td>   -0.001</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>   -0.0023</td> <td>    0.002</td> <td>   -1.390</td> <td> 0.165</td> <td>   -0.006</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>   -0.0023</td> <td>    0.002</td> <td>   -1.392</td> <td> 0.164</td> <td>   -0.005</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>    0.0022</td> <td>    0.002</td> <td>    1.102</td> <td> 0.271</td> <td>   -0.002</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>    0.0063</td> <td>    0.002</td> <td>    3.883</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>    0.0019</td> <td>    0.002</td> <td>    0.939</td> <td> 0.348</td> <td>   -0.002</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>    0.0010</td> <td>    0.002</td> <td>    0.553</td> <td> 0.580</td> <td>   -0.003</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>   -0.0025</td> <td>    0.002</td> <td>   -1.337</td> <td> 0.182</td> <td>   -0.006</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>   -0.0038</td> <td>    0.002</td> <td>   -1.849</td> <td> 0.065</td> <td>   -0.008</td> <td>    0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td> 1.694e-05</td> <td>    0.001</td> <td>    0.019</td> <td> 0.985</td> <td>   -0.002</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>    0.0023</td> <td>    0.002</td> <td>    0.999</td> <td> 0.318</td> <td>   -0.002</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>   -0.0022</td> <td>    0.001</td> <td>   -2.660</td> <td> 0.008</td> <td>   -0.004</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>    0.0012</td> <td>    0.001</td> <td>    1.529</td> <td> 0.127</td> <td>   -0.000</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>    0.0039</td> <td>    0.002</td> <td>    2.011</td> <td> 0.045</td> <td> 9.45e-05</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>   -0.0016</td> <td>    0.002</td> <td>   -0.793</td> <td> 0.428</td> <td>   -0.006</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>    0.0050</td> <td>    0.002</td> <td>    2.424</td> <td> 0.016</td> <td>    0.001</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>    0.0079</td> <td>    0.002</td> <td>    4.775</td> <td> 0.000</td> <td>    0.005</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.815</td> <td> 0.415</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0048</td> <td>    0.003</td> <td>   -1.743</td> <td> 0.082</td> <td>   -0.010</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>   -0.0036</td> <td>    0.002</td> <td>   -1.679</td> <td> 0.094</td> <td>   -0.008</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>    0.0010</td> <td>    0.001</td> <td>    1.717</td> <td> 0.086</td> <td>   -0.000</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>    0.0041</td> <td>    0.001</td> <td>    5.366</td> <td> 0.000</td> <td>    0.003</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>   -0.0023</td> <td>    0.002</td> <td>   -1.028</td> <td> 0.304</td> <td>   -0.007</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>   -0.0014</td> <td>    0.002</td> <td>   -0.885</td> <td> 0.377</td> <td>   -0.004</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0012</td> <td>    0.001</td> <td>    0.939</td> <td> 0.348</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>   -0.0078</td> <td>    0.002</td> <td>   -3.325</td> <td> 0.001</td> <td>   -0.012</td> <td>   -0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>   -0.0019</td> <td>    0.001</td> <td>   -1.341</td> <td> 0.180</td> <td>   -0.005</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>   -0.0030</td> <td>    0.001</td> <td>   -2.034</td> <td> 0.042</td> <td>   -0.006</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>    0.0036</td> <td>    0.002</td> <td>    1.483</td> <td> 0.138</td> <td>   -0.001</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>    0.0007</td> <td>    0.001</td> <td>    1.135</td> <td> 0.257</td> <td>   -0.000</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>    0.0008</td> <td>    0.001</td> <td>    0.563</td> <td> 0.574</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>    0.0034</td> <td>    0.001</td> <td>    2.468</td> <td> 0.014</td> <td>    0.001</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>    0.0015</td> <td>    0.001</td> <td>    2.033</td> <td> 0.042</td> <td> 5.28e-05</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>   -0.0004</td> <td>    0.001</td> <td>   -0.382</td> <td> 0.703</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>    0.0007</td> <td>    0.001</td> <td>    0.462</td> <td> 0.644</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>   -0.0005</td> <td>    0.001</td> <td>   -0.443</td> <td> 0.658</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>    0.0018</td> <td>    0.001</td> <td>    1.436</td> <td> 0.151</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>   -0.0007</td> <td>    0.001</td> <td>   -0.608</td> <td> 0.544</td> <td>   -0.003</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>   -0.0017</td> <td>    0.001</td> <td>   -1.156</td> <td> 0.248</td> <td>   -0.004</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>   -0.0022</td> <td>    0.002</td> <td>   -0.930</td> <td> 0.352</td> <td>   -0.007</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>    0.0066</td> <td>    0.002</td> <td>    3.998</td> <td> 0.000</td> <td>    0.003</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>   -0.0001</td> <td>    0.001</td> <td>   -0.152</td> <td> 0.880</td> <td>   -0.002</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0002</td> <td>    0.002</td> <td>    0.112</td> <td> 0.911</td> <td>   -0.003</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>    0.0012</td> <td>    0.002</td> <td>    0.609</td> <td> 0.543</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>   -0.0010</td> <td>    0.001</td> <td>   -1.916</td> <td> 0.056</td> <td>   -0.002</td> <td> 2.43e-05</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>    0.0001</td> <td>    0.002</td> <td>    0.059</td> <td> 0.953</td> <td>   -0.004</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>   -0.0016</td> <td>    0.001</td> <td>   -1.479</td> <td> 0.139</td> <td>   -0.004</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>   -0.0040</td> <td>    0.002</td> <td>   -2.427</td> <td> 0.015</td> <td>   -0.007</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>    0.0004</td> <td>    0.001</td> <td>    0.627</td> <td> 0.531</td> <td>   -0.001</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>    0.0007</td> <td>    0.001</td> <td>    0.548</td> <td> 0.584</td> <td>   -0.002</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>   -0.0017</td> <td>    0.001</td> <td>   -1.395</td> <td> 0.163</td> <td>   -0.004</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>   -0.0006</td> <td>    0.001</td> <td>   -0.528</td> <td> 0.598</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>   -0.0025</td> <td>    0.003</td> <td>   -0.781</td> <td> 0.435</td> <td>   -0.009</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>    0.0020</td> <td>    0.002</td> <td>    1.066</td> <td> 0.287</td> <td>   -0.002</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>    0.0047</td> <td>    0.002</td> <td>    2.902</td> <td> 0.004</td> <td>    0.002</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>    0.0006</td> <td>    0.002</td> <td>    0.311</td> <td> 0.756</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>    0.0032</td> <td>    0.002</td> <td>    1.607</td> <td> 0.108</td> <td>   -0.001</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>    0.0010</td> <td>    0.001</td> <td>    0.796</td> <td> 0.426</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>    0.0035</td> <td>    0.002</td> <td>    1.753</td> <td> 0.080</td> <td>   -0.000</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>   -0.0008</td> <td>    0.001</td> <td>   -0.650</td> <td> 0.516</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>   -0.0005</td> <td>    0.001</td> <td>   -0.475</td> <td> 0.635</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>    0.0028</td> <td>    0.002</td> <td>    1.581</td> <td> 0.114</td> <td>   -0.001</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>    0.0027</td> <td>    0.002</td> <td>    1.176</td> <td> 0.240</td> <td>   -0.002</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>   -0.0009</td> <td>    0.001</td> <td>   -0.742</td> <td> 0.458</td> <td>   -0.003</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>   -0.0019</td> <td>    0.002</td> <td>   -0.882</td> <td> 0.378</td> <td>   -0.006</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>    0.0059</td> <td>    0.002</td> <td>    2.794</td> <td> 0.005</td> <td>    0.002</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0100</td> <td>    0.002</td> <td>    5.557</td> <td> 0.000</td> <td>    0.006</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>    0.0060</td> <td>    0.002</td> <td>    3.019</td> <td> 0.003</td> <td>    0.002</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>    0.0003</td> <td>    0.002</td> <td>    0.130</td> <td> 0.897</td> <td>   -0.004</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>   -0.0018</td> <td>    0.001</td> <td>   -2.249</td> <td> 0.025</td> <td>   -0.003</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>    0.0067</td> <td>    0.002</td> <td>    3.353</td> <td> 0.001</td> <td>    0.003</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>    0.0001</td> <td>    0.001</td> <td>    0.161</td> <td> 0.872</td> <td>   -0.001</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>    0.0016</td> <td>    0.001</td> <td>    1.389</td> <td> 0.165</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>   -0.0026</td> <td>    0.001</td> <td>   -2.333</td> <td> 0.020</td> <td>   -0.005</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>    0.0004</td> <td>    0.002</td> <td>    0.148</td> <td> 0.883</td> <td>   -0.004</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>    0.0034</td> <td>    0.002</td> <td>    1.598</td> <td> 0.111</td> <td>   -0.001</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>   -0.0026</td> <td>    0.001</td> <td>   -2.319</td> <td> 0.021</td> <td>   -0.005</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>    0.0004</td> <td>    0.001</td> <td>    0.409</td> <td> 0.683</td> <td>   -0.002</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>    0.0002</td> <td>    0.002</td> <td>    0.084</td> <td> 0.933</td> <td>   -0.004</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>    0.0027</td> <td>    0.001</td> <td>    1.923</td> <td> 0.055</td> <td>-5.67e-05</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>   -0.0003</td> <td>    0.002</td> <td>   -0.210</td> <td> 0.833</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>    0.0011</td> <td>    0.001</td> <td>    1.311</td> <td> 0.190</td> <td>   -0.001</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>   -0.0034</td> <td>    0.002</td> <td>   -1.942</td> <td> 0.052</td> <td>   -0.007</td> <td>  3.6e-05</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>    0.0007</td> <td>    0.002</td> <td>    0.316</td> <td> 0.752</td> <td>   -0.004</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>    0.0022</td> <td>    0.002</td> <td>    1.107</td> <td> 0.268</td> <td>   -0.002</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>   -0.0057</td> <td>    0.002</td> <td>   -2.607</td> <td> 0.009</td> <td>   -0.010</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>    0.0032</td> <td>    0.001</td> <td>    2.624</td> <td> 0.009</td> <td>    0.001</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0030</td> <td>    0.001</td> <td>   -2.182</td> <td> 0.029</td> <td>   -0.006</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>   -0.0004</td> <td>    0.002</td> <td>   -0.247</td> <td> 0.805</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td>126.984</td> <th>  Durbin-Watson:     </th> <td>   1.943</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.000</td>  <th>  Jarque-Bera (JB):  </th> <td> 391.225</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td> 0.676</td>  <th>  Prob(JB):          </th> <td>1.11e-85</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 5.893</td>  <th>  Cond. No.          </th> <td>    311.</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.999\n",
       "Model:                            OLS   Adj. R-squared:                  0.999\n",
       "Method:                 Least Squares   F-statistic:                     6893.\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
       "Time:                        18:16:54   Log-Likelihood:                 2466.9\n",
       "No. Observations:                 921   AIC:                            -4732.\n",
       "Df Residuals:                     820   BIC:                            -4245.\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102            0.0011      0.002      0.672      0.502      -0.002       0.004\n",
       "107           -0.0011      0.000     -2.468      0.014      -0.002      -0.000\n",
       "108           -0.0013      0.001     -2.000      0.046      -0.003   -2.48e-05\n",
       "109            0.0002      0.001      0.183      0.855      -0.002       0.003\n",
       "111           -0.0003      0.001     -0.611      0.541      -0.001       0.001\n",
       "112            0.0015      0.001      1.019      0.309      -0.001       0.004\n",
       "113           -0.0020      0.001     -1.351      0.177      -0.005       0.001\n",
       "114           -0.0026      0.002     -1.382      0.167      -0.006       0.001\n",
       "115            0.0008      0.001      0.651      0.515      -0.002       0.003\n",
       "160            0.0067      0.002      3.978      0.000       0.003       0.010\n",
       "161           -0.0014      0.002     -0.675      0.500      -0.006       0.003\n",
       "163         5.495e-05      0.001      0.047      0.962      -0.002       0.002\n",
       "207            0.0028      0.002      1.587      0.113      -0.001       0.006\n",
       "208           -0.0023      0.002     -1.390      0.165      -0.006       0.001\n",
       "572           -0.0023      0.002     -1.392      0.164      -0.005       0.001\n",
       "801            0.0022      0.002      1.102      0.271      -0.002       0.006\n",
       "805            0.0063      0.002      3.883      0.000       0.003       0.010\n",
       "808            0.0019      0.002      0.939      0.348      -0.002       0.006\n",
       "814            0.0010      0.002      0.553      0.580      -0.003       0.004\n",
       "842           -0.0025      0.002     -1.337      0.182      -0.006       0.001\n",
       "867           -0.0038      0.002     -1.849      0.065      -0.008       0.000\n",
       "983         1.694e-05      0.001      0.019      0.985      -0.002       0.002\n",
       "998            0.0023      0.002      0.999      0.318      -0.002       0.007\n",
       "1026          -0.0022      0.001     -2.660      0.008      -0.004      -0.001\n",
       "1027           0.0012      0.001      1.529      0.127      -0.000       0.003\n",
       "1147           0.0039      0.002      2.011      0.045    9.45e-05       0.008\n",
       "1173          -0.0016      0.002     -0.793      0.428      -0.006       0.002\n",
       "1175           0.0050      0.002      2.424      0.016       0.001       0.009\n",
       "1211           0.0079      0.002      4.775      0.000       0.005       0.011\n",
       "1213           0.0011      0.001      0.815      0.415      -0.002       0.004\n",
       "1385          -0.0048      0.003     -1.743      0.082      -0.010       0.001\n",
       "1445          -0.0036      0.002     -1.679      0.094      -0.008       0.001\n",
       "1950           0.0010      0.001      1.717      0.086      -0.000       0.002\n",
       "1956           0.0041      0.001      5.366      0.000       0.003       0.006\n",
       "2060          -0.0023      0.002     -1.028      0.304      -0.007       0.002\n",
       "2308          -0.0014      0.002     -0.885      0.377      -0.004       0.002\n",
       "2309           0.0012      0.001      0.939      0.348      -0.001       0.004\n",
       "2475          -0.0078      0.002     -3.325      0.001      -0.012      -0.003\n",
       "2549          -0.0019      0.001     -1.341      0.180      -0.005       0.001\n",
       "2885          -0.0030      0.001     -2.034      0.042      -0.006      -0.000\n",
       "2931           0.0036      0.002      1.483      0.138      -0.001       0.008\n",
       "3164           0.0007      0.001      1.135      0.257      -0.000       0.002\n",
       "3265           0.0008      0.001      0.563      0.574      -0.002       0.004\n",
       "3320           0.0034      0.001      2.468      0.014       0.001       0.006\n",
       "3709           0.0015      0.001      2.033      0.042    5.28e-05       0.003\n",
       "3710          -0.0004      0.001     -0.382      0.703      -0.003       0.002\n",
       "3845           0.0007      0.001      0.462      0.644      -0.002       0.004\n",
       "4193          -0.0005      0.001     -0.443      0.658      -0.003       0.002\n",
       "4303           0.0018      0.001      1.436      0.151      -0.001       0.004\n",
       "4893          -0.0007      0.001     -0.608      0.544      -0.003       0.001\n",
       "5136          -0.0017      0.001     -1.156      0.248      -0.004       0.001\n",
       "5170          -0.0022      0.002     -0.930      0.352      -0.007       0.002\n",
       "5290           0.0066      0.002      3.998      0.000       0.003       0.010\n",
       "5295          -0.0001      0.001     -0.152      0.880      -0.002       0.002\n",
       "5335           0.0002      0.002      0.112      0.911      -0.003       0.004\n",
       "5566           0.0012      0.002      0.609      0.543      -0.003       0.005\n",
       "5567          -0.0010      0.001     -1.916      0.056      -0.002    2.43e-05\n",
       "5573           0.0001      0.002      0.059      0.953      -0.004       0.004\n",
       "5575          -0.0016      0.001     -1.479      0.139      -0.004       0.001\n",
       "5576          -0.0040      0.002     -2.427      0.015      -0.007      -0.001\n",
       "5577           0.0004      0.001      0.627      0.531      -0.001       0.002\n",
       "5578           0.0007      0.001      0.548      0.584      -0.002       0.003\n",
       "5580          -0.0017      0.001     -1.395      0.163      -0.004       0.001\n",
       "5581          -0.0006      0.001     -0.528      0.598      -0.003       0.002\n",
       "5582          -0.0025      0.003     -0.781      0.435      -0.009       0.004\n",
       "5594           0.0020      0.002      1.066      0.287      -0.002       0.006\n",
       "5595           0.0047      0.002      2.902      0.004       0.002       0.008\n",
       "5604           0.0006      0.002      0.311      0.756      -0.003       0.005\n",
       "5605           0.0032      0.002      1.607      0.108      -0.001       0.007\n",
       "5728           0.0010      0.001      0.796      0.426      -0.002       0.004\n",
       "5894           0.0035      0.002      1.753      0.080      -0.000       0.007\n",
       "6199          -0.0008      0.001     -0.650      0.516      -0.003       0.002\n",
       "6456          -0.0005      0.001     -0.475      0.635      -0.003       0.002\n",
       "6464           0.0028      0.002      1.581      0.114      -0.001       0.006\n",
       "6654           0.0027      0.002      1.176      0.240      -0.002       0.007\n",
       "6714          -0.0009      0.001     -0.742      0.458      -0.003       0.002\n",
       "6868          -0.0019      0.002     -0.882      0.378      -0.006       0.002\n",
       "7249           0.0059      0.002      2.794      0.005       0.002       0.010\n",
       "7311           0.0100      0.002      5.557      0.000       0.006       0.014\n",
       "7529           0.0060      0.002      3.019      0.003       0.002       0.010\n",
       "8027           0.0003      0.002      0.130      0.897      -0.004       0.004\n",
       "8038          -0.0018      0.001     -2.249      0.025      -0.003      -0.000\n",
       "9146           0.0067      0.002      3.353      0.001       0.003       0.011\n",
       "10000          0.0001      0.001      0.161      0.872      -0.001       0.002\n",
       "10252          0.0016      0.001      1.389      0.165      -0.001       0.004\n",
       "10253         -0.0026      0.001     -2.333      0.020      -0.005      -0.000\n",
       "10254          0.0004      0.002      0.148      0.883      -0.004       0.005\n",
       "11140          0.0034      0.002      1.598      0.111      -0.001       0.008\n",
       "23239         -0.0026      0.001     -2.319      0.021      -0.005      -0.000\n",
       "26018          0.0004      0.001      0.409      0.683      -0.002       0.002\n",
       "29924          0.0002      0.002      0.084      0.933      -0.004       0.004\n",
       "30011          0.0027      0.001      1.923      0.055   -5.67e-05       0.006\n",
       "55824         -0.0003      0.002     -0.210      0.833      -0.004       0.003\n",
       "57761          0.0011      0.001      1.311      0.190      -0.001       0.003\n",
       "58513         -0.0034      0.002     -1.942      0.052      -0.007     3.6e-05\n",
       "64223          0.0007      0.002      0.316      0.752      -0.004       0.005\n",
       "79109          0.0022      0.002      1.107      0.268      -0.002       0.006\n",
       "84335         -0.0057      0.002     -2.607      0.009      -0.010      -0.001\n",
       "117145         0.0032      0.001      2.624      0.009       0.001       0.006\n",
       "196883        -0.0030      0.001     -2.182      0.029      -0.006      -0.000\n",
       "253260        -0.0004      0.002     -0.247      0.805      -0.004       0.003\n",
       "==============================================================================\n",
       "Omnibus:                      126.984   Durbin-Watson:                   1.943\n",
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):              391.225\n",
       "Skew:                           0.676   Prob(JB):                     1.11e-85\n",
       "Kurtosis:                       5.893   Cond. No.                         311.\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "\"\"\""
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = BRCAEEGFR\n",
    "y = PredBRCA.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "        True, False, False, False, False, False, False,  True, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False,  True, False, False, False, False,  True, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False,  True, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False,  True, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False]), array([5.06825579e+01, 1.39415496e+00, 4.62728727e+00, 8.63416692e+01,\n",
      "       5.46703973e+01, 3.11693984e+01, 1.78878580e+01, 1.69076294e+01,\n",
      "       5.20628937e+01, 7.63775258e-03, 5.05012625e+01, 9.71747798e+01,\n",
      "       1.14047619e+01, 1.66547204e+01, 1.66002678e+01, 2.73452647e+01,\n",
      "       1.12394342e-02, 3.51638004e+01, 5.86190456e+01, 1.83319447e+01,\n",
      "       6.54695299e+00, 9.94635493e+01, 3.21067659e+01, 8.05601110e-01,\n",
      "       1.27822910e+01, 4.50467121e+00, 4.32100404e+01, 1.57037777e+00,\n",
      "       2.14790333e-04, 4.19184595e+01, 8.24890270e+00, 9.44769827e+00,\n",
      "       8.71708891e+00, 1.05952842e-05, 3.07063472e+01, 3.80319057e+01,\n",
      "       3.51583973e+01, 9.33166240e-02, 1.82235046e+01, 4.27457416e+00,\n",
      "       1.39790238e+01, 2.59382965e+01, 5.79606884e+01, 1.39245484e+00,\n",
      "       4.27958874e+00, 7.09755293e+01, 6.50415536e+01, 6.64791509e+01,\n",
      "       1.52878177e+01, 5.49098096e+01, 2.50604528e+01, 3.55940643e+01,\n",
      "       7.04010042e-03, 8.88385006e+01, 9.20001680e+01, 5.48126545e+01,\n",
      "       5.62914348e+00, 9.62605127e+01, 1.40816890e+01, 1.55929716e+00,\n",
      "       5.35888105e+01, 5.89525602e+01, 1.65058150e+01, 6.03911904e+01,\n",
      "       4.39569903e+01, 2.89789530e+01, 3.84153378e-01, 7.63121342e+01,\n",
      "       1.09480929e+01, 4.30656336e+01, 8.07928547e+00, 5.20798960e+01,\n",
      "       6.41025395e+01, 1.15472161e+01, 2.42427222e+01, 4.62966468e+01,\n",
      "       3.81851918e+01, 5.37540468e-01, 3.74789829e-06, 2.64334431e-01,\n",
      "       9.05906863e+01, 2.50371075e+00, 8.43106091e-02, 8.80570964e+01,\n",
      "       1.66905322e+01, 2.00775994e+00, 8.91590873e+01, 1.11621004e+01,\n",
      "       2.08549764e+00, 6.89553451e+01, 9.42277767e+01, 5.53765760e+00,\n",
      "       8.41771233e+01, 1.92219594e+01, 5.29466937e+00, 7.59562851e+01,\n",
      "       2.71121774e+01, 9.38706483e-01, 8.93055121e-01, 2.97120494e+00,\n",
      "       8.13250292e+01]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = PAADE.columns.intersection(lsEGFR)\n",
    "PAADEEGFR = PAADE[listEGFR]\n",
    "PAADMEGFR = PAADM[listEGFR]\n",
    "PAADCEGFR = PAADC[listEGFR]   "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.999</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.995</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   251.5</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th> <td>1.78e-29</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:17:00</td>     <th>  Log-Likelihood:    </th> <td>  364.21</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   130</td>      <th>  AIC:               </th> <td>  -526.4</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>    29</td>      <th>  BIC:               </th> <td>  -236.8</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>   -0.0263</td> <td>    0.023</td> <td>   -1.162</td> <td> 0.255</td> <td>   -0.072</td> <td>    0.020</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>    0.0055</td> <td>    0.013</td> <td>    0.414</td> <td> 0.682</td> <td>   -0.022</td> <td>    0.032</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>   -0.0179</td> <td>    0.025</td> <td>   -0.717</td> <td> 0.479</td> <td>   -0.069</td> <td>    0.033</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>    0.0112</td> <td>    0.019</td> <td>    0.600</td> <td> 0.553</td> <td>   -0.027</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>    0.0037</td> <td>    0.013</td> <td>    0.282</td> <td> 0.780</td> <td>   -0.023</td> <td>    0.031</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>   -0.0120</td> <td>    0.023</td> <td>   -0.527</td> <td> 0.602</td> <td>   -0.058</td> <td>    0.034</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>    0.0143</td> <td>    0.014</td> <td>    1.031</td> <td> 0.311</td> <td>   -0.014</td> <td>    0.043</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>   -0.0118</td> <td>    0.039</td> <td>   -0.302</td> <td> 0.765</td> <td>   -0.092</td> <td>    0.068</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>    0.0008</td> <td>    0.036</td> <td>    0.022</td> <td> 0.983</td> <td>   -0.072</td> <td>    0.074</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>   -0.0357</td> <td>    0.032</td> <td>   -1.114</td> <td> 0.274</td> <td>   -0.101</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>   -0.0005</td> <td>    0.027</td> <td>   -0.017</td> <td> 0.987</td> <td>   -0.056</td> <td>    0.056</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td>    0.0254</td> <td>    0.031</td> <td>    0.828</td> <td> 0.415</td> <td>   -0.037</td> <td>    0.088</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>    0.0099</td> <td>    0.031</td> <td>    0.323</td> <td> 0.749</td> <td>   -0.053</td> <td>    0.073</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>   -0.0149</td> <td>    0.016</td> <td>   -0.959</td> <td> 0.346</td> <td>   -0.047</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>    0.0064</td> <td>    0.024</td> <td>    0.267</td> <td> 0.791</td> <td>   -0.042</td> <td>    0.055</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>    0.0163</td> <td>    0.040</td> <td>    0.408</td> <td> 0.686</td> <td>   -0.065</td> <td>    0.098</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>    0.0394</td> <td>    0.032</td> <td>    1.232</td> <td> 0.228</td> <td>   -0.026</td> <td>    0.105</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>    0.0136</td> <td>    0.037</td> <td>    0.367</td> <td> 0.716</td> <td>   -0.062</td> <td>    0.089</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>   -0.0174</td> <td>    0.015</td> <td>   -1.129</td> <td> 0.268</td> <td>   -0.049</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>   -0.0067</td> <td>    0.021</td> <td>   -0.327</td> <td> 0.746</td> <td>   -0.049</td> <td>    0.035</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>   -0.0127</td> <td>    0.029</td> <td>   -0.442</td> <td> 0.661</td> <td>   -0.072</td> <td>    0.046</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td>    0.0013</td> <td>    0.015</td> <td>    0.089</td> <td> 0.930</td> <td>   -0.029</td> <td>    0.032</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>   -0.0207</td> <td>    0.043</td> <td>   -0.482</td> <td> 0.633</td> <td>   -0.109</td> <td>    0.067</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>   -0.0120</td> <td>    0.014</td> <td>   -0.878</td> <td> 0.387</td> <td>   -0.040</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>    0.0094</td> <td>    0.025</td> <td>    0.382</td> <td> 0.705</td> <td>   -0.041</td> <td>    0.060</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>   -0.0039</td> <td>    0.035</td> <td>   -0.111</td> <td> 0.912</td> <td>   -0.075</td> <td>    0.068</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>    0.0050</td> <td>    0.041</td> <td>    0.121</td> <td> 0.905</td> <td>   -0.079</td> <td>    0.089</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>    0.0105</td> <td>    0.033</td> <td>    0.318</td> <td> 0.753</td> <td>   -0.057</td> <td>    0.078</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>   -0.0214</td> <td>    0.015</td> <td>   -1.443</td> <td> 0.160</td> <td>   -0.052</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>    0.0339</td> <td>    0.041</td> <td>    0.830</td> <td> 0.413</td> <td>   -0.050</td> <td>    0.117</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0315</td> <td>    0.034</td> <td>   -0.915</td> <td> 0.367</td> <td>   -0.102</td> <td>    0.039</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>    0.0012</td> <td>    0.022</td> <td>    0.056</td> <td> 0.956</td> <td>   -0.044</td> <td>    0.046</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>  2.53e-05</td> <td>    0.011</td> <td>    0.002</td> <td> 0.998</td> <td>   -0.023</td> <td>    0.023</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>    0.0078</td> <td>    0.017</td> <td>    0.457</td> <td> 0.651</td> <td>   -0.027</td> <td>    0.043</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>    0.0024</td> <td>    0.044</td> <td>    0.054</td> <td> 0.958</td> <td>   -0.087</td> <td>    0.092</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>   -0.0160</td> <td>    0.020</td> <td>   -0.788</td> <td> 0.437</td> <td>   -0.057</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0524</td> <td>    0.019</td> <td>    2.699</td> <td> 0.011</td> <td>    0.013</td> <td>    0.092</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>   -0.0235</td> <td>    0.034</td> <td>   -0.688</td> <td> 0.497</td> <td>   -0.093</td> <td>    0.046</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>   -0.0339</td> <td>    0.028</td> <td>   -1.213</td> <td> 0.235</td> <td>   -0.091</td> <td>    0.023</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>    0.0448</td> <td>    0.041</td> <td>    1.094</td> <td> 0.283</td> <td>   -0.039</td> <td>    0.129</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>    0.0273</td> <td>    0.035</td> <td>    0.787</td> <td> 0.438</td> <td>   -0.044</td> <td>    0.098</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>    0.0046</td> <td>    0.006</td> <td>    0.796</td> <td> 0.433</td> <td>   -0.007</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>   -0.0047</td> <td>    0.023</td> <td>   -0.207</td> <td> 0.838</td> <td>   -0.051</td> <td>    0.042</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>    0.0023</td> <td>    0.023</td> <td>    0.100</td> <td> 0.921</td> <td>   -0.045</td> <td>    0.050</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>    0.0019</td> <td>    0.016</td> <td>    0.120</td> <td> 0.905</td> <td>   -0.030</td> <td>    0.034</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>    0.0056</td> <td>    0.017</td> <td>    0.323</td> <td> 0.749</td> <td>   -0.030</td> <td>    0.041</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>    0.0280</td> <td>    0.030</td> <td>    0.933</td> <td> 0.359</td> <td>   -0.033</td> <td>    0.090</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>    0.0034</td> <td>    0.016</td> <td>    0.206</td> <td> 0.838</td> <td>   -0.030</td> <td>    0.037</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>   -0.0149</td> <td>    0.020</td> <td>   -0.760</td> <td> 0.454</td> <td>   -0.055</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>   -0.0206</td> <td>    0.037</td> <td>   -0.560</td> <td> 0.579</td> <td>   -0.096</td> <td>    0.055</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>    0.0218</td> <td>    0.018</td> <td>    1.187</td> <td> 0.245</td> <td>   -0.016</td> <td>    0.059</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>    0.0066</td> <td>    0.041</td> <td>    0.160</td> <td> 0.874</td> <td>   -0.078</td> <td>    0.091</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>   -0.0134</td> <td>    0.039</td> <td>   -0.345</td> <td> 0.732</td> <td>   -0.093</td> <td>    0.066</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>    0.0044</td> <td>    0.022</td> <td>    0.196</td> <td> 0.846</td> <td>   -0.041</td> <td>    0.050</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0357</td> <td>    0.038</td> <td>    0.950</td> <td> 0.350</td> <td>   -0.041</td> <td>    0.113</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>    0.0261</td> <td>    0.033</td> <td>    0.804</td> <td> 0.428</td> <td>   -0.040</td> <td>    0.093</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>   -0.0081</td> <td>    0.014</td> <td>   -0.573</td> <td> 0.571</td> <td>   -0.037</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>   -0.0622</td> <td>    0.036</td> <td>   -1.748</td> <td> 0.091</td> <td>   -0.135</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>   -0.0046</td> <td>    0.018</td> <td>   -0.255</td> <td> 0.801</td> <td>   -0.042</td> <td>    0.032</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>    0.0045</td> <td>    0.022</td> <td>    0.205</td> <td> 0.839</td> <td>   -0.040</td> <td>    0.049</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>   -0.0068</td> <td>    0.012</td> <td>   -0.554</td> <td> 0.584</td> <td>   -0.032</td> <td>    0.018</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>   -0.0009</td> <td>    0.018</td> <td>   -0.047</td> <td> 0.963</td> <td>   -0.039</td> <td>    0.037</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>   -0.0330</td> <td>    0.019</td> <td>   -1.732</td> <td> 0.094</td> <td>   -0.072</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>   -0.0278</td> <td>    0.027</td> <td>   -1.015</td> <td> 0.319</td> <td>   -0.084</td> <td>    0.028</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>    0.0025</td> <td>    0.006</td> <td>    0.455</td> <td> 0.652</td> <td>   -0.009</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>   -0.0124</td> <td>    0.032</td> <td>   -0.386</td> <td> 0.702</td> <td>   -0.078</td> <td>    0.053</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>   -0.0193</td> <td>    0.024</td> <td>   -0.796</td> <td> 0.433</td> <td>   -0.069</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>   -0.0006</td> <td>    0.035</td> <td>   -0.018</td> <td> 0.986</td> <td>   -0.071</td> <td>    0.070</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>    0.0225</td> <td>    0.037</td> <td>    0.606</td> <td> 0.549</td> <td>   -0.053</td> <td>    0.098</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>   -0.0036</td> <td>    0.034</td> <td>   -0.106</td> <td> 0.917</td> <td>   -0.072</td> <td>    0.065</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>    0.0304</td> <td>    0.044</td> <td>    0.698</td> <td> 0.491</td> <td>   -0.059</td> <td>    0.120</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>   -0.0566</td> <td>    0.030</td> <td>   -1.915</td> <td> 0.065</td> <td>   -0.117</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>    0.0099</td> <td>    0.011</td> <td>    0.881</td> <td> 0.385</td> <td>   -0.013</td> <td>    0.033</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>   -0.0511</td> <td>    0.024</td> <td>   -2.113</td> <td> 0.043</td> <td>   -0.100</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>   -0.0389</td> <td>    0.034</td> <td>   -1.147</td> <td> 0.261</td> <td>   -0.108</td> <td>    0.030</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>    0.0157</td> <td>    0.018</td> <td>    0.861</td> <td> 0.396</td> <td>   -0.022</td> <td>    0.053</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>    0.0148</td> <td>    0.031</td> <td>    0.477</td> <td> 0.637</td> <td>   -0.049</td> <td>    0.078</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>    0.0824</td> <td>    0.044</td> <td>    1.858</td> <td> 0.073</td> <td>   -0.008</td> <td>    0.173</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0085</td> <td>    0.031</td> <td>    0.270</td> <td> 0.789</td> <td>   -0.056</td> <td>    0.073</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>   -0.0009</td> <td>    0.039</td> <td>   -0.023</td> <td> 0.982</td> <td>   -0.082</td> <td>    0.080</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>   -0.0035</td> <td>    0.027</td> <td>   -0.133</td> <td> 0.895</td> <td>   -0.058</td> <td>    0.051</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>    0.0074</td> <td>    0.009</td> <td>    0.807</td> <td> 0.426</td> <td>   -0.011</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>    0.0232</td> <td>    0.033</td> <td>    0.706</td> <td> 0.486</td> <td>   -0.044</td> <td>    0.090</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>   -0.0273</td> <td>    0.024</td> <td>   -1.146</td> <td> 0.261</td> <td>   -0.076</td> <td>    0.021</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>    0.0124</td> <td>    0.015</td> <td>    0.815</td> <td> 0.422</td> <td>   -0.019</td> <td>    0.044</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>   -0.0064</td> <td>    0.016</td> <td>   -0.395</td> <td> 0.696</td> <td>   -0.040</td> <td>    0.027</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>    0.0475</td> <td>    0.034</td> <td>    1.379</td> <td> 0.178</td> <td>   -0.023</td> <td>    0.118</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>    0.0289</td> <td>    0.038</td> <td>    0.753</td> <td> 0.458</td> <td>   -0.050</td> <td>    0.108</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>    0.0138</td> <td>    0.026</td> <td>    0.539</td> <td> 0.594</td> <td>   -0.038</td> <td>    0.066</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>    0.0022</td> <td>    0.015</td> <td>    0.148</td> <td> 0.884</td> <td>   -0.029</td> <td>    0.033</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>    0.0047</td> <td>    0.033</td> <td>    0.143</td> <td> 0.887</td> <td>   -0.062</td> <td>    0.072</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>    0.0248</td> <td>    0.022</td> <td>    1.143</td> <td> 0.262</td> <td>   -0.020</td> <td>    0.069</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>    0.0005</td> <td>    0.020</td> <td>    0.025</td> <td> 0.980</td> <td>   -0.040</td> <td>    0.042</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>   -0.0018</td> <td>    0.013</td> <td>   -0.137</td> <td> 0.892</td> <td>   -0.029</td> <td>    0.025</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>   -0.0800</td> <td>    0.037</td> <td>   -2.155</td> <td> 0.040</td> <td>   -0.156</td> <td>   -0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>   -0.0533</td> <td>    0.040</td> <td>   -1.339</td> <td> 0.191</td> <td>   -0.135</td> <td>    0.028</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>    0.0152</td> <td>    0.033</td> <td>    0.457</td> <td> 0.651</td> <td>   -0.053</td> <td>    0.083</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>   -0.0249</td> <td>    0.037</td> <td>   -0.670</td> <td> 0.508</td> <td>   -0.101</td> <td>    0.051</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>    0.0240</td> <td>    0.025</td> <td>    0.961</td> <td> 0.344</td> <td>   -0.027</td> <td>    0.075</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0494</td> <td>    0.015</td> <td>   -3.296</td> <td> 0.003</td> <td>   -0.080</td> <td>   -0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>    0.0409</td> <td>    0.046</td> <td>    0.893</td> <td> 0.379</td> <td>   -0.053</td> <td>    0.135</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td> 0.520</td> <th>  Durbin-Watson:     </th> <td>   2.062</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.771</td> <th>  Jarque-Bera (JB):  </th> <td>   0.425</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td> 0.140</td> <th>  Prob(JB):          </th> <td>   0.808</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 2.980</td> <th>  Cond. No.          </th> <td>1.84e+03</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.<br/>[2] The condition number is large, 1.84e+03. This might indicate that there are<br/>strong multicollinearity or other numerical problems."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.999\n",
       "Model:                            OLS   Adj. R-squared:                  0.995\n",
       "Method:                 Least Squares   F-statistic:                     251.5\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):           1.78e-29\n",
       "Time:                        18:17:00   Log-Likelihood:                 364.21\n",
       "No. Observations:                 130   AIC:                            -526.4\n",
       "Df Residuals:                      29   BIC:                            -236.8\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102           -0.0263      0.023     -1.162      0.255      -0.072       0.020\n",
       "107            0.0055      0.013      0.414      0.682      -0.022       0.032\n",
       "108           -0.0179      0.025     -0.717      0.479      -0.069       0.033\n",
       "109            0.0112      0.019      0.600      0.553      -0.027       0.049\n",
       "111            0.0037      0.013      0.282      0.780      -0.023       0.031\n",
       "112           -0.0120      0.023     -0.527      0.602      -0.058       0.034\n",
       "113            0.0143      0.014      1.031      0.311      -0.014       0.043\n",
       "114           -0.0118      0.039     -0.302      0.765      -0.092       0.068\n",
       "115            0.0008      0.036      0.022      0.983      -0.072       0.074\n",
       "160           -0.0357      0.032     -1.114      0.274      -0.101       0.030\n",
       "161           -0.0005      0.027     -0.017      0.987      -0.056       0.056\n",
       "163            0.0254      0.031      0.828      0.415      -0.037       0.088\n",
       "207            0.0099      0.031      0.323      0.749      -0.053       0.073\n",
       "208           -0.0149      0.016     -0.959      0.346      -0.047       0.017\n",
       "572            0.0064      0.024      0.267      0.791      -0.042       0.055\n",
       "801            0.0163      0.040      0.408      0.686      -0.065       0.098\n",
       "805            0.0394      0.032      1.232      0.228      -0.026       0.105\n",
       "808            0.0136      0.037      0.367      0.716      -0.062       0.089\n",
       "814           -0.0174      0.015     -1.129      0.268      -0.049       0.014\n",
       "842           -0.0067      0.021     -0.327      0.746      -0.049       0.035\n",
       "867           -0.0127      0.029     -0.442      0.661      -0.072       0.046\n",
       "983            0.0013      0.015      0.089      0.930      -0.029       0.032\n",
       "998           -0.0207      0.043     -0.482      0.633      -0.109       0.067\n",
       "1026          -0.0120      0.014     -0.878      0.387      -0.040       0.016\n",
       "1027           0.0094      0.025      0.382      0.705      -0.041       0.060\n",
       "1147          -0.0039      0.035     -0.111      0.912      -0.075       0.068\n",
       "1173           0.0050      0.041      0.121      0.905      -0.079       0.089\n",
       "1175           0.0105      0.033      0.318      0.753      -0.057       0.078\n",
       "1211          -0.0214      0.015     -1.443      0.160      -0.052       0.009\n",
       "1213           0.0339      0.041      0.830      0.413      -0.050       0.117\n",
       "1385          -0.0315      0.034     -0.915      0.367      -0.102       0.039\n",
       "1445           0.0012      0.022      0.056      0.956      -0.044       0.046\n",
       "1950         2.53e-05      0.011      0.002      0.998      -0.023       0.023\n",
       "1956           0.0078      0.017      0.457      0.651      -0.027       0.043\n",
       "2060           0.0024      0.044      0.054      0.958      -0.087       0.092\n",
       "2308          -0.0160      0.020     -0.788      0.437      -0.057       0.025\n",
       "2309           0.0524      0.019      2.699      0.011       0.013       0.092\n",
       "2475          -0.0235      0.034     -0.688      0.497      -0.093       0.046\n",
       "2549          -0.0339      0.028     -1.213      0.235      -0.091       0.023\n",
       "2885           0.0448      0.041      1.094      0.283      -0.039       0.129\n",
       "2931           0.0273      0.035      0.787      0.438      -0.044       0.098\n",
       "3164           0.0046      0.006      0.796      0.433      -0.007       0.016\n",
       "3265          -0.0047      0.023     -0.207      0.838      -0.051       0.042\n",
       "3320           0.0023      0.023      0.100      0.921      -0.045       0.050\n",
       "3709           0.0019      0.016      0.120      0.905      -0.030       0.034\n",
       "3710           0.0056      0.017      0.323      0.749      -0.030       0.041\n",
       "3845           0.0280      0.030      0.933      0.359      -0.033       0.090\n",
       "4193           0.0034      0.016      0.206      0.838      -0.030       0.037\n",
       "4303          -0.0149      0.020     -0.760      0.454      -0.055       0.025\n",
       "4893          -0.0206      0.037     -0.560      0.579      -0.096       0.055\n",
       "5136           0.0218      0.018      1.187      0.245      -0.016       0.059\n",
       "5170           0.0066      0.041      0.160      0.874      -0.078       0.091\n",
       "5290          -0.0134      0.039     -0.345      0.732      -0.093       0.066\n",
       "5295           0.0044      0.022      0.196      0.846      -0.041       0.050\n",
       "5335           0.0357      0.038      0.950      0.350      -0.041       0.113\n",
       "5566           0.0261      0.033      0.804      0.428      -0.040       0.093\n",
       "5567          -0.0081      0.014     -0.573      0.571      -0.037       0.021\n",
       "5573          -0.0622      0.036     -1.748      0.091      -0.135       0.011\n",
       "5575          -0.0046      0.018     -0.255      0.801      -0.042       0.032\n",
       "5576           0.0045      0.022      0.205      0.839      -0.040       0.049\n",
       "5577          -0.0068      0.012     -0.554      0.584      -0.032       0.018\n",
       "5578          -0.0009      0.018     -0.047      0.963      -0.039       0.037\n",
       "5580          -0.0330      0.019     -1.732      0.094      -0.072       0.006\n",
       "5581          -0.0278      0.027     -1.015      0.319      -0.084       0.028\n",
       "5582           0.0025      0.006      0.455      0.652      -0.009       0.014\n",
       "5594          -0.0124      0.032     -0.386      0.702      -0.078       0.053\n",
       "5595          -0.0193      0.024     -0.796      0.433      -0.069       0.030\n",
       "5604          -0.0006      0.035     -0.018      0.986      -0.071       0.070\n",
       "5605           0.0225      0.037      0.606      0.549      -0.053       0.098\n",
       "5728          -0.0036      0.034     -0.106      0.917      -0.072       0.065\n",
       "5894           0.0304      0.044      0.698      0.491      -0.059       0.120\n",
       "6199          -0.0566      0.030     -1.915      0.065      -0.117       0.004\n",
       "6456           0.0099      0.011      0.881      0.385      -0.013       0.033\n",
       "6464          -0.0511      0.024     -2.113      0.043      -0.100      -0.002\n",
       "6654          -0.0389      0.034     -1.147      0.261      -0.108       0.030\n",
       "6714           0.0157      0.018      0.861      0.396      -0.022       0.053\n",
       "6868           0.0148      0.031      0.477      0.637      -0.049       0.078\n",
       "7249           0.0824      0.044      1.858      0.073      -0.008       0.173\n",
       "7311           0.0085      0.031      0.270      0.789      -0.056       0.073\n",
       "7529          -0.0009      0.039     -0.023      0.982      -0.082       0.080\n",
       "8027          -0.0035      0.027     -0.133      0.895      -0.058       0.051\n",
       "8038           0.0074      0.009      0.807      0.426      -0.011       0.026\n",
       "9146           0.0232      0.033      0.706      0.486      -0.044       0.090\n",
       "10000         -0.0273      0.024     -1.146      0.261      -0.076       0.021\n",
       "10252          0.0124      0.015      0.815      0.422      -0.019       0.044\n",
       "10253         -0.0064      0.016     -0.395      0.696      -0.040       0.027\n",
       "10254          0.0475      0.034      1.379      0.178      -0.023       0.118\n",
       "11140          0.0289      0.038      0.753      0.458      -0.050       0.108\n",
       "23239          0.0138      0.026      0.539      0.594      -0.038       0.066\n",
       "26018          0.0022      0.015      0.148      0.884      -0.029       0.033\n",
       "29924          0.0047      0.033      0.143      0.887      -0.062       0.072\n",
       "30011          0.0248      0.022      1.143      0.262      -0.020       0.069\n",
       "55824          0.0005      0.020      0.025      0.980      -0.040       0.042\n",
       "57761         -0.0018      0.013     -0.137      0.892      -0.029       0.025\n",
       "58513         -0.0800      0.037     -2.155      0.040      -0.156      -0.004\n",
       "64223         -0.0533      0.040     -1.339      0.191      -0.135       0.028\n",
       "79109          0.0152      0.033      0.457      0.651      -0.053       0.083\n",
       "84335         -0.0249      0.037     -0.670      0.508      -0.101       0.051\n",
       "117145         0.0240      0.025      0.961      0.344      -0.027       0.075\n",
       "196883        -0.0494      0.015     -3.296      0.003      -0.080      -0.019\n",
       "253260         0.0409      0.046      0.893      0.379      -0.053       0.135\n",
       "==============================================================================\n",
       "Omnibus:                        0.520   Durbin-Watson:                   2.062\n",
       "Prob(Omnibus):                  0.771   Jarque-Bera (JB):                0.425\n",
       "Skew:                           0.140   Prob(JB):                        0.808\n",
       "Kurtosis:                       2.980   Cond. No.                     1.84e+03\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "[2] The condition number is large, 1.84e+03. This might indicate that there are\n",
       "strong multicollinearity or other numerical problems.\n",
       "\"\"\""
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = PAADEEGFR\n",
    "y = PredPAAD.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False]), array([ 25.71654309,  68.88518558,  48.36811448,  55.88769998,\n",
      "        78.77410576,  60.81762683,  31.41328896,  77.27234893,\n",
      "        99.26457938,  27.69865112,  99.63968843,  41.88315265,\n",
      "        75.66797196,  34.90617989,  79.8933991 ,  69.30354072,\n",
      "        23.00476997,  72.33422978,  27.07452713,  75.35095038,\n",
      "        66.80529175,  93.88769455,  63.96140325,  39.08209479,\n",
      "        71.23367866,  92.15867969,  91.37186718,  76.01882484,\n",
      "        16.14111894,  41.75472905,  37.11689276,  96.5460688 ,\n",
      "       100.82224759,  65.73660253,  96.71260657,  44.13402207,\n",
      "         1.15992322,  50.19810301,  23.73852166,  28.57323444,\n",
      "        44.22337651,  43.70695825,  84.62151046,  92.99084428,\n",
      "        91.44853418,  75.6610565 ,  36.23271223,  84.62829129,\n",
      "        45.80441445,  58.5278806 ,  24.72551896,  88.26889264,\n",
      "        73.97259467,  85.44107636,  35.35726029,  43.22913637,\n",
      "        57.65320828,   9.1996007 ,  80.86879718,  84.77120041,\n",
      "        58.95560148,  97.25785942,   9.48626631,  32.18986414,\n",
      "        65.87683843,  70.92028156,  43.70345735,  99.56588916,\n",
      "        55.4642251 ,  92.57057856,  49.57156958,   6.60461621,\n",
      "        38.91499157,   4.37977584,  26.3190754 ,  40.00277853,\n",
      "        64.33396747,   7.40933999,  79.70251399,  99.14239031,\n",
      "        90.41462196,  43.03320452,  49.04616013,  26.37560835,\n",
      "        42.58791361,  70.27675668,  18.01385415,  46.21681982,\n",
      "        59.96442188,  89.24362686,  89.57925236,  26.51049056,\n",
      "        99.00283406,  90.1284706 ,   4.00007563,  19.29804105,\n",
      "        65.75781376,  51.32626421,  34.78151666,   0.26216152,\n",
      "        38.29707195]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "listEGFR = LUADE.columns.intersection(lsEGFR)\n",
    "LUADEEGFR = LUADE[listEGFR]\n",
    "LUADMEGFR = LUADM[listEGFR]\n",
    "LUADCEGFR = LUADC[listEGFR]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table class=\"simpletable\">\n",
       "<caption>OLS Regression Results</caption>\n",
       "<tr>\n",
       "  <th>Dep. Variable:</th>            <td>y</td>        <th>  R-squared:         </th> <td>   0.998</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Model:</th>                   <td>OLS</td>       <th>  Adj. R-squared:    </th> <td>   0.998</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Method:</th>             <td>Least Squares</td>  <th>  F-statistic:       </th> <td>   1895.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Date:</th>             <td>Sat, 12 Jan 2019</td> <th>  Prob (F-statistic):</th>  <td>  0.00</td> \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Time:</th>                 <td>18:17:07</td>     <th>  Log-Likelihood:    </th> <td>  1160.5</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>No. Observations:</th>      <td>   475</td>      <th>  AIC:               </th> <td>  -2119.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Residuals:</th>          <td>   374</td>      <th>  BIC:               </th> <td>  -1699.</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Df Model:</th>              <td>   101</td>      <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Covariance Type:</th>      <td>nonrobust</td>    <th>                     </th>     <td> </td>   \n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "     <td></td>       <th>coef</th>     <th>std err</th>      <th>t</th>      <th>P>|t|</th>  <th>[0.025</th>    <th>0.975]</th>  \n",
       "</tr>\n",
       "<tr>\n",
       "  <th>102</th>    <td>    0.0028</td> <td>    0.003</td> <td>    0.825</td> <td> 0.410</td> <td>   -0.004</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>107</th>    <td>   -0.0021</td> <td>    0.002</td> <td>   -1.161</td> <td> 0.247</td> <td>   -0.006</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>108</th>    <td>    0.0013</td> <td>    0.003</td> <td>    0.412</td> <td> 0.681</td> <td>   -0.005</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>109</th>    <td>    0.0013</td> <td>    0.003</td> <td>    0.524</td> <td> 0.600</td> <td>   -0.004</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>111</th>    <td>   -0.0006</td> <td>    0.002</td> <td>   -0.411</td> <td> 0.681</td> <td>   -0.004</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>112</th>    <td>    0.0020</td> <td>    0.003</td> <td>    0.771</td> <td> 0.441</td> <td>   -0.003</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>113</th>    <td>   -0.0034</td> <td>    0.002</td> <td>   -1.652</td> <td> 0.099</td> <td>   -0.007</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>114</th>    <td>   -0.0046</td> <td>    0.004</td> <td>   -1.172</td> <td> 0.242</td> <td>   -0.012</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>115</th>    <td>    0.0020</td> <td>    0.003</td> <td>    0.798</td> <td> 0.426</td> <td>   -0.003</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>160</th>    <td>    0.0059</td> <td>    0.005</td> <td>    1.221</td> <td> 0.223</td> <td>   -0.004</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>161</th>    <td>   -0.0005</td> <td>    0.004</td> <td>   -0.128</td> <td> 0.898</td> <td>   -0.007</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>163</th>    <td>    0.0038</td> <td>    0.003</td> <td>    1.226</td> <td> 0.221</td> <td>   -0.002</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>207</th>    <td>   -0.0066</td> <td>    0.004</td> <td>   -1.705</td> <td> 0.089</td> <td>   -0.014</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>208</th>    <td>    0.0001</td> <td>    0.004</td> <td>    0.033</td> <td> 0.974</td> <td>   -0.008</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>572</th>    <td>   -0.0035</td> <td>    0.003</td> <td>   -1.009</td> <td> 0.314</td> <td>   -0.010</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>801</th>    <td>   -0.0123</td> <td>    0.004</td> <td>   -3.097</td> <td> 0.002</td> <td>   -0.020</td> <td>   -0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>805</th>    <td>    0.0078</td> <td>    0.004</td> <td>    2.175</td> <td> 0.030</td> <td>    0.001</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>808</th>    <td>    0.0009</td> <td>    0.005</td> <td>    0.193</td> <td> 0.847</td> <td>   -0.008</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>814</th>    <td>   -0.0018</td> <td>    0.004</td> <td>   -0.500</td> <td> 0.617</td> <td>   -0.009</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>842</th>    <td>    0.0039</td> <td>    0.004</td> <td>    1.104</td> <td> 0.270</td> <td>   -0.003</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>867</th>    <td>    0.0006</td> <td>    0.004</td> <td>    0.139</td> <td> 0.890</td> <td>   -0.008</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>983</th>    <td>    0.0038</td> <td>    0.002</td> <td>    1.848</td> <td> 0.065</td> <td>   -0.000</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>998</th>    <td>    0.0167</td> <td>    0.005</td> <td>    3.572</td> <td> 0.000</td> <td>    0.008</td> <td>    0.026</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1026</th>   <td>   -0.0011</td> <td>    0.002</td> <td>   -0.589</td> <td> 0.556</td> <td>   -0.005</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1027</th>   <td>    0.0030</td> <td>    0.003</td> <td>    1.039</td> <td> 0.300</td> <td>   -0.003</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1147</th>   <td>    0.0067</td> <td>    0.005</td> <td>    1.436</td> <td> 0.152</td> <td>   -0.002</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1173</th>   <td>    0.0047</td> <td>    0.004</td> <td>    1.070</td> <td> 0.285</td> <td>   -0.004</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1175</th>   <td>   -0.0014</td> <td>    0.005</td> <td>   -0.270</td> <td> 0.787</td> <td>   -0.012</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1211</th>   <td>    0.0088</td> <td>    0.003</td> <td>    2.973</td> <td> 0.003</td> <td>    0.003</td> <td>    0.015</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1213</th>   <td>   -0.0055</td> <td>    0.004</td> <td>   -1.259</td> <td> 0.209</td> <td>   -0.014</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1385</th>   <td>   -0.0124</td> <td>    0.005</td> <td>   -2.316</td> <td> 0.021</td> <td>   -0.023</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1445</th>   <td>   -0.0057</td> <td>    0.004</td> <td>   -1.319</td> <td> 0.188</td> <td>   -0.014</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1950</th>   <td>    0.0016</td> <td>    0.001</td> <td>    1.287</td> <td> 0.199</td> <td>   -0.001</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>1956</th>   <td>    0.0073</td> <td>    0.001</td> <td>    5.165</td> <td> 0.000</td> <td>    0.005</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2060</th>   <td>   -0.0115</td> <td>    0.005</td> <td>   -2.353</td> <td> 0.019</td> <td>   -0.021</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2308</th>   <td>    0.0014</td> <td>    0.003</td> <td>    0.494</td> <td> 0.621</td> <td>   -0.004</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2309</th>   <td>    0.0060</td> <td>    0.003</td> <td>    1.801</td> <td> 0.072</td> <td>   -0.001</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2475</th>   <td>   -0.0080</td> <td>    0.005</td> <td>   -1.582</td> <td> 0.114</td> <td>   -0.018</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2549</th>   <td>   -0.0035</td> <td>    0.003</td> <td>   -1.021</td> <td> 0.308</td> <td>   -0.010</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2885</th>   <td>    0.0081</td> <td>    0.006</td> <td>    1.414</td> <td> 0.158</td> <td>   -0.003</td> <td>    0.019</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>2931</th>   <td>    0.0025</td> <td>    0.005</td> <td>    0.479</td> <td> 0.632</td> <td>   -0.008</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3164</th>   <td>    0.0004</td> <td>    0.001</td> <td>    0.398</td> <td> 0.691</td> <td>   -0.002</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3265</th>   <td>    0.0018</td> <td>    0.003</td> <td>    0.574</td> <td> 0.566</td> <td>   -0.004</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3320</th>   <td>    0.0070</td> <td>    0.003</td> <td>    2.097</td> <td> 0.037</td> <td>    0.000</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3709</th>   <td>   -0.0031</td> <td>    0.002</td> <td>   -1.262</td> <td> 0.208</td> <td>   -0.008</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3710</th>   <td>    0.0024</td> <td>    0.002</td> <td>    1.164</td> <td> 0.245</td> <td>   -0.002</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>3845</th>   <td>   -0.0020</td> <td>    0.002</td> <td>   -0.814</td> <td> 0.416</td> <td>   -0.007</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4193</th>   <td>    0.0026</td> <td>    0.002</td> <td>    1.360</td> <td> 0.175</td> <td>   -0.001</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4303</th>   <td>   -0.0058</td> <td>    0.003</td> <td>   -2.072</td> <td> 0.039</td> <td>   -0.011</td> <td>   -0.000</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>4893</th>   <td>   -0.0029</td> <td>    0.003</td> <td>   -0.855</td> <td> 0.393</td> <td>   -0.010</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5136</th>   <td>   -0.0007</td> <td>    0.002</td> <td>   -0.346</td> <td> 0.729</td> <td>   -0.005</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5170</th>   <td>   -0.0020</td> <td>    0.005</td> <td>   -0.365</td> <td> 0.715</td> <td>   -0.013</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5290</th>   <td>   -0.0005</td> <td>    0.005</td> <td>   -0.108</td> <td> 0.914</td> <td>   -0.010</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5295</th>   <td>    0.0006</td> <td>    0.003</td> <td>    0.179</td> <td> 0.858</td> <td>   -0.006</td> <td>    0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5335</th>   <td>    0.0037</td> <td>    0.003</td> <td>    1.104</td> <td> 0.270</td> <td>   -0.003</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5566</th>   <td>   -0.0012</td> <td>    0.006</td> <td>   -0.208</td> <td> 0.835</td> <td>   -0.012</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5567</th>   <td>   -0.0006</td> <td>    0.002</td> <td>   -0.311</td> <td> 0.756</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5573</th>   <td>    0.0005</td> <td>    0.004</td> <td>    0.108</td> <td> 0.914</td> <td>   -0.008</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5575</th>   <td>    0.0013</td> <td>    0.002</td> <td>    0.627</td> <td> 0.531</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5576</th>   <td>   -0.0134</td> <td>    0.003</td> <td>   -3.897</td> <td> 0.000</td> <td>   -0.020</td> <td>   -0.007</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5577</th>   <td>    0.0011</td> <td>    0.001</td> <td>    0.742</td> <td> 0.459</td> <td>   -0.002</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5578</th>   <td>   -0.0005</td> <td>    0.002</td> <td>   -0.249</td> <td> 0.804</td> <td>   -0.004</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5580</th>   <td>    0.0077</td> <td>    0.003</td> <td>    2.264</td> <td> 0.024</td> <td>    0.001</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5581</th>   <td>    0.0029</td> <td>    0.003</td> <td>    0.860</td> <td> 0.391</td> <td>   -0.004</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5582</th>   <td>    0.0045</td> <td>    0.004</td> <td>    1.147</td> <td> 0.252</td> <td>   -0.003</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5594</th>   <td>    0.0058</td> <td>    0.003</td> <td>    1.729</td> <td> 0.085</td> <td>   -0.001</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5595</th>   <td>   -0.0001</td> <td>    0.003</td> <td>   -0.038</td> <td> 0.970</td> <td>   -0.006</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5604</th>   <td>    0.0019</td> <td>    0.003</td> <td>    0.543</td> <td> 0.588</td> <td>   -0.005</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5605</th>   <td>    0.0131</td> <td>    0.004</td> <td>    2.988</td> <td> 0.003</td> <td>    0.004</td> <td>    0.022</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5728</th>   <td>   -0.0003</td> <td>    0.004</td> <td>   -0.063</td> <td> 0.950</td> <td>   -0.008</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>5894</th>   <td>   -0.0067</td> <td>    0.005</td> <td>   -1.381</td> <td> 0.168</td> <td>   -0.016</td> <td>    0.003</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6199</th>   <td>    0.0016</td> <td>    0.004</td> <td>    0.425</td> <td> 0.671</td> <td>   -0.006</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6456</th>   <td>   -0.0059</td> <td>    0.002</td> <td>   -2.729</td> <td> 0.007</td> <td>   -0.010</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6464</th>   <td>    0.0003</td> <td>    0.003</td> <td>    0.114</td> <td> 0.909</td> <td>   -0.005</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6654</th>   <td>    0.0022</td> <td>    0.005</td> <td>    0.410</td> <td> 0.682</td> <td>   -0.008</td> <td>    0.013</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6714</th>   <td>   -0.0010</td> <td>    0.003</td> <td>   -0.366</td> <td> 0.715</td> <td>   -0.006</td> <td>    0.004</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>6868</th>   <td>    0.0093</td> <td>    0.003</td> <td>    2.710</td> <td> 0.007</td> <td>    0.003</td> <td>    0.016</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7249</th>   <td>    0.0050</td> <td>    0.005</td> <td>    1.030</td> <td> 0.304</td> <td>   -0.005</td> <td>    0.014</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7311</th>   <td>    0.0098</td> <td>    0.004</td> <td>    2.696</td> <td> 0.007</td> <td>    0.003</td> <td>    0.017</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>7529</th>   <td>   -0.0035</td> <td>    0.005</td> <td>   -0.746</td> <td> 0.456</td> <td>   -0.013</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8027</th>   <td>    0.0026</td> <td>    0.004</td> <td>    0.731</td> <td> 0.465</td> <td>   -0.004</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>8038</th>   <td>   -0.0044</td> <td>    0.001</td> <td>   -2.945</td> <td> 0.003</td> <td>   -0.007</td> <td>   -0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>9146</th>   <td>    0.0017</td> <td>    0.005</td> <td>    0.339</td> <td> 0.734</td> <td>   -0.008</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10000</th>  <td>    0.0011</td> <td>    0.002</td> <td>    0.525</td> <td> 0.600</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10252</th>  <td>    0.0010</td> <td>    0.002</td> <td>    0.430</td> <td> 0.667</td> <td>   -0.004</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10253</th>  <td>   -0.0067</td> <td>    0.002</td> <td>   -2.954</td> <td> 0.003</td> <td>   -0.011</td> <td>   -0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>10254</th>  <td>   -0.0002</td> <td>    0.005</td> <td>   -0.035</td> <td> 0.972</td> <td>   -0.009</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>11140</th>  <td>    0.0005</td> <td>    0.005</td> <td>    0.101</td> <td> 0.920</td> <td>   -0.010</td> <td>    0.011</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>23239</th>  <td>    0.0006</td> <td>    0.002</td> <td>    0.252</td> <td> 0.801</td> <td>   -0.004</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>26018</th>  <td>    0.0047</td> <td>    0.002</td> <td>    2.039</td> <td> 0.042</td> <td>    0.000</td> <td>    0.009</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>29924</th>  <td>    0.0014</td> <td>    0.004</td> <td>    0.323</td> <td> 0.747</td> <td>   -0.007</td> <td>    0.010</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>30011</th>  <td>   -0.0027</td> <td>    0.002</td> <td>   -1.093</td> <td> 0.275</td> <td>   -0.007</td> <td>    0.002</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>55824</th>  <td>    0.0011</td> <td>    0.002</td> <td>    0.521</td> <td> 0.602</td> <td>   -0.003</td> <td>    0.005</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>57761</th>  <td>   -0.0023</td> <td>    0.002</td> <td>   -1.336</td> <td> 0.182</td> <td>   -0.006</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>58513</th>  <td>   -0.0008</td> <td>    0.005</td> <td>   -0.177</td> <td> 0.860</td> <td>   -0.010</td> <td>    0.008</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>64223</th>  <td>    0.0029</td> <td>    0.005</td> <td>    0.623</td> <td> 0.534</td> <td>   -0.006</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>79109</th>  <td>   -0.0021</td> <td>    0.004</td> <td>   -0.504</td> <td> 0.615</td> <td>   -0.010</td> <td>    0.006</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>84335</th>  <td>   -0.0087</td> <td>    0.005</td> <td>   -1.845</td> <td> 0.066</td> <td>   -0.018</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>117145</th> <td>    0.0064</td> <td>    0.003</td> <td>    2.461</td> <td> 0.014</td> <td>    0.001</td> <td>    0.012</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>196883</th> <td>   -0.0042</td> <td>    0.003</td> <td>   -1.559</td> <td> 0.120</td> <td>   -0.010</td> <td>    0.001</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>253260</th> <td>    0.0034</td> <td>    0.003</td> <td>    1.329</td> <td> 0.185</td> <td>   -0.002</td> <td>    0.009</td>\n",
       "</tr>\n",
       "</table>\n",
       "<table class=\"simpletable\">\n",
       "<tr>\n",
       "  <th>Omnibus:</th>       <td>43.558</td> <th>  Durbin-Watson:     </th> <td>   2.088</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Prob(Omnibus):</th> <td> 0.000</td> <th>  Jarque-Bera (JB):  </th> <td>  91.380</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Skew:</th>          <td> 0.522</td> <th>  Prob(JB):          </th> <td>1.44e-20</td>\n",
       "</tr>\n",
       "<tr>\n",
       "  <th>Kurtosis:</th>      <td> 4.878</td> <th>  Cond. No.          </th> <td>    366.</td>\n",
       "</tr>\n",
       "</table><br/><br/>Warnings:<br/>[1] Standard Errors assume that the covariance matrix of the errors is correctly specified."
      ],
      "text/plain": [
       "<class 'statsmodels.iolib.summary.Summary'>\n",
       "\"\"\"\n",
       "                            OLS Regression Results                            \n",
       "==============================================================================\n",
       "Dep. Variable:                      y   R-squared:                       0.998\n",
       "Model:                            OLS   Adj. R-squared:                  0.998\n",
       "Method:                 Least Squares   F-statistic:                     1895.\n",
       "Date:                Sat, 12 Jan 2019   Prob (F-statistic):               0.00\n",
       "Time:                        18:17:07   Log-Likelihood:                 1160.5\n",
       "No. Observations:                 475   AIC:                            -2119.\n",
       "Df Residuals:                     374   BIC:                            -1699.\n",
       "Df Model:                         101                                         \n",
       "Covariance Type:            nonrobust                                         \n",
       "==============================================================================\n",
       "                 coef    std err          t      P>|t|      [0.025      0.975]\n",
       "------------------------------------------------------------------------------\n",
       "102            0.0028      0.003      0.825      0.410      -0.004       0.010\n",
       "107           -0.0021      0.002     -1.161      0.247      -0.006       0.001\n",
       "108            0.0013      0.003      0.412      0.681      -0.005       0.007\n",
       "109            0.0013      0.003      0.524      0.600      -0.004       0.006\n",
       "111           -0.0006      0.002     -0.411      0.681      -0.004       0.002\n",
       "112            0.0020      0.003      0.771      0.441      -0.003       0.007\n",
       "113           -0.0034      0.002     -1.652      0.099      -0.007       0.001\n",
       "114           -0.0046      0.004     -1.172      0.242      -0.012       0.003\n",
       "115            0.0020      0.003      0.798      0.426      -0.003       0.007\n",
       "160            0.0059      0.005      1.221      0.223      -0.004       0.015\n",
       "161           -0.0005      0.004     -0.128      0.898      -0.007       0.007\n",
       "163            0.0038      0.003      1.226      0.221      -0.002       0.010\n",
       "207           -0.0066      0.004     -1.705      0.089      -0.014       0.001\n",
       "208            0.0001      0.004      0.033      0.974      -0.008       0.008\n",
       "572           -0.0035      0.003     -1.009      0.314      -0.010       0.003\n",
       "801           -0.0123      0.004     -3.097      0.002      -0.020      -0.004\n",
       "805            0.0078      0.004      2.175      0.030       0.001       0.015\n",
       "808            0.0009      0.005      0.193      0.847      -0.008       0.010\n",
       "814           -0.0018      0.004     -0.500      0.617      -0.009       0.005\n",
       "842            0.0039      0.004      1.104      0.270      -0.003       0.011\n",
       "867            0.0006      0.004      0.139      0.890      -0.008       0.009\n",
       "983            0.0038      0.002      1.848      0.065      -0.000       0.008\n",
       "998            0.0167      0.005      3.572      0.000       0.008       0.026\n",
       "1026          -0.0011      0.002     -0.589      0.556      -0.005       0.003\n",
       "1027           0.0030      0.003      1.039      0.300      -0.003       0.009\n",
       "1147           0.0067      0.005      1.436      0.152      -0.002       0.016\n",
       "1173           0.0047      0.004      1.070      0.285      -0.004       0.013\n",
       "1175          -0.0014      0.005     -0.270      0.787      -0.012       0.009\n",
       "1211           0.0088      0.003      2.973      0.003       0.003       0.015\n",
       "1213          -0.0055      0.004     -1.259      0.209      -0.014       0.003\n",
       "1385          -0.0124      0.005     -2.316      0.021      -0.023      -0.002\n",
       "1445          -0.0057      0.004     -1.319      0.188      -0.014       0.003\n",
       "1950           0.0016      0.001      1.287      0.199      -0.001       0.004\n",
       "1956           0.0073      0.001      5.165      0.000       0.005       0.010\n",
       "2060          -0.0115      0.005     -2.353      0.019      -0.021      -0.002\n",
       "2308           0.0014      0.003      0.494      0.621      -0.004       0.007\n",
       "2309           0.0060      0.003      1.801      0.072      -0.001       0.013\n",
       "2475          -0.0080      0.005     -1.582      0.114      -0.018       0.002\n",
       "2549          -0.0035      0.003     -1.021      0.308      -0.010       0.003\n",
       "2885           0.0081      0.006      1.414      0.158      -0.003       0.019\n",
       "2931           0.0025      0.005      0.479      0.632      -0.008       0.013\n",
       "3164           0.0004      0.001      0.398      0.691      -0.002       0.002\n",
       "3265           0.0018      0.003      0.574      0.566      -0.004       0.008\n",
       "3320           0.0070      0.003      2.097      0.037       0.000       0.014\n",
       "3709          -0.0031      0.002     -1.262      0.208      -0.008       0.002\n",
       "3710           0.0024      0.002      1.164      0.245      -0.002       0.006\n",
       "3845          -0.0020      0.002     -0.814      0.416      -0.007       0.003\n",
       "4193           0.0026      0.002      1.360      0.175      -0.001       0.006\n",
       "4303          -0.0058      0.003     -2.072      0.039      -0.011      -0.000\n",
       "4893          -0.0029      0.003     -0.855      0.393      -0.010       0.004\n",
       "5136          -0.0007      0.002     -0.346      0.729      -0.005       0.003\n",
       "5170          -0.0020      0.005     -0.365      0.715      -0.013       0.009\n",
       "5290          -0.0005      0.005     -0.108      0.914      -0.010       0.009\n",
       "5295           0.0006      0.003      0.179      0.858      -0.006       0.007\n",
       "5335           0.0037      0.003      1.104      0.270      -0.003       0.010\n",
       "5566          -0.0012      0.006     -0.208      0.835      -0.012       0.010\n",
       "5567          -0.0006      0.002     -0.311      0.756      -0.004       0.003\n",
       "5573           0.0005      0.004      0.108      0.914      -0.008       0.009\n",
       "5575           0.0013      0.002      0.627      0.531      -0.003       0.005\n",
       "5576          -0.0134      0.003     -3.897      0.000      -0.020      -0.007\n",
       "5577           0.0011      0.001      0.742      0.459      -0.002       0.004\n",
       "5578          -0.0005      0.002     -0.249      0.804      -0.004       0.003\n",
       "5580           0.0077      0.003      2.264      0.024       0.001       0.014\n",
       "5581           0.0029      0.003      0.860      0.391      -0.004       0.009\n",
       "5582           0.0045      0.004      1.147      0.252      -0.003       0.012\n",
       "5594           0.0058      0.003      1.729      0.085      -0.001       0.012\n",
       "5595          -0.0001      0.003     -0.038      0.970      -0.006       0.006\n",
       "5604           0.0019      0.003      0.543      0.588      -0.005       0.009\n",
       "5605           0.0131      0.004      2.988      0.003       0.004       0.022\n",
       "5728          -0.0003      0.004     -0.063      0.950      -0.008       0.008\n",
       "5894          -0.0067      0.005     -1.381      0.168      -0.016       0.003\n",
       "6199           0.0016      0.004      0.425      0.671      -0.006       0.009\n",
       "6456          -0.0059      0.002     -2.729      0.007      -0.010      -0.002\n",
       "6464           0.0003      0.003      0.114      0.909      -0.005       0.006\n",
       "6654           0.0022      0.005      0.410      0.682      -0.008       0.013\n",
       "6714          -0.0010      0.003     -0.366      0.715      -0.006       0.004\n",
       "6868           0.0093      0.003      2.710      0.007       0.003       0.016\n",
       "7249           0.0050      0.005      1.030      0.304      -0.005       0.014\n",
       "7311           0.0098      0.004      2.696      0.007       0.003       0.017\n",
       "7529          -0.0035      0.005     -0.746      0.456      -0.013       0.006\n",
       "8027           0.0026      0.004      0.731      0.465      -0.004       0.010\n",
       "8038          -0.0044      0.001     -2.945      0.003      -0.007      -0.001\n",
       "9146           0.0017      0.005      0.339      0.734      -0.008       0.011\n",
       "10000          0.0011      0.002      0.525      0.600      -0.003       0.005\n",
       "10252          0.0010      0.002      0.430      0.667      -0.004       0.006\n",
       "10253         -0.0067      0.002     -2.954      0.003      -0.011      -0.002\n",
       "10254         -0.0002      0.005     -0.035      0.972      -0.009       0.009\n",
       "11140          0.0005      0.005      0.101      0.920      -0.010       0.011\n",
       "23239          0.0006      0.002      0.252      0.801      -0.004       0.005\n",
       "26018          0.0047      0.002      2.039      0.042       0.000       0.009\n",
       "29924          0.0014      0.004      0.323      0.747      -0.007       0.010\n",
       "30011         -0.0027      0.002     -1.093      0.275      -0.007       0.002\n",
       "55824          0.0011      0.002      0.521      0.602      -0.003       0.005\n",
       "57761         -0.0023      0.002     -1.336      0.182      -0.006       0.001\n",
       "58513         -0.0008      0.005     -0.177      0.860      -0.010       0.008\n",
       "64223          0.0029      0.005      0.623      0.534      -0.006       0.012\n",
       "79109         -0.0021      0.004     -0.504      0.615      -0.010       0.006\n",
       "84335         -0.0087      0.005     -1.845      0.066      -0.018       0.001\n",
       "117145         0.0064      0.003      2.461      0.014       0.001       0.012\n",
       "196883        -0.0042      0.003     -1.559      0.120      -0.010       0.001\n",
       "253260         0.0034      0.003      1.329      0.185      -0.002       0.009\n",
       "==============================================================================\n",
       "Omnibus:                       43.558   Durbin-Watson:                   2.088\n",
       "Prob(Omnibus):                  0.000   Jarque-Bera (JB):               91.380\n",
       "Skew:                           0.522   Prob(JB):                     1.44e-20\n",
       "Kurtosis:                       4.878   Cond. No.                         366.\n",
       "==============================================================================\n",
       "\n",
       "Warnings:\n",
       "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n",
       "\"\"\""
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = LUADEEGFR\n",
    "y = PredLUAD.detach().numpy()\n",
    "\n",
    "# Note the difference in argument order\n",
    "model = sm.OLS(y, X).fit()\n",
    "predictions = model.predict(X) # make the predictions by the model\n",
    "\n",
    "# Print out the statistics\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(array([False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False,  True, False, False, False, False,\n",
      "       False, False, False, False, False, False,  True, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False,  True, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False, False, False, False, False, False, False, False,\n",
      "       False, False]), array([4.14015235e+01, 2.48985067e+01, 6.87655871e+01, 6.06407957e+01,\n",
      "       6.88273009e+01, 4.45728983e+01, 1.00290905e+01, 2.44236844e+01,\n",
      "       4.29890891e+01, 2.25237889e+01, 9.06980328e+01, 2.23338545e+01,\n",
      "       8.98385247e+00, 9.83573120e+01, 3.16968699e+01, 2.12121420e-01,\n",
      "       3.05570011e+00, 8.55424721e+01, 6.23448783e+01, 2.72870372e+01,\n",
      "       8.98633410e+01, 6.60488342e+00, 4.04124456e-02, 5.61667950e+01,\n",
      "       3.02544778e+01, 1.53460327e+01, 2.88183240e+01, 7.95045115e+01,\n",
      "       3.17136654e-01, 2.10836580e+01, 2.13318505e+00, 1.89824840e+01,\n",
      "       2.01032828e+01, 3.95510978e-05, 1.93330041e+00, 6.27577203e+01,\n",
      "       7.31548083e+00, 1.15576665e+01, 3.10839496e+01, 1.59815035e+01,\n",
      "       6.38765252e+01, 6.98100511e+01, 5.71721313e+01, 3.70384856e+00,\n",
      "       2.09784933e+01, 2.47574544e+01, 4.20584879e+01, 1.76263456e+01,\n",
      "       3.93798091e+00, 3.97200959e+01, 7.36691578e+01, 7.22146703e+01,\n",
      "       9.23540721e+01, 8.66810315e+01, 2.73081316e+01, 8.43743365e+01,\n",
      "       7.63168572e+01, 9.22877149e+01, 5.36144076e+01, 1.16774259e-02,\n",
      "       4.63226333e+01, 8.11664996e+01, 2.43831571e+00, 3.94516497e+01,\n",
      "       2.54481798e+01, 8.54272406e+00, 9.79257383e+01, 5.93442425e+01,\n",
      "       3.02023351e-01, 9.59097960e+01, 1.69827761e+01, 6.77638632e+01,\n",
      "       6.71173946e-01, 9.18373294e+01, 6.88643524e+01, 7.21909359e+01,\n",
      "       7.11695781e-01, 3.06722593e+01, 7.40430399e-01, 4.60684526e+01,\n",
      "       4.69928395e+01, 3.46394030e-01, 7.41824356e+01, 6.06084806e+01,\n",
      "       6.73821895e+01, 3.37003178e-01, 9.81702523e+01, 9.29017562e+01,\n",
      "       8.09271567e+01, 4.26041583e+00, 7.54604810e+01, 2.77902496e+01,\n",
      "       6.08401844e+01, 1.84323634e+01, 8.68103068e+01, 5.38850645e+01,\n",
      "       6.21034660e+01, 6.64966356e+00, 1.44584009e+00, 1.21155444e+01,\n",
      "       1.86469613e+01]))\n"
     ]
    }
   ],
   "source": [
    "print(bonferroni_correction(model.pvalues, alpha=0.05))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}