Switch to side-by-side view

--- a
+++ b/DEMO/DeepDTA_Reproduce_KIBA.ipynb
@@ -0,0 +1,652 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import os\n",
+    "os.chdir('../')\n",
+    "\n",
+    "import DeepPurpose.DTI as models\n",
+    "from DeepPurpose.utils import *\n",
+    "from DeepPurpose.dataset import *"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Beginning Processing...\n",
+      "Beginning to extract zip file...\n",
+      "Done!\n",
+      "in total: 118254 drug-target pairs\n",
+      "encoding drug...\n",
+      "unique drugs: 2068\n",
+      "drug encoding finished...\n",
+      "encoding protein...\n",
+      "unique target sequence: 229\n",
+      "protein encoding finished...\n",
+      "splitting dataset...\n",
+      "Done.\n"
+     ]
+    }
+   ],
+   "source": [
+    "X_drug, X_target, y = load_process_KIBA('./data/', binary=False)\n",
+    "\n",
+    "drug_encoding = 'CNN'\n",
+    "target_encoding = 'CNN'\n",
+    "train, val, test = data_process(X_drug, X_target, y, \n",
+    "                                drug_encoding, target_encoding, \n",
+    "                                split_method='random',frac=[0.7,0.1,0.2])\n",
+    "\n",
+    "# use the parameters setting provided in the paper: https://arxiv.org/abs/1801.10193\n",
+    "config = generate_config(drug_encoding = drug_encoding, \n",
+    "                         target_encoding = target_encoding, \n",
+    "                         cls_hidden_dims = [1024,1024,512], \n",
+    "                         train_epoch = 100, \n",
+    "                         LR = 0.001, \n",
+    "                         batch_size = 256,\n",
+    "                         cnn_drug_filters = [32,64,96],\n",
+    "                         cnn_target_filters = [32,64,96],\n",
+    "                         cnn_drug_kernels = [4,6,8],\n",
+    "                         cnn_target_kernels = [4,8,12]\n",
+    "                        )"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Let's use 1 GPU/s!\n",
+      "--- Data Preparation ---\n",
+      "--- Go for Training ---\n",
+      "Training at Epoch 1 iteration 0 with loss 139.46227\n",
+      "Training at Epoch 1 iteration 100 with loss 0.8524759\n",
+      "Training at Epoch 1 iteration 200 with loss 0.7659789\n",
+      "Training at Epoch 1 iteration 300 with loss 0.77633584\n",
+      "Validation at Epoch 1 , MSE: 0.6252079559108886 , Pearson Correlation: 0.37120514588057757 with p-value: 0.0 , Concordance Index: 0.6476415418668936\n",
+      "Training at Epoch 2 iteration 0 with loss 0.6057545\n",
+      "Training at Epoch 2 iteration 100 with loss 0.7182188\n",
+      "Training at Epoch 2 iteration 200 with loss 0.5523121\n",
+      "Training at Epoch 2 iteration 300 with loss 0.7289833\n",
+      "Validation at Epoch 2 , MSE: 0.5057249716094536 , Pearson Correlation: 0.5271302441704545 with p-value: 0.0 , Concordance Index: 0.7108944244194332\n",
+      "Training at Epoch 3 iteration 0 with loss 0.3336088\n",
+      "Training at Epoch 3 iteration 100 with loss 0.55122924\n",
+      "Training at Epoch 3 iteration 200 with loss 0.9139983\n",
+      "Training at Epoch 3 iteration 300 with loss 0.3553281\n",
+      "Validation at Epoch 3 , MSE: 0.43898317224256905 , Pearson Correlation: 0.6078014781089629 with p-value: 0.0 , Concordance Index: 0.7420012120873392\n",
+      "Training at Epoch 4 iteration 0 with loss 0.6549053\n",
+      "Training at Epoch 4 iteration 100 with loss 0.28032207\n",
+      "Training at Epoch 4 iteration 200 with loss 0.5554817\n",
+      "Training at Epoch 4 iteration 300 with loss 0.4001999\n",
+      "Validation at Epoch 4 , MSE: 0.517115460522088 , Pearson Correlation: 0.6388425484955205 with p-value: 0.0 , Concordance Index: 0.7548923258982349\n",
+      "Training at Epoch 5 iteration 0 with loss 0.3249922\n",
+      "Training at Epoch 5 iteration 100 with loss 0.49228954\n",
+      "Training at Epoch 5 iteration 200 with loss 0.53248477\n",
+      "Training at Epoch 5 iteration 300 with loss 0.35868204\n",
+      "Validation at Epoch 5 , MSE: 0.43980806543009365 , Pearson Correlation: 0.6566629620181142 with p-value: 0.0 , Concordance Index: 0.7574936859223567\n",
+      "Training at Epoch 6 iteration 0 with loss 0.33518466\n",
+      "Training at Epoch 6 iteration 100 with loss 0.4694142\n",
+      "Training at Epoch 6 iteration 200 with loss 0.43182057\n",
+      "Training at Epoch 6 iteration 300 with loss 0.27362764\n",
+      "Validation at Epoch 6 , MSE: 0.40906901873856194 , Pearson Correlation: 0.6642595221495057 with p-value: 0.0 , Concordance Index: 0.7633581128907642\n",
+      "Training at Epoch 7 iteration 0 with loss 0.4240064\n",
+      "Training at Epoch 7 iteration 100 with loss 0.579832\n",
+      "Training at Epoch 7 iteration 200 with loss 0.29981527\n",
+      "Training at Epoch 7 iteration 300 with loss 0.37393552\n",
+      "Validation at Epoch 7 , MSE: 0.4669870538496263 , Pearson Correlation: 0.6709961444282361 with p-value: 0.0 , Concordance Index: 0.7629225786099979\n",
+      "Training at Epoch 8 iteration 0 with loss 0.49253836\n",
+      "Training at Epoch 8 iteration 100 with loss 0.43154156\n",
+      "Training at Epoch 8 iteration 200 with loss 0.8942315\n",
+      "Training at Epoch 8 iteration 300 with loss 0.36026978\n",
+      "Validation at Epoch 8 , MSE: 0.40576572643883924 , Pearson Correlation: 0.6698314683974463 with p-value: 0.0 , Concordance Index: 0.7633775503707003\n",
+      "Training at Epoch 9 iteration 0 with loss 0.54117924\n",
+      "Training at Epoch 9 iteration 100 with loss 0.4534474\n",
+      "Training at Epoch 9 iteration 200 with loss 0.2908664\n",
+      "Training at Epoch 9 iteration 300 with loss 0.40684277\n",
+      "Validation at Epoch 9 , MSE: 0.3866416384830573 , Pearson Correlation: 0.6740064110006907 with p-value: 0.0 , Concordance Index: 0.7670906313072465\n",
+      "Training at Epoch 10 iteration 0 with loss 0.5149473\n",
+      "Training at Epoch 10 iteration 100 with loss 0.3061192\n",
+      "Training at Epoch 10 iteration 200 with loss 0.39455894\n",
+      "Training at Epoch 10 iteration 300 with loss 0.41524413\n",
+      "Validation at Epoch 10 , MSE: 0.5004661130110899 , Pearson Correlation: 0.6746381122340862 with p-value: 0.0 , Concordance Index: 0.7636888073994913\n",
+      "Training at Epoch 11 iteration 0 with loss 0.50837636\n",
+      "Training at Epoch 11 iteration 100 with loss 0.34029388\n",
+      "Training at Epoch 11 iteration 200 with loss 0.4000504\n",
+      "Training at Epoch 11 iteration 300 with loss 0.43241504\n",
+      "Validation at Epoch 11 , MSE: 0.687117328399177 , Pearson Correlation: 0.6775114338921054 with p-value: 0.0 , Concordance Index: 0.768135395853816\n",
+      "Training at Epoch 12 iteration 0 with loss 0.61384964\n",
+      "Training at Epoch 12 iteration 100 with loss 0.4489369\n",
+      "Training at Epoch 12 iteration 200 with loss 0.41723198\n",
+      "Training at Epoch 12 iteration 300 with loss 0.52659756\n",
+      "Validation at Epoch 12 , MSE: 0.44284007519234136 , Pearson Correlation: 0.6815251759494406 with p-value: 0.0 , Concordance Index: 0.7671332681158448\n",
+      "Training at Epoch 13 iteration 0 with loss 0.4339065\n",
+      "Training at Epoch 13 iteration 100 with loss 0.43391797\n",
+      "Training at Epoch 13 iteration 200 with loss 0.44804138\n",
+      "Training at Epoch 13 iteration 300 with loss 0.29588428\n",
+      "Validation at Epoch 13 , MSE: 0.3794264554744155 , Pearson Correlation: 0.6824709263168197 with p-value: 0.0 , Concordance Index: 0.7648438539097174\n",
+      "Training at Epoch 14 iteration 0 with loss 0.35877725\n",
+      "Training at Epoch 14 iteration 100 with loss 0.40320885\n",
+      "Training at Epoch 14 iteration 200 with loss 0.6108784\n",
+      "Training at Epoch 14 iteration 300 with loss 0.4231795\n",
+      "Validation at Epoch 14 , MSE: 0.37035736481551695 , Pearson Correlation: 0.6886761842378898 with p-value: 0.0 , Concordance Index: 0.769635147299312\n",
+      "Training at Epoch 15 iteration 0 with loss 0.28133658\n",
+      "Training at Epoch 15 iteration 100 with loss 0.2723529\n",
+      "Training at Epoch 15 iteration 200 with loss 0.37842792\n",
+      "Training at Epoch 15 iteration 300 with loss 0.42256156\n",
+      "Validation at Epoch 15 , MSE: 0.39174580991934793 , Pearson Correlation: 0.6980164319885487 with p-value: 0.0 , Concordance Index: 0.7729698407516334\n",
+      "Training at Epoch 16 iteration 0 with loss 0.442822\n",
+      "Training at Epoch 16 iteration 100 with loss 0.29855317\n",
+      "Training at Epoch 16 iteration 200 with loss 0.44767448\n",
+      "Training at Epoch 16 iteration 300 with loss 0.3829686\n",
+      "Validation at Epoch 16 , MSE: 0.3587687227351904 , Pearson Correlation: 0.7275656094433479 with p-value: 0.0 , Concordance Index: 0.7817976053933012\n",
+      "Training at Epoch 17 iteration 0 with loss 0.39717722\n",
+      "Training at Epoch 17 iteration 100 with loss 0.34678438\n",
+      "Training at Epoch 17 iteration 200 with loss 0.30226183\n",
+      "Training at Epoch 17 iteration 300 with loss 0.42329437\n",
+      "Validation at Epoch 17 , MSE: 0.4687368140248108 , Pearson Correlation: 0.7304166804308427 with p-value: 0.0 , Concordance Index: 0.7828565923015529\n",
+      "Training at Epoch 18 iteration 0 with loss 0.3633739\n",
+      "Training at Epoch 18 iteration 100 with loss 0.28657123\n",
+      "Training at Epoch 18 iteration 200 with loss 0.32053304\n",
+      "Training at Epoch 18 iteration 300 with loss 0.32039723\n",
+      "Validation at Epoch 18 , MSE: 0.3276044065524828 , Pearson Correlation: 0.7382404349298669 with p-value: 0.0 , Concordance Index: 0.7846119602326115\n",
+      "Training at Epoch 19 iteration 0 with loss 0.17331107\n",
+      "Training at Epoch 19 iteration 100 with loss 0.43098885\n",
+      "Training at Epoch 19 iteration 200 with loss 0.34547406\n",
+      "Training at Epoch 19 iteration 300 with loss 0.29107505\n",
+      "Validation at Epoch 19 , MSE: 0.3122119465124941 , Pearson Correlation: 0.7406594366731748 with p-value: 0.0 , Concordance Index: 0.7859064449263984\n",
+      "Training at Epoch 20 iteration 0 with loss 0.3300522\n",
+      "Training at Epoch 20 iteration 100 with loss 0.23845857\n",
+      "Training at Epoch 20 iteration 200 with loss 0.3112779\n",
+      "Training at Epoch 20 iteration 300 with loss 0.2908824\n",
+      "Validation at Epoch 20 , MSE: 0.332362568609342 , Pearson Correlation: 0.7433144190944236 with p-value: 0.0 , Concordance Index: 0.779341019776788\n",
+      "Training at Epoch 21 iteration 0 with loss 0.21694058\n",
+      "Training at Epoch 21 iteration 100 with loss 0.31264272\n",
+      "Training at Epoch 21 iteration 200 with loss 0.291134\n",
+      "Training at Epoch 21 iteration 300 with loss 0.2443328\n",
+      "Validation at Epoch 21 , MSE: 0.32488440260458695 , Pearson Correlation: 0.7549241003756116 with p-value: 0.0 , Concordance Index: 0.7905469464642452\n",
+      "Training at Epoch 22 iteration 0 with loss 0.296259\n",
+      "Training at Epoch 22 iteration 100 with loss 0.32945767\n",
+      "Training at Epoch 22 iteration 200 with loss 0.28351027\n",
+      "Training at Epoch 22 iteration 300 with loss 0.27165496\n",
+      "Validation at Epoch 22 , MSE: 0.3556623065139081 , Pearson Correlation: 0.7558565473223899 with p-value: 0.0 , Concordance Index: 0.7933693699770804\n",
+      "Training at Epoch 23 iteration 0 with loss 0.3517964\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Training at Epoch 23 iteration 100 with loss 0.21723273\n",
+      "Training at Epoch 23 iteration 200 with loss 0.3215351\n",
+      "Training at Epoch 23 iteration 300 with loss 0.37353116\n",
+      "Validation at Epoch 23 , MSE: 0.2904944052104942 , Pearson Correlation: 0.7605299689697724 with p-value: 0.0 , Concordance Index: 0.7966217114894854\n",
+      "Training at Epoch 24 iteration 0 with loss 0.2798943\n",
+      "Training at Epoch 24 iteration 100 with loss 0.39643136\n",
+      "Training at Epoch 24 iteration 200 with loss 0.4295775\n",
+      "Training at Epoch 24 iteration 300 with loss 0.24249879\n",
+      "Validation at Epoch 24 , MSE: 0.3018565196551627 , Pearson Correlation: 0.7695774972212105 with p-value: 0.0 , Concordance Index: 0.7993369790819714\n",
+      "Training at Epoch 25 iteration 0 with loss 0.22099498\n",
+      "Training at Epoch 25 iteration 100 with loss 0.26581356\n",
+      "Training at Epoch 25 iteration 200 with loss 0.31408113\n",
+      "Training at Epoch 25 iteration 300 with loss 0.31087956\n",
+      "Validation at Epoch 25 , MSE: 0.28863161072602156 , Pearson Correlation: 0.762775379909726 with p-value: 0.0 , Concordance Index: 0.8012207040164403\n",
+      "Training at Epoch 26 iteration 0 with loss 0.2655613\n",
+      "Training at Epoch 26 iteration 100 with loss 0.25201494\n",
+      "Training at Epoch 26 iteration 200 with loss 0.2319586\n",
+      "Training at Epoch 26 iteration 300 with loss 0.2557767\n",
+      "Validation at Epoch 26 , MSE: 0.4930393249311044 , Pearson Correlation: 0.7699562360525237 with p-value: 0.0 , Concordance Index: 0.7963000393624107\n",
+      "Training at Epoch 27 iteration 0 with loss 0.44117013\n",
+      "Training at Epoch 27 iteration 100 with loss 0.3273319\n",
+      "Training at Epoch 27 iteration 200 with loss 0.26239604\n",
+      "Training at Epoch 27 iteration 300 with loss 0.2871576\n",
+      "Validation at Epoch 27 , MSE: 0.2763490526371625 , Pearson Correlation: 0.7732378079725977 with p-value: 0.0 , Concordance Index: 0.8040261651490042\n",
+      "Training at Epoch 28 iteration 0 with loss 0.2402317\n",
+      "Training at Epoch 28 iteration 100 with loss 0.2201614\n",
+      "Training at Epoch 28 iteration 200 with loss 0.29813662\n",
+      "Training at Epoch 28 iteration 300 with loss 0.36932343\n",
+      "Validation at Epoch 28 , MSE: 0.3163570835376237 , Pearson Correlation: 0.7757515692799253 with p-value: 0.0 , Concordance Index: 0.7992013178853609\n",
+      "Training at Epoch 29 iteration 0 with loss 0.30585524\n",
+      "Training at Epoch 29 iteration 100 with loss 0.29580683\n",
+      "Training at Epoch 29 iteration 200 with loss 0.26298445\n",
+      "Training at Epoch 29 iteration 300 with loss 0.29019976\n",
+      "Validation at Epoch 29 , MSE: 0.27718322610610213 , Pearson Correlation: 0.7781347032430801 with p-value: 0.0 , Concordance Index: 0.8021961809106205\n",
+      "Training at Epoch 30 iteration 0 with loss 0.2829763\n",
+      "Training at Epoch 30 iteration 100 with loss 0.28772902\n",
+      "Training at Epoch 30 iteration 200 with loss 0.2438101\n",
+      "Training at Epoch 30 iteration 300 with loss 0.2709101\n",
+      "Validation at Epoch 30 , MSE: 0.3025488108277402 , Pearson Correlation: 0.7766137013109087 with p-value: 0.0 , Concordance Index: 0.7978011305407529\n",
+      "Training at Epoch 31 iteration 0 with loss 0.2151288\n",
+      "Training at Epoch 31 iteration 100 with loss 0.27977476\n",
+      "Training at Epoch 31 iteration 200 with loss 0.32945055\n",
+      "Training at Epoch 31 iteration 300 with loss 0.30322352\n",
+      "Validation at Epoch 31 , MSE: 0.27950626900433845 , Pearson Correlation: 0.7768095876580197 with p-value: 0.0 , Concordance Index: 0.809581908586743\n",
+      "Training at Epoch 32 iteration 0 with loss 0.30916905\n",
+      "Training at Epoch 32 iteration 100 with loss 0.35691962\n",
+      "Training at Epoch 32 iteration 200 with loss 0.30594778\n",
+      "Training at Epoch 32 iteration 300 with loss 0.25673822\n",
+      "Validation at Epoch 32 , MSE: 0.28392810687019493 , Pearson Correlation: 0.7884139492934661 with p-value: 0.0 , Concordance Index: 0.8080715906607208\n",
+      "Training at Epoch 33 iteration 0 with loss 0.24682216\n",
+      "Training at Epoch 33 iteration 100 with loss 0.32019353\n",
+      "Training at Epoch 33 iteration 200 with loss 0.26248968\n",
+      "Training at Epoch 33 iteration 300 with loss 0.23284283\n",
+      "Validation at Epoch 33 , MSE: 0.28147583151983824 , Pearson Correlation: 0.7912324256450507 with p-value: 0.0 , Concordance Index: 0.806736921550664\n",
+      "Training at Epoch 34 iteration 0 with loss 0.26583454\n",
+      "Training at Epoch 34 iteration 100 with loss 0.27032578\n",
+      "Training at Epoch 34 iteration 200 with loss 0.2510938\n",
+      "Training at Epoch 34 iteration 300 with loss 0.18677746\n",
+      "Validation at Epoch 34 , MSE: 0.254715168591093 , Pearson Correlation: 0.7944845435587876 with p-value: 0.0 , Concordance Index: 0.8092169411383151\n",
+      "Training at Epoch 35 iteration 0 with loss 0.21087557\n",
+      "Training at Epoch 35 iteration 100 with loss 0.22810724\n",
+      "Training at Epoch 35 iteration 200 with loss 0.30516157\n",
+      "Training at Epoch 35 iteration 300 with loss 0.21417427\n",
+      "Validation at Epoch 35 , MSE: 0.2556761511172115 , Pearson Correlation: 0.7957341951817682 with p-value: 0.0 , Concordance Index: 0.8142363751364597\n",
+      "Training at Epoch 36 iteration 0 with loss 0.17365673\n",
+      "Training at Epoch 36 iteration 100 with loss 0.26811144\n",
+      "Training at Epoch 36 iteration 200 with loss 0.23328358\n",
+      "Training at Epoch 36 iteration 300 with loss 0.25731447\n",
+      "Validation at Epoch 36 , MSE: 0.2559457642380893 , Pearson Correlation: 0.799422136545758 with p-value: 0.0 , Concordance Index: 0.8127337474283564\n",
+      "Training at Epoch 37 iteration 0 with loss 0.17421441\n",
+      "Training at Epoch 37 iteration 100 with loss 0.2805039\n",
+      "Training at Epoch 37 iteration 200 with loss 0.16534641\n",
+      "Training at Epoch 37 iteration 300 with loss 0.22006656\n",
+      "Validation at Epoch 37 , MSE: 0.32952278645964184 , Pearson Correlation: 0.7934489740191146 with p-value: 0.0 , Concordance Index: 0.8010512391650809\n",
+      "Training at Epoch 38 iteration 0 with loss 0.2512241\n",
+      "Training at Epoch 38 iteration 100 with loss 0.18280888\n",
+      "Training at Epoch 38 iteration 200 with loss 0.16358562\n",
+      "Training at Epoch 38 iteration 300 with loss 0.2114552\n",
+      "Validation at Epoch 38 , MSE: 0.2469675475963409 , Pearson Correlation: 0.8041560299798638 with p-value: 0.0 , Concordance Index: 0.8143626506340358\n",
+      "Training at Epoch 39 iteration 0 with loss 0.17007281\n",
+      "Training at Epoch 39 iteration 100 with loss 0.2576394\n",
+      "Training at Epoch 39 iteration 200 with loss 0.25703746\n",
+      "Training at Epoch 39 iteration 300 with loss 0.30679893\n",
+      "Validation at Epoch 39 , MSE: 0.2814202224785629 , Pearson Correlation: 0.7986617531118729 with p-value: 0.0 , Concordance Index: 0.8123491078575165\n",
+      "Training at Epoch 40 iteration 0 with loss 0.17776462\n",
+      "Training at Epoch 40 iteration 100 with loss 0.23892398\n",
+      "Training at Epoch 40 iteration 200 with loss 0.3002319\n",
+      "Training at Epoch 40 iteration 300 with loss 0.21091235\n",
+      "Validation at Epoch 40 , MSE: 0.24471777008217963 , Pearson Correlation: 0.8029439370476041 with p-value: 0.0 , Concordance Index: 0.8139744687187216\n",
+      "Training at Epoch 41 iteration 0 with loss 0.18120411\n",
+      "Training at Epoch 41 iteration 100 with loss 0.1467629\n",
+      "Training at Epoch 41 iteration 200 with loss 0.18784216\n",
+      "Training at Epoch 41 iteration 300 with loss 0.39111277\n",
+      "Validation at Epoch 41 , MSE: 0.25024235414513113 , Pearson Correlation: 0.8065926609762567 with p-value: 0.0 , Concordance Index: 0.8126689861050176\n",
+      "Training at Epoch 42 iteration 0 with loss 0.18558024\n",
+      "Training at Epoch 42 iteration 100 with loss 0.22143383\n",
+      "Training at Epoch 42 iteration 200 with loss 0.17659724\n",
+      "Training at Epoch 42 iteration 300 with loss 0.21279082\n",
+      "Validation at Epoch 42 , MSE: 0.24589991385365795 , Pearson Correlation: 0.8058687406335383 with p-value: 0.0 , Concordance Index: 0.8180839667645736\n",
+      "Training at Epoch 43 iteration 0 with loss 0.18575062\n",
+      "Training at Epoch 43 iteration 100 with loss 0.14547281\n",
+      "Training at Epoch 43 iteration 200 with loss 0.21407829\n",
+      "Training at Epoch 43 iteration 300 with loss 0.25702018\n",
+      "Validation at Epoch 43 , MSE: 0.23597818335337695 , Pearson Correlation: 0.8131261358345395 with p-value: 0.0 , Concordance Index: 0.8214152843928875\n",
+      "Training at Epoch 44 iteration 0 with loss 0.1430232\n",
+      "Training at Epoch 44 iteration 100 with loss 0.23007198\n",
+      "Training at Epoch 44 iteration 200 with loss 0.23313954\n",
+      "Training at Epoch 44 iteration 300 with loss 0.21242057\n",
+      "Validation at Epoch 44 , MSE: 0.2410594394283686 , Pearson Correlation: 0.8093517312778701 with p-value: 0.0 , Concordance Index: 0.8200404838503456\n",
+      "Training at Epoch 45 iteration 0 with loss 0.19044279\n",
+      "Training at Epoch 45 iteration 100 with loss 0.22278084\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Training at Epoch 45 iteration 200 with loss 0.20137015\n",
+      "Training at Epoch 45 iteration 300 with loss 0.20637983\n",
+      "Validation at Epoch 45 , MSE: 0.2931538610613398 , Pearson Correlation: 0.8161000618379057 with p-value: 0.0 , Concordance Index: 0.8220322200148337\n",
+      "Training at Epoch 46 iteration 0 with loss 0.19920787\n",
+      "Training at Epoch 46 iteration 100 with loss 0.17406026\n",
+      "Training at Epoch 46 iteration 200 with loss 0.19329071\n",
+      "Training at Epoch 46 iteration 300 with loss 0.19324802\n",
+      "Validation at Epoch 46 , MSE: 0.27236348972361674 , Pearson Correlation: 0.8138519945205422 with p-value: 0.0 , Concordance Index: 0.8187842761565732\n",
+      "Training at Epoch 47 iteration 0 with loss 0.23279054\n",
+      "Training at Epoch 47 iteration 100 with loss 0.17864995\n",
+      "Training at Epoch 47 iteration 200 with loss 0.21056506\n",
+      "Training at Epoch 47 iteration 300 with loss 0.1824354\n",
+      "Validation at Epoch 47 , MSE: 0.27391643581608033 , Pearson Correlation: 0.8068993596091968 with p-value: 0.0 , Concordance Index: 0.8129289169844912\n",
+      "Training at Epoch 48 iteration 0 with loss 0.26312447\n",
+      "Training at Epoch 48 iteration 100 with loss 0.22185321\n",
+      "Training at Epoch 48 iteration 200 with loss 0.23431794\n",
+      "Training at Epoch 48 iteration 300 with loss 0.2589783\n",
+      "Validation at Epoch 48 , MSE: 0.23025026174260313 , Pearson Correlation: 0.8170136575139418 with p-value: 0.0 , Concordance Index: 0.8225616567006185\n",
+      "Training at Epoch 49 iteration 0 with loss 0.1223916\n",
+      "Training at Epoch 49 iteration 100 with loss 0.18264055\n",
+      "Training at Epoch 49 iteration 200 with loss 0.17989686\n",
+      "Training at Epoch 49 iteration 300 with loss 0.16855526\n",
+      "Validation at Epoch 49 , MSE: 0.2205644529408611 , Pearson Correlation: 0.8239693877895442 with p-value: 0.0 , Concordance Index: 0.8312742800086458\n",
+      "Training at Epoch 50 iteration 0 with loss 0.30569243\n",
+      "Training at Epoch 50 iteration 100 with loss 0.16925503\n",
+      "Training at Epoch 50 iteration 200 with loss 0.20570143\n",
+      "Training at Epoch 50 iteration 300 with loss 0.19284452\n",
+      "Validation at Epoch 50 , MSE: 0.23454742649197957 , Pearson Correlation: 0.8259606548256397 with p-value: 0.0 , Concordance Index: 0.8328887640557276\n",
+      "Training at Epoch 51 iteration 0 with loss 0.22376396\n",
+      "Training at Epoch 51 iteration 100 with loss 0.23242217\n",
+      "Training at Epoch 51 iteration 200 with loss 0.17831367\n",
+      "Training at Epoch 51 iteration 300 with loss 0.17776337\n",
+      "Validation at Epoch 51 , MSE: 0.22540333524221137 , Pearson Correlation: 0.8237001583277702 with p-value: 0.0 , Concordance Index: 0.8321182360105475\n",
+      "Training at Epoch 52 iteration 0 with loss 0.18440701\n",
+      "Training at Epoch 52 iteration 100 with loss 0.15421636\n",
+      "Training at Epoch 52 iteration 200 with loss 0.1854191\n",
+      "Training at Epoch 52 iteration 300 with loss 0.2379067\n",
+      "Validation at Epoch 52 , MSE: 0.20853173553592427 , Pearson Correlation: 0.8344220487922347 with p-value: 0.0 , Concordance Index: 0.8367795147611765\n",
+      "Training at Epoch 53 iteration 0 with loss 0.20401023\n",
+      "Training at Epoch 53 iteration 100 with loss 0.11404045\n",
+      "Training at Epoch 53 iteration 200 with loss 0.15747331\n",
+      "Training at Epoch 53 iteration 300 with loss 0.22042069\n",
+      "Validation at Epoch 53 , MSE: 0.21615491029018147 , Pearson Correlation: 0.8306163494334134 with p-value: 0.0 , Concordance Index: 0.83251870259484\n",
+      "Training at Epoch 54 iteration 0 with loss 0.14299648\n",
+      "Training at Epoch 54 iteration 100 with loss 0.23001932\n",
+      "Training at Epoch 54 iteration 200 with loss 0.1770986\n",
+      "Training at Epoch 54 iteration 300 with loss 0.2942966\n",
+      "Validation at Epoch 54 , MSE: 0.2501904187767297 , Pearson Correlation: 0.8292196336328985 with p-value: 0.0 , Concordance Index: 0.8316417604022989\n",
+      "Training at Epoch 55 iteration 0 with loss 0.19672471\n",
+      "Training at Epoch 55 iteration 100 with loss 0.142104\n",
+      "Training at Epoch 55 iteration 200 with loss 0.1882125\n",
+      "Training at Epoch 55 iteration 300 with loss 0.18341456\n",
+      "Validation at Epoch 55 , MSE: 0.22685709705865717 , Pearson Correlation: 0.8298601891084482 with p-value: 0.0 , Concordance Index: 0.8332577279953572\n",
+      "Training at Epoch 56 iteration 0 with loss 0.16769859\n",
+      "Training at Epoch 56 iteration 100 with loss 0.15354457\n",
+      "Training at Epoch 56 iteration 200 with loss 0.18837331\n",
+      "Training at Epoch 56 iteration 300 with loss 0.18643981\n",
+      "Validation at Epoch 56 , MSE: 0.23465381325878834 , Pearson Correlation: 0.8226322913072514 with p-value: 0.0 , Concordance Index: 0.8305899944270141\n",
+      "Training at Epoch 57 iteration 0 with loss 0.15828678\n",
+      "Training at Epoch 57 iteration 100 with loss 0.23880166\n",
+      "Training at Epoch 57 iteration 200 with loss 0.19497156\n",
+      "Training at Epoch 57 iteration 300 with loss 0.20533583\n",
+      "Validation at Epoch 57 , MSE: 0.23490776397150226 , Pearson Correlation: 0.8261580940950286 with p-value: 0.0 , Concordance Index: 0.828603049510501\n",
+      "Training at Epoch 58 iteration 0 with loss 0.14188379\n",
+      "Training at Epoch 58 iteration 100 with loss 0.16264115\n",
+      "Training at Epoch 58 iteration 200 with loss 0.27134517\n",
+      "Training at Epoch 58 iteration 300 with loss 0.16497058\n",
+      "Validation at Epoch 58 , MSE: 0.20634967969974802 , Pearson Correlation: 0.8372939570839266 with p-value: 0.0 , Concordance Index: 0.8338584354409082\n",
+      "Training at Epoch 59 iteration 0 with loss 0.13785982\n",
+      "Training at Epoch 59 iteration 100 with loss 0.16934662\n",
+      "Training at Epoch 59 iteration 200 with loss 0.14488962\n",
+      "Training at Epoch 59 iteration 300 with loss 0.24447058\n",
+      "Validation at Epoch 59 , MSE: 0.24024889528142337 , Pearson Correlation: 0.8281995247440275 with p-value: 0.0 , Concordance Index: 0.8348693811945046\n",
+      "Training at Epoch 60 iteration 0 with loss 0.20227404\n",
+      "Training at Epoch 60 iteration 100 with loss 0.15199661\n",
+      "Training at Epoch 60 iteration 200 with loss 0.21587655\n",
+      "Training at Epoch 60 iteration 300 with loss 0.19945133\n",
+      "Validation at Epoch 60 , MSE: 0.21013252771272414 , Pearson Correlation: 0.8365628781877509 with p-value: 0.0 , Concordance Index: 0.8368555691998053\n",
+      "Training at Epoch 61 iteration 0 with loss 0.19128245\n",
+      "Training at Epoch 61 iteration 100 with loss 0.11310287\n",
+      "Training at Epoch 61 iteration 200 with loss 0.19415411\n",
+      "Training at Epoch 61 iteration 300 with loss 0.15344377\n",
+      "Validation at Epoch 61 , MSE: 0.21382856500150502 , Pearson Correlation: 0.8418828074732749 with p-value: 0.0 , Concordance Index: 0.8403055856444582\n",
+      "Training at Epoch 62 iteration 0 with loss 0.16562407\n",
+      "Training at Epoch 62 iteration 100 with loss 0.15640946\n",
+      "Training at Epoch 62 iteration 200 with loss 0.11651558\n",
+      "Training at Epoch 62 iteration 300 with loss 0.1702704\n",
+      "Validation at Epoch 62 , MSE: 0.2162447095154239 , Pearson Correlation: 0.8374758847636822 with p-value: 0.0 , Concordance Index: 0.8369025658169874\n",
+      "Training at Epoch 63 iteration 0 with loss 0.15939891\n",
+      "Training at Epoch 63 iteration 100 with loss 0.15985921\n",
+      "Training at Epoch 63 iteration 200 with loss 0.16121107\n",
+      "Training at Epoch 63 iteration 300 with loss 0.16741844\n",
+      "Validation at Epoch 63 , MSE: 0.2756391213687168 , Pearson Correlation: 0.8423224386672058 with p-value: 0.0 , Concordance Index: 0.8402666501316889\n",
+      "Training at Epoch 64 iteration 0 with loss 0.17065248\n",
+      "Training at Epoch 64 iteration 100 with loss 0.16979195\n",
+      "Training at Epoch 64 iteration 200 with loss 0.1779838\n",
+      "Training at Epoch 64 iteration 300 with loss 0.1243289\n",
+      "Validation at Epoch 64 , MSE: 0.21367358112252594 , Pearson Correlation: 0.8412688018890631 with p-value: 0.0 , Concordance Index: 0.8431657033356744\n",
+      "Training at Epoch 65 iteration 0 with loss 0.10653531\n",
+      "Training at Epoch 65 iteration 100 with loss 0.123516455\n",
+      "Training at Epoch 65 iteration 200 with loss 0.14685814\n",
+      "Training at Epoch 65 iteration 300 with loss 0.16070805\n",
+      "Validation at Epoch 65 , MSE: 0.20601349676311673 , Pearson Correlation: 0.8427587170010579 with p-value: 0.0 , Concordance Index: 0.8434652358097844\n",
+      "Training at Epoch 66 iteration 0 with loss 0.13670954\n",
+      "Training at Epoch 66 iteration 100 with loss 0.1612285\n",
+      "Training at Epoch 66 iteration 200 with loss 0.13986418\n",
+      "Training at Epoch 66 iteration 300 with loss 0.20413469\n",
+      "Validation at Epoch 66 , MSE: 0.21948135905803182 , Pearson Correlation: 0.8405495493810491 with p-value: 0.0 , Concordance Index: 0.8415147058917031\n",
+      "Training at Epoch 67 iteration 0 with loss 0.12630597\n",
+      "Training at Epoch 67 iteration 100 with loss 0.15750438\n",
+      "Training at Epoch 67 iteration 200 with loss 0.10876533\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Training at Epoch 67 iteration 300 with loss 0.14193657\n",
+      "Validation at Epoch 67 , MSE: 0.20856750871228383 , Pearson Correlation: 0.8429001782451714 with p-value: 0.0 , Concordance Index: 0.8417360115920044\n",
+      "Training at Epoch 68 iteration 0 with loss 0.107186705\n",
+      "Training at Epoch 68 iteration 100 with loss 0.12097235\n",
+      "Training at Epoch 68 iteration 200 with loss 0.11111714\n",
+      "Training at Epoch 68 iteration 300 with loss 0.13749462\n",
+      "Validation at Epoch 68 , MSE: 0.2023187404043579 , Pearson Correlation: 0.8437123674403135 with p-value: 0.0 , Concordance Index: 0.8441180641614258\n",
+      "Training at Epoch 69 iteration 0 with loss 0.13140449\n",
+      "Training at Epoch 69 iteration 100 with loss 0.14743677\n",
+      "Training at Epoch 69 iteration 200 with loss 0.12306302\n",
+      "Training at Epoch 69 iteration 300 with loss 0.16115318\n",
+      "Validation at Epoch 69 , MSE: 0.20788437705473423 , Pearson Correlation: 0.8422315875620378 with p-value: 0.0 , Concordance Index: 0.8416793794950875\n",
+      "Training at Epoch 70 iteration 0 with loss 0.12666376\n",
+      "Training at Epoch 70 iteration 100 with loss 0.12379304\n",
+      "Training at Epoch 70 iteration 200 with loss 0.09857385\n",
+      "Training at Epoch 70 iteration 300 with loss 0.14105771\n",
+      "Validation at Epoch 70 , MSE: 0.2032973529984822 , Pearson Correlation: 0.8453484882542686 with p-value: 0.0 , Concordance Index: 0.8457166227965974\n",
+      "Training at Epoch 71 iteration 0 with loss 0.10225665\n",
+      "Training at Epoch 71 iteration 100 with loss 0.19021757\n",
+      "Training at Epoch 71 iteration 200 with loss 0.13823912\n",
+      "Training at Epoch 71 iteration 300 with loss 0.16305545\n",
+      "Validation at Epoch 71 , MSE: 0.21069843178497505 , Pearson Correlation: 0.8436724261637155 with p-value: 0.0 , Concordance Index: 0.8451032900720226\n",
+      "Training at Epoch 72 iteration 0 with loss 0.101149544\n",
+      "Training at Epoch 72 iteration 100 with loss 0.14553498\n",
+      "Training at Epoch 72 iteration 200 with loss 0.14325085\n",
+      "Training at Epoch 72 iteration 300 with loss 0.13576855\n",
+      "Validation at Epoch 72 , MSE: 0.20832088312351932 , Pearson Correlation: 0.8469731326052242 with p-value: 0.0 , Concordance Index: 0.8455185164249581\n",
+      "Training at Epoch 73 iteration 0 with loss 0.13514586\n",
+      "Training at Epoch 73 iteration 100 with loss 0.118497826\n",
+      "Training at Epoch 73 iteration 200 with loss 0.1443201\n",
+      "Training at Epoch 73 iteration 300 with loss 0.1113624\n",
+      "Validation at Epoch 73 , MSE: 0.20253388316067503 , Pearson Correlation: 0.8441805028660897 with p-value: 0.0 , Concordance Index: 0.8470269042273885\n",
+      "Training at Epoch 74 iteration 0 with loss 0.09950824\n",
+      "Training at Epoch 74 iteration 100 with loss 0.10273786\n",
+      "Training at Epoch 74 iteration 200 with loss 0.104656264\n",
+      "Training at Epoch 74 iteration 300 with loss 0.10339452\n",
+      "Validation at Epoch 74 , MSE: 0.21705914965444434 , Pearson Correlation: 0.8452427278756023 with p-value: 0.0 , Concordance Index: 0.8499502769886306\n",
+      "Training at Epoch 75 iteration 0 with loss 0.14215946\n",
+      "Training at Epoch 75 iteration 100 with loss 0.0961847\n",
+      "Training at Epoch 75 iteration 200 with loss 0.12331951\n",
+      "Training at Epoch 75 iteration 300 with loss 0.15130317\n",
+      "Validation at Epoch 75 , MSE: 0.19871417532173935 , Pearson Correlation: 0.8446179648633848 with p-value: 0.0 , Concordance Index: 0.8417817138910132\n",
+      "Training at Epoch 76 iteration 0 with loss 0.15954836\n",
+      "Training at Epoch 76 iteration 100 with loss 0.13232154\n",
+      "Training at Epoch 76 iteration 200 with loss 0.12701261\n",
+      "Training at Epoch 76 iteration 300 with loss 0.13210264\n",
+      "Validation at Epoch 76 , MSE: 0.2240879919971544 , Pearson Correlation: 0.8445331028002763 with p-value: 0.0 , Concordance Index: 0.8421498149518606\n",
+      "Training at Epoch 77 iteration 0 with loss 0.15750398\n",
+      "Training at Epoch 77 iteration 100 with loss 0.11341824\n",
+      "Training at Epoch 77 iteration 200 with loss 0.114693\n",
+      "Training at Epoch 77 iteration 300 with loss 0.13361749\n",
+      "Validation at Epoch 77 , MSE: 0.2152338539087079 , Pearson Correlation: 0.8478969625574723 with p-value: 0.0 , Concordance Index: 0.8455267289116134\n",
+      "Training at Epoch 78 iteration 0 with loss 0.10080556\n",
+      "Training at Epoch 78 iteration 100 with loss 0.12171855\n",
+      "Training at Epoch 78 iteration 200 with loss 0.11807407\n",
+      "Training at Epoch 78 iteration 300 with loss 0.11452898\n",
+      "Validation at Epoch 78 , MSE: 0.18960463618759138 , Pearson Correlation: 0.8518679693765236 with p-value: 0.0 , Concordance Index: 0.8496819519638107\n",
+      "Training at Epoch 79 iteration 0 with loss 0.11710593\n",
+      "Training at Epoch 79 iteration 100 with loss 0.1476192\n",
+      "Training at Epoch 79 iteration 200 with loss 0.13384058\n",
+      "Training at Epoch 79 iteration 300 with loss 0.12313308\n",
+      "Validation at Epoch 79 , MSE: 0.19822040337060512 , Pearson Correlation: 0.8503488534686926 with p-value: 0.0 , Concordance Index: 0.8491997843832444\n",
+      "Training at Epoch 80 iteration 0 with loss 0.087387584\n",
+      "Training at Epoch 80 iteration 100 with loss 0.10811463\n",
+      "Training at Epoch 80 iteration 200 with loss 0.18600343\n",
+      "Training at Epoch 80 iteration 300 with loss 0.10030829\n",
+      "Validation at Epoch 80 , MSE: 0.1932626936966475 , Pearson Correlation: 0.8513422745569593 with p-value: 0.0 , Concordance Index: 0.8515337428142256\n",
+      "Training at Epoch 81 iteration 0 with loss 0.098010935\n",
+      "Training at Epoch 81 iteration 100 with loss 0.14005096\n",
+      "Training at Epoch 81 iteration 200 with loss 0.08274831\n",
+      "Training at Epoch 81 iteration 300 with loss 0.11399409\n",
+      "Validation at Epoch 81 , MSE: 0.19758804520753495 , Pearson Correlation: 0.8527537063190721 with p-value: 0.0 , Concordance Index: 0.8509730986791506\n",
+      "Training at Epoch 82 iteration 0 with loss 0.10215923\n",
+      "Training at Epoch 82 iteration 100 with loss 0.088796765\n",
+      "Training at Epoch 82 iteration 200 with loss 0.1028263\n",
+      "Training at Epoch 82 iteration 300 with loss 0.11578086\n",
+      "Validation at Epoch 82 , MSE: 0.21772542272704945 , Pearson Correlation: 0.8529054553962128 with p-value: 0.0 , Concordance Index: 0.8475180563440468\n",
+      "Training at Epoch 83 iteration 0 with loss 0.1321531\n",
+      "Training at Epoch 83 iteration 100 with loss 0.09774816\n",
+      "Training at Epoch 83 iteration 200 with loss 0.10050143\n",
+      "Training at Epoch 83 iteration 300 with loss 0.09495833\n",
+      "Validation at Epoch 83 , MSE: 0.2136586002959394 , Pearson Correlation: 0.8508724011744425 with p-value: 0.0 , Concordance Index: 0.8516967663511669\n",
+      "Training at Epoch 84 iteration 0 with loss 0.12597932\n",
+      "Training at Epoch 84 iteration 100 with loss 0.124858126\n",
+      "Training at Epoch 84 iteration 200 with loss 0.12111996\n",
+      "Training at Epoch 84 iteration 300 with loss 0.08302106\n",
+      "Validation at Epoch 84 , MSE: 0.1990206659886085 , Pearson Correlation: 0.8477119170050185 with p-value: 0.0 , Concordance Index: 0.8468090878877765\n",
+      "Training at Epoch 85 iteration 0 with loss 0.07115703\n",
+      "Training at Epoch 85 iteration 100 with loss 0.1317057\n",
+      "Training at Epoch 85 iteration 200 with loss 0.09400135\n",
+      "Training at Epoch 85 iteration 300 with loss 0.08817591\n",
+      "Validation at Epoch 85 , MSE: 0.1880923648098855 , Pearson Correlation: 0.8524468842300589 with p-value: 0.0 , Concordance Index: 0.8530510470307388\n",
+      "Training at Epoch 86 iteration 0 with loss 0.090310104\n",
+      "Training at Epoch 86 iteration 100 with loss 0.123432584\n",
+      "Training at Epoch 86 iteration 200 with loss 0.0826193\n",
+      "Training at Epoch 86 iteration 300 with loss 0.15323539\n",
+      "Validation at Epoch 86 , MSE: 0.19529560739508625 , Pearson Correlation: 0.8476772162914558 with p-value: 0.0 , Concordance Index: 0.8502067033691905\n",
+      "Training at Epoch 87 iteration 0 with loss 0.080911815\n",
+      "Training at Epoch 87 iteration 100 with loss 0.13101102\n",
+      "Training at Epoch 87 iteration 200 with loss 0.08883078\n",
+      "Training at Epoch 87 iteration 300 with loss 0.12456667\n",
+      "Validation at Epoch 87 , MSE: 0.1981152364558615 , Pearson Correlation: 0.8516459469462152 with p-value: 0.0 , Concordance Index: 0.851878531009729\n",
+      "Training at Epoch 88 iteration 0 with loss 0.07875684\n",
+      "Training at Epoch 88 iteration 100 with loss 0.113229\n",
+      "Training at Epoch 88 iteration 200 with loss 0.09287766\n",
+      "Training at Epoch 88 iteration 300 with loss 0.09923973\n",
+      "Validation at Epoch 88 , MSE: 0.2791987337318223 , Pearson Correlation: 0.8473231716104132 with p-value: 0.0 , Concordance Index: 0.8484163358636674\n",
+      "Training at Epoch 89 iteration 0 with loss 0.1758956\n",
+      "Training at Epoch 89 iteration 100 with loss 0.15466869\n",
+      "Training at Epoch 89 iteration 200 with loss 0.06840886\n",
+      "Training at Epoch 89 iteration 300 with loss 0.09052813\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Validation at Epoch 89 , MSE: 0.18879584363341734 , Pearson Correlation: 0.8552581698425819 with p-value: 0.0 , Concordance Index: 0.8493616044313579\n",
+      "Training at Epoch 90 iteration 0 with loss 0.10148429\n",
+      "Training at Epoch 90 iteration 100 with loss 0.07839349\n",
+      "Training at Epoch 90 iteration 200 with loss 0.07834745\n",
+      "Training at Epoch 90 iteration 300 with loss 0.0998727\n",
+      "Validation at Epoch 90 , MSE: 0.19582125600456016 , Pearson Correlation: 0.8511291809184155 with p-value: 0.0 , Concordance Index: 0.8505655474059094\n",
+      "Training at Epoch 91 iteration 0 with loss 0.081786595\n",
+      "Training at Epoch 91 iteration 100 with loss 0.09030631\n",
+      "Training at Epoch 91 iteration 200 with loss 0.10174892\n",
+      "Training at Epoch 91 iteration 300 with loss 0.11394987\n",
+      "Validation at Epoch 91 , MSE: 0.20908388322739013 , Pearson Correlation: 0.8478519940615383 with p-value: 0.0 , Concordance Index: 0.8477589281991904\n",
+      "Training at Epoch 92 iteration 0 with loss 0.07235121\n",
+      "Training at Epoch 92 iteration 100 with loss 0.08341678\n",
+      "Training at Epoch 92 iteration 200 with loss 0.11056797\n",
+      "Training at Epoch 92 iteration 300 with loss 0.110210285\n",
+      "Validation at Epoch 92 , MSE: 0.19718609031082673 , Pearson Correlation: 0.8528774001655518 with p-value: 0.0 , Concordance Index: 0.851195925770989\n",
+      "Training at Epoch 93 iteration 0 with loss 0.08601681\n",
+      "Training at Epoch 93 iteration 100 with loss 0.08848184\n",
+      "Training at Epoch 93 iteration 200 with loss 0.14170763\n",
+      "Training at Epoch 93 iteration 300 with loss 0.14256622\n",
+      "Validation at Epoch 93 , MSE: 0.18961202081054754 , Pearson Correlation: 0.8550672175716182 with p-value: 0.0 , Concordance Index: 0.8546280564036914\n",
+      "Training at Epoch 94 iteration 0 with loss 0.08315914\n",
+      "Training at Epoch 94 iteration 100 with loss 0.07144792\n",
+      "Training at Epoch 94 iteration 200 with loss 0.109951824\n",
+      "Training at Epoch 94 iteration 300 with loss 0.07713781\n",
+      "Validation at Epoch 94 , MSE: 0.2025433794457135 , Pearson Correlation: 0.8565218271749488 with p-value: 0.0 , Concordance Index: 0.8569312236865494\n",
+      "Training at Epoch 95 iteration 0 with loss 0.085088074\n",
+      "Training at Epoch 95 iteration 100 with loss 0.09484902\n",
+      "Training at Epoch 95 iteration 200 with loss 0.07999084\n",
+      "Training at Epoch 95 iteration 300 with loss 0.11516863\n",
+      "Validation at Epoch 95 , MSE: 0.195291974748004 , Pearson Correlation: 0.8487014918161339 with p-value: 0.0 , Concordance Index: 0.8531190857796275\n",
+      "Training at Epoch 96 iteration 0 with loss 0.081413455\n",
+      "Training at Epoch 96 iteration 100 with loss 0.06010697\n",
+      "Training at Epoch 96 iteration 200 with loss 0.1060314\n",
+      "Training at Epoch 96 iteration 300 with loss 0.08848443\n",
+      "Validation at Epoch 96 , MSE: 0.1903152710427138 , Pearson Correlation: 0.8529740974876179 with p-value: 0.0 , Concordance Index: 0.8525666919767684\n",
+      "Training at Epoch 97 iteration 0 with loss 0.08045774\n",
+      "Training at Epoch 97 iteration 100 with loss 0.07858796\n",
+      "Training at Epoch 97 iteration 200 with loss 0.09438899\n",
+      "Training at Epoch 97 iteration 300 with loss 0.1323199\n",
+      "Validation at Epoch 97 , MSE: 0.1888422605033678 , Pearson Correlation: 0.8521231818633057 with p-value: 0.0 , Concordance Index: 0.8529498177251833\n",
+      "Training at Epoch 98 iteration 0 with loss 0.07730898\n",
+      "Training at Epoch 98 iteration 100 with loss 0.09733562\n",
+      "Training at Epoch 98 iteration 200 with loss 0.06377132\n",
+      "Training at Epoch 98 iteration 300 with loss 0.113953054\n",
+      "Validation at Epoch 98 , MSE: 0.21204893970889843 , Pearson Correlation: 0.8565689045993777 with p-value: 0.0 , Concordance Index: 0.8556206621986097\n",
+      "Training at Epoch 99 iteration 0 with loss 0.09585332\n",
+      "Training at Epoch 99 iteration 100 with loss 0.08413041\n",
+      "Training at Epoch 99 iteration 200 with loss 0.0944461\n",
+      "Training at Epoch 99 iteration 300 with loss 0.065783754\n",
+      "Validation at Epoch 99 , MSE: 0.20532750896585933 , Pearson Correlation: 0.8597496055395153 with p-value: 0.0 , Concordance Index: 0.8568210174140141\n",
+      "Training at Epoch 100 iteration 0 with loss 0.11175461\n",
+      "Training at Epoch 100 iteration 100 with loss 0.08406766\n",
+      "Training at Epoch 100 iteration 200 with loss 0.098233305\n",
+      "Training at Epoch 100 iteration 300 with loss 0.1574293\n",
+      "Validation at Epoch 100 , MSE: 0.1918805189450315 , Pearson Correlation: 0.8567659539114503 with p-value: 0.0 , Concordance Index: 0.8551461394212893\n",
+      "--- Go for Testing ---\n",
+      "Testing MSE: 0.19945655726244765 , Pearson Correlation: 0.8506343800999702 with p-value: 0.0 , Concordance Index: 0.8536301623899886\n",
+      "--- Training Finished ---\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAELCAYAAADHksFtAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAdiklEQVR4nO3de5ydVX3v8c+XcI3ISUIGSBOSAaQoIipOKYgHEXq4KQQRWnQK4dIzFfAoYE9BUzW2gFqsCKeIRgWDjCCCFopc5SKVAjrhEkDEhFuIhGQAQSXcQn7nj7U22TPZ88yeZN9m9vf9eu3Xfp713H57ZzK/edZaz1qKCMzMzIayXrMDMDOz1uZEYWZmhZwozMyskBOFmZkVcqIwM7NC6zc7gFqbPHlydHZ2NjsMM7NRZf78+c9EREelbWMuUXR2dtLX19fsMMzMRhVJTwy1zVVPZmZWyInCzMwKOVGYmVkhJwozMyvU0EQh6QJJyyU9UGHbP0gKSZPzuiSdK2mRpAWSdmlkrGZmljT6juJ7wP6DCyVtDfwvYHFZ8QHA9vnVA5xfr6B6e6GzE9ZbL7339tbrSmZmo09DE0VE3AY8V2HT2cA/AuVD2c4ELorkTmCCpCm1jqm3F3p64IknICK99/Q4WZiZlTS9jULSwcDvIuK+QZumAk+WrS/JZZXO0SOpT1Jff3//iK4/ezasWDGwbMWKVG5mZk1OFJLGA7OBz1faXKGs4uQZETE3Iroioqujo+KDhUNavHhk5WZm7abZdxTbAdsA90l6HJgG3C1pK9IdxNZl+04Dnqp1ANOnj6zczKzdNDVRRMT9EbFFRHRGRCcpOewSEU8DVwFH5d5PuwEvRMTSWsdwxhkwfvzAsvHjU7mZmTW+e+wlwB3ADpKWSDquYPdrgEeBRcC3gRPqEVN3N8yalXo8QXqfNSuVm5lZgwcFjIiPDrO9s2w5gBPrHVNvL8ybB6tWpfVVq9L6Hns4WZiZQfPbKJrOvZ7MzIq1faJwryczs2Jtnyjc68nMrFjbJwr3ejIzK9b2iaK7G+bOhcmT0/qUKWndDdlmZsmYmwp1bXR3w0YbweGHww03wE47NTsiM7PW0fZ3FGZmVsyJwszMCjlRmJlZIScK0tPZxx+flvfd13NRmJmVa/vG7NLERaWns5cuTevgnk9mZuA7Cg/hYWY2jLZPFB7Cw8ysWNsnCg/hYWZWrO0ThYfwMDMr1vaJwkN4mJkVa/teT5CSwsYbw2GHwfXXwzve0eyIzMxaR9vfUZiZWTEnCjMzK+REYWZmhRqaKCRdIGm5pAfKys6S9BtJCyT9RNKEsm2fkbRI0sOS9mtEjBGNuIqZ2ejR6DuK7wH7Dyq7EdgpInYGfgt8BkDSjsARwNvzMd+QNK5egUn1OrOZ2ejW0EQREbcBzw0quyEiVubVO4FpeXkmcGlEvBIRjwGLgF0bFqyZmQGt10ZxLHBtXp4KPFm2bUkuW4OkHkl9kvr6+/vrHKKZWXtpmUQhaTawEigN8l2pMqhiC0JEzI2Irojo6ujoqFeIZmZtqSUeuJM0C/gQsE/EG83JS4Cty3abBjzV6NjMzNpd0+8oJO0PnAocHBHlA35fBRwhaSNJ2wDbA79sRoxmZu2soXcUki4B9gImS1oCfIHUy2kj4Ealrkd3RsTHI+JBSZcBvyZVSZ0YEa83Ml4zMwPFGHtwoKurK/r6+kZ0TG8vnHQSPPNMGhTwrLM8KKCZtRdJ8yOiq9K2lmijaCZPhWpmVqzpbRTN5qlQzcyKtX2i8FSoZmbF2j5ReCpUM7NibZ8oPBWqmVmxtk8UpalQSw90eypUM7OB2r7XE6SkMH48HHooXHstvPOdzY7IzKx1tP0dhZmZFXOiGGSMPX9oZrbOnCgyT1xkZlaZE4WZmRVyojAzs0JOFGZmVsiJwszMCjlRmJlZIScKMzMr5ERhZmaFnCgG8QN3ZmYDOVFkfuDOzKyyhiYKSRdIWi7pgbKySZJulLQwv0/M5ZJ0rqRFkhZI2qWRsZqZWdLoO4rvAfsPKjsNuCkitgduyusABwDb51cPcH6DYjQzszINTRQRcRvw3KDimcC8vDwPOKSs/KJI7gQmSJrSmEjNzKykFdootoyIpQD5fYtcPhV4smy/JblsDZJ6JPVJ6uvv769rsGZm7aYVEsVQKjUvV+yTFBFzI6IrIro6SlPVmZlZTbRColhWqlLK78tz+RJg67L9pgFPNTg2M7O21wqJ4ipgVl6eBVxZVn5U7v20G/BCqYqqnvwchZnZQA2dM1vSJcBewGRJS4AvAF8GLpN0HLAYODzvfg1wILAIWAEcU9/Y6nl2M7PRq6GJIiI+OsSmfSrsG8CJ9Y3IzMyGU3XVk6R3S/qxpGckrSw9ACfpTEmDn40wM7MxoqpEIel9wB3AW4EfDDpuFfDx2odmZmatoNo7ii8D1wNvB04ZtO1uwMNrmJmNUdW2UewCHBoRIWlwv6BnAD+8YGY2RlV7R/EyMH6IbVOAF2oTjpmZtZpqE8UvgJMkjSsrK91ZHAfcXNOozMysZVRb9fQ54HbgPuByUpKYJelrwHuAv6hPeI3nB+7MzAaq6o4iIu4D9gSWAbNJ4zB9Im9+f0Q8XJ/wGscP3JmZVVb1A3cRcTewj6SNgUnA8xGxom6RmZlZSxjxk9kR8TIenM/MrG1UlSgkfX6YXSIi/qUG8ZiZWYup9o5iTsG2UvOvE4WZ2RhUbWP2eoNfwObA0cADwFvqGKOZmTXRWo8eGxG/By6StDlwHmlIcDMzG2NqMXFRqeusmZmNQbVIFB8C+mtwnpbgB+7MzAaqttfTBRWKNwR2At5BmqluVPMDd2ZmlVXbRrE3q3s3lbwMPAF8HZhXy6DMzKx1VJUoIqKzznGYmVmLqkUbRU1IOlnSg5IekHSJpI0lbSPpLkkLJf1Q0obNjtPMrN0MeUchaUQ9mSLitrUNQtJU4JPAjhHxkqTLgCNIXW7PjohLJX2TNKT5+Wt7HTMzG7miqqdbWbNdohLl/cYNt2MVsWwi6TXSJElLSW0jH8vb55GeEK95oujthZNPTssHHQRf/Sp0d9f6KmZmo1NRovhAo4KIiN9J+iqwGHgJuAGYTxqhdmXebQkwtdbX7u2Fnh5YkcfBffrptA5OFmZmUJAoIuLnjQpC0kRgJrAN8DzwI+CASmENcXwP0AMwffr0EV179uzVSaJkxYpU7kRhZtY6jdl/BTwWEf0R8RrwY+C9wARJpWQ2jSGGN4+IuRHRFRFdHR0dI7rw4sUjKzczazdVj/UkaSdSY/IOwMaDNkdE7LMOcSwGdpM0nlT1tA/QB9wCHAZcCswCrlyHa1Q0fTo88UTlcjMzq/KOQtJfkn5xHwDsB0wEtgX2Io0cu07PNUfEXaS5uO8G7s9xzQVOBU6RtIg0Wu131+U6lZxxBowfP7Bs/PhUbmZmoKhicCNJN5Hmyz4SeA3oioi7Je0NfB84MiJurmukVerq6oq+vr4RHVPq9dTfD1tt5V5PZtZ+JM2PiK5K26qtetqZVPVTyirjACLiZkmnA18C/nJdA22W7m6YOBE++EG48krYdddmR2Rm1jqqbczeAHgxIlYBzwFTyrY9TBoc0MzMxqBqE8UjrH6GYQFwrKT1JK0HHAM8XY/gzMys+aqtevpPUsP1D4AzgZ8CfwBeBzYlDb8xJng+CjOzgaodPXZO2fLPJO0GfIQ01MZ1EXFDfcJrHM9HYWZW2VrNmR0R9wD31DgWMzNrQdU+R/FjSYdI2qDeATWbq57MzAaqtjH7raRhNZZKOi9XPY0prnoyM6usqkQRETsCfwFcDBwK3J4nE/qcpG3rGaCZmTVX1YMCRsT8iDiJNDjfQcCvSENsLJT0X3WKz8zMmmzEo8dGxOsRcU1EfIx0d/EUaaTXMcFtFGZmA404UUjaTtIXJP0WuJY0IOC/1TyyBurthVmz0vKHP5zWzcwsqap7bJ5Y6G9IgwLuBqwAfgKcCPwsqhlZsEUNnuFu2TLPcGdmVq7a0WNfIQ0EeDNptNgrImJF8VHNMdLRYzs7K89HMWMGPP54zcIyM2tptRg99p+AiyNiae3Cag2e4c7MrFi13WPPGotJAoaeyc4z3JmZJa0yZ3bTeIY7M7NibZ8ourth7lzYYou0vuWWad0N2WZmyVoNCjjWdHenRLHvvnDFFbDHHs2OyMysdbT9HYWZmRVrmUQhaYKkyyX9RtJDknaXNEnSjXlcqRvz8xx1NXqfCDEzq49qhxmfKemYsvUZku6Q9Mf8y33TGsRyDmkSpLcC7wQeAk4DboqI7YGb8npdePRYM7PKqr2j+Cego2z9a6TBAecCewJz1iUISZvl83wXICJejYjngZnAvLzbPOCQdbmOmZmNXLWJYjtgAYCkTYADgVMi4tPAZ4EPr2Mc2wL9wIWS7pH0HUlvArYsPb+R37eodLCkHkl9kvr6+/vXMRQzMytXbaLYGHgpL7+X1FuqNE/2w8CfrWMc6wO7AOdHxLuBFxlBNVNEzI2Irojo6ujoGP6AwnOt0+FmZmNOtYniceB9eXkmMD8iXsjrWwAvVDpoBJYASyLirrx+OSlxLJM0BSC/L1/H6wzJbRRmZpVVmyi+BcyR1AecQG5LyHYHfr0uQUTE08CTknbIRfvkc14F5AHAmQVcuS7XMTOzkavqgbuIOEfSM6Qhxs+NiIvKNr8ZuLAGsfwfoFfShsCjwDGkRHaZpOOAxcDhNbhOIVc9mZkNVPWT2RHRC6wxpU9E/H0tAomIe4FKQ9zuU4vzD8dVT2ZmlVX7HMWfS9q1bH0TSV+S9J+SPlG/8MzMrNmqbaP4d+CwsvUzgE+TejudLenEWgdmZmatodpEsTNwO4Ck9YCjgFMj4j3A6UBPfcJrPLdRmJkNVG2imAA8m5ffDUwkdWEFuJX0wNyo5jYKM7PKqk0Uy4C35OV9gUci4sm8vimwstaBmZlZa6i219NVwJck7QQcTXquouQdpO6sZmY2BlWbKE4jDeOxHylpnFm27WBWD+cx6rmNwsxsoGofuHsR+N9DbHtvTSNqErdRmJlVNqKpUCVNIg3ZMYnUuH1nRDxXj8DMzKw1VJ0oJJ1OenZio7LiVyR9NSI+V/PImsRVT2ZmA1X7ZPZJpHknLgY+ALwtv18MfFbSJ+sWYYO46snMrLJq7yg+DpwTESeXlT0M/FzSn0gjyp5b6+DMzKz5qn2OohP46RDbfpq3m5nZGFRtongW2GmIbW9n9VPbo57bKMzMBqo2UfwE+BdJR0raAEDS+pI+CvwzcEW9AmwUt1GYmVVWbaL4DHAvMA9YIWkZaQ7tXuA+UkO3mZmNQdU+cPdHSXsCHwT+J+k5iueAnwPXRoydCpux80nMzGpjJDPcBXB1fo05rnoyM6us2qonMzNrU0MmCkmrJL1e5asmw4xLGifpHklX5/VtJN0laaGkH0rasBbXMTOz6hVVPf0z0Oga+08BDwGb5fWvAGdHxKWSvgkcB5xfzwDcRmFmNtCQiSIi5jQwDiRNIzWWnwGcIknA3sDH8i7zgDnUKVG4jcLMrLJWaqP4OvCPwKq8vjnwfESUqrWWAFMrHSipR1KfpL7+/v76R2pm1kZaIlFI+hCwPCLmlxdX2LVixVBEzI2Irojo6ujoWKdYXPVkZjbQiOajqKM9gIMlHUiaSW8z0h3GBEnr57uKacBT9QrAVU9mZpW1xB1FRHwmIqZFRCdwBHBzRHQDtwCH5d1mAVc2KUQzs7bVEomiwKmkhu1FpDaL7zY5HjOzttMqVU9viIhbgVvz8qPAro29fiOvZmbW+lr9jqJh3EZhZlaZE4WZmRVyojAzs0JOFEBvLxx6aFo+8si0bmZmScs1Zjdaby/09MCKFWl9+fK0DtDd3by4zMxaRdvfUcyevTpJlKxYkcrNzMyJgsWLR1ZuZtZu2j5RTJ8+snIzs3bT9onijDNg/PiBZePHp3IzM3OioLsb5s6FzTZbXbbJJs2Lx8ys1bR9oih59dXVy88+m3o+uZusmZkTBZB6OL388sAy93wyM0ucKHDPJzOzIk4UrNmYPVy5mVk7caIAXnppZOVmZu3EiQJYtWpk5WZm7cSJAhg3bmTlZmbtxImC1YMAVltuZtZOnCiAb3wDPvKR1evjxsHxx6dyM7N21xKJQtLWkm6R9JCkByV9KpdPknSjpIX5fWK9Yjj11PR+9dWwcqWThJlZSUskCmAl8OmIeBuwG3CipB2B04CbImJ74Ka8XhfXXZfeDzoIOjv9VLaZWUlLJIqIWBoRd+flPwIPAVOBmcC8vNs84JB6XL+3F848sxQLPPGEh/AwMytpiURRTlIn8G7gLmDLiFgKKZkAW9Tjmh7Cw8xsaC2VKCRtClwBnBQRfxjBcT2S+iT19ff3j/i6HsLDzGxoLZMoJG1AShK9EfHjXLxM0pS8fQqwvNKxETE3Iroioqujo2PE1/bkRWZmQ2uJRCFJwHeBhyLia2WbrgJm5eVZwJX1uP4ZZ8AGGwws23BDT15kZgawfrMDyPYAjgTul3RvLvss8GXgMknHAYuBw+sVQETxuplZu1KMsd+IXV1d0dfXN6JjOjtTT6fBZsyAxx+vSVhmZi1N0vyI6Kq0rSWqnprNjdlmZkNzosCN2WZmRZwogAMPHFm5mVk7caIArrlmZOVmZu3EiYLKDdlF5WZm7aTtE0XReE7rtf23Y2bmRFE4npOnQjUzc6JwF1gzs2G0faIo6gK7+eaNi8PMrFW1faKoNM4TpOlQzzmn8fGYmbWaVhnrqWm6u9P7UUcNbJPYa6/V28zM2lnb31EA3H77mg3XN90EJ5zQnHjMzFqJEwXwrW+NrNzMrJ04UTB0N1h3jzUzc6IwM7NhOFGYmVmhtk8URUN4mJmZE0XhEB5mZuZEMewQHhJsskkaILCz03cgZtZ+2j5RVDOL3csvQ0Qadvxv/zYlD7/a47X++ul5mt7e9IeClP5oKG0fNy69T54Mm266unzy5DX/qCidY/AfHSeckK5T6XpD/YEy3HazWlJENDuGYUnaHzgHGAd8JyK+PNS+XV1d0dfXV/W5e3vTL38zs7FkpL/aJc2PiK5K21r+jkLSOOA84ABgR+Cjknas1fk9TIeZjUVS7c7V8okC2BVYFBGPRsSrwKXAzCbHZGbWNkZDopgKPFm2viSXvUFSj6Q+SX39/f0NDc7MbKwbDYmi0g3UgNq3iJgbEV0R0dXR0dGgsMzM2sNoSBRLgK3L1qcBT9XyAqOgPd/MrGlGQ6L4FbC9pG0kbQgcAVxV64tEVH5dfDHMmFHrq5mZ1Vct/wBu+YmLImKlpE8A15O6x14QEQ826vrd3e4ZZWbtreUTBUBEXANc0+w4zMza0WioejIzsyZyojAzs0JOFGZmVsiJwszMCo2KQQFHQlI/8MRaHj4ZeKaG4TSSY28Ox94cjr32ZkRExSeWx1yiWBeS+oYaPbHVOfbmcOzN4dgby1VPZmZWyInCzMwKOVEMNLfZAawDx94cjr05HHsDuY3CzMwK+Y7CzMwKOVGYmVkhJ4pM0v6SHpa0SNJpzY6nRNLjku6XdK+kvlw2SdKNkhbm94m5XJLOzZ9hgaRdys4zK++/UNKsOsV6gaTlkh4oK6tZrJLek7+LRfnYms0KPETscyT9Ln/390o6sGzbZ3IcD0var6y84s9RHib/rvyZfpiHzK9F3FtLukXSQ5IelPSpXN7y33tB7KPhe99Y0i8l3Zdj/2LR9SRtlNcX5e2da/uZmiIi2v5FGr78EWBbYEPgPmDHZseVY3scmDyo7F+B0/LyacBX8vKBwLWkWQF3A+7K5ZOAR/P7xLw8sQ6x7gnsAjxQj1iBXwK752OuBQ6oc+xzgH+osO+O+WdkI2Cb/LMzrujnCLgMOCIvfxM4vkZxTwF2yctvBn6b42v5770g9tHwvQvYNC9vANyVv8+K1wNOAL6Zl48Afri2n6kZL99RJLsCiyLi0Yh4FbgUmNnkmIrMBObl5XnAIWXlF0VyJzBB0hRgP+DGiHguIn4P3AjsX+ugIuI24Ll6xJq3bRYRd0T6H3ZR2bnqFftQZgKXRsQrEfEYsIj0M1Tx5yj/Bb43cHk+vvx7WNe4l0bE3Xn5j8BDpDnlW/57L4h9KK30vUdE/CmvbpBfUXC98n+Py4F9cnwj+ky1iH1tOFEkU4Eny9aXUPwD20gB3CBpvqSeXLZlRCyF9J8N2CKXD/U5mvn5ahXr1Lw8uLzePpGraC4oVd8ME2Ol8s2B5yNi5aDymsrVGe8m/XU7qr73QbHDKPjeJY2TdC+wnJRYHym43hsx5u0v5Pha8f/sGpwokkp1rq3Sb3iPiNgFOAA4UdKeBfsO9Tla8fONNNZmfIbzge2AdwFLgX/L5S0Xu6RNgSuAkyLiD0W7DhFLK8U+Kr73iHg9It4FTCPdAbyt4HotFftIOVEkS4Cty9anAU81KZYBIuKp/L4c+AnpB3JZrhIgvy/Puw/1OZr5+WoV65K8PLi8biJiWf5lsAr4Num7Z5gYK5U/Q6riWX9QeU1I2oD0i7Y3In6ci0fF914p9tHyvZdExPPAraQ2iqGu90aMefv/IFV1tuL/2TU1q3GklV6kKWEfJTUmlRqO3t4Ccb0JeHPZ8n+T2hbOYmBD5b/m5Q8ysKHyl7l8EvAYqZFyYl6eVKeYOxnYIFyzWIFf5X1LjaoH1jn2KWXLJ5PqkgHezsAGyEdJjY9D/hwBP2JgI+cJNYpZpHaDrw8qb/nvvSD20fC9dwAT8vImwH8BHxrqesCJDGzMvmxtP1MzXk25aCu+SL1BfkuqZ5zd7HhyTNvmH5D7gAdLcZHqNm8CFub30n9oAeflz3A/0FV2rmNJDWWLgGPqFO8lpKqC10h/ER1Xy1iBLuCBfMy/k0cWqGPs38+xLQCuGvQLbHaO42HKegEN9XOU/y1/mT/Tj4CNahT3+0hVEguAe/PrwNHwvRfEPhq+952Be3KMDwCfL7oesHFeX5S3b7u2n6kZLw/hYWZmhdxGYWZmhZwozMyskBOFmZkVcqIwM7NCThRmZlbIicLaRh6VNPLyhLy+y3DH1TGed+UYJlXYFpLmNCEsszU4UVg7+Q5pFFSACcAXSCPGNsu7cgxrJApSnN9pbDhmla0//C5mY0NELGHgAHc1lUcD3SDSaJ/rJNLIrmYtwXcU1jZKVU95pNLHcvG3c1lIOrps30Ml3SlphaTnJf1I0vRB53tc0sWSjpX0G+BV0hAZSPqipLslvSDpGUk3S9qt7NijgQvz6sKyGDrz9jWqnvJENndIeimf9z8k7TBon1sl/ULSX+Xrr5D0gKSaDclu7ceJwtrRUuDQvPwlUjXP7sBPASR9nDRQ3a+Bw4C/B3YCfi7pzYPO9QHgFOCLpHG4FuTyqcDZpPkIjiYNynebpJ3z9p8Cp+flw8tiWFopYEn752P+BPwNcHyO6ReSBg8/vR1wDvC1/DmXApdLekvht2I2BFc9WduJiFck3ZNXHy2v5slDXn8FuDAiji0rv4s07s5xwNfLTjcReE9EPD3oGn9Xduw44DrSeF3HAZ+KiH5Jj+Rd7o2IRcOEfTppkLgDIs93IOmOHNOnScmqZDKwZ0QszPvdTUoWfw2cOcx1zNbgOwqzgXYHNgN6Ja1fepHaNn5DmjK13J2DkwRArvq5RdKzwErSYIN/DuwweN/hSHoTqdH9h7F6UhwizYh2O/D+QYcsLCWJvN9y0h3NdMzWgu8ozAYqzQT3syG2/37Q+hpVRbnL7TXA9aQ7iKXA66ReTBuvRUwTSaO+VqqWehqYMais0pSur6zltc2cKMwGeTa/H02qKhrsj4PWKw2//BHSXcShEfFaqTBP6fn8WsT0+3ydrSps24rVMZvVhROFtatX8vsmg8r/m5QM3hIR89by3ONJdxBvJBFJe5Oqfh4r22+oGAaIiBclzQcOlzQnIl7P55wBvBf4f2sZp1lVnCisXS0j/SV+hKQFwIvAYxHxrKT/C5wnqYM0o9sLpF5M7wdujYgfDHPu64CTgO9JupDUNvE54HeD9vt1fj9R0jxSO8aCIZ7D+Byp19PVkr4BbErqafUCq+eUNqsLN2ZbW4o0H/Pfker/f0aa7vOgvO1bwMGkhufvk5LFF0l/WN1bxbmvBz4J7AFcTZo57ijS7Gbl+90HzMnX/UWO4c+GOOd1pGc0JgCXkabZfAh4X+R51c3qxTPcmZlZId9RmJlZIScKMzMr5ERhZmaFnCjMzKyQE4WZmRVyojAzs0JOFGZmVsiJwszMCv1/dna7sjqINoYAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "model = models.model_initialize(**config)\n",
+    "model.train(train, val, test)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.7"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}