a b/DEMO/DeepDTA_Reproduce_KIBA.ipynb
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "metadata": {},
7
   "outputs": [],
8
   "source": [
9
    "import os\n",
10
    "os.chdir('../')\n",
11
    "\n",
12
    "import DeepPurpose.DTI as models\n",
13
    "from DeepPurpose.utils import *\n",
14
    "from DeepPurpose.dataset import *"
15
   ]
16
  },
17
  {
18
   "cell_type": "code",
19
   "execution_count": 2,
20
   "metadata": {},
21
   "outputs": [
22
    {
23
     "name": "stdout",
24
     "output_type": "stream",
25
     "text": [
26
      "Beginning Processing...\n",
27
      "Beginning to extract zip file...\n",
28
      "Done!\n",
29
      "in total: 118254 drug-target pairs\n",
30
      "encoding drug...\n",
31
      "unique drugs: 2068\n",
32
      "drug encoding finished...\n",
33
      "encoding protein...\n",
34
      "unique target sequence: 229\n",
35
      "protein encoding finished...\n",
36
      "splitting dataset...\n",
37
      "Done.\n"
38
     ]
39
    }
40
   ],
41
   "source": [
42
    "X_drug, X_target, y = load_process_KIBA('./data/', binary=False)\n",
43
    "\n",
44
    "drug_encoding = 'CNN'\n",
45
    "target_encoding = 'CNN'\n",
46
    "train, val, test = data_process(X_drug, X_target, y, \n",
47
    "                                drug_encoding, target_encoding, \n",
48
    "                                split_method='random',frac=[0.7,0.1,0.2])\n",
49
    "\n",
50
    "# use the parameters setting provided in the paper: https://arxiv.org/abs/1801.10193\n",
51
    "config = generate_config(drug_encoding = drug_encoding, \n",
52
    "                         target_encoding = target_encoding, \n",
53
    "                         cls_hidden_dims = [1024,1024,512], \n",
54
    "                         train_epoch = 100, \n",
55
    "                         LR = 0.001, \n",
56
    "                         batch_size = 256,\n",
57
    "                         cnn_drug_filters = [32,64,96],\n",
58
    "                         cnn_target_filters = [32,64,96],\n",
59
    "                         cnn_drug_kernels = [4,6,8],\n",
60
    "                         cnn_target_kernels = [4,8,12]\n",
61
    "                        )"
62
   ]
63
  },
64
  {
65
   "cell_type": "code",
66
   "execution_count": 3,
67
   "metadata": {},
68
   "outputs": [
69
    {
70
     "name": "stdout",
71
     "output_type": "stream",
72
     "text": [
73
      "Let's use 1 GPU/s!\n",
74
      "--- Data Preparation ---\n",
75
      "--- Go for Training ---\n",
76
      "Training at Epoch 1 iteration 0 with loss 139.46227\n",
77
      "Training at Epoch 1 iteration 100 with loss 0.8524759\n",
78
      "Training at Epoch 1 iteration 200 with loss 0.7659789\n",
79
      "Training at Epoch 1 iteration 300 with loss 0.77633584\n",
80
      "Validation at Epoch 1 , MSE: 0.6252079559108886 , Pearson Correlation: 0.37120514588057757 with p-value: 0.0 , Concordance Index: 0.6476415418668936\n",
81
      "Training at Epoch 2 iteration 0 with loss 0.6057545\n",
82
      "Training at Epoch 2 iteration 100 with loss 0.7182188\n",
83
      "Training at Epoch 2 iteration 200 with loss 0.5523121\n",
84
      "Training at Epoch 2 iteration 300 with loss 0.7289833\n",
85
      "Validation at Epoch 2 , MSE: 0.5057249716094536 , Pearson Correlation: 0.5271302441704545 with p-value: 0.0 , Concordance Index: 0.7108944244194332\n",
86
      "Training at Epoch 3 iteration 0 with loss 0.3336088\n",
87
      "Training at Epoch 3 iteration 100 with loss 0.55122924\n",
88
      "Training at Epoch 3 iteration 200 with loss 0.9139983\n",
89
      "Training at Epoch 3 iteration 300 with loss 0.3553281\n",
90
      "Validation at Epoch 3 , MSE: 0.43898317224256905 , Pearson Correlation: 0.6078014781089629 with p-value: 0.0 , Concordance Index: 0.7420012120873392\n",
91
      "Training at Epoch 4 iteration 0 with loss 0.6549053\n",
92
      "Training at Epoch 4 iteration 100 with loss 0.28032207\n",
93
      "Training at Epoch 4 iteration 200 with loss 0.5554817\n",
94
      "Training at Epoch 4 iteration 300 with loss 0.4001999\n",
95
      "Validation at Epoch 4 , MSE: 0.517115460522088 , Pearson Correlation: 0.6388425484955205 with p-value: 0.0 , Concordance Index: 0.7548923258982349\n",
96
      "Training at Epoch 5 iteration 0 with loss 0.3249922\n",
97
      "Training at Epoch 5 iteration 100 with loss 0.49228954\n",
98
      "Training at Epoch 5 iteration 200 with loss 0.53248477\n",
99
      "Training at Epoch 5 iteration 300 with loss 0.35868204\n",
100
      "Validation at Epoch 5 , MSE: 0.43980806543009365 , Pearson Correlation: 0.6566629620181142 with p-value: 0.0 , Concordance Index: 0.7574936859223567\n",
101
      "Training at Epoch 6 iteration 0 with loss 0.33518466\n",
102
      "Training at Epoch 6 iteration 100 with loss 0.4694142\n",
103
      "Training at Epoch 6 iteration 200 with loss 0.43182057\n",
104
      "Training at Epoch 6 iteration 300 with loss 0.27362764\n",
105
      "Validation at Epoch 6 , MSE: 0.40906901873856194 , Pearson Correlation: 0.6642595221495057 with p-value: 0.0 , Concordance Index: 0.7633581128907642\n",
106
      "Training at Epoch 7 iteration 0 with loss 0.4240064\n",
107
      "Training at Epoch 7 iteration 100 with loss 0.579832\n",
108
      "Training at Epoch 7 iteration 200 with loss 0.29981527\n",
109
      "Training at Epoch 7 iteration 300 with loss 0.37393552\n",
110
      "Validation at Epoch 7 , MSE: 0.4669870538496263 , Pearson Correlation: 0.6709961444282361 with p-value: 0.0 , Concordance Index: 0.7629225786099979\n",
111
      "Training at Epoch 8 iteration 0 with loss 0.49253836\n",
112
      "Training at Epoch 8 iteration 100 with loss 0.43154156\n",
113
      "Training at Epoch 8 iteration 200 with loss 0.8942315\n",
114
      "Training at Epoch 8 iteration 300 with loss 0.36026978\n",
115
      "Validation at Epoch 8 , MSE: 0.40576572643883924 , Pearson Correlation: 0.6698314683974463 with p-value: 0.0 , Concordance Index: 0.7633775503707003\n",
116
      "Training at Epoch 9 iteration 0 with loss 0.54117924\n",
117
      "Training at Epoch 9 iteration 100 with loss 0.4534474\n",
118
      "Training at Epoch 9 iteration 200 with loss 0.2908664\n",
119
      "Training at Epoch 9 iteration 300 with loss 0.40684277\n",
120
      "Validation at Epoch 9 , MSE: 0.3866416384830573 , Pearson Correlation: 0.6740064110006907 with p-value: 0.0 , Concordance Index: 0.7670906313072465\n",
121
      "Training at Epoch 10 iteration 0 with loss 0.5149473\n",
122
      "Training at Epoch 10 iteration 100 with loss 0.3061192\n",
123
      "Training at Epoch 10 iteration 200 with loss 0.39455894\n",
124
      "Training at Epoch 10 iteration 300 with loss 0.41524413\n",
125
      "Validation at Epoch 10 , MSE: 0.5004661130110899 , Pearson Correlation: 0.6746381122340862 with p-value: 0.0 , Concordance Index: 0.7636888073994913\n",
126
      "Training at Epoch 11 iteration 0 with loss 0.50837636\n",
127
      "Training at Epoch 11 iteration 100 with loss 0.34029388\n",
128
      "Training at Epoch 11 iteration 200 with loss 0.4000504\n",
129
      "Training at Epoch 11 iteration 300 with loss 0.43241504\n",
130
      "Validation at Epoch 11 , MSE: 0.687117328399177 , Pearson Correlation: 0.6775114338921054 with p-value: 0.0 , Concordance Index: 0.768135395853816\n",
131
      "Training at Epoch 12 iteration 0 with loss 0.61384964\n",
132
      "Training at Epoch 12 iteration 100 with loss 0.4489369\n",
133
      "Training at Epoch 12 iteration 200 with loss 0.41723198\n",
134
      "Training at Epoch 12 iteration 300 with loss 0.52659756\n",
135
      "Validation at Epoch 12 , MSE: 0.44284007519234136 , Pearson Correlation: 0.6815251759494406 with p-value: 0.0 , Concordance Index: 0.7671332681158448\n",
136
      "Training at Epoch 13 iteration 0 with loss 0.4339065\n",
137
      "Training at Epoch 13 iteration 100 with loss 0.43391797\n",
138
      "Training at Epoch 13 iteration 200 with loss 0.44804138\n",
139
      "Training at Epoch 13 iteration 300 with loss 0.29588428\n",
140
      "Validation at Epoch 13 , MSE: 0.3794264554744155 , Pearson Correlation: 0.6824709263168197 with p-value: 0.0 , Concordance Index: 0.7648438539097174\n",
141
      "Training at Epoch 14 iteration 0 with loss 0.35877725\n",
142
      "Training at Epoch 14 iteration 100 with loss 0.40320885\n",
143
      "Training at Epoch 14 iteration 200 with loss 0.6108784\n",
144
      "Training at Epoch 14 iteration 300 with loss 0.4231795\n",
145
      "Validation at Epoch 14 , MSE: 0.37035736481551695 , Pearson Correlation: 0.6886761842378898 with p-value: 0.0 , Concordance Index: 0.769635147299312\n",
146
      "Training at Epoch 15 iteration 0 with loss 0.28133658\n",
147
      "Training at Epoch 15 iteration 100 with loss 0.2723529\n",
148
      "Training at Epoch 15 iteration 200 with loss 0.37842792\n",
149
      "Training at Epoch 15 iteration 300 with loss 0.42256156\n",
150
      "Validation at Epoch 15 , MSE: 0.39174580991934793 , Pearson Correlation: 0.6980164319885487 with p-value: 0.0 , Concordance Index: 0.7729698407516334\n",
151
      "Training at Epoch 16 iteration 0 with loss 0.442822\n",
152
      "Training at Epoch 16 iteration 100 with loss 0.29855317\n",
153
      "Training at Epoch 16 iteration 200 with loss 0.44767448\n",
154
      "Training at Epoch 16 iteration 300 with loss 0.3829686\n",
155
      "Validation at Epoch 16 , MSE: 0.3587687227351904 , Pearson Correlation: 0.7275656094433479 with p-value: 0.0 , Concordance Index: 0.7817976053933012\n",
156
      "Training at Epoch 17 iteration 0 with loss 0.39717722\n",
157
      "Training at Epoch 17 iteration 100 with loss 0.34678438\n",
158
      "Training at Epoch 17 iteration 200 with loss 0.30226183\n",
159
      "Training at Epoch 17 iteration 300 with loss 0.42329437\n",
160
      "Validation at Epoch 17 , MSE: 0.4687368140248108 , Pearson Correlation: 0.7304166804308427 with p-value: 0.0 , Concordance Index: 0.7828565923015529\n",
161
      "Training at Epoch 18 iteration 0 with loss 0.3633739\n",
162
      "Training at Epoch 18 iteration 100 with loss 0.28657123\n",
163
      "Training at Epoch 18 iteration 200 with loss 0.32053304\n",
164
      "Training at Epoch 18 iteration 300 with loss 0.32039723\n",
165
      "Validation at Epoch 18 , MSE: 0.3276044065524828 , Pearson Correlation: 0.7382404349298669 with p-value: 0.0 , Concordance Index: 0.7846119602326115\n",
166
      "Training at Epoch 19 iteration 0 with loss 0.17331107\n",
167
      "Training at Epoch 19 iteration 100 with loss 0.43098885\n",
168
      "Training at Epoch 19 iteration 200 with loss 0.34547406\n",
169
      "Training at Epoch 19 iteration 300 with loss 0.29107505\n",
170
      "Validation at Epoch 19 , MSE: 0.3122119465124941 , Pearson Correlation: 0.7406594366731748 with p-value: 0.0 , Concordance Index: 0.7859064449263984\n",
171
      "Training at Epoch 20 iteration 0 with loss 0.3300522\n",
172
      "Training at Epoch 20 iteration 100 with loss 0.23845857\n",
173
      "Training at Epoch 20 iteration 200 with loss 0.3112779\n",
174
      "Training at Epoch 20 iteration 300 with loss 0.2908824\n",
175
      "Validation at Epoch 20 , MSE: 0.332362568609342 , Pearson Correlation: 0.7433144190944236 with p-value: 0.0 , Concordance Index: 0.779341019776788\n",
176
      "Training at Epoch 21 iteration 0 with loss 0.21694058\n",
177
      "Training at Epoch 21 iteration 100 with loss 0.31264272\n",
178
      "Training at Epoch 21 iteration 200 with loss 0.291134\n",
179
      "Training at Epoch 21 iteration 300 with loss 0.2443328\n",
180
      "Validation at Epoch 21 , MSE: 0.32488440260458695 , Pearson Correlation: 0.7549241003756116 with p-value: 0.0 , Concordance Index: 0.7905469464642452\n",
181
      "Training at Epoch 22 iteration 0 with loss 0.296259\n",
182
      "Training at Epoch 22 iteration 100 with loss 0.32945767\n",
183
      "Training at Epoch 22 iteration 200 with loss 0.28351027\n",
184
      "Training at Epoch 22 iteration 300 with loss 0.27165496\n",
185
      "Validation at Epoch 22 , MSE: 0.3556623065139081 , Pearson Correlation: 0.7558565473223899 with p-value: 0.0 , Concordance Index: 0.7933693699770804\n",
186
      "Training at Epoch 23 iteration 0 with loss 0.3517964\n"
187
     ]
188
    },
189
    {
190
     "name": "stdout",
191
     "output_type": "stream",
192
     "text": [
193
      "Training at Epoch 23 iteration 100 with loss 0.21723273\n",
194
      "Training at Epoch 23 iteration 200 with loss 0.3215351\n",
195
      "Training at Epoch 23 iteration 300 with loss 0.37353116\n",
196
      "Validation at Epoch 23 , MSE: 0.2904944052104942 , Pearson Correlation: 0.7605299689697724 with p-value: 0.0 , Concordance Index: 0.7966217114894854\n",
197
      "Training at Epoch 24 iteration 0 with loss 0.2798943\n",
198
      "Training at Epoch 24 iteration 100 with loss 0.39643136\n",
199
      "Training at Epoch 24 iteration 200 with loss 0.4295775\n",
200
      "Training at Epoch 24 iteration 300 with loss 0.24249879\n",
201
      "Validation at Epoch 24 , MSE: 0.3018565196551627 , Pearson Correlation: 0.7695774972212105 with p-value: 0.0 , Concordance Index: 0.7993369790819714\n",
202
      "Training at Epoch 25 iteration 0 with loss 0.22099498\n",
203
      "Training at Epoch 25 iteration 100 with loss 0.26581356\n",
204
      "Training at Epoch 25 iteration 200 with loss 0.31408113\n",
205
      "Training at Epoch 25 iteration 300 with loss 0.31087956\n",
206
      "Validation at Epoch 25 , MSE: 0.28863161072602156 , Pearson Correlation: 0.762775379909726 with p-value: 0.0 , Concordance Index: 0.8012207040164403\n",
207
      "Training at Epoch 26 iteration 0 with loss 0.2655613\n",
208
      "Training at Epoch 26 iteration 100 with loss 0.25201494\n",
209
      "Training at Epoch 26 iteration 200 with loss 0.2319586\n",
210
      "Training at Epoch 26 iteration 300 with loss 0.2557767\n",
211
      "Validation at Epoch 26 , MSE: 0.4930393249311044 , Pearson Correlation: 0.7699562360525237 with p-value: 0.0 , Concordance Index: 0.7963000393624107\n",
212
      "Training at Epoch 27 iteration 0 with loss 0.44117013\n",
213
      "Training at Epoch 27 iteration 100 with loss 0.3273319\n",
214
      "Training at Epoch 27 iteration 200 with loss 0.26239604\n",
215
      "Training at Epoch 27 iteration 300 with loss 0.2871576\n",
216
      "Validation at Epoch 27 , MSE: 0.2763490526371625 , Pearson Correlation: 0.7732378079725977 with p-value: 0.0 , Concordance Index: 0.8040261651490042\n",
217
      "Training at Epoch 28 iteration 0 with loss 0.2402317\n",
218
      "Training at Epoch 28 iteration 100 with loss 0.2201614\n",
219
      "Training at Epoch 28 iteration 200 with loss 0.29813662\n",
220
      "Training at Epoch 28 iteration 300 with loss 0.36932343\n",
221
      "Validation at Epoch 28 , MSE: 0.3163570835376237 , Pearson Correlation: 0.7757515692799253 with p-value: 0.0 , Concordance Index: 0.7992013178853609\n",
222
      "Training at Epoch 29 iteration 0 with loss 0.30585524\n",
223
      "Training at Epoch 29 iteration 100 with loss 0.29580683\n",
224
      "Training at Epoch 29 iteration 200 with loss 0.26298445\n",
225
      "Training at Epoch 29 iteration 300 with loss 0.29019976\n",
226
      "Validation at Epoch 29 , MSE: 0.27718322610610213 , Pearson Correlation: 0.7781347032430801 with p-value: 0.0 , Concordance Index: 0.8021961809106205\n",
227
      "Training at Epoch 30 iteration 0 with loss 0.2829763\n",
228
      "Training at Epoch 30 iteration 100 with loss 0.28772902\n",
229
      "Training at Epoch 30 iteration 200 with loss 0.2438101\n",
230
      "Training at Epoch 30 iteration 300 with loss 0.2709101\n",
231
      "Validation at Epoch 30 , MSE: 0.3025488108277402 , Pearson Correlation: 0.7766137013109087 with p-value: 0.0 , Concordance Index: 0.7978011305407529\n",
232
      "Training at Epoch 31 iteration 0 with loss 0.2151288\n",
233
      "Training at Epoch 31 iteration 100 with loss 0.27977476\n",
234
      "Training at Epoch 31 iteration 200 with loss 0.32945055\n",
235
      "Training at Epoch 31 iteration 300 with loss 0.30322352\n",
236
      "Validation at Epoch 31 , MSE: 0.27950626900433845 , Pearson Correlation: 0.7768095876580197 with p-value: 0.0 , Concordance Index: 0.809581908586743\n",
237
      "Training at Epoch 32 iteration 0 with loss 0.30916905\n",
238
      "Training at Epoch 32 iteration 100 with loss 0.35691962\n",
239
      "Training at Epoch 32 iteration 200 with loss 0.30594778\n",
240
      "Training at Epoch 32 iteration 300 with loss 0.25673822\n",
241
      "Validation at Epoch 32 , MSE: 0.28392810687019493 , Pearson Correlation: 0.7884139492934661 with p-value: 0.0 , Concordance Index: 0.8080715906607208\n",
242
      "Training at Epoch 33 iteration 0 with loss 0.24682216\n",
243
      "Training at Epoch 33 iteration 100 with loss 0.32019353\n",
244
      "Training at Epoch 33 iteration 200 with loss 0.26248968\n",
245
      "Training at Epoch 33 iteration 300 with loss 0.23284283\n",
246
      "Validation at Epoch 33 , MSE: 0.28147583151983824 , Pearson Correlation: 0.7912324256450507 with p-value: 0.0 , Concordance Index: 0.806736921550664\n",
247
      "Training at Epoch 34 iteration 0 with loss 0.26583454\n",
248
      "Training at Epoch 34 iteration 100 with loss 0.27032578\n",
249
      "Training at Epoch 34 iteration 200 with loss 0.2510938\n",
250
      "Training at Epoch 34 iteration 300 with loss 0.18677746\n",
251
      "Validation at Epoch 34 , MSE: 0.254715168591093 , Pearson Correlation: 0.7944845435587876 with p-value: 0.0 , Concordance Index: 0.8092169411383151\n",
252
      "Training at Epoch 35 iteration 0 with loss 0.21087557\n",
253
      "Training at Epoch 35 iteration 100 with loss 0.22810724\n",
254
      "Training at Epoch 35 iteration 200 with loss 0.30516157\n",
255
      "Training at Epoch 35 iteration 300 with loss 0.21417427\n",
256
      "Validation at Epoch 35 , MSE: 0.2556761511172115 , Pearson Correlation: 0.7957341951817682 with p-value: 0.0 , Concordance Index: 0.8142363751364597\n",
257
      "Training at Epoch 36 iteration 0 with loss 0.17365673\n",
258
      "Training at Epoch 36 iteration 100 with loss 0.26811144\n",
259
      "Training at Epoch 36 iteration 200 with loss 0.23328358\n",
260
      "Training at Epoch 36 iteration 300 with loss 0.25731447\n",
261
      "Validation at Epoch 36 , MSE: 0.2559457642380893 , Pearson Correlation: 0.799422136545758 with p-value: 0.0 , Concordance Index: 0.8127337474283564\n",
262
      "Training at Epoch 37 iteration 0 with loss 0.17421441\n",
263
      "Training at Epoch 37 iteration 100 with loss 0.2805039\n",
264
      "Training at Epoch 37 iteration 200 with loss 0.16534641\n",
265
      "Training at Epoch 37 iteration 300 with loss 0.22006656\n",
266
      "Validation at Epoch 37 , MSE: 0.32952278645964184 , Pearson Correlation: 0.7934489740191146 with p-value: 0.0 , Concordance Index: 0.8010512391650809\n",
267
      "Training at Epoch 38 iteration 0 with loss 0.2512241\n",
268
      "Training at Epoch 38 iteration 100 with loss 0.18280888\n",
269
      "Training at Epoch 38 iteration 200 with loss 0.16358562\n",
270
      "Training at Epoch 38 iteration 300 with loss 0.2114552\n",
271
      "Validation at Epoch 38 , MSE: 0.2469675475963409 , Pearson Correlation: 0.8041560299798638 with p-value: 0.0 , Concordance Index: 0.8143626506340358\n",
272
      "Training at Epoch 39 iteration 0 with loss 0.17007281\n",
273
      "Training at Epoch 39 iteration 100 with loss 0.2576394\n",
274
      "Training at Epoch 39 iteration 200 with loss 0.25703746\n",
275
      "Training at Epoch 39 iteration 300 with loss 0.30679893\n",
276
      "Validation at Epoch 39 , MSE: 0.2814202224785629 , Pearson Correlation: 0.7986617531118729 with p-value: 0.0 , Concordance Index: 0.8123491078575165\n",
277
      "Training at Epoch 40 iteration 0 with loss 0.17776462\n",
278
      "Training at Epoch 40 iteration 100 with loss 0.23892398\n",
279
      "Training at Epoch 40 iteration 200 with loss 0.3002319\n",
280
      "Training at Epoch 40 iteration 300 with loss 0.21091235\n",
281
      "Validation at Epoch 40 , MSE: 0.24471777008217963 , Pearson Correlation: 0.8029439370476041 with p-value: 0.0 , Concordance Index: 0.8139744687187216\n",
282
      "Training at Epoch 41 iteration 0 with loss 0.18120411\n",
283
      "Training at Epoch 41 iteration 100 with loss 0.1467629\n",
284
      "Training at Epoch 41 iteration 200 with loss 0.18784216\n",
285
      "Training at Epoch 41 iteration 300 with loss 0.39111277\n",
286
      "Validation at Epoch 41 , MSE: 0.25024235414513113 , Pearson Correlation: 0.8065926609762567 with p-value: 0.0 , Concordance Index: 0.8126689861050176\n",
287
      "Training at Epoch 42 iteration 0 with loss 0.18558024\n",
288
      "Training at Epoch 42 iteration 100 with loss 0.22143383\n",
289
      "Training at Epoch 42 iteration 200 with loss 0.17659724\n",
290
      "Training at Epoch 42 iteration 300 with loss 0.21279082\n",
291
      "Validation at Epoch 42 , MSE: 0.24589991385365795 , Pearson Correlation: 0.8058687406335383 with p-value: 0.0 , Concordance Index: 0.8180839667645736\n",
292
      "Training at Epoch 43 iteration 0 with loss 0.18575062\n",
293
      "Training at Epoch 43 iteration 100 with loss 0.14547281\n",
294
      "Training at Epoch 43 iteration 200 with loss 0.21407829\n",
295
      "Training at Epoch 43 iteration 300 with loss 0.25702018\n",
296
      "Validation at Epoch 43 , MSE: 0.23597818335337695 , Pearson Correlation: 0.8131261358345395 with p-value: 0.0 , Concordance Index: 0.8214152843928875\n",
297
      "Training at Epoch 44 iteration 0 with loss 0.1430232\n",
298
      "Training at Epoch 44 iteration 100 with loss 0.23007198\n",
299
      "Training at Epoch 44 iteration 200 with loss 0.23313954\n",
300
      "Training at Epoch 44 iteration 300 with loss 0.21242057\n",
301
      "Validation at Epoch 44 , MSE: 0.2410594394283686 , Pearson Correlation: 0.8093517312778701 with p-value: 0.0 , Concordance Index: 0.8200404838503456\n",
302
      "Training at Epoch 45 iteration 0 with loss 0.19044279\n",
303
      "Training at Epoch 45 iteration 100 with loss 0.22278084\n"
304
     ]
305
    },
306
    {
307
     "name": "stdout",
308
     "output_type": "stream",
309
     "text": [
310
      "Training at Epoch 45 iteration 200 with loss 0.20137015\n",
311
      "Training at Epoch 45 iteration 300 with loss 0.20637983\n",
312
      "Validation at Epoch 45 , MSE: 0.2931538610613398 , Pearson Correlation: 0.8161000618379057 with p-value: 0.0 , Concordance Index: 0.8220322200148337\n",
313
      "Training at Epoch 46 iteration 0 with loss 0.19920787\n",
314
      "Training at Epoch 46 iteration 100 with loss 0.17406026\n",
315
      "Training at Epoch 46 iteration 200 with loss 0.19329071\n",
316
      "Training at Epoch 46 iteration 300 with loss 0.19324802\n",
317
      "Validation at Epoch 46 , MSE: 0.27236348972361674 , Pearson Correlation: 0.8138519945205422 with p-value: 0.0 , Concordance Index: 0.8187842761565732\n",
318
      "Training at Epoch 47 iteration 0 with loss 0.23279054\n",
319
      "Training at Epoch 47 iteration 100 with loss 0.17864995\n",
320
      "Training at Epoch 47 iteration 200 with loss 0.21056506\n",
321
      "Training at Epoch 47 iteration 300 with loss 0.1824354\n",
322
      "Validation at Epoch 47 , MSE: 0.27391643581608033 , Pearson Correlation: 0.8068993596091968 with p-value: 0.0 , Concordance Index: 0.8129289169844912\n",
323
      "Training at Epoch 48 iteration 0 with loss 0.26312447\n",
324
      "Training at Epoch 48 iteration 100 with loss 0.22185321\n",
325
      "Training at Epoch 48 iteration 200 with loss 0.23431794\n",
326
      "Training at Epoch 48 iteration 300 with loss 0.2589783\n",
327
      "Validation at Epoch 48 , MSE: 0.23025026174260313 , Pearson Correlation: 0.8170136575139418 with p-value: 0.0 , Concordance Index: 0.8225616567006185\n",
328
      "Training at Epoch 49 iteration 0 with loss 0.1223916\n",
329
      "Training at Epoch 49 iteration 100 with loss 0.18264055\n",
330
      "Training at Epoch 49 iteration 200 with loss 0.17989686\n",
331
      "Training at Epoch 49 iteration 300 with loss 0.16855526\n",
332
      "Validation at Epoch 49 , MSE: 0.2205644529408611 , Pearson Correlation: 0.8239693877895442 with p-value: 0.0 , Concordance Index: 0.8312742800086458\n",
333
      "Training at Epoch 50 iteration 0 with loss 0.30569243\n",
334
      "Training at Epoch 50 iteration 100 with loss 0.16925503\n",
335
      "Training at Epoch 50 iteration 200 with loss 0.20570143\n",
336
      "Training at Epoch 50 iteration 300 with loss 0.19284452\n",
337
      "Validation at Epoch 50 , MSE: 0.23454742649197957 , Pearson Correlation: 0.8259606548256397 with p-value: 0.0 , Concordance Index: 0.8328887640557276\n",
338
      "Training at Epoch 51 iteration 0 with loss 0.22376396\n",
339
      "Training at Epoch 51 iteration 100 with loss 0.23242217\n",
340
      "Training at Epoch 51 iteration 200 with loss 0.17831367\n",
341
      "Training at Epoch 51 iteration 300 with loss 0.17776337\n",
342
      "Validation at Epoch 51 , MSE: 0.22540333524221137 , Pearson Correlation: 0.8237001583277702 with p-value: 0.0 , Concordance Index: 0.8321182360105475\n",
343
      "Training at Epoch 52 iteration 0 with loss 0.18440701\n",
344
      "Training at Epoch 52 iteration 100 with loss 0.15421636\n",
345
      "Training at Epoch 52 iteration 200 with loss 0.1854191\n",
346
      "Training at Epoch 52 iteration 300 with loss 0.2379067\n",
347
      "Validation at Epoch 52 , MSE: 0.20853173553592427 , Pearson Correlation: 0.8344220487922347 with p-value: 0.0 , Concordance Index: 0.8367795147611765\n",
348
      "Training at Epoch 53 iteration 0 with loss 0.20401023\n",
349
      "Training at Epoch 53 iteration 100 with loss 0.11404045\n",
350
      "Training at Epoch 53 iteration 200 with loss 0.15747331\n",
351
      "Training at Epoch 53 iteration 300 with loss 0.22042069\n",
352
      "Validation at Epoch 53 , MSE: 0.21615491029018147 , Pearson Correlation: 0.8306163494334134 with p-value: 0.0 , Concordance Index: 0.83251870259484\n",
353
      "Training at Epoch 54 iteration 0 with loss 0.14299648\n",
354
      "Training at Epoch 54 iteration 100 with loss 0.23001932\n",
355
      "Training at Epoch 54 iteration 200 with loss 0.1770986\n",
356
      "Training at Epoch 54 iteration 300 with loss 0.2942966\n",
357
      "Validation at Epoch 54 , MSE: 0.2501904187767297 , Pearson Correlation: 0.8292196336328985 with p-value: 0.0 , Concordance Index: 0.8316417604022989\n",
358
      "Training at Epoch 55 iteration 0 with loss 0.19672471\n",
359
      "Training at Epoch 55 iteration 100 with loss 0.142104\n",
360
      "Training at Epoch 55 iteration 200 with loss 0.1882125\n",
361
      "Training at Epoch 55 iteration 300 with loss 0.18341456\n",
362
      "Validation at Epoch 55 , MSE: 0.22685709705865717 , Pearson Correlation: 0.8298601891084482 with p-value: 0.0 , Concordance Index: 0.8332577279953572\n",
363
      "Training at Epoch 56 iteration 0 with loss 0.16769859\n",
364
      "Training at Epoch 56 iteration 100 with loss 0.15354457\n",
365
      "Training at Epoch 56 iteration 200 with loss 0.18837331\n",
366
      "Training at Epoch 56 iteration 300 with loss 0.18643981\n",
367
      "Validation at Epoch 56 , MSE: 0.23465381325878834 , Pearson Correlation: 0.8226322913072514 with p-value: 0.0 , Concordance Index: 0.8305899944270141\n",
368
      "Training at Epoch 57 iteration 0 with loss 0.15828678\n",
369
      "Training at Epoch 57 iteration 100 with loss 0.23880166\n",
370
      "Training at Epoch 57 iteration 200 with loss 0.19497156\n",
371
      "Training at Epoch 57 iteration 300 with loss 0.20533583\n",
372
      "Validation at Epoch 57 , MSE: 0.23490776397150226 , Pearson Correlation: 0.8261580940950286 with p-value: 0.0 , Concordance Index: 0.828603049510501\n",
373
      "Training at Epoch 58 iteration 0 with loss 0.14188379\n",
374
      "Training at Epoch 58 iteration 100 with loss 0.16264115\n",
375
      "Training at Epoch 58 iteration 200 with loss 0.27134517\n",
376
      "Training at Epoch 58 iteration 300 with loss 0.16497058\n",
377
      "Validation at Epoch 58 , MSE: 0.20634967969974802 , Pearson Correlation: 0.8372939570839266 with p-value: 0.0 , Concordance Index: 0.8338584354409082\n",
378
      "Training at Epoch 59 iteration 0 with loss 0.13785982\n",
379
      "Training at Epoch 59 iteration 100 with loss 0.16934662\n",
380
      "Training at Epoch 59 iteration 200 with loss 0.14488962\n",
381
      "Training at Epoch 59 iteration 300 with loss 0.24447058\n",
382
      "Validation at Epoch 59 , MSE: 0.24024889528142337 , Pearson Correlation: 0.8281995247440275 with p-value: 0.0 , Concordance Index: 0.8348693811945046\n",
383
      "Training at Epoch 60 iteration 0 with loss 0.20227404\n",
384
      "Training at Epoch 60 iteration 100 with loss 0.15199661\n",
385
      "Training at Epoch 60 iteration 200 with loss 0.21587655\n",
386
      "Training at Epoch 60 iteration 300 with loss 0.19945133\n",
387
      "Validation at Epoch 60 , MSE: 0.21013252771272414 , Pearson Correlation: 0.8365628781877509 with p-value: 0.0 , Concordance Index: 0.8368555691998053\n",
388
      "Training at Epoch 61 iteration 0 with loss 0.19128245\n",
389
      "Training at Epoch 61 iteration 100 with loss 0.11310287\n",
390
      "Training at Epoch 61 iteration 200 with loss 0.19415411\n",
391
      "Training at Epoch 61 iteration 300 with loss 0.15344377\n",
392
      "Validation at Epoch 61 , MSE: 0.21382856500150502 , Pearson Correlation: 0.8418828074732749 with p-value: 0.0 , Concordance Index: 0.8403055856444582\n",
393
      "Training at Epoch 62 iteration 0 with loss 0.16562407\n",
394
      "Training at Epoch 62 iteration 100 with loss 0.15640946\n",
395
      "Training at Epoch 62 iteration 200 with loss 0.11651558\n",
396
      "Training at Epoch 62 iteration 300 with loss 0.1702704\n",
397
      "Validation at Epoch 62 , MSE: 0.2162447095154239 , Pearson Correlation: 0.8374758847636822 with p-value: 0.0 , Concordance Index: 0.8369025658169874\n",
398
      "Training at Epoch 63 iteration 0 with loss 0.15939891\n",
399
      "Training at Epoch 63 iteration 100 with loss 0.15985921\n",
400
      "Training at Epoch 63 iteration 200 with loss 0.16121107\n",
401
      "Training at Epoch 63 iteration 300 with loss 0.16741844\n",
402
      "Validation at Epoch 63 , MSE: 0.2756391213687168 , Pearson Correlation: 0.8423224386672058 with p-value: 0.0 , Concordance Index: 0.8402666501316889\n",
403
      "Training at Epoch 64 iteration 0 with loss 0.17065248\n",
404
      "Training at Epoch 64 iteration 100 with loss 0.16979195\n",
405
      "Training at Epoch 64 iteration 200 with loss 0.1779838\n",
406
      "Training at Epoch 64 iteration 300 with loss 0.1243289\n",
407
      "Validation at Epoch 64 , MSE: 0.21367358112252594 , Pearson Correlation: 0.8412688018890631 with p-value: 0.0 , Concordance Index: 0.8431657033356744\n",
408
      "Training at Epoch 65 iteration 0 with loss 0.10653531\n",
409
      "Training at Epoch 65 iteration 100 with loss 0.123516455\n",
410
      "Training at Epoch 65 iteration 200 with loss 0.14685814\n",
411
      "Training at Epoch 65 iteration 300 with loss 0.16070805\n",
412
      "Validation at Epoch 65 , MSE: 0.20601349676311673 , Pearson Correlation: 0.8427587170010579 with p-value: 0.0 , Concordance Index: 0.8434652358097844\n",
413
      "Training at Epoch 66 iteration 0 with loss 0.13670954\n",
414
      "Training at Epoch 66 iteration 100 with loss 0.1612285\n",
415
      "Training at Epoch 66 iteration 200 with loss 0.13986418\n",
416
      "Training at Epoch 66 iteration 300 with loss 0.20413469\n",
417
      "Validation at Epoch 66 , MSE: 0.21948135905803182 , Pearson Correlation: 0.8405495493810491 with p-value: 0.0 , Concordance Index: 0.8415147058917031\n",
418
      "Training at Epoch 67 iteration 0 with loss 0.12630597\n",
419
      "Training at Epoch 67 iteration 100 with loss 0.15750438\n",
420
      "Training at Epoch 67 iteration 200 with loss 0.10876533\n"
421
     ]
422
    },
423
    {
424
     "name": "stdout",
425
     "output_type": "stream",
426
     "text": [
427
      "Training at Epoch 67 iteration 300 with loss 0.14193657\n",
428
      "Validation at Epoch 67 , MSE: 0.20856750871228383 , Pearson Correlation: 0.8429001782451714 with p-value: 0.0 , Concordance Index: 0.8417360115920044\n",
429
      "Training at Epoch 68 iteration 0 with loss 0.107186705\n",
430
      "Training at Epoch 68 iteration 100 with loss 0.12097235\n",
431
      "Training at Epoch 68 iteration 200 with loss 0.11111714\n",
432
      "Training at Epoch 68 iteration 300 with loss 0.13749462\n",
433
      "Validation at Epoch 68 , MSE: 0.2023187404043579 , Pearson Correlation: 0.8437123674403135 with p-value: 0.0 , Concordance Index: 0.8441180641614258\n",
434
      "Training at Epoch 69 iteration 0 with loss 0.13140449\n",
435
      "Training at Epoch 69 iteration 100 with loss 0.14743677\n",
436
      "Training at Epoch 69 iteration 200 with loss 0.12306302\n",
437
      "Training at Epoch 69 iteration 300 with loss 0.16115318\n",
438
      "Validation at Epoch 69 , MSE: 0.20788437705473423 , Pearson Correlation: 0.8422315875620378 with p-value: 0.0 , Concordance Index: 0.8416793794950875\n",
439
      "Training at Epoch 70 iteration 0 with loss 0.12666376\n",
440
      "Training at Epoch 70 iteration 100 with loss 0.12379304\n",
441
      "Training at Epoch 70 iteration 200 with loss 0.09857385\n",
442
      "Training at Epoch 70 iteration 300 with loss 0.14105771\n",
443
      "Validation at Epoch 70 , MSE: 0.2032973529984822 , Pearson Correlation: 0.8453484882542686 with p-value: 0.0 , Concordance Index: 0.8457166227965974\n",
444
      "Training at Epoch 71 iteration 0 with loss 0.10225665\n",
445
      "Training at Epoch 71 iteration 100 with loss 0.19021757\n",
446
      "Training at Epoch 71 iteration 200 with loss 0.13823912\n",
447
      "Training at Epoch 71 iteration 300 with loss 0.16305545\n",
448
      "Validation at Epoch 71 , MSE: 0.21069843178497505 , Pearson Correlation: 0.8436724261637155 with p-value: 0.0 , Concordance Index: 0.8451032900720226\n",
449
      "Training at Epoch 72 iteration 0 with loss 0.101149544\n",
450
      "Training at Epoch 72 iteration 100 with loss 0.14553498\n",
451
      "Training at Epoch 72 iteration 200 with loss 0.14325085\n",
452
      "Training at Epoch 72 iteration 300 with loss 0.13576855\n",
453
      "Validation at Epoch 72 , MSE: 0.20832088312351932 , Pearson Correlation: 0.8469731326052242 with p-value: 0.0 , Concordance Index: 0.8455185164249581\n",
454
      "Training at Epoch 73 iteration 0 with loss 0.13514586\n",
455
      "Training at Epoch 73 iteration 100 with loss 0.118497826\n",
456
      "Training at Epoch 73 iteration 200 with loss 0.1443201\n",
457
      "Training at Epoch 73 iteration 300 with loss 0.1113624\n",
458
      "Validation at Epoch 73 , MSE: 0.20253388316067503 , Pearson Correlation: 0.8441805028660897 with p-value: 0.0 , Concordance Index: 0.8470269042273885\n",
459
      "Training at Epoch 74 iteration 0 with loss 0.09950824\n",
460
      "Training at Epoch 74 iteration 100 with loss 0.10273786\n",
461
      "Training at Epoch 74 iteration 200 with loss 0.104656264\n",
462
      "Training at Epoch 74 iteration 300 with loss 0.10339452\n",
463
      "Validation at Epoch 74 , MSE: 0.21705914965444434 , Pearson Correlation: 0.8452427278756023 with p-value: 0.0 , Concordance Index: 0.8499502769886306\n",
464
      "Training at Epoch 75 iteration 0 with loss 0.14215946\n",
465
      "Training at Epoch 75 iteration 100 with loss 0.0961847\n",
466
      "Training at Epoch 75 iteration 200 with loss 0.12331951\n",
467
      "Training at Epoch 75 iteration 300 with loss 0.15130317\n",
468
      "Validation at Epoch 75 , MSE: 0.19871417532173935 , Pearson Correlation: 0.8446179648633848 with p-value: 0.0 , Concordance Index: 0.8417817138910132\n",
469
      "Training at Epoch 76 iteration 0 with loss 0.15954836\n",
470
      "Training at Epoch 76 iteration 100 with loss 0.13232154\n",
471
      "Training at Epoch 76 iteration 200 with loss 0.12701261\n",
472
      "Training at Epoch 76 iteration 300 with loss 0.13210264\n",
473
      "Validation at Epoch 76 , MSE: 0.2240879919971544 , Pearson Correlation: 0.8445331028002763 with p-value: 0.0 , Concordance Index: 0.8421498149518606\n",
474
      "Training at Epoch 77 iteration 0 with loss 0.15750398\n",
475
      "Training at Epoch 77 iteration 100 with loss 0.11341824\n",
476
      "Training at Epoch 77 iteration 200 with loss 0.114693\n",
477
      "Training at Epoch 77 iteration 300 with loss 0.13361749\n",
478
      "Validation at Epoch 77 , MSE: 0.2152338539087079 , Pearson Correlation: 0.8478969625574723 with p-value: 0.0 , Concordance Index: 0.8455267289116134\n",
479
      "Training at Epoch 78 iteration 0 with loss 0.10080556\n",
480
      "Training at Epoch 78 iteration 100 with loss 0.12171855\n",
481
      "Training at Epoch 78 iteration 200 with loss 0.11807407\n",
482
      "Training at Epoch 78 iteration 300 with loss 0.11452898\n",
483
      "Validation at Epoch 78 , MSE: 0.18960463618759138 , Pearson Correlation: 0.8518679693765236 with p-value: 0.0 , Concordance Index: 0.8496819519638107\n",
484
      "Training at Epoch 79 iteration 0 with loss 0.11710593\n",
485
      "Training at Epoch 79 iteration 100 with loss 0.1476192\n",
486
      "Training at Epoch 79 iteration 200 with loss 0.13384058\n",
487
      "Training at Epoch 79 iteration 300 with loss 0.12313308\n",
488
      "Validation at Epoch 79 , MSE: 0.19822040337060512 , Pearson Correlation: 0.8503488534686926 with p-value: 0.0 , Concordance Index: 0.8491997843832444\n",
489
      "Training at Epoch 80 iteration 0 with loss 0.087387584\n",
490
      "Training at Epoch 80 iteration 100 with loss 0.10811463\n",
491
      "Training at Epoch 80 iteration 200 with loss 0.18600343\n",
492
      "Training at Epoch 80 iteration 300 with loss 0.10030829\n",
493
      "Validation at Epoch 80 , MSE: 0.1932626936966475 , Pearson Correlation: 0.8513422745569593 with p-value: 0.0 , Concordance Index: 0.8515337428142256\n",
494
      "Training at Epoch 81 iteration 0 with loss 0.098010935\n",
495
      "Training at Epoch 81 iteration 100 with loss 0.14005096\n",
496
      "Training at Epoch 81 iteration 200 with loss 0.08274831\n",
497
      "Training at Epoch 81 iteration 300 with loss 0.11399409\n",
498
      "Validation at Epoch 81 , MSE: 0.19758804520753495 , Pearson Correlation: 0.8527537063190721 with p-value: 0.0 , Concordance Index: 0.8509730986791506\n",
499
      "Training at Epoch 82 iteration 0 with loss 0.10215923\n",
500
      "Training at Epoch 82 iteration 100 with loss 0.088796765\n",
501
      "Training at Epoch 82 iteration 200 with loss 0.1028263\n",
502
      "Training at Epoch 82 iteration 300 with loss 0.11578086\n",
503
      "Validation at Epoch 82 , MSE: 0.21772542272704945 , Pearson Correlation: 0.8529054553962128 with p-value: 0.0 , Concordance Index: 0.8475180563440468\n",
504
      "Training at Epoch 83 iteration 0 with loss 0.1321531\n",
505
      "Training at Epoch 83 iteration 100 with loss 0.09774816\n",
506
      "Training at Epoch 83 iteration 200 with loss 0.10050143\n",
507
      "Training at Epoch 83 iteration 300 with loss 0.09495833\n",
508
      "Validation at Epoch 83 , MSE: 0.2136586002959394 , Pearson Correlation: 0.8508724011744425 with p-value: 0.0 , Concordance Index: 0.8516967663511669\n",
509
      "Training at Epoch 84 iteration 0 with loss 0.12597932\n",
510
      "Training at Epoch 84 iteration 100 with loss 0.124858126\n",
511
      "Training at Epoch 84 iteration 200 with loss 0.12111996\n",
512
      "Training at Epoch 84 iteration 300 with loss 0.08302106\n",
513
      "Validation at Epoch 84 , MSE: 0.1990206659886085 , Pearson Correlation: 0.8477119170050185 with p-value: 0.0 , Concordance Index: 0.8468090878877765\n",
514
      "Training at Epoch 85 iteration 0 with loss 0.07115703\n",
515
      "Training at Epoch 85 iteration 100 with loss 0.1317057\n",
516
      "Training at Epoch 85 iteration 200 with loss 0.09400135\n",
517
      "Training at Epoch 85 iteration 300 with loss 0.08817591\n",
518
      "Validation at Epoch 85 , MSE: 0.1880923648098855 , Pearson Correlation: 0.8524468842300589 with p-value: 0.0 , Concordance Index: 0.8530510470307388\n",
519
      "Training at Epoch 86 iteration 0 with loss 0.090310104\n",
520
      "Training at Epoch 86 iteration 100 with loss 0.123432584\n",
521
      "Training at Epoch 86 iteration 200 with loss 0.0826193\n",
522
      "Training at Epoch 86 iteration 300 with loss 0.15323539\n",
523
      "Validation at Epoch 86 , MSE: 0.19529560739508625 , Pearson Correlation: 0.8476772162914558 with p-value: 0.0 , Concordance Index: 0.8502067033691905\n",
524
      "Training at Epoch 87 iteration 0 with loss 0.080911815\n",
525
      "Training at Epoch 87 iteration 100 with loss 0.13101102\n",
526
      "Training at Epoch 87 iteration 200 with loss 0.08883078\n",
527
      "Training at Epoch 87 iteration 300 with loss 0.12456667\n",
528
      "Validation at Epoch 87 , MSE: 0.1981152364558615 , Pearson Correlation: 0.8516459469462152 with p-value: 0.0 , Concordance Index: 0.851878531009729\n",
529
      "Training at Epoch 88 iteration 0 with loss 0.07875684\n",
530
      "Training at Epoch 88 iteration 100 with loss 0.113229\n",
531
      "Training at Epoch 88 iteration 200 with loss 0.09287766\n",
532
      "Training at Epoch 88 iteration 300 with loss 0.09923973\n",
533
      "Validation at Epoch 88 , MSE: 0.2791987337318223 , Pearson Correlation: 0.8473231716104132 with p-value: 0.0 , Concordance Index: 0.8484163358636674\n",
534
      "Training at Epoch 89 iteration 0 with loss 0.1758956\n",
535
      "Training at Epoch 89 iteration 100 with loss 0.15466869\n",
536
      "Training at Epoch 89 iteration 200 with loss 0.06840886\n",
537
      "Training at Epoch 89 iteration 300 with loss 0.09052813\n"
538
     ]
539
    },
540
    {
541
     "name": "stdout",
542
     "output_type": "stream",
543
     "text": [
544
      "Validation at Epoch 89 , MSE: 0.18879584363341734 , Pearson Correlation: 0.8552581698425819 with p-value: 0.0 , Concordance Index: 0.8493616044313579\n",
545
      "Training at Epoch 90 iteration 0 with loss 0.10148429\n",
546
      "Training at Epoch 90 iteration 100 with loss 0.07839349\n",
547
      "Training at Epoch 90 iteration 200 with loss 0.07834745\n",
548
      "Training at Epoch 90 iteration 300 with loss 0.0998727\n",
549
      "Validation at Epoch 90 , MSE: 0.19582125600456016 , Pearson Correlation: 0.8511291809184155 with p-value: 0.0 , Concordance Index: 0.8505655474059094\n",
550
      "Training at Epoch 91 iteration 0 with loss 0.081786595\n",
551
      "Training at Epoch 91 iteration 100 with loss 0.09030631\n",
552
      "Training at Epoch 91 iteration 200 with loss 0.10174892\n",
553
      "Training at Epoch 91 iteration 300 with loss 0.11394987\n",
554
      "Validation at Epoch 91 , MSE: 0.20908388322739013 , Pearson Correlation: 0.8478519940615383 with p-value: 0.0 , Concordance Index: 0.8477589281991904\n",
555
      "Training at Epoch 92 iteration 0 with loss 0.07235121\n",
556
      "Training at Epoch 92 iteration 100 with loss 0.08341678\n",
557
      "Training at Epoch 92 iteration 200 with loss 0.11056797\n",
558
      "Training at Epoch 92 iteration 300 with loss 0.110210285\n",
559
      "Validation at Epoch 92 , MSE: 0.19718609031082673 , Pearson Correlation: 0.8528774001655518 with p-value: 0.0 , Concordance Index: 0.851195925770989\n",
560
      "Training at Epoch 93 iteration 0 with loss 0.08601681\n",
561
      "Training at Epoch 93 iteration 100 with loss 0.08848184\n",
562
      "Training at Epoch 93 iteration 200 with loss 0.14170763\n",
563
      "Training at Epoch 93 iteration 300 with loss 0.14256622\n",
564
      "Validation at Epoch 93 , MSE: 0.18961202081054754 , Pearson Correlation: 0.8550672175716182 with p-value: 0.0 , Concordance Index: 0.8546280564036914\n",
565
      "Training at Epoch 94 iteration 0 with loss 0.08315914\n",
566
      "Training at Epoch 94 iteration 100 with loss 0.07144792\n",
567
      "Training at Epoch 94 iteration 200 with loss 0.109951824\n",
568
      "Training at Epoch 94 iteration 300 with loss 0.07713781\n",
569
      "Validation at Epoch 94 , MSE: 0.2025433794457135 , Pearson Correlation: 0.8565218271749488 with p-value: 0.0 , Concordance Index: 0.8569312236865494\n",
570
      "Training at Epoch 95 iteration 0 with loss 0.085088074\n",
571
      "Training at Epoch 95 iteration 100 with loss 0.09484902\n",
572
      "Training at Epoch 95 iteration 200 with loss 0.07999084\n",
573
      "Training at Epoch 95 iteration 300 with loss 0.11516863\n",
574
      "Validation at Epoch 95 , MSE: 0.195291974748004 , Pearson Correlation: 0.8487014918161339 with p-value: 0.0 , Concordance Index: 0.8531190857796275\n",
575
      "Training at Epoch 96 iteration 0 with loss 0.081413455\n",
576
      "Training at Epoch 96 iteration 100 with loss 0.06010697\n",
577
      "Training at Epoch 96 iteration 200 with loss 0.1060314\n",
578
      "Training at Epoch 96 iteration 300 with loss 0.08848443\n",
579
      "Validation at Epoch 96 , MSE: 0.1903152710427138 , Pearson Correlation: 0.8529740974876179 with p-value: 0.0 , Concordance Index: 0.8525666919767684\n",
580
      "Training at Epoch 97 iteration 0 with loss 0.08045774\n",
581
      "Training at Epoch 97 iteration 100 with loss 0.07858796\n",
582
      "Training at Epoch 97 iteration 200 with loss 0.09438899\n",
583
      "Training at Epoch 97 iteration 300 with loss 0.1323199\n",
584
      "Validation at Epoch 97 , MSE: 0.1888422605033678 , Pearson Correlation: 0.8521231818633057 with p-value: 0.0 , Concordance Index: 0.8529498177251833\n",
585
      "Training at Epoch 98 iteration 0 with loss 0.07730898\n",
586
      "Training at Epoch 98 iteration 100 with loss 0.09733562\n",
587
      "Training at Epoch 98 iteration 200 with loss 0.06377132\n",
588
      "Training at Epoch 98 iteration 300 with loss 0.113953054\n",
589
      "Validation at Epoch 98 , MSE: 0.21204893970889843 , Pearson Correlation: 0.8565689045993777 with p-value: 0.0 , Concordance Index: 0.8556206621986097\n",
590
      "Training at Epoch 99 iteration 0 with loss 0.09585332\n",
591
      "Training at Epoch 99 iteration 100 with loss 0.08413041\n",
592
      "Training at Epoch 99 iteration 200 with loss 0.0944461\n",
593
      "Training at Epoch 99 iteration 300 with loss 0.065783754\n",
594
      "Validation at Epoch 99 , MSE: 0.20532750896585933 , Pearson Correlation: 0.8597496055395153 with p-value: 0.0 , Concordance Index: 0.8568210174140141\n",
595
      "Training at Epoch 100 iteration 0 with loss 0.11175461\n",
596
      "Training at Epoch 100 iteration 100 with loss 0.08406766\n",
597
      "Training at Epoch 100 iteration 200 with loss 0.098233305\n",
598
      "Training at Epoch 100 iteration 300 with loss 0.1574293\n",
599
      "Validation at Epoch 100 , MSE: 0.1918805189450315 , Pearson Correlation: 0.8567659539114503 with p-value: 0.0 , Concordance Index: 0.8551461394212893\n",
600
      "--- Go for Testing ---\n",
601
      "Testing MSE: 0.19945655726244765 , Pearson Correlation: 0.8506343800999702 with p-value: 0.0 , Concordance Index: 0.8536301623899886\n",
602
      "--- Training Finished ---\n"
603
     ]
604
    },
605
    {
606
     "data": {
607
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAELCAYAAADHksFtAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAdiklEQVR4nO3de5ydVX3v8c+XcI3ISUIGSBOSAaQoIipOKYgHEXq4KQQRWnQK4dIzFfAoYE9BUzW2gFqsCKeIRgWDjCCCFopc5SKVAjrhEkDEhFuIhGQAQSXcQn7nj7U22TPZ88yeZN9m9vf9eu3Xfp713H57ZzK/edZaz1qKCMzMzIayXrMDMDOz1uZEYWZmhZwozMyskBOFmZkVcqIwM7NC6zc7gFqbPHlydHZ2NjsMM7NRZf78+c9EREelbWMuUXR2dtLX19fsMMzMRhVJTwy1zVVPZmZWyInCzMwKOVGYmVkhJwozMyvU0EQh6QJJyyU9UGHbP0gKSZPzuiSdK2mRpAWSdmlkrGZmljT6juJ7wP6DCyVtDfwvYHFZ8QHA9vnVA5xfr6B6e6GzE9ZbL7339tbrSmZmo09DE0VE3AY8V2HT2cA/AuVD2c4ELorkTmCCpCm1jqm3F3p64IknICK99/Q4WZiZlTS9jULSwcDvIuK+QZumAk+WrS/JZZXO0SOpT1Jff3//iK4/ezasWDGwbMWKVG5mZk1OFJLGA7OBz1faXKGs4uQZETE3Iroioqujo+KDhUNavHhk5WZm7abZdxTbAdsA90l6HJgG3C1pK9IdxNZl+04Dnqp1ANOnj6zczKzdNDVRRMT9EbFFRHRGRCcpOewSEU8DVwFH5d5PuwEvRMTSWsdwxhkwfvzAsvHjU7mZmTW+e+wlwB3ADpKWSDquYPdrgEeBRcC3gRPqEVN3N8yalXo8QXqfNSuVm5lZgwcFjIiPDrO9s2w5gBPrHVNvL8ybB6tWpfVVq9L6Hns4WZiZQfPbKJrOvZ7MzIq1faJwryczs2Jtnyjc68nMrFjbJwr3ejIzK9b2iaK7G+bOhcmT0/qUKWndDdlmZsmYmwp1bXR3w0YbweGHww03wE47NTsiM7PW0fZ3FGZmVsyJwszMCjlRmJlZIScK0tPZxx+flvfd13NRmJmVa/vG7NLERaWns5cuTevgnk9mZuA7Cg/hYWY2jLZPFB7Cw8ysWNsnCg/hYWZWrO0ThYfwMDMr1vaJwkN4mJkVa/teT5CSwsYbw2GHwfXXwzve0eyIzMxaR9vfUZiZWTEnCjMzK+REYWZmhRqaKCRdIGm5pAfKys6S9BtJCyT9RNKEsm2fkbRI0sOS9mtEjBGNuIqZ2ejR6DuK7wH7Dyq7EdgpInYGfgt8BkDSjsARwNvzMd+QNK5egUn1OrOZ2ejW0EQREbcBzw0quyEiVubVO4FpeXkmcGlEvBIRjwGLgF0bFqyZmQGt10ZxLHBtXp4KPFm2bUkuW4OkHkl9kvr6+/vrHKKZWXtpmUQhaTawEigN8l2pMqhiC0JEzI2Irojo6ujoqFeIZmZtqSUeuJM0C/gQsE/EG83JS4Cty3abBjzV6NjMzNpd0+8oJO0PnAocHBHlA35fBRwhaSNJ2wDbA79sRoxmZu2soXcUki4B9gImS1oCfIHUy2kj4Ealrkd3RsTHI+JBSZcBvyZVSZ0YEa83Ml4zMwPFGHtwoKurK/r6+kZ0TG8vnHQSPPNMGhTwrLM8KKCZtRdJ8yOiq9K2lmijaCZPhWpmVqzpbRTN5qlQzcyKtX2i8FSoZmbF2j5ReCpUM7NibZ8oPBWqmVmxtk8UpalQSw90eypUM7OB2r7XE6SkMH48HHooXHstvPOdzY7IzKx1tP0dhZmZFXOiGGSMPX9oZrbOnCgyT1xkZlaZE4WZmRVyojAzs0JOFGZmVsiJwszMCjlRmJlZIScKMzMr5ERhZmaFnCgG8QN3ZmYDOVFkfuDOzKyyhiYKSRdIWi7pgbKySZJulLQwv0/M5ZJ0rqRFkhZI2qWRsZqZWdLoO4rvAfsPKjsNuCkitgduyusABwDb51cPcH6DYjQzszINTRQRcRvw3KDimcC8vDwPOKSs/KJI7gQmSJrSmEjNzKykFdootoyIpQD5fYtcPhV4smy/JblsDZJ6JPVJ6uvv769rsGZm7aYVEsVQKjUvV+yTFBFzI6IrIro6SlPVmZlZTbRColhWqlLK78tz+RJg67L9pgFPNTg2M7O21wqJ4ipgVl6eBVxZVn5U7v20G/BCqYqqnvwchZnZQA2dM1vSJcBewGRJS4AvAF8GLpN0HLAYODzvfg1wILAIWAEcU9/Y6nl2M7PRq6GJIiI+OsSmfSrsG8CJ9Y3IzMyGU3XVk6R3S/qxpGckrSw9ACfpTEmDn40wM7MxoqpEIel9wB3AW4EfDDpuFfDx2odmZmatoNo7ii8D1wNvB04ZtO1uwMNrmJmNUdW2UewCHBoRIWlwv6BnAD+8YGY2RlV7R/EyMH6IbVOAF2oTjpmZtZpqE8UvgJMkjSsrK91ZHAfcXNOozMysZVRb9fQ54HbgPuByUpKYJelrwHuAv6hPeI3nB+7MzAaq6o4iIu4D9gSWAbNJ4zB9Im9+f0Q8XJ/wGscP3JmZVVb1A3cRcTewj6SNgUnA8xGxom6RmZlZSxjxk9kR8TIenM/MrG1UlSgkfX6YXSIi/qUG8ZiZWYup9o5iTsG2UvOvE4WZ2RhUbWP2eoNfwObA0cADwFvqGKOZmTXRWo8eGxG/By6StDlwHmlIcDMzG2NqMXFRqeusmZmNQbVIFB8C+mtwnpbgB+7MzAaqttfTBRWKNwR2At5BmqluVPMDd2ZmlVXbRrE3q3s3lbwMPAF8HZhXy6DMzKx1VJUoIqKzznGYmVmLqkUbRU1IOlnSg5IekHSJpI0lbSPpLkkLJf1Q0obNjtPMrN0MeUchaUQ9mSLitrUNQtJU4JPAjhHxkqTLgCNIXW7PjohLJX2TNKT5+Wt7HTMzG7miqqdbWbNdohLl/cYNt2MVsWwi6TXSJElLSW0jH8vb55GeEK95oujthZNPTssHHQRf/Sp0d9f6KmZmo1NRovhAo4KIiN9J+iqwGHgJuAGYTxqhdmXebQkwtdbX7u2Fnh5YkcfBffrptA5OFmZmUJAoIuLnjQpC0kRgJrAN8DzwI+CASmENcXwP0AMwffr0EV179uzVSaJkxYpU7kRhZtY6jdl/BTwWEf0R8RrwY+C9wARJpWQ2jSGGN4+IuRHRFRFdHR0dI7rw4sUjKzczazdVj/UkaSdSY/IOwMaDNkdE7LMOcSwGdpM0nlT1tA/QB9wCHAZcCswCrlyHa1Q0fTo88UTlcjMzq/KOQtJfkn5xHwDsB0wEtgX2Io0cu07PNUfEXaS5uO8G7s9xzQVOBU6RtIg0Wu131+U6lZxxBowfP7Bs/PhUbmZmoKhicCNJN5Hmyz4SeA3oioi7Je0NfB84MiJurmukVerq6oq+vr4RHVPq9dTfD1tt5V5PZtZ+JM2PiK5K26qtetqZVPVTyirjACLiZkmnA18C/nJdA22W7m6YOBE++EG48krYdddmR2Rm1jqqbczeAHgxIlYBzwFTyrY9TBoc0MzMxqBqE8UjrH6GYQFwrKT1JK0HHAM8XY/gzMys+aqtevpPUsP1D4AzgZ8CfwBeBzYlDb8xJng+CjOzgaodPXZO2fLPJO0GfIQ01MZ1EXFDfcJrHM9HYWZW2VrNmR0R9wD31DgWMzNrQdU+R/FjSYdI2qDeATWbq57MzAaqtjH7raRhNZZKOi9XPY0prnoyM6usqkQRETsCfwFcDBwK3J4nE/qcpG3rGaCZmTVX1YMCRsT8iDiJNDjfQcCvSENsLJT0X3WKz8zMmmzEo8dGxOsRcU1EfIx0d/EUaaTXMcFtFGZmA404UUjaTtIXJP0WuJY0IOC/1TyyBurthVmz0vKHP5zWzcwsqap7bJ5Y6G9IgwLuBqwAfgKcCPwsqhlZsEUNnuFu2TLPcGdmVq7a0WNfIQ0EeDNptNgrImJF8VHNMdLRYzs7K89HMWMGPP54zcIyM2tptRg99p+AiyNiae3Cag2e4c7MrFi13WPPGotJAoaeyc4z3JmZJa0yZ3bTeIY7M7NibZ8ourth7lzYYou0vuWWad0N2WZmyVoNCjjWdHenRLHvvnDFFbDHHs2OyMysdbT9HYWZmRVrmUQhaYKkyyX9RtJDknaXNEnSjXlcqRvz8xx1NXqfCDEzq49qhxmfKemYsvUZku6Q9Mf8y33TGsRyDmkSpLcC7wQeAk4DboqI7YGb8npdePRYM7PKqr2j+Cego2z9a6TBAecCewJz1iUISZvl83wXICJejYjngZnAvLzbPOCQdbmOmZmNXLWJYjtgAYCkTYADgVMi4tPAZ4EPr2Mc2wL9wIWS7pH0HUlvArYsPb+R37eodLCkHkl9kvr6+/vXMRQzMytXbaLYGHgpL7+X1FuqNE/2w8CfrWMc6wO7AOdHxLuBFxlBNVNEzI2Irojo6ujoGP6AwnOt0+FmZmNOtYniceB9eXkmMD8iXsjrWwAvVDpoBJYASyLirrx+OSlxLJM0BSC/L1/H6wzJbRRmZpVVmyi+BcyR1AecQG5LyHYHfr0uQUTE08CTknbIRfvkc14F5AHAmQVcuS7XMTOzkavqgbuIOEfSM6Qhxs+NiIvKNr8ZuLAGsfwfoFfShsCjwDGkRHaZpOOAxcDhNbhOIVc9mZkNVPWT2RHRC6wxpU9E/H0tAomIe4FKQ9zuU4vzD8dVT2ZmlVX7HMWfS9q1bH0TSV+S9J+SPlG/8MzMrNmqbaP4d+CwsvUzgE+TejudLenEWgdmZmatodpEsTNwO4Ck9YCjgFMj4j3A6UBPfcJrPLdRmJkNVG2imAA8m5ffDUwkdWEFuJX0wNyo5jYKM7PKqk0Uy4C35OV9gUci4sm8vimwstaBmZlZa6i219NVwJck7QQcTXquouQdpO6sZmY2BlWbKE4jDeOxHylpnFm27WBWD+cx6rmNwsxsoGofuHsR+N9DbHtvTSNqErdRmJlVNqKpUCVNIg3ZMYnUuH1nRDxXj8DMzKw1VJ0oJJ1OenZio7LiVyR9NSI+V/PImsRVT2ZmA1X7ZPZJpHknLgY+ALwtv18MfFbSJ+sWYYO46snMrLJq7yg+DpwTESeXlT0M/FzSn0gjyp5b6+DMzKz5qn2OohP46RDbfpq3m5nZGFRtongW2GmIbW9n9VPbo57bKMzMBqo2UfwE+BdJR0raAEDS+pI+CvwzcEW9AmwUt1GYmVVWbaL4DHAvMA9YIWkZaQ7tXuA+UkO3mZmNQdU+cPdHSXsCHwT+J+k5iueAnwPXRoydCpux80nMzGpjJDPcBXB1fo05rnoyM6us2qonMzNrU0MmCkmrJL1e5asmw4xLGifpHklX5/VtJN0laaGkH0rasBbXMTOz6hVVPf0z0Oga+08BDwGb5fWvAGdHxKWSvgkcB5xfzwDcRmFmNtCQiSIi5jQwDiRNIzWWnwGcIknA3sDH8i7zgDnUKVG4jcLMrLJWaqP4OvCPwKq8vjnwfESUqrWWAFMrHSipR1KfpL7+/v76R2pm1kZaIlFI+hCwPCLmlxdX2LVixVBEzI2Irojo6ujoWKdYXPVkZjbQiOajqKM9gIMlHUiaSW8z0h3GBEnr57uKacBT9QrAVU9mZpW1xB1FRHwmIqZFRCdwBHBzRHQDtwCH5d1mAVc2KUQzs7bVEomiwKmkhu1FpDaL7zY5HjOzttMqVU9viIhbgVvz8qPAro29fiOvZmbW+lr9jqJh3EZhZlaZE4WZmRVyojAzs0JOFEBvLxx6aFo+8si0bmZmScs1Zjdaby/09MCKFWl9+fK0DtDd3by4zMxaRdvfUcyevTpJlKxYkcrNzMyJgsWLR1ZuZtZu2j5RTJ8+snIzs3bT9onijDNg/PiBZePHp3IzM3OioLsb5s6FzTZbXbbJJs2Lx8ys1bR9oih59dXVy88+m3o+uZusmZkTBZB6OL388sAy93wyM0ucKHDPJzOzIk4UrNmYPVy5mVk7caIAXnppZOVmZu3EiQJYtWpk5WZm7cSJAhg3bmTlZmbtxImC1YMAVltuZtZOnCiAb3wDPvKR1evjxsHxx6dyM7N21xKJQtLWkm6R9JCkByV9KpdPknSjpIX5fWK9Yjj11PR+9dWwcqWThJlZSUskCmAl8OmIeBuwG3CipB2B04CbImJ74Ka8XhfXXZfeDzoIOjv9VLaZWUlLJIqIWBoRd+flPwIPAVOBmcC8vNs84JB6XL+3F848sxQLPPGEh/AwMytpiURRTlIn8G7gLmDLiFgKKZkAW9Tjmh7Cw8xsaC2VKCRtClwBnBQRfxjBcT2S+iT19ff3j/i6HsLDzGxoLZMoJG1AShK9EfHjXLxM0pS8fQqwvNKxETE3Iroioqujo2PE1/bkRWZmQ2uJRCFJwHeBhyLia2WbrgJm5eVZwJX1uP4ZZ8AGGwws23BDT15kZgawfrMDyPYAjgTul3RvLvss8GXgMknHAYuBw+sVQETxuplZu1KMsd+IXV1d0dfXN6JjOjtTT6fBZsyAxx+vSVhmZi1N0vyI6Kq0rSWqnprNjdlmZkNzosCN2WZmRZwogAMPHFm5mVk7caIArrlmZOVmZu3EiYLKDdlF5WZm7aTtE0XReE7rtf23Y2bmRFE4npOnQjUzc6JwF1gzs2G0faIo6gK7+eaNi8PMrFW1faKoNM4TpOlQzzmn8fGYmbWaVhnrqWm6u9P7UUcNbJPYa6/V28zM2lnb31EA3H77mg3XN90EJ5zQnHjMzFqJEwXwrW+NrNzMrJ04UTB0N1h3jzUzc6IwM7NhOFGYmVmhtk8URUN4mJmZE0XhEB5mZuZEMewQHhJsskkaILCz03cgZtZ+2j5RVDOL3csvQ0Qadvxv/zYlD7/a47X++ul5mt7e9IeClP5oKG0fNy69T54Mm266unzy5DX/qCidY/AfHSeckK5T6XpD/YEy3HazWlJENDuGYUnaHzgHGAd8JyK+PNS+XV1d0dfXV/W5e3vTL38zs7FkpL/aJc2PiK5K21r+jkLSOOA84ABgR+Cjknas1fk9TIeZjUVS7c7V8okC2BVYFBGPRsSrwKXAzCbHZGbWNkZDopgKPFm2viSXvUFSj6Q+SX39/f0NDc7MbKwbDYmi0g3UgNq3iJgbEV0R0dXR0dGgsMzM2sNoSBRLgK3L1qcBT9XyAqOgPd/MrGlGQ6L4FbC9pG0kbQgcAVxV64tEVH5dfDHMmFHrq5mZ1Vct/wBu+YmLImKlpE8A15O6x14QEQ826vrd3e4ZZWbtreUTBUBEXANc0+w4zMza0WioejIzsyZyojAzs0JOFGZmVsiJwszMCo2KQQFHQlI/8MRaHj4ZeKaG4TSSY28Ox94cjr32ZkRExSeWx1yiWBeS+oYaPbHVOfbmcOzN4dgby1VPZmZWyInCzMwKOVEMNLfZAawDx94cjr05HHsDuY3CzMwK+Y7CzMwKOVGYmVkhJ4pM0v6SHpa0SNJpzY6nRNLjku6XdK+kvlw2SdKNkhbm94m5XJLOzZ9hgaRdys4zK++/UNKsOsV6gaTlkh4oK6tZrJLek7+LRfnYms0KPETscyT9Ln/390o6sGzbZ3IcD0var6y84s9RHib/rvyZfpiHzK9F3FtLukXSQ5IelPSpXN7y33tB7KPhe99Y0i8l3Zdj/2LR9SRtlNcX5e2da/uZmiIi2v5FGr78EWBbYEPgPmDHZseVY3scmDyo7F+B0/LyacBX8vKBwLWkWQF3A+7K5ZOAR/P7xLw8sQ6x7gnsAjxQj1iBXwK752OuBQ6oc+xzgH+osO+O+WdkI2Cb/LMzrujnCLgMOCIvfxM4vkZxTwF2yctvBn6b42v5770g9tHwvQvYNC9vANyVv8+K1wNOAL6Zl48Afri2n6kZL99RJLsCiyLi0Yh4FbgUmNnkmIrMBObl5XnAIWXlF0VyJzBB0hRgP+DGiHguIn4P3AjsX+ugIuI24Ll6xJq3bRYRd0T6H3ZR2bnqFftQZgKXRsQrEfEYsIj0M1Tx5yj/Bb43cHk+vvx7WNe4l0bE3Xn5j8BDpDnlW/57L4h9KK30vUdE/CmvbpBfUXC98n+Py4F9cnwj+ky1iH1tOFEkU4Eny9aXUPwD20gB3CBpvqSeXLZlRCyF9J8N2CKXD/U5mvn5ahXr1Lw8uLzePpGraC4oVd8ME2Ol8s2B5yNi5aDymsrVGe8m/XU7qr73QbHDKPjeJY2TdC+wnJRYHym43hsx5u0v5Pha8f/sGpwokkp1rq3Sb3iPiNgFOAA4UdKeBfsO9Tla8fONNNZmfIbzge2AdwFLgX/L5S0Xu6RNgSuAkyLiD0W7DhFLK8U+Kr73iHg9It4FTCPdAbyt4HotFftIOVEkS4Cty9anAU81KZYBIuKp/L4c+AnpB3JZrhIgvy/Puw/1OZr5+WoV65K8PLi8biJiWf5lsAr4Num7Z5gYK5U/Q6riWX9QeU1I2oD0i7Y3In6ci0fF914p9tHyvZdExPPAraQ2iqGu90aMefv/IFV1tuL/2TU1q3GklV6kKWEfJTUmlRqO3t4Ccb0JeHPZ8n+T2hbOYmBD5b/m5Q8ysKHyl7l8EvAYqZFyYl6eVKeYOxnYIFyzWIFf5X1LjaoH1jn2KWXLJ5PqkgHezsAGyEdJjY9D/hwBP2JgI+cJNYpZpHaDrw8qb/nvvSD20fC9dwAT8vImwH8BHxrqesCJDGzMvmxtP1MzXk25aCu+SL1BfkuqZ5zd7HhyTNvmH5D7gAdLcZHqNm8CFub30n9oAeflz3A/0FV2rmNJDWWLgGPqFO8lpKqC10h/ER1Xy1iBLuCBfMy/k0cWqGPs38+xLQCuGvQLbHaO42HKegEN9XOU/y1/mT/Tj4CNahT3+0hVEguAe/PrwNHwvRfEPhq+952Be3KMDwCfL7oesHFeX5S3b7u2n6kZLw/hYWZmhdxGYWZmhZwozMyskBOFmZkVcqIwM7NCThRmZlbIicLaRh6VNPLyhLy+y3DH1TGed+UYJlXYFpLmNCEsszU4UVg7+Q5pFFSACcAXSCPGNsu7cgxrJApSnN9pbDhmla0//C5mY0NELGHgAHc1lUcD3SDSaJ/rJNLIrmYtwXcU1jZKVU95pNLHcvG3c1lIOrps30Ml3SlphaTnJf1I0vRB53tc0sWSjpX0G+BV0hAZSPqipLslvSDpGUk3S9qt7NijgQvz6sKyGDrz9jWqnvJENndIeimf9z8k7TBon1sl/ULSX+Xrr5D0gKSaDclu7ceJwtrRUuDQvPwlUjXP7sBPASR9nDRQ3a+Bw4C/B3YCfi7pzYPO9QHgFOCLpHG4FuTyqcDZpPkIjiYNynebpJ3z9p8Cp+flw8tiWFopYEn752P+BPwNcHyO6ReSBg8/vR1wDvC1/DmXApdLekvht2I2BFc9WduJiFck3ZNXHy2v5slDXn8FuDAiji0rv4s07s5xwNfLTjcReE9EPD3oGn9Xduw44DrSeF3HAZ+KiH5Jj+Rd7o2IRcOEfTppkLgDIs93IOmOHNOnScmqZDKwZ0QszPvdTUoWfw2cOcx1zNbgOwqzgXYHNgN6Ja1fepHaNn5DmjK13J2DkwRArvq5RdKzwErSYIN/DuwweN/hSHoTqdH9h7F6UhwizYh2O/D+QYcsLCWJvN9y0h3NdMzWgu8ozAYqzQT3syG2/37Q+hpVRbnL7TXA9aQ7iKXA66ReTBuvRUwTSaO+VqqWehqYMais0pSur6zltc2cKMwGeTa/H02qKhrsj4PWKw2//BHSXcShEfFaqTBP6fn8WsT0+3ydrSps24rVMZvVhROFtatX8vsmg8r/m5QM3hIR89by3ONJdxBvJBFJe5Oqfh4r22+oGAaIiBclzQcOlzQnIl7P55wBvBf4f2sZp1lVnCisXS0j/SV+hKQFwIvAYxHxrKT/C5wnqYM0o9sLpF5M7wdujYgfDHPu64CTgO9JupDUNvE54HeD9vt1fj9R0jxSO8aCIZ7D+Byp19PVkr4BbErqafUCq+eUNqsLN2ZbW4o0H/Pfker/f0aa7vOgvO1bwMGkhufvk5LFF0l/WN1bxbmvBz4J7AFcTZo57ijS7Gbl+90HzMnX/UWO4c+GOOd1pGc0JgCXkabZfAh4X+R51c3qxTPcmZlZId9RmJlZIScKMzMr5ERhZmaFnCjMzKyQE4WZmRVyojAzs0JOFGZmVsiJwszMCv1/dna7sjqINoYAAAAASUVORK5CYII=\n",
608
      "text/plain": [
609
       "<Figure size 432x288 with 1 Axes>"
610
      ]
611
     },
612
     "metadata": {
613
      "needs_background": "light"
614
     },
615
     "output_type": "display_data"
616
    }
617
   ],
618
   "source": [
619
    "model = models.model_initialize(**config)\n",
620
    "model.train(train, val, test)"
621
   ]
622
  },
623
  {
624
   "cell_type": "code",
625
   "execution_count": null,
626
   "metadata": {},
627
   "outputs": [],
628
   "source": []
629
  }
630
 ],
631
 "metadata": {
632
  "kernelspec": {
633
   "display_name": "Python 3",
634
   "language": "python",
635
   "name": "python3"
636
  },
637
  "language_info": {
638
   "codemirror_mode": {
639
    "name": "ipython",
640
    "version": 3
641
   },
642
   "file_extension": ".py",
643
   "mimetype": "text/x-python",
644
   "name": "python",
645
   "nbconvert_exporter": "python",
646
   "pygments_lexer": "ipython3",
647
   "version": "3.7.7"
648
  }
649
 },
650
 "nbformat": 4,
651
 "nbformat_minor": 4
652
}