Switch to unified view

a b/notebooks/Ensembling.ipynb
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "metadata": {},
7
   "outputs": [],
8
   "source": [
9
    "VERSION = 33\n",
10
    "\n",
11
    "FOCAL_LOSS = 0\n",
12
    "CLOUD_SINGLE = True\n",
13
    "MIXUP = False\n",
14
    "NO_BLACK_LOSS = True\n",
15
    "DATA_SMALL = False"
16
   ]
17
  },
18
  {
19
   "cell_type": "code",
20
   "execution_count": 2,
21
   "metadata": {},
22
   "outputs": [],
23
   "source": [
24
    "if VERSION in [31,32]:\n",
25
    "    TRAIN_ON_STAGE_1 = False\n",
26
    "else:\n",
27
    "    TRAIN_ON_STAGE_1 = True\n",
28
    "\n",
29
    "if VERSION in [32,34,36]:\n",
30
    "    WEIGHTED = True\n",
31
    "else:\n",
32
    "    WEIGHTED = False"
33
   ]
34
  },
35
  {
36
   "cell_type": "code",
37
   "execution_count": 3,
38
   "metadata": {},
39
   "outputs": [],
40
   "source": [
41
    "%run ./Code.ipynb"
42
   ]
43
  },
44
  {
45
   "cell_type": "code",
46
   "execution_count": 4,
47
   "metadata": {},
48
   "outputs": [],
49
   "source": [
50
    "if VERSION in [31,32]:\n",
51
    "    # old features, no stage2 training\n",
52
    "    train_md, test_md = loadMetadata()\n",
53
    "elif VERSION in [33,34]:\n",
54
    "    # old features, with stage2 training\n",
55
    "    train_md, test_md = loadMetadata3()\n",
56
    "elif VERSION in [35,36]:\n",
57
    "    # new features\n",
58
    "    train_md, test_md = loadMetadata2()"
59
   ]
60
  },
61
  {
62
   "cell_type": "markdown",
63
   "metadata": {},
64
   "source": [
65
    "# OOF"
66
   ]
67
  },
68
  {
69
   "cell_type": "code",
70
   "execution_count": 21,
71
   "metadata": {
72
    "scrolled": true
73
   },
74
   "outputs": [
75
    {
76
     "name": "stdout",
77
     "output_type": "stream",
78
     "text": [
79
      "completed epochs: 3 iters starting now: 32\n",
80
      "adding dummy serieses 14\n",
81
      "DataSet 7 valid size 7232 fold 0\n",
82
      "dataset valid: 7232 loader valid: 226\n",
83
      "loading model model.b3.f0.d7.v34\n",
84
      "setFeats, augmentation -1\n",
85
      "Batch 50 device: cuda time passed: 11.039 time per batch: 0.221\n",
86
      "Batch 100 device: cuda time passed: 19.674 time per batch: 0.197\n",
87
      "Batch 150 device: cuda time passed: 28.256 time per batch: 0.188\n",
88
      "Batch 200 device: cuda time passed: 36.735 time per batch: 0.184\n",
89
      "ver 34, iter 0, fold 0, val ll: 0.0629, cor: 0.8425, auc: 0.9882\n",
90
      "setFeats, augmentation -1\n",
91
      "Batch 50 device: cuda time passed: 11.369 time per batch: 0.227\n",
92
      "Batch 100 device: cuda time passed: 19.874 time per batch: 0.199\n",
93
      "Batch 150 device: cuda time passed: 28.322 time per batch: 0.189\n",
94
      "Batch 200 device: cuda time passed: 36.820 time per batch: 0.184\n",
95
      "ver 34, iter 1, fold 0, val ll: 0.0633, cor: 0.8416, auc: 0.9880\n",
96
      "setFeats, augmentation -1\n",
97
      "Batch 50 device: cuda time passed: 11.085 time per batch: 0.222\n",
98
      "Batch 100 device: cuda time passed: 19.790 time per batch: 0.198\n",
99
      "Batch 150 device: cuda time passed: 28.354 time per batch: 0.189\n",
100
      "Batch 200 device: cuda time passed: 36.403 time per batch: 0.182\n",
101
      "ver 34, iter 2, fold 0, val ll: 0.0630, cor: 0.8423, auc: 0.9881\n",
102
      "setFeats, augmentation -1\n",
103
      "Batch 50 device: cuda time passed: 11.630 time per batch: 0.233\n",
104
      "Batch 100 device: cuda time passed: 20.129 time per batch: 0.201\n",
105
      "Batch 150 device: cuda time passed: 28.720 time per batch: 0.191\n",
106
      "Batch 200 device: cuda time passed: 36.561 time per batch: 0.183\n",
107
      "ver 34, iter 3, fold 0, val ll: 0.0631, cor: 0.8421, auc: 0.9881\n",
108
      "setFeats, augmentation -1\n",
109
      "Batch 50 device: cuda time passed: 10.798 time per batch: 0.216\n",
110
      "Batch 100 device: cuda time passed: 20.275 time per batch: 0.203\n",
111
      "Batch 150 device: cuda time passed: 28.541 time per batch: 0.190\n",
112
      "Batch 200 device: cuda time passed: 36.693 time per batch: 0.183\n",
113
      "ver 34, iter 4, fold 0, val ll: 0.0630, cor: 0.8427, auc: 0.9880\n",
114
      "setFeats, augmentation -1\n",
115
      "Batch 50 device: cuda time passed: 11.537 time per batch: 0.231\n",
116
      "Batch 100 device: cuda time passed: 20.034 time per batch: 0.200\n",
117
      "Batch 150 device: cuda time passed: 29.120 time per batch: 0.194\n",
118
      "Batch 200 device: cuda time passed: 37.470 time per batch: 0.187\n",
119
      "ver 34, iter 5, fold 0, val ll: 0.0631, cor: 0.8419, auc: 0.9881\n",
120
      "setFeats, augmentation -1\n",
121
      "Batch 50 device: cuda time passed: 11.616 time per batch: 0.232\n",
122
      "Batch 100 device: cuda time passed: 20.549 time per batch: 0.205\n",
123
      "Batch 150 device: cuda time passed: 28.847 time per batch: 0.192\n",
124
      "Batch 200 device: cuda time passed: 37.005 time per batch: 0.185\n",
125
      "ver 34, iter 6, fold 0, val ll: 0.0632, cor: 0.8420, auc: 0.9881\n",
126
      "setFeats, augmentation -1\n",
127
      "Batch 50 device: cuda time passed: 11.390 time per batch: 0.228\n",
128
      "Batch 100 device: cuda time passed: 19.692 time per batch: 0.197\n",
129
      "Batch 150 device: cuda time passed: 28.482 time per batch: 0.190\n",
130
      "Batch 200 device: cuda time passed: 37.211 time per batch: 0.186\n",
131
      "ver 34, iter 7, fold 0, val ll: 0.0632, cor: 0.8418, auc: 0.9880\n",
132
      "setFeats, augmentation -1\n",
133
      "Batch 50 device: cuda time passed: 11.109 time per batch: 0.222\n",
134
      "Batch 100 device: cuda time passed: 19.665 time per batch: 0.197\n",
135
      "Batch 150 device: cuda time passed: 27.973 time per batch: 0.186\n",
136
      "Batch 200 device: cuda time passed: 36.279 time per batch: 0.181\n",
137
      "ver 34, iter 8, fold 0, val ll: 0.0631, cor: 0.8422, auc: 0.9881\n",
138
      "setFeats, augmentation -1\n",
139
      "Batch 50 device: cuda time passed: 11.182 time per batch: 0.224\n",
140
      "Batch 100 device: cuda time passed: 20.280 time per batch: 0.203\n",
141
      "Batch 150 device: cuda time passed: 28.800 time per batch: 0.192\n",
142
      "Batch 200 device: cuda time passed: 37.683 time per batch: 0.188\n",
143
      "ver 34, iter 9, fold 0, val ll: 0.0632, cor: 0.8418, auc: 0.9880\n",
144
      "setFeats, augmentation -1\n",
145
      "Batch 50 device: cuda time passed: 11.546 time per batch: 0.231\n",
146
      "Batch 100 device: cuda time passed: 19.935 time per batch: 0.199\n",
147
      "Batch 150 device: cuda time passed: 28.393 time per batch: 0.189\n",
148
      "Batch 200 device: cuda time passed: 37.353 time per batch: 0.187\n",
149
      "ver 34, iter 10, fold 0, val ll: 0.0631, cor: 0.8419, auc: 0.9881\n",
150
      "setFeats, augmentation -1\n",
151
      "Batch 50 device: cuda time passed: 11.706 time per batch: 0.234\n",
152
      "Batch 100 device: cuda time passed: 20.097 time per batch: 0.201\n",
153
      "Batch 150 device: cuda time passed: 28.629 time per batch: 0.191\n",
154
      "Batch 200 device: cuda time passed: 36.971 time per batch: 0.185\n",
155
      "ver 34, iter 11, fold 0, val ll: 0.0632, cor: 0.8416, auc: 0.9880\n",
156
      "setFeats, augmentation -1\n",
157
      "Batch 50 device: cuda time passed: 11.344 time per batch: 0.227\n",
158
      "Batch 100 device: cuda time passed: 20.333 time per batch: 0.203\n",
159
      "Batch 150 device: cuda time passed: 28.633 time per batch: 0.191\n",
160
      "Batch 200 device: cuda time passed: 36.853 time per batch: 0.184\n",
161
      "ver 34, iter 12, fold 0, val ll: 0.0632, cor: 0.8418, auc: 0.9880\n",
162
      "setFeats, augmentation -1\n",
163
      "Batch 50 device: cuda time passed: 10.884 time per batch: 0.218\n",
164
      "Batch 100 device: cuda time passed: 19.948 time per batch: 0.199\n",
165
      "Batch 150 device: cuda time passed: 28.653 time per batch: 0.191\n",
166
      "Batch 200 device: cuda time passed: 37.139 time per batch: 0.186\n",
167
      "ver 34, iter 13, fold 0, val ll: 0.0632, cor: 0.8422, auc: 0.9880\n",
168
      "setFeats, augmentation -1\n",
169
      "Batch 50 device: cuda time passed: 11.232 time per batch: 0.225\n",
170
      "Batch 100 device: cuda time passed: 19.680 time per batch: 0.197\n",
171
      "Batch 150 device: cuda time passed: 28.180 time per batch: 0.188\n",
172
      "Batch 200 device: cuda time passed: 36.829 time per batch: 0.184\n",
173
      "ver 34, iter 14, fold 0, val ll: 0.0631, cor: 0.8422, auc: 0.9881\n",
174
      "setFeats, augmentation -1\n",
175
      "Batch 50 device: cuda time passed: 11.134 time per batch: 0.223\n",
176
      "Batch 100 device: cuda time passed: 19.750 time per batch: 0.197\n",
177
      "Batch 150 device: cuda time passed: 28.254 time per batch: 0.188\n",
178
      "Batch 200 device: cuda time passed: 37.052 time per batch: 0.185\n",
179
      "ver 34, iter 15, fold 0, val ll: 0.0632, cor: 0.8419, auc: 0.9880\n",
180
      "setFeats, augmentation -1\n",
181
      "Batch 50 device: cuda time passed: 11.076 time per batch: 0.222\n",
182
      "Batch 100 device: cuda time passed: 19.921 time per batch: 0.199\n",
183
      "Batch 150 device: cuda time passed: 28.521 time per batch: 0.190\n",
184
      "Batch 200 device: cuda time passed: 36.744 time per batch: 0.184\n",
185
      "ver 34, iter 16, fold 0, val ll: 0.0633, cor: 0.8417, auc: 0.9880\n",
186
      "setFeats, augmentation -1\n",
187
      "Batch 50 device: cuda time passed: 10.846 time per batch: 0.217\n",
188
      "Batch 100 device: cuda time passed: 20.198 time per batch: 0.202\n",
189
      "Batch 150 device: cuda time passed: 28.638 time per batch: 0.191\n",
190
      "Batch 200 device: cuda time passed: 36.762 time per batch: 0.184\n",
191
      "ver 34, iter 17, fold 0, val ll: 0.0631, cor: 0.8423, auc: 0.9880\n",
192
      "setFeats, augmentation -1\n",
193
      "Batch 50 device: cuda time passed: 11.145 time per batch: 0.223\n",
194
      "Batch 100 device: cuda time passed: 19.770 time per batch: 0.198\n",
195
      "Batch 150 device: cuda time passed: 28.000 time per batch: 0.187\n",
196
      "Batch 200 device: cuda time passed: 36.639 time per batch: 0.183\n",
197
      "ver 34, iter 18, fold 0, val ll: 0.0631, cor: 0.8418, auc: 0.9881\n",
198
      "setFeats, augmentation -1\n",
199
      "Batch 50 device: cuda time passed: 10.940 time per batch: 0.219\n",
200
      "Batch 100 device: cuda time passed: 19.199 time per batch: 0.192\n",
201
      "Batch 150 device: cuda time passed: 27.382 time per batch: 0.183\n",
202
      "Batch 200 device: cuda time passed: 35.991 time per batch: 0.180\n",
203
      "ver 34, iter 19, fold 0, val ll: 0.0633, cor: 0.8418, auc: 0.9880\n",
204
      "setFeats, augmentation -1\n",
205
      "Batch 50 device: cuda time passed: 11.303 time per batch: 0.226\n",
206
      "Batch 100 device: cuda time passed: 20.052 time per batch: 0.201\n",
207
      "Batch 150 device: cuda time passed: 28.800 time per batch: 0.192\n",
208
      "Batch 200 device: cuda time passed: 37.209 time per batch: 0.186\n",
209
      "ver 34, iter 20, fold 0, val ll: 0.0631, cor: 0.8419, auc: 0.9881\n",
210
      "setFeats, augmentation -1\n",
211
      "Batch 50 device: cuda time passed: 11.012 time per batch: 0.220\n",
212
      "Batch 100 device: cuda time passed: 19.801 time per batch: 0.198\n",
213
      "Batch 150 device: cuda time passed: 28.285 time per batch: 0.189\n",
214
      "Batch 200 device: cuda time passed: 36.655 time per batch: 0.183\n",
215
      "ver 34, iter 21, fold 0, val ll: 0.0631, cor: 0.8419, auc: 0.9881\n",
216
      "setFeats, augmentation -1\n",
217
      "Batch 50 device: cuda time passed: 11.588 time per batch: 0.232\n",
218
      "Batch 100 device: cuda time passed: 19.972 time per batch: 0.200\n",
219
      "Batch 150 device: cuda time passed: 28.349 time per batch: 0.189\n",
220
      "Batch 200 device: cuda time passed: 36.708 time per batch: 0.184\n",
221
      "ver 34, iter 22, fold 0, val ll: 0.0632, cor: 0.8421, auc: 0.9880\n",
222
      "setFeats, augmentation -1\n"
223
     ]
224
    },
225
    {
226
     "name": "stdout",
227
     "output_type": "stream",
228
     "text": [
229
      "Batch 50 device: cuda time passed: 11.373 time per batch: 0.227\n",
230
      "Batch 100 device: cuda time passed: 19.848 time per batch: 0.198\n",
231
      "Batch 150 device: cuda time passed: 28.645 time per batch: 0.191\n",
232
      "Batch 200 device: cuda time passed: 37.166 time per batch: 0.186\n",
233
      "ver 34, iter 23, fold 0, val ll: 0.0631, cor: 0.8424, auc: 0.9881\n",
234
      "setFeats, augmentation -1\n",
235
      "Batch 50 device: cuda time passed: 11.732 time per batch: 0.235\n",
236
      "Batch 100 device: cuda time passed: 20.379 time per batch: 0.204\n",
237
      "Batch 150 device: cuda time passed: 28.964 time per batch: 0.193\n",
238
      "Batch 200 device: cuda time passed: 37.185 time per batch: 0.186\n",
239
      "ver 34, iter 24, fold 0, val ll: 0.0632, cor: 0.8421, auc: 0.9880\n",
240
      "setFeats, augmentation -1\n",
241
      "Batch 50 device: cuda time passed: 10.942 time per batch: 0.219\n",
242
      "Batch 100 device: cuda time passed: 19.584 time per batch: 0.196\n",
243
      "Batch 150 device: cuda time passed: 27.962 time per batch: 0.186\n",
244
      "Batch 200 device: cuda time passed: 36.295 time per batch: 0.181\n",
245
      "ver 34, iter 25, fold 0, val ll: 0.0631, cor: 0.8421, auc: 0.9880\n",
246
      "setFeats, augmentation -1\n",
247
      "Batch 50 device: cuda time passed: 11.547 time per batch: 0.231\n",
248
      "Batch 100 device: cuda time passed: 19.793 time per batch: 0.198\n",
249
      "Batch 150 device: cuda time passed: 28.204 time per batch: 0.188\n",
250
      "Batch 200 device: cuda time passed: 36.624 time per batch: 0.183\n",
251
      "ver 34, iter 26, fold 0, val ll: 0.0630, cor: 0.8426, auc: 0.9881\n",
252
      "setFeats, augmentation -1\n",
253
      "Batch 50 device: cuda time passed: 10.957 time per batch: 0.219\n",
254
      "Batch 100 device: cuda time passed: 18.892 time per batch: 0.189\n",
255
      "Batch 150 device: cuda time passed: 27.679 time per batch: 0.185\n",
256
      "Batch 200 device: cuda time passed: 36.440 time per batch: 0.182\n",
257
      "ver 34, iter 27, fold 0, val ll: 0.0630, cor: 0.8422, auc: 0.9881\n",
258
      "setFeats, augmentation -1\n",
259
      "Batch 50 device: cuda time passed: 10.715 time per batch: 0.214\n",
260
      "Batch 100 device: cuda time passed: 19.980 time per batch: 0.200\n",
261
      "Batch 150 device: cuda time passed: 28.835 time per batch: 0.192\n",
262
      "Batch 200 device: cuda time passed: 37.150 time per batch: 0.186\n",
263
      "ver 34, iter 28, fold 0, val ll: 0.0633, cor: 0.8417, auc: 0.9880\n",
264
      "setFeats, augmentation -1\n",
265
      "Batch 50 device: cuda time passed: 11.384 time per batch: 0.228\n",
266
      "Batch 100 device: cuda time passed: 19.805 time per batch: 0.198\n",
267
      "Batch 150 device: cuda time passed: 28.613 time per batch: 0.191\n",
268
      "Batch 200 device: cuda time passed: 37.061 time per batch: 0.185\n",
269
      "ver 34, iter 29, fold 0, val ll: 0.0631, cor: 0.8421, auc: 0.9881\n",
270
      "setFeats, augmentation -1\n",
271
      "Batch 50 device: cuda time passed: 11.388 time per batch: 0.228\n",
272
      "Batch 100 device: cuda time passed: 19.790 time per batch: 0.198\n",
273
      "Batch 150 device: cuda time passed: 28.816 time per batch: 0.192\n",
274
      "Batch 200 device: cuda time passed: 36.974 time per batch: 0.185\n",
275
      "ver 34, iter 30, fold 0, val ll: 0.0633, cor: 0.8417, auc: 0.9880\n",
276
      "setFeats, augmentation -1\n",
277
      "Batch 50 device: cuda time passed: 10.891 time per batch: 0.218\n",
278
      "Batch 100 device: cuda time passed: 19.773 time per batch: 0.198\n",
279
      "Batch 150 device: cuda time passed: 28.208 time per batch: 0.188\n",
280
      "Batch 200 device: cuda time passed: 36.513 time per batch: 0.183\n",
281
      "ver 34, iter 31, fold 0, val ll: 0.0632, cor: 0.8419, auc: 0.9880\n",
282
      "total running time 1743.487956047058\n",
283
      "total time 1744.0221991539001\n",
284
      "completed epochs: 3 iters starting now: 32\n",
285
      "adding dummy serieses 30\n",
286
      "DataSet 7 valid size 7328 fold 1\n",
287
      "dataset valid: 7328 loader valid: 229\n",
288
      "loading model model.b3.f1.d7.v34\n",
289
      "setFeats, augmentation -1\n",
290
      "Batch 50 device: cuda time passed: 11.726 time per batch: 0.235\n",
291
      "Batch 100 device: cuda time passed: 20.041 time per batch: 0.200\n",
292
      "Batch 150 device: cuda time passed: 28.446 time per batch: 0.190\n",
293
      "Batch 200 device: cuda time passed: 37.606 time per batch: 0.188\n",
294
      "ver 34, iter 0, fold 1, val ll: 0.0641, cor: 0.8352, auc: 0.9876\n",
295
      "setFeats, augmentation -1\n",
296
      "Batch 50 device: cuda time passed: 11.401 time per batch: 0.228\n",
297
      "Batch 100 device: cuda time passed: 19.701 time per batch: 0.197\n",
298
      "Batch 150 device: cuda time passed: 28.377 time per batch: 0.189\n",
299
      "Batch 200 device: cuda time passed: 37.516 time per batch: 0.188\n",
300
      "ver 34, iter 1, fold 1, val ll: 0.0644, cor: 0.8346, auc: 0.9875\n",
301
      "setFeats, augmentation -1\n",
302
      "Batch 50 device: cuda time passed: 11.045 time per batch: 0.221\n",
303
      "Batch 100 device: cuda time passed: 19.526 time per batch: 0.195\n",
304
      "Batch 150 device: cuda time passed: 28.070 time per batch: 0.187\n",
305
      "Batch 200 device: cuda time passed: 36.553 time per batch: 0.183\n",
306
      "ver 34, iter 2, fold 1, val ll: 0.0645, cor: 0.8347, auc: 0.9875\n",
307
      "setFeats, augmentation -1\n",
308
      "Batch 50 device: cuda time passed: 11.522 time per batch: 0.230\n",
309
      "Batch 100 device: cuda time passed: 20.039 time per batch: 0.200\n",
310
      "Batch 150 device: cuda time passed: 28.182 time per batch: 0.188\n",
311
      "Batch 200 device: cuda time passed: 36.515 time per batch: 0.183\n",
312
      "ver 34, iter 3, fold 1, val ll: 0.0643, cor: 0.8352, auc: 0.9875\n",
313
      "setFeats, augmentation -1\n",
314
      "Batch 50 device: cuda time passed: 11.412 time per batch: 0.228\n",
315
      "Batch 100 device: cuda time passed: 19.794 time per batch: 0.198\n",
316
      "Batch 150 device: cuda time passed: 27.859 time per batch: 0.186\n",
317
      "Batch 200 device: cuda time passed: 35.826 time per batch: 0.179\n",
318
      "ver 34, iter 4, fold 1, val ll: 0.0644, cor: 0.8346, auc: 0.9875\n",
319
      "setFeats, augmentation -1\n",
320
      "Batch 50 device: cuda time passed: 11.184 time per batch: 0.224\n",
321
      "Batch 100 device: cuda time passed: 19.415 time per batch: 0.194\n",
322
      "Batch 150 device: cuda time passed: 27.791 time per batch: 0.185\n",
323
      "Batch 200 device: cuda time passed: 36.132 time per batch: 0.181\n",
324
      "ver 34, iter 5, fold 1, val ll: 0.0642, cor: 0.8355, auc: 0.9876\n",
325
      "setFeats, augmentation -1\n",
326
      "Batch 50 device: cuda time passed: 10.913 time per batch: 0.218\n",
327
      "Batch 100 device: cuda time passed: 20.135 time per batch: 0.201\n",
328
      "Batch 150 device: cuda time passed: 28.498 time per batch: 0.190\n",
329
      "Batch 200 device: cuda time passed: 36.584 time per batch: 0.183\n",
330
      "ver 34, iter 6, fold 1, val ll: 0.0644, cor: 0.8350, auc: 0.9875\n",
331
      "setFeats, augmentation -1\n",
332
      "Batch 50 device: cuda time passed: 11.169 time per batch: 0.223\n",
333
      "Batch 100 device: cuda time passed: 19.419 time per batch: 0.194\n",
334
      "Batch 150 device: cuda time passed: 27.835 time per batch: 0.186\n",
335
      "Batch 200 device: cuda time passed: 36.749 time per batch: 0.184\n",
336
      "ver 34, iter 7, fold 1, val ll: 0.0644, cor: 0.8346, auc: 0.9876\n",
337
      "setFeats, augmentation -1\n",
338
      "Batch 50 device: cuda time passed: 11.093 time per batch: 0.222\n",
339
      "Batch 100 device: cuda time passed: 19.735 time per batch: 0.197\n",
340
      "Batch 150 device: cuda time passed: 27.976 time per batch: 0.187\n",
341
      "Batch 200 device: cuda time passed: 36.522 time per batch: 0.183\n",
342
      "ver 34, iter 8, fold 1, val ll: 0.0643, cor: 0.8351, auc: 0.9875\n",
343
      "setFeats, augmentation -1\n",
344
      "Batch 50 device: cuda time passed: 10.886 time per batch: 0.218\n",
345
      "Batch 100 device: cuda time passed: 19.310 time per batch: 0.193\n",
346
      "Batch 150 device: cuda time passed: 27.555 time per batch: 0.184\n",
347
      "Batch 200 device: cuda time passed: 35.897 time per batch: 0.179\n",
348
      "ver 34, iter 9, fold 1, val ll: 0.0640, cor: 0.8360, auc: 0.9876\n",
349
      "setFeats, augmentation -1\n",
350
      "Batch 50 device: cuda time passed: 11.190 time per batch: 0.224\n",
351
      "Batch 100 device: cuda time passed: 19.659 time per batch: 0.197\n",
352
      "Batch 150 device: cuda time passed: 28.059 time per batch: 0.187\n",
353
      "Batch 200 device: cuda time passed: 36.597 time per batch: 0.183\n",
354
      "ver 34, iter 10, fold 1, val ll: 0.0645, cor: 0.8345, auc: 0.9875\n",
355
      "setFeats, augmentation -1\n",
356
      "Batch 50 device: cuda time passed: 11.462 time per batch: 0.229\n",
357
      "Batch 100 device: cuda time passed: 19.851 time per batch: 0.199\n",
358
      "Batch 150 device: cuda time passed: 28.426 time per batch: 0.190\n",
359
      "Batch 200 device: cuda time passed: 37.003 time per batch: 0.185\n",
360
      "ver 34, iter 11, fold 1, val ll: 0.0643, cor: 0.8353, auc: 0.9876\n",
361
      "setFeats, augmentation -1\n",
362
      "Batch 50 device: cuda time passed: 11.183 time per batch: 0.224\n",
363
      "Batch 100 device: cuda time passed: 19.891 time per batch: 0.199\n",
364
      "Batch 150 device: cuda time passed: 28.179 time per batch: 0.188\n",
365
      "Batch 200 device: cuda time passed: 36.768 time per batch: 0.184\n",
366
      "ver 34, iter 12, fold 1, val ll: 0.0644, cor: 0.8347, auc: 0.9875\n",
367
      "setFeats, augmentation -1\n",
368
      "Batch 50 device: cuda time passed: 11.197 time per batch: 0.224\n",
369
      "Batch 100 device: cuda time passed: 19.529 time per batch: 0.195\n",
370
      "Batch 150 device: cuda time passed: 27.817 time per batch: 0.185\n",
371
      "Batch 200 device: cuda time passed: 36.686 time per batch: 0.183\n"
372
     ]
373
    },
374
    {
375
     "name": "stdout",
376
     "output_type": "stream",
377
     "text": [
378
      "ver 34, iter 13, fold 1, val ll: 0.0642, cor: 0.8353, auc: 0.9876\n",
379
      "setFeats, augmentation -1\n",
380
      "Batch 50 device: cuda time passed: 10.934 time per batch: 0.219\n",
381
      "Batch 100 device: cuda time passed: 19.565 time per batch: 0.196\n",
382
      "Batch 150 device: cuda time passed: 28.726 time per batch: 0.192\n",
383
      "Batch 200 device: cuda time passed: 37.472 time per batch: 0.187\n",
384
      "ver 34, iter 14, fold 1, val ll: 0.0644, cor: 0.8351, auc: 0.9875\n",
385
      "setFeats, augmentation -1\n",
386
      "Batch 50 device: cuda time passed: 11.664 time per batch: 0.233\n",
387
      "Batch 100 device: cuda time passed: 20.181 time per batch: 0.202\n",
388
      "Batch 150 device: cuda time passed: 28.592 time per batch: 0.191\n",
389
      "Batch 200 device: cuda time passed: 37.140 time per batch: 0.186\n",
390
      "ver 34, iter 15, fold 1, val ll: 0.0642, cor: 0.8355, auc: 0.9876\n",
391
      "setFeats, augmentation -1\n",
392
      "Batch 50 device: cuda time passed: 11.248 time per batch: 0.225\n",
393
      "Batch 100 device: cuda time passed: 19.746 time per batch: 0.197\n",
394
      "Batch 150 device: cuda time passed: 28.334 time per batch: 0.189\n",
395
      "Batch 200 device: cuda time passed: 36.783 time per batch: 0.184\n",
396
      "ver 34, iter 16, fold 1, val ll: 0.0643, cor: 0.8352, auc: 0.9875\n",
397
      "setFeats, augmentation -1\n",
398
      "Batch 50 device: cuda time passed: 10.833 time per batch: 0.217\n",
399
      "Batch 100 device: cuda time passed: 19.927 time per batch: 0.199\n",
400
      "Batch 150 device: cuda time passed: 28.267 time per batch: 0.188\n",
401
      "Batch 200 device: cuda time passed: 37.130 time per batch: 0.186\n",
402
      "ver 34, iter 17, fold 1, val ll: 0.0644, cor: 0.8349, auc: 0.9876\n",
403
      "setFeats, augmentation -1\n",
404
      "Batch 50 device: cuda time passed: 10.842 time per batch: 0.217\n",
405
      "Batch 100 device: cuda time passed: 19.668 time per batch: 0.197\n",
406
      "Batch 150 device: cuda time passed: 28.168 time per batch: 0.188\n",
407
      "Batch 200 device: cuda time passed: 36.418 time per batch: 0.182\n",
408
      "ver 34, iter 18, fold 1, val ll: 0.0642, cor: 0.8350, auc: 0.9877\n",
409
      "setFeats, augmentation -1\n",
410
      "Batch 50 device: cuda time passed: 11.130 time per batch: 0.223\n",
411
      "Batch 100 device: cuda time passed: 19.641 time per batch: 0.196\n",
412
      "Batch 150 device: cuda time passed: 28.136 time per batch: 0.188\n",
413
      "Batch 200 device: cuda time passed: 36.331 time per batch: 0.182\n",
414
      "ver 34, iter 19, fold 1, val ll: 0.0643, cor: 0.8350, auc: 0.9875\n",
415
      "setFeats, augmentation -1\n",
416
      "Batch 50 device: cuda time passed: 11.647 time per batch: 0.233\n",
417
      "Batch 100 device: cuda time passed: 20.391 time per batch: 0.204\n",
418
      "Batch 150 device: cuda time passed: 29.397 time per batch: 0.196\n",
419
      "Batch 200 device: cuda time passed: 37.997 time per batch: 0.190\n",
420
      "ver 34, iter 20, fold 1, val ll: 0.0643, cor: 0.8348, auc: 0.9876\n",
421
      "setFeats, augmentation -1\n",
422
      "Batch 50 device: cuda time passed: 11.382 time per batch: 0.228\n",
423
      "Batch 100 device: cuda time passed: 19.777 time per batch: 0.198\n",
424
      "Batch 150 device: cuda time passed: 27.840 time per batch: 0.186\n",
425
      "Batch 200 device: cuda time passed: 36.301 time per batch: 0.182\n",
426
      "ver 34, iter 21, fold 1, val ll: 0.0644, cor: 0.8347, auc: 0.9876\n",
427
      "setFeats, augmentation -1\n",
428
      "Batch 50 device: cuda time passed: 11.454 time per batch: 0.229\n",
429
      "Batch 100 device: cuda time passed: 19.967 time per batch: 0.200\n",
430
      "Batch 150 device: cuda time passed: 28.448 time per batch: 0.190\n",
431
      "Batch 200 device: cuda time passed: 36.952 time per batch: 0.185\n",
432
      "ver 34, iter 22, fold 1, val ll: 0.0644, cor: 0.8347, auc: 0.9875\n",
433
      "setFeats, augmentation -1\n",
434
      "Batch 50 device: cuda time passed: 10.610 time per batch: 0.212\n",
435
      "Batch 100 device: cuda time passed: 19.496 time per batch: 0.195\n",
436
      "Batch 150 device: cuda time passed: 27.773 time per batch: 0.185\n",
437
      "Batch 200 device: cuda time passed: 36.058 time per batch: 0.180\n",
438
      "ver 34, iter 23, fold 1, val ll: 0.0644, cor: 0.8350, auc: 0.9875\n",
439
      "setFeats, augmentation -1\n",
440
      "Batch 50 device: cuda time passed: 11.206 time per batch: 0.224\n",
441
      "Batch 100 device: cuda time passed: 20.066 time per batch: 0.201\n",
442
      "Batch 150 device: cuda time passed: 28.779 time per batch: 0.192\n",
443
      "Batch 200 device: cuda time passed: 36.978 time per batch: 0.185\n",
444
      "ver 34, iter 24, fold 1, val ll: 0.0645, cor: 0.8345, auc: 0.9875\n",
445
      "setFeats, augmentation -1\n",
446
      "Batch 50 device: cuda time passed: 11.366 time per batch: 0.227\n",
447
      "Batch 100 device: cuda time passed: 20.158 time per batch: 0.202\n",
448
      "Batch 150 device: cuda time passed: 28.539 time per batch: 0.190\n",
449
      "Batch 200 device: cuda time passed: 37.137 time per batch: 0.186\n",
450
      "ver 34, iter 25, fold 1, val ll: 0.0643, cor: 0.8351, auc: 0.9876\n",
451
      "setFeats, augmentation -1\n",
452
      "Batch 50 device: cuda time passed: 10.986 time per batch: 0.220\n",
453
      "Batch 100 device: cuda time passed: 19.900 time per batch: 0.199\n",
454
      "Batch 150 device: cuda time passed: 28.512 time per batch: 0.190\n",
455
      "Batch 200 device: cuda time passed: 37.649 time per batch: 0.188\n",
456
      "ver 34, iter 26, fold 1, val ll: 0.0644, cor: 0.8349, auc: 0.9875\n",
457
      "setFeats, augmentation -1\n",
458
      "Batch 50 device: cuda time passed: 11.681 time per batch: 0.234\n",
459
      "Batch 100 device: cuda time passed: 20.007 time per batch: 0.200\n",
460
      "Batch 150 device: cuda time passed: 28.371 time per batch: 0.189\n",
461
      "Batch 200 device: cuda time passed: 36.712 time per batch: 0.184\n",
462
      "ver 34, iter 27, fold 1, val ll: 0.0644, cor: 0.8346, auc: 0.9875\n",
463
      "setFeats, augmentation -1\n",
464
      "Batch 50 device: cuda time passed: 11.369 time per batch: 0.227\n",
465
      "Batch 100 device: cuda time passed: 20.216 time per batch: 0.202\n",
466
      "Batch 150 device: cuda time passed: 28.675 time per batch: 0.191\n",
467
      "Batch 200 device: cuda time passed: 36.999 time per batch: 0.185\n",
468
      "ver 34, iter 28, fold 1, val ll: 0.0645, cor: 0.8348, auc: 0.9875\n",
469
      "setFeats, augmentation -1\n",
470
      "Batch 50 device: cuda time passed: 11.430 time per batch: 0.229\n",
471
      "Batch 100 device: cuda time passed: 20.356 time per batch: 0.204\n",
472
      "Batch 150 device: cuda time passed: 28.426 time per batch: 0.190\n",
473
      "Batch 200 device: cuda time passed: 36.924 time per batch: 0.185\n",
474
      "ver 34, iter 29, fold 1, val ll: 0.0643, cor: 0.8351, auc: 0.9876\n",
475
      "setFeats, augmentation -1\n",
476
      "Batch 50 device: cuda time passed: 11.285 time per batch: 0.226\n",
477
      "Batch 100 device: cuda time passed: 19.941 time per batch: 0.199\n",
478
      "Batch 150 device: cuda time passed: 28.387 time per batch: 0.189\n",
479
      "Batch 200 device: cuda time passed: 36.743 time per batch: 0.184\n",
480
      "ver 34, iter 30, fold 1, val ll: 0.0643, cor: 0.8351, auc: 0.9876\n",
481
      "setFeats, augmentation -1\n",
482
      "Batch 50 device: cuda time passed: 10.907 time per batch: 0.218\n",
483
      "Batch 100 device: cuda time passed: 19.032 time per batch: 0.190\n",
484
      "Batch 150 device: cuda time passed: 27.424 time per batch: 0.183\n",
485
      "Batch 200 device: cuda time passed: 35.915 time per batch: 0.180\n",
486
      "ver 34, iter 31, fold 1, val ll: 0.0645, cor: 0.8347, auc: 0.9875\n",
487
      "total running time 1752.2469551563263\n",
488
      "total time 3496.7637753486633\n",
489
      "completed epochs: 3 iters starting now: 32\n",
490
      "adding dummy serieses 4\n",
491
      "DataSet 7 valid size 7232 fold 2\n",
492
      "dataset valid: 7232 loader valid: 226\n",
493
      "loading model model.b3.f2.d7.v34\n",
494
      "setFeats, augmentation -1\n",
495
      "Batch 50 device: cuda time passed: 11.025 time per batch: 0.221\n",
496
      "Batch 100 device: cuda time passed: 20.043 time per batch: 0.200\n",
497
      "Batch 150 device: cuda time passed: 28.567 time per batch: 0.190\n",
498
      "Batch 200 device: cuda time passed: 36.567 time per batch: 0.183\n",
499
      "ver 34, iter 0, fold 2, val ll: 0.0603, cor: 0.8423, auc: 0.9893\n",
500
      "setFeats, augmentation -1\n",
501
      "Batch 50 device: cuda time passed: 11.200 time per batch: 0.224\n",
502
      "Batch 100 device: cuda time passed: 19.679 time per batch: 0.197\n",
503
      "Batch 150 device: cuda time passed: 28.501 time per batch: 0.190\n",
504
      "Batch 200 device: cuda time passed: 37.321 time per batch: 0.187\n",
505
      "ver 34, iter 1, fold 2, val ll: 0.0601, cor: 0.8425, auc: 0.9894\n",
506
      "setFeats, augmentation -1\n",
507
      "Batch 50 device: cuda time passed: 11.278 time per batch: 0.226\n",
508
      "Batch 100 device: cuda time passed: 19.971 time per batch: 0.200\n",
509
      "Batch 150 device: cuda time passed: 28.258 time per batch: 0.188\n",
510
      "Batch 200 device: cuda time passed: 36.470 time per batch: 0.182\n",
511
      "ver 34, iter 2, fold 2, val ll: 0.0603, cor: 0.8422, auc: 0.9893\n",
512
      "setFeats, augmentation -1\n",
513
      "Batch 50 device: cuda time passed: 10.465 time per batch: 0.209\n",
514
      "Batch 100 device: cuda time passed: 19.259 time per batch: 0.193\n",
515
      "Batch 150 device: cuda time passed: 27.605 time per batch: 0.184\n",
516
      "Batch 200 device: cuda time passed: 35.631 time per batch: 0.178\n",
517
      "ver 34, iter 3, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9893\n",
518
      "setFeats, augmentation -1\n",
519
      "Batch 50 device: cuda time passed: 11.061 time per batch: 0.221\n",
520
      "Batch 100 device: cuda time passed: 19.044 time per batch: 0.190\n",
521
      "Batch 150 device: cuda time passed: 27.745 time per batch: 0.185\n"
522
     ]
523
    },
524
    {
525
     "name": "stdout",
526
     "output_type": "stream",
527
     "text": [
528
      "Batch 200 device: cuda time passed: 35.943 time per batch: 0.180\n",
529
      "ver 34, iter 4, fold 2, val ll: 0.0602, cor: 0.8425, auc: 0.9893\n",
530
      "setFeats, augmentation -1\n",
531
      "Batch 50 device: cuda time passed: 11.082 time per batch: 0.222\n",
532
      "Batch 100 device: cuda time passed: 19.942 time per batch: 0.199\n",
533
      "Batch 150 device: cuda time passed: 28.129 time per batch: 0.188\n",
534
      "Batch 200 device: cuda time passed: 36.673 time per batch: 0.183\n",
535
      "ver 34, iter 5, fold 2, val ll: 0.0602, cor: 0.8423, auc: 0.9894\n",
536
      "setFeats, augmentation -1\n",
537
      "Batch 50 device: cuda time passed: 11.475 time per batch: 0.229\n",
538
      "Batch 100 device: cuda time passed: 19.975 time per batch: 0.200\n",
539
      "Batch 150 device: cuda time passed: 28.403 time per batch: 0.189\n",
540
      "Batch 200 device: cuda time passed: 36.868 time per batch: 0.184\n",
541
      "ver 34, iter 6, fold 2, val ll: 0.0603, cor: 0.8420, auc: 0.9893\n",
542
      "setFeats, augmentation -1\n",
543
      "Batch 50 device: cuda time passed: 11.274 time per batch: 0.225\n",
544
      "Batch 100 device: cuda time passed: 19.953 time per batch: 0.200\n",
545
      "Batch 150 device: cuda time passed: 28.659 time per batch: 0.191\n",
546
      "Batch 200 device: cuda time passed: 37.063 time per batch: 0.185\n",
547
      "ver 34, iter 7, fold 2, val ll: 0.0601, cor: 0.8425, auc: 0.9894\n",
548
      "setFeats, augmentation -1\n",
549
      "Batch 50 device: cuda time passed: 11.211 time per batch: 0.224\n",
550
      "Batch 100 device: cuda time passed: 19.499 time per batch: 0.195\n",
551
      "Batch 150 device: cuda time passed: 28.098 time per batch: 0.187\n",
552
      "Batch 200 device: cuda time passed: 36.855 time per batch: 0.184\n",
553
      "ver 34, iter 8, fold 2, val ll: 0.0601, cor: 0.8427, auc: 0.9894\n",
554
      "setFeats, augmentation -1\n",
555
      "Batch 50 device: cuda time passed: 10.738 time per batch: 0.215\n",
556
      "Batch 100 device: cuda time passed: 19.813 time per batch: 0.198\n",
557
      "Batch 150 device: cuda time passed: 28.172 time per batch: 0.188\n",
558
      "Batch 200 device: cuda time passed: 36.748 time per batch: 0.184\n",
559
      "ver 34, iter 9, fold 2, val ll: 0.0603, cor: 0.8423, auc: 0.9893\n",
560
      "setFeats, augmentation -1\n",
561
      "Batch 50 device: cuda time passed: 11.731 time per batch: 0.235\n",
562
      "Batch 100 device: cuda time passed: 19.783 time per batch: 0.198\n",
563
      "Batch 150 device: cuda time passed: 28.264 time per batch: 0.188\n",
564
      "Batch 200 device: cuda time passed: 36.257 time per batch: 0.181\n",
565
      "ver 34, iter 10, fold 2, val ll: 0.0602, cor: 0.8427, auc: 0.9893\n",
566
      "setFeats, augmentation -1\n",
567
      "Batch 50 device: cuda time passed: 11.122 time per batch: 0.222\n",
568
      "Batch 100 device: cuda time passed: 19.324 time per batch: 0.193\n",
569
      "Batch 150 device: cuda time passed: 28.046 time per batch: 0.187\n",
570
      "Batch 200 device: cuda time passed: 36.710 time per batch: 0.184\n",
571
      "ver 34, iter 11, fold 2, val ll: 0.0601, cor: 0.8426, auc: 0.9894\n",
572
      "setFeats, augmentation -1\n",
573
      "Batch 50 device: cuda time passed: 10.910 time per batch: 0.218\n",
574
      "Batch 100 device: cuda time passed: 19.134 time per batch: 0.191\n",
575
      "Batch 150 device: cuda time passed: 27.419 time per batch: 0.183\n",
576
      "Batch 200 device: cuda time passed: 36.119 time per batch: 0.181\n",
577
      "ver 34, iter 12, fold 2, val ll: 0.0599, cor: 0.8431, auc: 0.9894\n",
578
      "setFeats, augmentation -1\n",
579
      "Batch 50 device: cuda time passed: 10.974 time per batch: 0.219\n",
580
      "Batch 100 device: cuda time passed: 19.467 time per batch: 0.195\n",
581
      "Batch 150 device: cuda time passed: 28.024 time per batch: 0.187\n",
582
      "Batch 200 device: cuda time passed: 36.578 time per batch: 0.183\n",
583
      "ver 34, iter 13, fold 2, val ll: 0.0600, cor: 0.8427, auc: 0.9894\n",
584
      "setFeats, augmentation -1\n",
585
      "Batch 50 device: cuda time passed: 10.671 time per batch: 0.213\n",
586
      "Batch 100 device: cuda time passed: 19.080 time per batch: 0.191\n",
587
      "Batch 150 device: cuda time passed: 27.685 time per batch: 0.185\n",
588
      "Batch 200 device: cuda time passed: 37.040 time per batch: 0.185\n",
589
      "ver 34, iter 14, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9894\n",
590
      "setFeats, augmentation -1\n",
591
      "Batch 50 device: cuda time passed: 11.339 time per batch: 0.227\n",
592
      "Batch 100 device: cuda time passed: 19.898 time per batch: 0.199\n",
593
      "Batch 150 device: cuda time passed: 28.227 time per batch: 0.188\n",
594
      "Batch 200 device: cuda time passed: 36.515 time per batch: 0.183\n",
595
      "ver 34, iter 15, fold 2, val ll: 0.0603, cor: 0.8422, auc: 0.9893\n",
596
      "setFeats, augmentation -1\n",
597
      "Batch 50 device: cuda time passed: 10.809 time per batch: 0.216\n",
598
      "Batch 100 device: cuda time passed: 19.102 time per batch: 0.191\n",
599
      "Batch 150 device: cuda time passed: 27.781 time per batch: 0.185\n",
600
      "Batch 200 device: cuda time passed: 36.265 time per batch: 0.181\n",
601
      "ver 34, iter 16, fold 2, val ll: 0.0602, cor: 0.8423, auc: 0.9893\n",
602
      "setFeats, augmentation -1\n",
603
      "Batch 50 device: cuda time passed: 11.536 time per batch: 0.231\n",
604
      "Batch 100 device: cuda time passed: 20.097 time per batch: 0.201\n",
605
      "Batch 150 device: cuda time passed: 28.499 time per batch: 0.190\n",
606
      "Batch 200 device: cuda time passed: 37.109 time per batch: 0.186\n",
607
      "ver 34, iter 17, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9893\n",
608
      "setFeats, augmentation -1\n",
609
      "Batch 50 device: cuda time passed: 10.864 time per batch: 0.217\n",
610
      "Batch 100 device: cuda time passed: 20.060 time per batch: 0.201\n",
611
      "Batch 150 device: cuda time passed: 28.733 time per batch: 0.192\n",
612
      "Batch 200 device: cuda time passed: 36.855 time per batch: 0.184\n",
613
      "ver 34, iter 18, fold 2, val ll: 0.0601, cor: 0.8426, auc: 0.9894\n",
614
      "setFeats, augmentation -1\n",
615
      "Batch 50 device: cuda time passed: 11.085 time per batch: 0.222\n",
616
      "Batch 100 device: cuda time passed: 19.551 time per batch: 0.196\n",
617
      "Batch 150 device: cuda time passed: 28.034 time per batch: 0.187\n",
618
      "Batch 200 device: cuda time passed: 36.621 time per batch: 0.183\n",
619
      "ver 34, iter 19, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9894\n",
620
      "setFeats, augmentation -1\n",
621
      "Batch 50 device: cuda time passed: 10.940 time per batch: 0.219\n",
622
      "Batch 100 device: cuda time passed: 19.591 time per batch: 0.196\n",
623
      "Batch 150 device: cuda time passed: 28.376 time per batch: 0.189\n",
624
      "Batch 200 device: cuda time passed: 36.995 time per batch: 0.185\n",
625
      "ver 34, iter 20, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9894\n",
626
      "setFeats, augmentation -1\n",
627
      "Batch 50 device: cuda time passed: 11.425 time per batch: 0.229\n",
628
      "Batch 100 device: cuda time passed: 20.048 time per batch: 0.200\n",
629
      "Batch 150 device: cuda time passed: 28.301 time per batch: 0.189\n",
630
      "Batch 200 device: cuda time passed: 36.405 time per batch: 0.182\n",
631
      "ver 34, iter 21, fold 2, val ll: 0.0602, cor: 0.8420, auc: 0.9894\n",
632
      "setFeats, augmentation -1\n",
633
      "Batch 50 device: cuda time passed: 10.892 time per batch: 0.218\n",
634
      "Batch 100 device: cuda time passed: 19.780 time per batch: 0.198\n",
635
      "Batch 150 device: cuda time passed: 28.309 time per batch: 0.189\n",
636
      "Batch 200 device: cuda time passed: 37.377 time per batch: 0.187\n",
637
      "ver 34, iter 22, fold 2, val ll: 0.0601, cor: 0.8427, auc: 0.9894\n",
638
      "setFeats, augmentation -1\n",
639
      "Batch 50 device: cuda time passed: 11.127 time per batch: 0.223\n",
640
      "Batch 100 device: cuda time passed: 19.509 time per batch: 0.195\n",
641
      "Batch 150 device: cuda time passed: 27.842 time per batch: 0.186\n",
642
      "Batch 200 device: cuda time passed: 36.450 time per batch: 0.182\n",
643
      "ver 34, iter 23, fold 2, val ll: 0.0600, cor: 0.8429, auc: 0.9894\n",
644
      "setFeats, augmentation -1\n",
645
      "Batch 50 device: cuda time passed: 11.696 time per batch: 0.234\n",
646
      "Batch 100 device: cuda time passed: 20.296 time per batch: 0.203\n",
647
      "Batch 150 device: cuda time passed: 28.493 time per batch: 0.190\n",
648
      "Batch 200 device: cuda time passed: 37.014 time per batch: 0.185\n",
649
      "ver 34, iter 24, fold 2, val ll: 0.0600, cor: 0.8429, auc: 0.9894\n",
650
      "setFeats, augmentation -1\n",
651
      "Batch 50 device: cuda time passed: 11.370 time per batch: 0.227\n",
652
      "Batch 100 device: cuda time passed: 19.776 time per batch: 0.198\n",
653
      "Batch 150 device: cuda time passed: 28.208 time per batch: 0.188\n",
654
      "Batch 200 device: cuda time passed: 36.551 time per batch: 0.183\n",
655
      "ver 34, iter 25, fold 2, val ll: 0.0603, cor: 0.8421, auc: 0.9893\n",
656
      "setFeats, augmentation -1\n",
657
      "Batch 50 device: cuda time passed: 11.212 time per batch: 0.224\n",
658
      "Batch 100 device: cuda time passed: 19.791 time per batch: 0.198\n",
659
      "Batch 150 device: cuda time passed: 28.275 time per batch: 0.188\n",
660
      "Batch 200 device: cuda time passed: 36.567 time per batch: 0.183\n",
661
      "ver 34, iter 26, fold 2, val ll: 0.0601, cor: 0.8425, auc: 0.9894\n",
662
      "setFeats, augmentation -1\n",
663
      "Batch 50 device: cuda time passed: 11.755 time per batch: 0.235\n",
664
      "Batch 100 device: cuda time passed: 20.286 time per batch: 0.203\n",
665
      "Batch 150 device: cuda time passed: 29.146 time per batch: 0.194\n",
666
      "Batch 200 device: cuda time passed: 37.236 time per batch: 0.186\n",
667
      "ver 34, iter 27, fold 2, val ll: 0.0602, cor: 0.8426, auc: 0.9893\n",
668
      "setFeats, augmentation -1\n"
669
     ]
670
    },
671
    {
672
     "name": "stdout",
673
     "output_type": "stream",
674
     "text": [
675
      "Batch 50 device: cuda time passed: 10.998 time per batch: 0.220\n",
676
      "Batch 100 device: cuda time passed: 19.483 time per batch: 0.195\n",
677
      "Batch 150 device: cuda time passed: 27.915 time per batch: 0.186\n",
678
      "Batch 200 device: cuda time passed: 36.732 time per batch: 0.184\n",
679
      "ver 34, iter 28, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9894\n",
680
      "setFeats, augmentation -1\n",
681
      "Batch 50 device: cuda time passed: 11.019 time per batch: 0.220\n",
682
      "Batch 100 device: cuda time passed: 19.728 time per batch: 0.197\n",
683
      "Batch 150 device: cuda time passed: 28.105 time per batch: 0.187\n",
684
      "Batch 200 device: cuda time passed: 36.333 time per batch: 0.182\n",
685
      "ver 34, iter 29, fold 2, val ll: 0.0603, cor: 0.8420, auc: 0.9893\n",
686
      "setFeats, augmentation -1\n",
687
      "Batch 50 device: cuda time passed: 11.334 time per batch: 0.227\n",
688
      "Batch 100 device: cuda time passed: 19.885 time per batch: 0.199\n",
689
      "Batch 150 device: cuda time passed: 28.419 time per batch: 0.189\n",
690
      "Batch 200 device: cuda time passed: 36.882 time per batch: 0.184\n",
691
      "ver 34, iter 30, fold 2, val ll: 0.0602, cor: 0.8424, auc: 0.9894\n",
692
      "setFeats, augmentation -1\n",
693
      "Batch 50 device: cuda time passed: 11.443 time per batch: 0.229\n",
694
      "Batch 100 device: cuda time passed: 19.957 time per batch: 0.200\n",
695
      "Batch 150 device: cuda time passed: 28.389 time per batch: 0.189\n",
696
      "Batch 200 device: cuda time passed: 36.956 time per batch: 0.185\n",
697
      "ver 34, iter 31, fold 2, val ll: 0.0600, cor: 0.8430, auc: 0.9894\n",
698
      "total running time 1731.3292746543884\n",
699
      "total time 5228.603754520416\n",
700
      "completed epochs: 3 iters starting now: 32\n",
701
      "adding dummy serieses 14\n",
702
      "DataSet 9 valid size 7232 fold 0\n",
703
      "dataset valid: 7232 loader valid: 226\n",
704
      "loading model model.b3.f0.d9.v34\n",
705
      "setFeats, augmentation -1\n",
706
      "Batch 50 device: cuda time passed: 11.536 time per batch: 0.231\n",
707
      "Batch 100 device: cuda time passed: 19.442 time per batch: 0.194\n",
708
      "Batch 150 device: cuda time passed: 28.012 time per batch: 0.187\n",
709
      "Batch 200 device: cuda time passed: 36.039 time per batch: 0.180\n",
710
      "ver 34, iter 0, fold 0, val ll: 0.0632, cor: 0.8416, auc: 0.9881\n",
711
      "setFeats, augmentation -1\n",
712
      "Batch 50 device: cuda time passed: 10.857 time per batch: 0.217\n",
713
      "Batch 100 device: cuda time passed: 19.517 time per batch: 0.195\n",
714
      "Batch 150 device: cuda time passed: 27.749 time per batch: 0.185\n",
715
      "Batch 200 device: cuda time passed: 36.496 time per batch: 0.182\n",
716
      "ver 34, iter 1, fold 0, val ll: 0.0629, cor: 0.8419, auc: 0.9883\n",
717
      "setFeats, augmentation -1\n",
718
      "Batch 50 device: cuda time passed: 11.265 time per batch: 0.225\n",
719
      "Batch 100 device: cuda time passed: 19.730 time per batch: 0.197\n",
720
      "Batch 150 device: cuda time passed: 28.009 time per batch: 0.187\n",
721
      "Batch 200 device: cuda time passed: 36.751 time per batch: 0.184\n",
722
      "ver 34, iter 2, fold 0, val ll: 0.0633, cor: 0.8414, auc: 0.9880\n",
723
      "setFeats, augmentation -1\n",
724
      "Batch 50 device: cuda time passed: 11.156 time per batch: 0.223\n",
725
      "Batch 100 device: cuda time passed: 19.573 time per batch: 0.196\n",
726
      "Batch 150 device: cuda time passed: 27.689 time per batch: 0.185\n",
727
      "Batch 200 device: cuda time passed: 36.021 time per batch: 0.180\n",
728
      "ver 34, iter 3, fold 0, val ll: 0.0631, cor: 0.8417, auc: 0.9881\n",
729
      "setFeats, augmentation -1\n",
730
      "Batch 50 device: cuda time passed: 11.289 time per batch: 0.226\n",
731
      "Batch 100 device: cuda time passed: 19.345 time per batch: 0.193\n",
732
      "Batch 150 device: cuda time passed: 27.990 time per batch: 0.187\n",
733
      "Batch 200 device: cuda time passed: 36.361 time per batch: 0.182\n",
734
      "ver 34, iter 4, fold 0, val ll: 0.0634, cor: 0.8412, auc: 0.9880\n",
735
      "setFeats, augmentation -1\n",
736
      "Batch 50 device: cuda time passed: 10.799 time per batch: 0.216\n",
737
      "Batch 100 device: cuda time passed: 19.307 time per batch: 0.193\n",
738
      "Batch 150 device: cuda time passed: 27.358 time per batch: 0.182\n",
739
      "Batch 200 device: cuda time passed: 36.510 time per batch: 0.183\n",
740
      "ver 34, iter 5, fold 0, val ll: 0.0632, cor: 0.8414, auc: 0.9881\n",
741
      "setFeats, augmentation -1\n",
742
      "Batch 50 device: cuda time passed: 11.144 time per batch: 0.223\n",
743
      "Batch 100 device: cuda time passed: 19.781 time per batch: 0.198\n",
744
      "Batch 150 device: cuda time passed: 28.281 time per batch: 0.189\n",
745
      "Batch 200 device: cuda time passed: 36.541 time per batch: 0.183\n",
746
      "ver 34, iter 6, fold 0, val ll: 0.0634, cor: 0.8411, auc: 0.9881\n",
747
      "setFeats, augmentation -1\n",
748
      "Batch 50 device: cuda time passed: 10.809 time per batch: 0.216\n",
749
      "Batch 100 device: cuda time passed: 19.659 time per batch: 0.197\n",
750
      "Batch 150 device: cuda time passed: 27.493 time per batch: 0.183\n",
751
      "Batch 200 device: cuda time passed: 36.342 time per batch: 0.182\n",
752
      "ver 34, iter 7, fold 0, val ll: 0.0631, cor: 0.8418, auc: 0.9882\n",
753
      "setFeats, augmentation -1\n",
754
      "Batch 50 device: cuda time passed: 10.565 time per batch: 0.211\n",
755
      "Batch 100 device: cuda time passed: 19.150 time per batch: 0.191\n",
756
      "Batch 150 device: cuda time passed: 27.338 time per batch: 0.182\n",
757
      "Batch 200 device: cuda time passed: 35.715 time per batch: 0.179\n",
758
      "ver 34, iter 8, fold 0, val ll: 0.0631, cor: 0.8417, auc: 0.9881\n",
759
      "setFeats, augmentation -1\n",
760
      "Batch 50 device: cuda time passed: 11.067 time per batch: 0.221\n",
761
      "Batch 100 device: cuda time passed: 19.201 time per batch: 0.192\n",
762
      "Batch 150 device: cuda time passed: 27.660 time per batch: 0.184\n",
763
      "Batch 200 device: cuda time passed: 35.980 time per batch: 0.180\n",
764
      "ver 34, iter 9, fold 0, val ll: 0.0632, cor: 0.8414, auc: 0.9880\n",
765
      "setFeats, augmentation -1\n",
766
      "Batch 50 device: cuda time passed: 10.775 time per batch: 0.216\n",
767
      "Batch 100 device: cuda time passed: 19.030 time per batch: 0.190\n",
768
      "Batch 150 device: cuda time passed: 27.434 time per batch: 0.183\n",
769
      "Batch 200 device: cuda time passed: 35.997 time per batch: 0.180\n",
770
      "ver 34, iter 10, fold 0, val ll: 0.0632, cor: 0.8418, auc: 0.9881\n",
771
      "setFeats, augmentation -1\n",
772
      "Batch 50 device: cuda time passed: 11.414 time per batch: 0.228\n",
773
      "Batch 100 device: cuda time passed: 19.585 time per batch: 0.196\n",
774
      "Batch 150 device: cuda time passed: 28.034 time per batch: 0.187\n",
775
      "Batch 200 device: cuda time passed: 36.464 time per batch: 0.182\n",
776
      "ver 34, iter 11, fold 0, val ll: 0.0631, cor: 0.8417, auc: 0.9882\n",
777
      "setFeats, augmentation -1\n",
778
      "Batch 50 device: cuda time passed: 11.071 time per batch: 0.221\n",
779
      "Batch 100 device: cuda time passed: 19.928 time per batch: 0.199\n",
780
      "Batch 150 device: cuda time passed: 28.159 time per batch: 0.188\n",
781
      "Batch 200 device: cuda time passed: 37.234 time per batch: 0.186\n",
782
      "ver 34, iter 12, fold 0, val ll: 0.0634, cor: 0.8411, auc: 0.9881\n",
783
      "setFeats, augmentation -1\n",
784
      "Batch 50 device: cuda time passed: 10.507 time per batch: 0.210\n",
785
      "Batch 100 device: cuda time passed: 19.527 time per batch: 0.195\n",
786
      "Batch 150 device: cuda time passed: 27.786 time per batch: 0.185\n",
787
      "Batch 200 device: cuda time passed: 36.570 time per batch: 0.183\n",
788
      "ver 34, iter 13, fold 0, val ll: 0.0633, cor: 0.8414, auc: 0.9880\n",
789
      "setFeats, augmentation -1\n",
790
      "Batch 50 device: cuda time passed: 10.787 time per batch: 0.216\n",
791
      "Batch 100 device: cuda time passed: 19.426 time per batch: 0.194\n",
792
      "Batch 150 device: cuda time passed: 28.053 time per batch: 0.187\n",
793
      "Batch 200 device: cuda time passed: 36.304 time per batch: 0.182\n",
794
      "ver 34, iter 14, fold 0, val ll: 0.0633, cor: 0.8416, auc: 0.9880\n",
795
      "setFeats, augmentation -1\n",
796
      "Batch 50 device: cuda time passed: 10.934 time per batch: 0.219\n",
797
      "Batch 100 device: cuda time passed: 19.786 time per batch: 0.198\n",
798
      "Batch 150 device: cuda time passed: 28.130 time per batch: 0.188\n",
799
      "Batch 200 device: cuda time passed: 36.253 time per batch: 0.181\n",
800
      "ver 34, iter 15, fold 0, val ll: 0.0632, cor: 0.8415, auc: 0.9881\n",
801
      "setFeats, augmentation -1\n",
802
      "Batch 50 device: cuda time passed: 11.365 time per batch: 0.227\n",
803
      "Batch 100 device: cuda time passed: 19.577 time per batch: 0.196\n",
804
      "Batch 150 device: cuda time passed: 27.828 time per batch: 0.186\n",
805
      "Batch 200 device: cuda time passed: 36.337 time per batch: 0.182\n",
806
      "ver 34, iter 16, fold 0, val ll: 0.0631, cor: 0.8418, auc: 0.9882\n",
807
      "setFeats, augmentation -1\n",
808
      "Batch 50 device: cuda time passed: 11.483 time per batch: 0.230\n",
809
      "Batch 100 device: cuda time passed: 19.916 time per batch: 0.199\n",
810
      "Batch 150 device: cuda time passed: 28.381 time per batch: 0.189\n",
811
      "Batch 200 device: cuda time passed: 36.431 time per batch: 0.182\n",
812
      "ver 34, iter 17, fold 0, val ll: 0.0631, cor: 0.8416, auc: 0.9882\n",
813
      "setFeats, augmentation -1\n",
814
      "Batch 50 device: cuda time passed: 11.026 time per batch: 0.221\n",
815
      "Batch 100 device: cuda time passed: 19.578 time per batch: 0.196\n",
816
      "Batch 150 device: cuda time passed: 28.720 time per batch: 0.191\n",
817
      "Batch 200 device: cuda time passed: 36.864 time per batch: 0.184\n"
818
     ]
819
    },
820
    {
821
     "name": "stdout",
822
     "output_type": "stream",
823
     "text": [
824
      "ver 34, iter 18, fold 0, val ll: 0.0632, cor: 0.8414, auc: 0.9882\n",
825
      "setFeats, augmentation -1\n",
826
      "Batch 50 device: cuda time passed: 10.382 time per batch: 0.208\n",
827
      "Batch 100 device: cuda time passed: 18.828 time per batch: 0.188\n",
828
      "Batch 150 device: cuda time passed: 28.774 time per batch: 0.192\n",
829
      "Batch 200 device: cuda time passed: 36.504 time per batch: 0.183\n",
830
      "ver 34, iter 19, fold 0, val ll: 0.0632, cor: 0.8415, auc: 0.9882\n",
831
      "setFeats, augmentation -1\n",
832
      "Batch 50 device: cuda time passed: 11.459 time per batch: 0.229\n",
833
      "Batch 100 device: cuda time passed: 19.702 time per batch: 0.197\n",
834
      "Batch 150 device: cuda time passed: 27.777 time per batch: 0.185\n",
835
      "Batch 200 device: cuda time passed: 36.381 time per batch: 0.182\n",
836
      "ver 34, iter 20, fold 0, val ll: 0.0634, cor: 0.8413, auc: 0.9880\n",
837
      "setFeats, augmentation -1\n",
838
      "Batch 50 device: cuda time passed: 11.382 time per batch: 0.228\n",
839
      "Batch 100 device: cuda time passed: 19.343 time per batch: 0.193\n",
840
      "Batch 150 device: cuda time passed: 27.549 time per batch: 0.184\n",
841
      "Batch 200 device: cuda time passed: 35.777 time per batch: 0.179\n",
842
      "ver 34, iter 21, fold 0, val ll: 0.0630, cor: 0.8421, auc: 0.9881\n",
843
      "setFeats, augmentation -1\n",
844
      "Batch 50 device: cuda time passed: 11.823 time per batch: 0.236\n",
845
      "Batch 100 device: cuda time passed: 19.865 time per batch: 0.199\n",
846
      "Batch 150 device: cuda time passed: 28.305 time per batch: 0.189\n",
847
      "Batch 200 device: cuda time passed: 36.592 time per batch: 0.183\n",
848
      "ver 34, iter 22, fold 0, val ll: 0.0632, cor: 0.8415, auc: 0.9882\n",
849
      "setFeats, augmentation -1\n",
850
      "Batch 50 device: cuda time passed: 11.087 time per batch: 0.222\n",
851
      "Batch 100 device: cuda time passed: 19.745 time per batch: 0.197\n",
852
      "Batch 150 device: cuda time passed: 28.129 time per batch: 0.188\n",
853
      "Batch 200 device: cuda time passed: 36.259 time per batch: 0.181\n",
854
      "ver 34, iter 23, fold 0, val ll: 0.0631, cor: 0.8415, auc: 0.9882\n",
855
      "setFeats, augmentation -1\n",
856
      "Batch 50 device: cuda time passed: 11.263 time per batch: 0.225\n",
857
      "Batch 100 device: cuda time passed: 19.197 time per batch: 0.192\n",
858
      "Batch 150 device: cuda time passed: 27.765 time per batch: 0.185\n",
859
      "Batch 200 device: cuda time passed: 36.010 time per batch: 0.180\n",
860
      "ver 34, iter 24, fold 0, val ll: 0.0634, cor: 0.8412, auc: 0.9880\n",
861
      "setFeats, augmentation -1\n",
862
      "Batch 50 device: cuda time passed: 11.063 time per batch: 0.221\n",
863
      "Batch 100 device: cuda time passed: 19.637 time per batch: 0.196\n",
864
      "Batch 150 device: cuda time passed: 27.441 time per batch: 0.183\n",
865
      "Batch 200 device: cuda time passed: 35.823 time per batch: 0.179\n",
866
      "ver 34, iter 25, fold 0, val ll: 0.0633, cor: 0.8415, auc: 0.9881\n",
867
      "setFeats, augmentation -1\n",
868
      "Batch 50 device: cuda time passed: 10.825 time per batch: 0.216\n",
869
      "Batch 100 device: cuda time passed: 19.368 time per batch: 0.194\n",
870
      "Batch 150 device: cuda time passed: 27.689 time per batch: 0.185\n",
871
      "Batch 200 device: cuda time passed: 37.222 time per batch: 0.186\n",
872
      "ver 34, iter 26, fold 0, val ll: 0.0634, cor: 0.8414, auc: 0.9880\n",
873
      "setFeats, augmentation -1\n",
874
      "Batch 50 device: cuda time passed: 10.720 time per batch: 0.214\n",
875
      "Batch 100 device: cuda time passed: 19.278 time per batch: 0.193\n",
876
      "Batch 150 device: cuda time passed: 27.638 time per batch: 0.184\n",
877
      "Batch 200 device: cuda time passed: 36.025 time per batch: 0.180\n",
878
      "ver 34, iter 27, fold 0, val ll: 0.0630, cor: 0.8420, auc: 0.9882\n",
879
      "setFeats, augmentation -1\n",
880
      "Batch 50 device: cuda time passed: 10.807 time per batch: 0.216\n",
881
      "Batch 100 device: cuda time passed: 19.534 time per batch: 0.195\n",
882
      "Batch 150 device: cuda time passed: 27.797 time per batch: 0.185\n",
883
      "Batch 200 device: cuda time passed: 36.143 time per batch: 0.181\n",
884
      "ver 34, iter 28, fold 0, val ll: 0.0631, cor: 0.8417, auc: 0.9882\n",
885
      "setFeats, augmentation -1\n",
886
      "Batch 50 device: cuda time passed: 11.228 time per batch: 0.225\n",
887
      "Batch 100 device: cuda time passed: 19.919 time per batch: 0.199\n",
888
      "Batch 150 device: cuda time passed: 28.348 time per batch: 0.189\n",
889
      "Batch 200 device: cuda time passed: 37.116 time per batch: 0.186\n",
890
      "ver 34, iter 29, fold 0, val ll: 0.0630, cor: 0.8420, auc: 0.9881\n",
891
      "setFeats, augmentation -1\n",
892
      "Batch 50 device: cuda time passed: 11.326 time per batch: 0.227\n",
893
      "Batch 100 device: cuda time passed: 19.509 time per batch: 0.195\n",
894
      "Batch 150 device: cuda time passed: 27.800 time per batch: 0.185\n",
895
      "Batch 200 device: cuda time passed: 36.166 time per batch: 0.181\n",
896
      "ver 34, iter 30, fold 0, val ll: 0.0632, cor: 0.8416, auc: 0.9881\n",
897
      "setFeats, augmentation -1\n",
898
      "Batch 50 device: cuda time passed: 10.800 time per batch: 0.216\n",
899
      "Batch 100 device: cuda time passed: 19.610 time per batch: 0.196\n",
900
      "Batch 150 device: cuda time passed: 28.093 time per batch: 0.187\n",
901
      "Batch 200 device: cuda time passed: 36.552 time per batch: 0.183\n",
902
      "ver 34, iter 31, fold 0, val ll: 0.0631, cor: 0.8417, auc: 0.9881\n",
903
      "total running time 1487.4923713207245\n",
904
      "total time 6716.580273866653\n",
905
      "completed epochs: 3 iters starting now: 32\n",
906
      "adding dummy serieses 30\n",
907
      "DataSet 9 valid size 7328 fold 1\n",
908
      "dataset valid: 7328 loader valid: 229\n",
909
      "loading model model.b3.f1.d9.v34\n",
910
      "setFeats, augmentation -1\n",
911
      "Batch 50 device: cuda time passed: 11.601 time per batch: 0.232\n",
912
      "Batch 100 device: cuda time passed: 18.900 time per batch: 0.189\n",
913
      "Batch 150 device: cuda time passed: 26.936 time per batch: 0.180\n",
914
      "Batch 200 device: cuda time passed: 36.698 time per batch: 0.183\n",
915
      "ver 34, iter 0, fold 1, val ll: 0.0633, cor: 0.8388, auc: 0.9879\n",
916
      "setFeats, augmentation -1\n",
917
      "Batch 50 device: cuda time passed: 11.225 time per batch: 0.224\n",
918
      "Batch 100 device: cuda time passed: 19.727 time per batch: 0.197\n",
919
      "Batch 150 device: cuda time passed: 27.782 time per batch: 0.185\n",
920
      "Batch 200 device: cuda time passed: 36.099 time per batch: 0.180\n",
921
      "ver 34, iter 1, fold 1, val ll: 0.0634, cor: 0.8387, auc: 0.9879\n",
922
      "setFeats, augmentation -1\n",
923
      "Batch 50 device: cuda time passed: 11.501 time per batch: 0.230\n",
924
      "Batch 100 device: cuda time passed: 19.581 time per batch: 0.196\n",
925
      "Batch 150 device: cuda time passed: 27.804 time per batch: 0.185\n",
926
      "Batch 200 device: cuda time passed: 36.041 time per batch: 0.180\n",
927
      "ver 34, iter 2, fold 1, val ll: 0.0634, cor: 0.8388, auc: 0.9879\n",
928
      "setFeats, augmentation -1\n",
929
      "Batch 50 device: cuda time passed: 10.828 time per batch: 0.217\n",
930
      "Batch 100 device: cuda time passed: 19.901 time per batch: 0.199\n",
931
      "Batch 150 device: cuda time passed: 28.421 time per batch: 0.189\n",
932
      "Batch 200 device: cuda time passed: 36.320 time per batch: 0.182\n",
933
      "ver 34, iter 3, fold 1, val ll: 0.0634, cor: 0.8386, auc: 0.9879\n",
934
      "setFeats, augmentation -1\n",
935
      "Batch 50 device: cuda time passed: 10.654 time per batch: 0.213\n",
936
      "Batch 100 device: cuda time passed: 19.474 time per batch: 0.195\n",
937
      "Batch 150 device: cuda time passed: 27.773 time per batch: 0.185\n",
938
      "Batch 200 device: cuda time passed: 35.790 time per batch: 0.179\n",
939
      "ver 34, iter 4, fold 1, val ll: 0.0632, cor: 0.8390, auc: 0.9879\n",
940
      "setFeats, augmentation -1\n",
941
      "Batch 50 device: cuda time passed: 11.220 time per batch: 0.224\n",
942
      "Batch 100 device: cuda time passed: 19.963 time per batch: 0.200\n",
943
      "Batch 150 device: cuda time passed: 27.894 time per batch: 0.186\n",
944
      "Batch 200 device: cuda time passed: 36.716 time per batch: 0.184\n",
945
      "ver 34, iter 5, fold 1, val ll: 0.0632, cor: 0.8389, auc: 0.9879\n",
946
      "setFeats, augmentation -1\n",
947
      "Batch 50 device: cuda time passed: 10.849 time per batch: 0.217\n",
948
      "Batch 100 device: cuda time passed: 20.349 time per batch: 0.203\n",
949
      "Batch 150 device: cuda time passed: 27.875 time per batch: 0.186\n",
950
      "Batch 200 device: cuda time passed: 36.373 time per batch: 0.182\n",
951
      "ver 34, iter 6, fold 1, val ll: 0.0631, cor: 0.8393, auc: 0.9880\n",
952
      "setFeats, augmentation -1\n",
953
      "Batch 50 device: cuda time passed: 11.577 time per batch: 0.232\n",
954
      "Batch 100 device: cuda time passed: 19.637 time per batch: 0.196\n",
955
      "Batch 150 device: cuda time passed: 28.061 time per batch: 0.187\n",
956
      "Batch 200 device: cuda time passed: 36.134 time per batch: 0.181\n",
957
      "ver 34, iter 7, fold 1, val ll: 0.0634, cor: 0.8385, auc: 0.9879\n",
958
      "setFeats, augmentation -1\n",
959
      "Batch 50 device: cuda time passed: 11.124 time per batch: 0.222\n",
960
      "Batch 100 device: cuda time passed: 19.560 time per batch: 0.196\n",
961
      "Batch 150 device: cuda time passed: 28.370 time per batch: 0.189\n",
962
      "Batch 200 device: cuda time passed: 36.781 time per batch: 0.184\n",
963
      "ver 34, iter 8, fold 1, val ll: 0.0633, cor: 0.8388, auc: 0.9879\n",
964
      "setFeats, augmentation -1\n",
965
      "Batch 50 device: cuda time passed: 11.238 time per batch: 0.225\n",
966
      "Batch 100 device: cuda time passed: 19.463 time per batch: 0.195\n",
967
      "Batch 150 device: cuda time passed: 28.507 time per batch: 0.190\n"
968
     ]
969
    },
970
    {
971
     "name": "stdout",
972
     "output_type": "stream",
973
     "text": [
974
      "Batch 200 device: cuda time passed: 36.950 time per batch: 0.185\n",
975
      "ver 34, iter 9, fold 1, val ll: 0.0630, cor: 0.8392, auc: 0.9880\n",
976
      "setFeats, augmentation -1\n",
977
      "Batch 50 device: cuda time passed: 10.309 time per batch: 0.206\n",
978
      "Batch 100 device: cuda time passed: 19.247 time per batch: 0.192\n",
979
      "Batch 150 device: cuda time passed: 27.463 time per batch: 0.183\n",
980
      "Batch 200 device: cuda time passed: 36.078 time per batch: 0.180\n",
981
      "ver 34, iter 10, fold 1, val ll: 0.0634, cor: 0.8387, auc: 0.9878\n",
982
      "setFeats, augmentation -1\n",
983
      "Batch 50 device: cuda time passed: 10.580 time per batch: 0.212\n",
984
      "Batch 100 device: cuda time passed: 18.971 time per batch: 0.190\n",
985
      "Batch 150 device: cuda time passed: 27.962 time per batch: 0.186\n",
986
      "Batch 200 device: cuda time passed: 35.981 time per batch: 0.180\n",
987
      "ver 34, iter 11, fold 1, val ll: 0.0632, cor: 0.8391, auc: 0.9880\n",
988
      "setFeats, augmentation -1\n",
989
      "Batch 50 device: cuda time passed: 11.003 time per batch: 0.220\n",
990
      "Batch 100 device: cuda time passed: 19.562 time per batch: 0.196\n",
991
      "Batch 150 device: cuda time passed: 27.887 time per batch: 0.186\n",
992
      "Batch 200 device: cuda time passed: 36.433 time per batch: 0.182\n",
993
      "ver 34, iter 12, fold 1, val ll: 0.0632, cor: 0.8392, auc: 0.9879\n",
994
      "setFeats, augmentation -1\n",
995
      "Batch 50 device: cuda time passed: 11.487 time per batch: 0.230\n",
996
      "Batch 100 device: cuda time passed: 19.255 time per batch: 0.193\n",
997
      "Batch 150 device: cuda time passed: 28.115 time per batch: 0.187\n",
998
      "Batch 200 device: cuda time passed: 36.623 time per batch: 0.183\n",
999
      "ver 34, iter 13, fold 1, val ll: 0.0633, cor: 0.8387, auc: 0.9879\n",
1000
      "setFeats, augmentation -1\n",
1001
      "Batch 50 device: cuda time passed: 10.321 time per batch: 0.206\n",
1002
      "Batch 100 device: cuda time passed: 19.255 time per batch: 0.193\n",
1003
      "Batch 150 device: cuda time passed: 28.486 time per batch: 0.190\n",
1004
      "Batch 200 device: cuda time passed: 36.377 time per batch: 0.182\n",
1005
      "ver 34, iter 14, fold 1, val ll: 0.0633, cor: 0.8388, auc: 0.9880\n",
1006
      "setFeats, augmentation -1\n",
1007
      "Batch 50 device: cuda time passed: 10.919 time per batch: 0.218\n",
1008
      "Batch 100 device: cuda time passed: 19.488 time per batch: 0.195\n",
1009
      "Batch 150 device: cuda time passed: 27.954 time per batch: 0.186\n",
1010
      "Batch 200 device: cuda time passed: 36.277 time per batch: 0.181\n",
1011
      "ver 34, iter 15, fold 1, val ll: 0.0634, cor: 0.8385, auc: 0.9879\n",
1012
      "setFeats, augmentation -1\n",
1013
      "Batch 50 device: cuda time passed: 11.244 time per batch: 0.225\n",
1014
      "Batch 100 device: cuda time passed: 19.801 time per batch: 0.198\n",
1015
      "Batch 150 device: cuda time passed: 28.208 time per batch: 0.188\n",
1016
      "Batch 200 device: cuda time passed: 36.477 time per batch: 0.182\n",
1017
      "ver 34, iter 16, fold 1, val ll: 0.0632, cor: 0.8391, auc: 0.9879\n",
1018
      "setFeats, augmentation -1\n",
1019
      "Batch 50 device: cuda time passed: 11.143 time per batch: 0.223\n",
1020
      "Batch 100 device: cuda time passed: 19.614 time per batch: 0.196\n",
1021
      "Batch 150 device: cuda time passed: 28.227 time per batch: 0.188\n",
1022
      "Batch 200 device: cuda time passed: 36.469 time per batch: 0.182\n",
1023
      "ver 34, iter 17, fold 1, val ll: 0.0632, cor: 0.8392, auc: 0.9879\n",
1024
      "setFeats, augmentation -1\n",
1025
      "Batch 50 device: cuda time passed: 11.002 time per batch: 0.220\n",
1026
      "Batch 100 device: cuda time passed: 19.268 time per batch: 0.193\n",
1027
      "Batch 150 device: cuda time passed: 27.492 time per batch: 0.183\n",
1028
      "Batch 200 device: cuda time passed: 36.499 time per batch: 0.182\n",
1029
      "ver 34, iter 18, fold 1, val ll: 0.0632, cor: 0.8389, auc: 0.9880\n",
1030
      "setFeats, augmentation -1\n",
1031
      "Batch 50 device: cuda time passed: 10.935 time per batch: 0.219\n",
1032
      "Batch 100 device: cuda time passed: 19.428 time per batch: 0.194\n",
1033
      "Batch 150 device: cuda time passed: 27.915 time per batch: 0.186\n",
1034
      "Batch 200 device: cuda time passed: 36.213 time per batch: 0.181\n",
1035
      "ver 34, iter 19, fold 1, val ll: 0.0633, cor: 0.8390, auc: 0.9879\n",
1036
      "setFeats, augmentation -1\n",
1037
      "Batch 50 device: cuda time passed: 11.107 time per batch: 0.222\n",
1038
      "Batch 100 device: cuda time passed: 19.694 time per batch: 0.197\n",
1039
      "Batch 150 device: cuda time passed: 27.799 time per batch: 0.185\n",
1040
      "Batch 200 device: cuda time passed: 35.827 time per batch: 0.179\n",
1041
      "ver 34, iter 20, fold 1, val ll: 0.0630, cor: 0.8396, auc: 0.9881\n",
1042
      "setFeats, augmentation -1\n",
1043
      "Batch 50 device: cuda time passed: 11.432 time per batch: 0.229\n",
1044
      "Batch 100 device: cuda time passed: 20.032 time per batch: 0.200\n",
1045
      "Batch 150 device: cuda time passed: 28.095 time per batch: 0.187\n",
1046
      "Batch 200 device: cuda time passed: 36.735 time per batch: 0.184\n",
1047
      "ver 34, iter 21, fold 1, val ll: 0.0632, cor: 0.8390, auc: 0.9879\n",
1048
      "setFeats, augmentation -1\n",
1049
      "Batch 50 device: cuda time passed: 10.436 time per batch: 0.209\n",
1050
      "Batch 100 device: cuda time passed: 19.425 time per batch: 0.194\n",
1051
      "Batch 150 device: cuda time passed: 27.705 time per batch: 0.185\n",
1052
      "Batch 200 device: cuda time passed: 36.119 time per batch: 0.181\n",
1053
      "ver 34, iter 22, fold 1, val ll: 0.0633, cor: 0.8390, auc: 0.9879\n",
1054
      "setFeats, augmentation -1\n",
1055
      "Batch 50 device: cuda time passed: 11.294 time per batch: 0.226\n",
1056
      "Batch 100 device: cuda time passed: 19.698 time per batch: 0.197\n",
1057
      "Batch 150 device: cuda time passed: 28.636 time per batch: 0.191\n",
1058
      "Batch 200 device: cuda time passed: 36.945 time per batch: 0.185\n",
1059
      "ver 34, iter 23, fold 1, val ll: 0.0633, cor: 0.8387, auc: 0.9879\n",
1060
      "setFeats, augmentation -1\n",
1061
      "Batch 50 device: cuda time passed: 10.781 time per batch: 0.216\n",
1062
      "Batch 100 device: cuda time passed: 19.957 time per batch: 0.200\n",
1063
      "Batch 150 device: cuda time passed: 28.300 time per batch: 0.189\n",
1064
      "Batch 200 device: cuda time passed: 36.615 time per batch: 0.183\n",
1065
      "ver 34, iter 24, fold 1, val ll: 0.0635, cor: 0.8386, auc: 0.9878\n",
1066
      "setFeats, augmentation -1\n",
1067
      "Batch 50 device: cuda time passed: 11.301 time per batch: 0.226\n",
1068
      "Batch 100 device: cuda time passed: 19.520 time per batch: 0.195\n",
1069
      "Batch 150 device: cuda time passed: 27.442 time per batch: 0.183\n",
1070
      "Batch 200 device: cuda time passed: 36.188 time per batch: 0.181\n",
1071
      "ver 34, iter 25, fold 1, val ll: 0.0632, cor: 0.8391, auc: 0.9879\n",
1072
      "setFeats, augmentation -1\n",
1073
      "Batch 50 device: cuda time passed: 10.992 time per batch: 0.220\n",
1074
      "Batch 100 device: cuda time passed: 19.148 time per batch: 0.191\n",
1075
      "Batch 150 device: cuda time passed: 27.658 time per batch: 0.184\n",
1076
      "Batch 200 device: cuda time passed: 36.310 time per batch: 0.182\n",
1077
      "ver 34, iter 26, fold 1, val ll: 0.0632, cor: 0.8391, auc: 0.9880\n",
1078
      "setFeats, augmentation -1\n",
1079
      "Batch 50 device: cuda time passed: 10.614 time per batch: 0.212\n",
1080
      "Batch 100 device: cuda time passed: 19.507 time per batch: 0.195\n",
1081
      "Batch 150 device: cuda time passed: 28.187 time per batch: 0.188\n",
1082
      "Batch 200 device: cuda time passed: 36.693 time per batch: 0.183\n",
1083
      "ver 34, iter 27, fold 1, val ll: 0.0633, cor: 0.8389, auc: 0.9879\n",
1084
      "setFeats, augmentation -1\n",
1085
      "Batch 50 device: cuda time passed: 10.404 time per batch: 0.208\n",
1086
      "Batch 100 device: cuda time passed: 18.785 time per batch: 0.188\n",
1087
      "Batch 150 device: cuda time passed: 27.808 time per batch: 0.185\n",
1088
      "Batch 200 device: cuda time passed: 36.485 time per batch: 0.182\n",
1089
      "ver 34, iter 28, fold 1, val ll: 0.0632, cor: 0.8392, auc: 0.9879\n",
1090
      "setFeats, augmentation -1\n",
1091
      "Batch 50 device: cuda time passed: 11.517 time per batch: 0.230\n",
1092
      "Batch 100 device: cuda time passed: 19.947 time per batch: 0.199\n",
1093
      "Batch 150 device: cuda time passed: 28.191 time per batch: 0.188\n",
1094
      "Batch 200 device: cuda time passed: 36.732 time per batch: 0.184\n",
1095
      "ver 34, iter 29, fold 1, val ll: 0.0632, cor: 0.8390, auc: 0.9879\n",
1096
      "setFeats, augmentation -1\n",
1097
      "Batch 50 device: cuda time passed: 11.320 time per batch: 0.226\n",
1098
      "Batch 100 device: cuda time passed: 19.965 time per batch: 0.200\n",
1099
      "Batch 150 device: cuda time passed: 28.118 time per batch: 0.187\n",
1100
      "Batch 200 device: cuda time passed: 36.174 time per batch: 0.181\n",
1101
      "ver 34, iter 30, fold 1, val ll: 0.0633, cor: 0.8389, auc: 0.9879\n",
1102
      "setFeats, augmentation -1\n",
1103
      "Batch 50 device: cuda time passed: 11.090 time per batch: 0.222\n",
1104
      "Batch 100 device: cuda time passed: 19.595 time per batch: 0.196\n",
1105
      "Batch 150 device: cuda time passed: 27.997 time per batch: 0.187\n",
1106
      "Batch 200 device: cuda time passed: 36.218 time per batch: 0.181\n",
1107
      "ver 34, iter 31, fold 1, val ll: 0.0633, cor: 0.8388, auc: 0.9879\n",
1108
      "total running time 1502.9731421470642\n",
1109
      "total time 8220.030895471573\n",
1110
      "completed epochs: 3 iters starting now: 32\n",
1111
      "adding dummy serieses 4\n",
1112
      "DataSet 9 valid size 7232 fold 2\n",
1113
      "dataset valid: 7232 loader valid: 226\n",
1114
      "loading model model.b3.f2.d9.v34\n",
1115
      "setFeats, augmentation -1\n",
1116
      "Batch 50 device: cuda time passed: 11.523 time per batch: 0.230\n",
1117
      "Batch 100 device: cuda time passed: 19.846 time per batch: 0.198\n"
1118
     ]
1119
    },
1120
    {
1121
     "name": "stdout",
1122
     "output_type": "stream",
1123
     "text": [
1124
      "Batch 150 device: cuda time passed: 28.448 time per batch: 0.190\n",
1125
      "Batch 200 device: cuda time passed: 36.151 time per batch: 0.181\n",
1126
      "ver 34, iter 0, fold 2, val ll: 0.0604, cor: 0.8411, auc: 0.9892\n",
1127
      "setFeats, augmentation -1\n",
1128
      "Batch 50 device: cuda time passed: 11.055 time per batch: 0.221\n",
1129
      "Batch 100 device: cuda time passed: 19.589 time per batch: 0.196\n",
1130
      "Batch 150 device: cuda time passed: 27.452 time per batch: 0.183\n",
1131
      "Batch 200 device: cuda time passed: 35.844 time per batch: 0.179\n",
1132
      "ver 34, iter 1, fold 2, val ll: 0.0603, cor: 0.8412, auc: 0.9893\n",
1133
      "setFeats, augmentation -1\n",
1134
      "Batch 50 device: cuda time passed: 10.997 time per batch: 0.220\n",
1135
      "Batch 100 device: cuda time passed: 19.403 time per batch: 0.194\n",
1136
      "Batch 150 device: cuda time passed: 27.832 time per batch: 0.186\n",
1137
      "Batch 200 device: cuda time passed: 36.258 time per batch: 0.181\n",
1138
      "ver 34, iter 2, fold 2, val ll: 0.0603, cor: 0.8413, auc: 0.9892\n",
1139
      "setFeats, augmentation -1\n",
1140
      "Batch 50 device: cuda time passed: 11.043 time per batch: 0.221\n",
1141
      "Batch 100 device: cuda time passed: 19.303 time per batch: 0.193\n",
1142
      "Batch 150 device: cuda time passed: 27.651 time per batch: 0.184\n",
1143
      "Batch 200 device: cuda time passed: 36.037 time per batch: 0.180\n",
1144
      "ver 34, iter 3, fold 2, val ll: 0.0605, cor: 0.8407, auc: 0.9892\n",
1145
      "setFeats, augmentation -1\n",
1146
      "Batch 50 device: cuda time passed: 11.119 time per batch: 0.222\n",
1147
      "Batch 100 device: cuda time passed: 19.489 time per batch: 0.195\n",
1148
      "Batch 150 device: cuda time passed: 28.105 time per batch: 0.187\n",
1149
      "Batch 200 device: cuda time passed: 36.335 time per batch: 0.182\n",
1150
      "ver 34, iter 4, fold 2, val ll: 0.0604, cor: 0.8411, auc: 0.9891\n",
1151
      "setFeats, augmentation -1\n",
1152
      "Batch 50 device: cuda time passed: 11.144 time per batch: 0.223\n",
1153
      "Batch 100 device: cuda time passed: 19.627 time per batch: 0.196\n",
1154
      "Batch 150 device: cuda time passed: 27.430 time per batch: 0.183\n",
1155
      "Batch 200 device: cuda time passed: 37.224 time per batch: 0.186\n",
1156
      "ver 34, iter 5, fold 2, val ll: 0.0606, cor: 0.8406, auc: 0.9891\n",
1157
      "setFeats, augmentation -1\n",
1158
      "Batch 50 device: cuda time passed: 10.761 time per batch: 0.215\n",
1159
      "Batch 100 device: cuda time passed: 18.991 time per batch: 0.190\n",
1160
      "Batch 150 device: cuda time passed: 27.854 time per batch: 0.186\n",
1161
      "Batch 200 device: cuda time passed: 36.054 time per batch: 0.180\n",
1162
      "ver 34, iter 6, fold 2, val ll: 0.0603, cor: 0.8413, auc: 0.9893\n",
1163
      "setFeats, augmentation -1\n",
1164
      "Batch 50 device: cuda time passed: 10.947 time per batch: 0.219\n",
1165
      "Batch 100 device: cuda time passed: 19.358 time per batch: 0.194\n",
1166
      "Batch 150 device: cuda time passed: 27.685 time per batch: 0.185\n",
1167
      "Batch 200 device: cuda time passed: 36.458 time per batch: 0.182\n",
1168
      "ver 34, iter 7, fold 2, val ll: 0.0604, cor: 0.8411, auc: 0.9892\n",
1169
      "setFeats, augmentation -1\n",
1170
      "Batch 50 device: cuda time passed: 11.975 time per batch: 0.240\n",
1171
      "Batch 100 device: cuda time passed: 20.457 time per batch: 0.205\n",
1172
      "Batch 150 device: cuda time passed: 28.962 time per batch: 0.193\n",
1173
      "Batch 200 device: cuda time passed: 37.452 time per batch: 0.187\n",
1174
      "ver 34, iter 8, fold 2, val ll: 0.0602, cor: 0.8415, auc: 0.9892\n",
1175
      "setFeats, augmentation -1\n",
1176
      "Batch 50 device: cuda time passed: 11.492 time per batch: 0.230\n",
1177
      "Batch 100 device: cuda time passed: 19.598 time per batch: 0.196\n",
1178
      "Batch 150 device: cuda time passed: 27.863 time per batch: 0.186\n",
1179
      "Batch 200 device: cuda time passed: 36.893 time per batch: 0.184\n",
1180
      "ver 34, iter 9, fold 2, val ll: 0.0604, cor: 0.8412, auc: 0.9892\n",
1181
      "setFeats, augmentation -1\n",
1182
      "Batch 50 device: cuda time passed: 10.704 time per batch: 0.214\n",
1183
      "Batch 100 device: cuda time passed: 19.476 time per batch: 0.195\n",
1184
      "Batch 150 device: cuda time passed: 27.911 time per batch: 0.186\n",
1185
      "Batch 200 device: cuda time passed: 36.416 time per batch: 0.182\n",
1186
      "ver 34, iter 10, fold 2, val ll: 0.0603, cor: 0.8414, auc: 0.9892\n",
1187
      "setFeats, augmentation -1\n",
1188
      "Batch 50 device: cuda time passed: 10.454 time per batch: 0.209\n",
1189
      "Batch 100 device: cuda time passed: 19.591 time per batch: 0.196\n",
1190
      "Batch 150 device: cuda time passed: 27.687 time per batch: 0.185\n",
1191
      "Batch 200 device: cuda time passed: 36.437 time per batch: 0.182\n",
1192
      "ver 34, iter 11, fold 2, val ll: 0.0602, cor: 0.8416, auc: 0.9892\n",
1193
      "setFeats, augmentation -1\n",
1194
      "Batch 50 device: cuda time passed: 11.138 time per batch: 0.223\n",
1195
      "Batch 100 device: cuda time passed: 19.714 time per batch: 0.197\n",
1196
      "Batch 150 device: cuda time passed: 28.012 time per batch: 0.187\n",
1197
      "Batch 200 device: cuda time passed: 36.721 time per batch: 0.184\n",
1198
      "ver 34, iter 12, fold 2, val ll: 0.0604, cor: 0.8413, auc: 0.9892\n",
1199
      "setFeats, augmentation -1\n",
1200
      "Batch 50 device: cuda time passed: 11.125 time per batch: 0.223\n",
1201
      "Batch 100 device: cuda time passed: 19.883 time per batch: 0.199\n",
1202
      "Batch 150 device: cuda time passed: 27.985 time per batch: 0.187\n",
1203
      "Batch 200 device: cuda time passed: 36.442 time per batch: 0.182\n",
1204
      "ver 34, iter 13, fold 2, val ll: 0.0603, cor: 0.8411, auc: 0.9893\n",
1205
      "setFeats, augmentation -1\n",
1206
      "Batch 50 device: cuda time passed: 11.080 time per batch: 0.222\n",
1207
      "Batch 100 device: cuda time passed: 19.109 time per batch: 0.191\n",
1208
      "Batch 150 device: cuda time passed: 27.470 time per batch: 0.183\n",
1209
      "Batch 200 device: cuda time passed: 35.903 time per batch: 0.180\n",
1210
      "ver 34, iter 14, fold 2, val ll: 0.0604, cor: 0.8411, auc: 0.9892\n",
1211
      "setFeats, augmentation -1\n",
1212
      "Batch 50 device: cuda time passed: 11.237 time per batch: 0.225\n",
1213
      "Batch 100 device: cuda time passed: 19.461 time per batch: 0.195\n",
1214
      "Batch 150 device: cuda time passed: 27.905 time per batch: 0.186\n",
1215
      "Batch 200 device: cuda time passed: 36.584 time per batch: 0.183\n",
1216
      "ver 34, iter 15, fold 2, val ll: 0.0603, cor: 0.8413, auc: 0.9893\n",
1217
      "setFeats, augmentation -1\n",
1218
      "Batch 50 device: cuda time passed: 10.728 time per batch: 0.215\n",
1219
      "Batch 100 device: cuda time passed: 19.503 time per batch: 0.195\n",
1220
      "Batch 150 device: cuda time passed: 27.680 time per batch: 0.185\n",
1221
      "Batch 200 device: cuda time passed: 36.592 time per batch: 0.183\n",
1222
      "ver 34, iter 16, fold 2, val ll: 0.0604, cor: 0.8410, auc: 0.9892\n",
1223
      "setFeats, augmentation -1\n",
1224
      "Batch 50 device: cuda time passed: 11.169 time per batch: 0.223\n",
1225
      "Batch 100 device: cuda time passed: 19.308 time per batch: 0.193\n",
1226
      "Batch 150 device: cuda time passed: 27.280 time per batch: 0.182\n",
1227
      "Batch 200 device: cuda time passed: 36.004 time per batch: 0.180\n",
1228
      "ver 34, iter 17, fold 2, val ll: 0.0604, cor: 0.8410, auc: 0.9892\n",
1229
      "setFeats, augmentation -1\n",
1230
      "Batch 50 device: cuda time passed: 10.856 time per batch: 0.217\n",
1231
      "Batch 100 device: cuda time passed: 19.630 time per batch: 0.196\n",
1232
      "Batch 150 device: cuda time passed: 28.026 time per batch: 0.187\n",
1233
      "Batch 200 device: cuda time passed: 36.941 time per batch: 0.185\n",
1234
      "ver 34, iter 18, fold 2, val ll: 0.0602, cor: 0.8414, auc: 0.9893\n",
1235
      "setFeats, augmentation -1\n",
1236
      "Batch 50 device: cuda time passed: 11.190 time per batch: 0.224\n",
1237
      "Batch 100 device: cuda time passed: 19.526 time per batch: 0.195\n",
1238
      "Batch 150 device: cuda time passed: 27.649 time per batch: 0.184\n",
1239
      "Batch 200 device: cuda time passed: 36.064 time per batch: 0.180\n",
1240
      "ver 34, iter 19, fold 2, val ll: 0.0603, cor: 0.8415, auc: 0.9892\n",
1241
      "setFeats, augmentation -1\n",
1242
      "Batch 50 device: cuda time passed: 11.066 time per batch: 0.221\n",
1243
      "Batch 100 device: cuda time passed: 19.706 time per batch: 0.197\n",
1244
      "Batch 150 device: cuda time passed: 27.965 time per batch: 0.186\n",
1245
      "Batch 200 device: cuda time passed: 36.090 time per batch: 0.180\n",
1246
      "ver 34, iter 20, fold 2, val ll: 0.0603, cor: 0.8413, auc: 0.9893\n",
1247
      "setFeats, augmentation -1\n",
1248
      "Batch 50 device: cuda time passed: 11.246 time per batch: 0.225\n",
1249
      "Batch 100 device: cuda time passed: 19.657 time per batch: 0.197\n",
1250
      "Batch 150 device: cuda time passed: 28.063 time per batch: 0.187\n",
1251
      "Batch 200 device: cuda time passed: 37.087 time per batch: 0.185\n",
1252
      "ver 34, iter 21, fold 2, val ll: 0.0603, cor: 0.8412, auc: 0.9892\n",
1253
      "setFeats, augmentation -1\n",
1254
      "Batch 50 device: cuda time passed: 11.059 time per batch: 0.221\n",
1255
      "Batch 100 device: cuda time passed: 19.357 time per batch: 0.194\n",
1256
      "Batch 150 device: cuda time passed: 27.762 time per batch: 0.185\n",
1257
      "Batch 200 device: cuda time passed: 36.632 time per batch: 0.183\n",
1258
      "ver 34, iter 22, fold 2, val ll: 0.0603, cor: 0.8413, auc: 0.9892\n",
1259
      "setFeats, augmentation -1\n",
1260
      "Batch 50 device: cuda time passed: 11.570 time per batch: 0.231\n",
1261
      "Batch 100 device: cuda time passed: 20.007 time per batch: 0.200\n",
1262
      "Batch 150 device: cuda time passed: 28.008 time per batch: 0.187\n",
1263
      "Batch 200 device: cuda time passed: 36.203 time per batch: 0.181\n"
1264
     ]
1265
    },
1266
    {
1267
     "name": "stdout",
1268
     "output_type": "stream",
1269
     "text": [
1270
      "ver 34, iter 23, fold 2, val ll: 0.0603, cor: 0.8414, auc: 0.9892\n",
1271
      "setFeats, augmentation -1\n",
1272
      "Batch 50 device: cuda time passed: 10.914 time per batch: 0.218\n",
1273
      "Batch 100 device: cuda time passed: 19.576 time per batch: 0.196\n",
1274
      "Batch 150 device: cuda time passed: 27.771 time per batch: 0.185\n",
1275
      "Batch 200 device: cuda time passed: 35.991 time per batch: 0.180\n",
1276
      "ver 34, iter 24, fold 2, val ll: 0.0605, cor: 0.8410, auc: 0.9891\n",
1277
      "setFeats, augmentation -1\n",
1278
      "Batch 50 device: cuda time passed: 11.077 time per batch: 0.222\n",
1279
      "Batch 100 device: cuda time passed: 19.416 time per batch: 0.194\n",
1280
      "Batch 150 device: cuda time passed: 27.528 time per batch: 0.184\n",
1281
      "Batch 200 device: cuda time passed: 35.811 time per batch: 0.179\n",
1282
      "ver 34, iter 25, fold 2, val ll: 0.0603, cor: 0.8415, auc: 0.9893\n",
1283
      "setFeats, augmentation -1\n",
1284
      "Batch 50 device: cuda time passed: 10.948 time per batch: 0.219\n",
1285
      "Batch 100 device: cuda time passed: 19.157 time per batch: 0.192\n",
1286
      "Batch 150 device: cuda time passed: 27.678 time per batch: 0.185\n",
1287
      "Batch 200 device: cuda time passed: 35.593 time per batch: 0.178\n",
1288
      "ver 34, iter 26, fold 2, val ll: 0.0605, cor: 0.8408, auc: 0.9891\n",
1289
      "setFeats, augmentation -1\n",
1290
      "Batch 50 device: cuda time passed: 10.699 time per batch: 0.214\n",
1291
      "Batch 100 device: cuda time passed: 18.743 time per batch: 0.187\n",
1292
      "Batch 150 device: cuda time passed: 27.111 time per batch: 0.181\n",
1293
      "Batch 200 device: cuda time passed: 35.546 time per batch: 0.178\n",
1294
      "ver 34, iter 27, fold 2, val ll: 0.0604, cor: 0.8410, auc: 0.9892\n",
1295
      "setFeats, augmentation -1\n",
1296
      "Batch 50 device: cuda time passed: 10.825 time per batch: 0.216\n",
1297
      "Batch 100 device: cuda time passed: 18.607 time per batch: 0.186\n",
1298
      "Batch 150 device: cuda time passed: 26.901 time per batch: 0.179\n",
1299
      "Batch 200 device: cuda time passed: 35.283 time per batch: 0.176\n",
1300
      "ver 34, iter 28, fold 2, val ll: 0.0604, cor: 0.8409, auc: 0.9892\n",
1301
      "setFeats, augmentation -1\n",
1302
      "Batch 50 device: cuda time passed: 10.969 time per batch: 0.219\n",
1303
      "Batch 100 device: cuda time passed: 19.424 time per batch: 0.194\n",
1304
      "Batch 150 device: cuda time passed: 27.878 time per batch: 0.186\n",
1305
      "Batch 200 device: cuda time passed: 36.051 time per batch: 0.180\n",
1306
      "ver 34, iter 29, fold 2, val ll: 0.0602, cor: 0.8415, auc: 0.9893\n",
1307
      "setFeats, augmentation -1\n",
1308
      "Batch 50 device: cuda time passed: 11.252 time per batch: 0.225\n",
1309
      "Batch 100 device: cuda time passed: 19.632 time per batch: 0.196\n",
1310
      "Batch 150 device: cuda time passed: 28.146 time per batch: 0.188\n",
1311
      "Batch 200 device: cuda time passed: 36.296 time per batch: 0.181\n",
1312
      "ver 34, iter 30, fold 2, val ll: 0.0603, cor: 0.8411, auc: 0.9893\n",
1313
      "setFeats, augmentation -1\n",
1314
      "Batch 50 device: cuda time passed: 11.159 time per batch: 0.223\n",
1315
      "Batch 100 device: cuda time passed: 19.845 time per batch: 0.198\n",
1316
      "Batch 150 device: cuda time passed: 27.672 time per batch: 0.184\n",
1317
      "Batch 200 device: cuda time passed: 36.092 time per batch: 0.180\n",
1318
      "ver 34, iter 31, fold 2, val ll: 0.0603, cor: 0.8414, auc: 0.9893\n",
1319
      "total running time 1486.592945575714\n",
1320
      "total time 9707.101170063019\n",
1321
      "completed epochs: 3 iters starting now: 32\n",
1322
      "adding dummy serieses 9\n",
1323
      "DataSet 11 valid size 4384 fold 0\n",
1324
      "dataset valid: 4384 loader valid: 137\n",
1325
      "loading model model.b3.f0.d11.v34\n",
1326
      "setFeats, augmentation -1\n",
1327
      "Batch 50 device: cuda time passed: 7.940 time per batch: 0.159\n",
1328
      "Batch 100 device: cuda time passed: 14.756 time per batch: 0.148\n",
1329
      "ver 34, iter 0, fold 0, val ll: 0.0609, cor: 0.8452, auc: 0.9888\n",
1330
      "setFeats, augmentation -1\n",
1331
      "Batch 50 device: cuda time passed: 8.554 time per batch: 0.171\n",
1332
      "Batch 100 device: cuda time passed: 14.739 time per batch: 0.147\n",
1333
      "ver 34, iter 1, fold 0, val ll: 0.0607, cor: 0.8453, auc: 0.9889\n",
1334
      "setFeats, augmentation -1\n",
1335
      "Batch 50 device: cuda time passed: 8.235 time per batch: 0.165\n",
1336
      "Batch 100 device: cuda time passed: 14.917 time per batch: 0.149\n",
1337
      "ver 34, iter 2, fold 0, val ll: 0.0608, cor: 0.8452, auc: 0.9889\n",
1338
      "setFeats, augmentation -1\n",
1339
      "Batch 50 device: cuda time passed: 7.603 time per batch: 0.152\n",
1340
      "Batch 100 device: cuda time passed: 14.007 time per batch: 0.140\n",
1341
      "ver 34, iter 3, fold 0, val ll: 0.0609, cor: 0.8449, auc: 0.9889\n",
1342
      "setFeats, augmentation -1\n",
1343
      "Batch 50 device: cuda time passed: 8.003 time per batch: 0.160\n",
1344
      "Batch 100 device: cuda time passed: 14.445 time per batch: 0.144\n",
1345
      "ver 34, iter 4, fold 0, val ll: 0.0608, cor: 0.8455, auc: 0.9889\n",
1346
      "setFeats, augmentation -1\n",
1347
      "Batch 50 device: cuda time passed: 8.444 time per batch: 0.169\n",
1348
      "Batch 100 device: cuda time passed: 14.775 time per batch: 0.148\n",
1349
      "ver 34, iter 5, fold 0, val ll: 0.0608, cor: 0.8449, auc: 0.9889\n",
1350
      "setFeats, augmentation -1\n",
1351
      "Batch 50 device: cuda time passed: 8.487 time per batch: 0.170\n",
1352
      "Batch 100 device: cuda time passed: 14.919 time per batch: 0.149\n",
1353
      "ver 34, iter 6, fold 0, val ll: 0.0609, cor: 0.8451, auc: 0.9887\n",
1354
      "setFeats, augmentation -1\n",
1355
      "Batch 50 device: cuda time passed: 8.645 time per batch: 0.173\n",
1356
      "Batch 100 device: cuda time passed: 14.933 time per batch: 0.149\n",
1357
      "ver 34, iter 7, fold 0, val ll: 0.0607, cor: 0.8455, auc: 0.9889\n",
1358
      "setFeats, augmentation -1\n",
1359
      "Batch 50 device: cuda time passed: 8.537 time per batch: 0.171\n",
1360
      "Batch 100 device: cuda time passed: 14.497 time per batch: 0.145\n",
1361
      "ver 34, iter 8, fold 0, val ll: 0.0608, cor: 0.8456, auc: 0.9888\n",
1362
      "setFeats, augmentation -1\n",
1363
      "Batch 50 device: cuda time passed: 8.195 time per batch: 0.164\n",
1364
      "Batch 100 device: cuda time passed: 14.452 time per batch: 0.145\n",
1365
      "ver 34, iter 9, fold 0, val ll: 0.0608, cor: 0.8453, auc: 0.9889\n",
1366
      "setFeats, augmentation -1\n",
1367
      "Batch 50 device: cuda time passed: 8.251 time per batch: 0.165\n",
1368
      "Batch 100 device: cuda time passed: 14.323 time per batch: 0.143\n",
1369
      "ver 34, iter 10, fold 0, val ll: 0.0610, cor: 0.8450, auc: 0.9887\n",
1370
      "setFeats, augmentation -1\n",
1371
      "Batch 50 device: cuda time passed: 7.191 time per batch: 0.144\n",
1372
      "Batch 100 device: cuda time passed: 13.764 time per batch: 0.138\n",
1373
      "ver 34, iter 11, fold 0, val ll: 0.0608, cor: 0.8451, auc: 0.9889\n",
1374
      "setFeats, augmentation -1\n",
1375
      "Batch 50 device: cuda time passed: 8.054 time per batch: 0.161\n",
1376
      "Batch 100 device: cuda time passed: 14.529 time per batch: 0.145\n",
1377
      "ver 34, iter 12, fold 0, val ll: 0.0609, cor: 0.8449, auc: 0.9888\n",
1378
      "setFeats, augmentation -1\n",
1379
      "Batch 50 device: cuda time passed: 7.674 time per batch: 0.153\n",
1380
      "Batch 100 device: cuda time passed: 14.194 time per batch: 0.142\n",
1381
      "ver 34, iter 13, fold 0, val ll: 0.0610, cor: 0.8450, auc: 0.9888\n",
1382
      "setFeats, augmentation -1\n",
1383
      "Batch 50 device: cuda time passed: 8.067 time per batch: 0.161\n",
1384
      "Batch 100 device: cuda time passed: 14.660 time per batch: 0.147\n",
1385
      "ver 34, iter 14, fold 0, val ll: 0.0609, cor: 0.8451, auc: 0.9888\n",
1386
      "setFeats, augmentation -1\n",
1387
      "Batch 50 device: cuda time passed: 7.751 time per batch: 0.155\n",
1388
      "Batch 100 device: cuda time passed: 14.356 time per batch: 0.144\n",
1389
      "ver 34, iter 15, fold 0, val ll: 0.0610, cor: 0.8448, auc: 0.9888\n",
1390
      "setFeats, augmentation -1\n",
1391
      "Batch 50 device: cuda time passed: 8.262 time per batch: 0.165\n",
1392
      "Batch 100 device: cuda time passed: 14.690 time per batch: 0.147\n",
1393
      "ver 34, iter 16, fold 0, val ll: 0.0608, cor: 0.8450, auc: 0.9888\n",
1394
      "setFeats, augmentation -1\n",
1395
      "Batch 50 device: cuda time passed: 7.771 time per batch: 0.155\n",
1396
      "Batch 100 device: cuda time passed: 14.489 time per batch: 0.145\n",
1397
      "ver 34, iter 17, fold 0, val ll: 0.0608, cor: 0.8455, auc: 0.9887\n",
1398
      "setFeats, augmentation -1\n",
1399
      "Batch 50 device: cuda time passed: 8.139 time per batch: 0.163\n",
1400
      "Batch 100 device: cuda time passed: 14.767 time per batch: 0.148\n",
1401
      "ver 34, iter 18, fold 0, val ll: 0.0609, cor: 0.8449, auc: 0.9889\n",
1402
      "setFeats, augmentation -1\n",
1403
      "Batch 50 device: cuda time passed: 7.670 time per batch: 0.153\n",
1404
      "Batch 100 device: cuda time passed: 14.955 time per batch: 0.150\n",
1405
      "ver 34, iter 19, fold 0, val ll: 0.0606, cor: 0.8454, auc: 0.9890\n",
1406
      "setFeats, augmentation -1\n",
1407
      "Batch 50 device: cuda time passed: 8.039 time per batch: 0.161\n",
1408
      "Batch 100 device: cuda time passed: 14.878 time per batch: 0.149\n",
1409
      "ver 34, iter 20, fold 0, val ll: 0.0608, cor: 0.8449, auc: 0.9889\n",
1410
      "setFeats, augmentation -1\n",
1411
      "Batch 50 device: cuda time passed: 7.962 time per batch: 0.159\n",
1412
      "Batch 100 device: cuda time passed: 14.352 time per batch: 0.144\n",
1413
      "ver 34, iter 21, fold 0, val ll: 0.0608, cor: 0.8451, auc: 0.9888\n",
1414
      "setFeats, augmentation -1\n",
1415
      "Batch 50 device: cuda time passed: 7.997 time per batch: 0.160\n",
1416
      "Batch 100 device: cuda time passed: 14.132 time per batch: 0.141\n",
1417
      "ver 34, iter 22, fold 0, val ll: 0.0606, cor: 0.8459, auc: 0.9889\n",
1418
      "setFeats, augmentation -1\n",
1419
      "Batch 50 device: cuda time passed: 7.943 time per batch: 0.159\n"
1420
     ]
1421
    },
1422
    {
1423
     "name": "stdout",
1424
     "output_type": "stream",
1425
     "text": [
1426
      "Batch 100 device: cuda time passed: 14.021 time per batch: 0.140\n",
1427
      "ver 34, iter 23, fold 0, val ll: 0.0608, cor: 0.8453, auc: 0.9889\n",
1428
      "setFeats, augmentation -1\n",
1429
      "Batch 50 device: cuda time passed: 7.490 time per batch: 0.150\n",
1430
      "Batch 100 device: cuda time passed: 13.991 time per batch: 0.140\n",
1431
      "ver 34, iter 24, fold 0, val ll: 0.0609, cor: 0.8450, auc: 0.9888\n",
1432
      "setFeats, augmentation -1\n",
1433
      "Batch 50 device: cuda time passed: 8.355 time per batch: 0.167\n",
1434
      "Batch 100 device: cuda time passed: 14.456 time per batch: 0.145\n",
1435
      "ver 34, iter 25, fold 0, val ll: 0.0608, cor: 0.8452, auc: 0.9889\n",
1436
      "setFeats, augmentation -1\n",
1437
      "Batch 50 device: cuda time passed: 8.871 time per batch: 0.177\n",
1438
      "Batch 100 device: cuda time passed: 15.260 time per batch: 0.153\n",
1439
      "ver 34, iter 26, fold 0, val ll: 0.0608, cor: 0.8451, auc: 0.9889\n",
1440
      "setFeats, augmentation -1\n",
1441
      "Batch 50 device: cuda time passed: 7.760 time per batch: 0.155\n",
1442
      "Batch 100 device: cuda time passed: 14.599 time per batch: 0.146\n",
1443
      "ver 34, iter 27, fold 0, val ll: 0.0609, cor: 0.8450, auc: 0.9888\n",
1444
      "setFeats, augmentation -1\n",
1445
      "Batch 50 device: cuda time passed: 8.498 time per batch: 0.170\n",
1446
      "Batch 100 device: cuda time passed: 14.639 time per batch: 0.146\n",
1447
      "ver 34, iter 28, fold 0, val ll: 0.0610, cor: 0.8450, auc: 0.9887\n",
1448
      "setFeats, augmentation -1\n",
1449
      "Batch 50 device: cuda time passed: 7.980 time per batch: 0.160\n",
1450
      "Batch 100 device: cuda time passed: 14.528 time per batch: 0.145\n",
1451
      "ver 34, iter 29, fold 0, val ll: 0.0609, cor: 0.8450, auc: 0.9889\n",
1452
      "setFeats, augmentation -1\n",
1453
      "Batch 50 device: cuda time passed: 8.133 time per batch: 0.163\n",
1454
      "Batch 100 device: cuda time passed: 14.503 time per batch: 0.145\n",
1455
      "ver 34, iter 30, fold 0, val ll: 0.0607, cor: 0.8453, auc: 0.9889\n",
1456
      "setFeats, augmentation -1\n",
1457
      "Batch 50 device: cuda time passed: 8.069 time per batch: 0.161\n",
1458
      "Batch 100 device: cuda time passed: 14.106 time per batch: 0.141\n",
1459
      "ver 34, iter 31, fold 0, val ll: 0.0609, cor: 0.8450, auc: 0.9888\n",
1460
      "total running time 741.6375591754913\n",
1461
      "total time 10448.977521657944\n",
1462
      "completed epochs: 3 iters starting now: 32\n",
1463
      "adding dummy serieses 12\n",
1464
      "DataSet 11 valid size 4288 fold 1\n",
1465
      "dataset valid: 4288 loader valid: 134\n",
1466
      "loading model model.b3.f1.d11.v34\n",
1467
      "setFeats, augmentation -1\n",
1468
      "Batch 50 device: cuda time passed: 7.855 time per batch: 0.157\n",
1469
      "Batch 100 device: cuda time passed: 13.890 time per batch: 0.139\n",
1470
      "ver 34, iter 0, fold 1, val ll: 0.0597, cor: 0.8468, auc: 0.9897\n",
1471
      "setFeats, augmentation -1\n",
1472
      "Batch 50 device: cuda time passed: 7.194 time per batch: 0.144\n",
1473
      "Batch 100 device: cuda time passed: 14.411 time per batch: 0.144\n",
1474
      "ver 34, iter 1, fold 1, val ll: 0.0598, cor: 0.8465, auc: 0.9896\n",
1475
      "setFeats, augmentation -1\n",
1476
      "Batch 50 device: cuda time passed: 8.180 time per batch: 0.164\n",
1477
      "Batch 100 device: cuda time passed: 14.126 time per batch: 0.141\n",
1478
      "ver 34, iter 2, fold 1, val ll: 0.0597, cor: 0.8467, auc: 0.9898\n",
1479
      "setFeats, augmentation -1\n",
1480
      "Batch 50 device: cuda time passed: 7.618 time per batch: 0.152\n",
1481
      "Batch 100 device: cuda time passed: 14.407 time per batch: 0.144\n",
1482
      "ver 34, iter 3, fold 1, val ll: 0.0596, cor: 0.8468, auc: 0.9899\n",
1483
      "setFeats, augmentation -1\n",
1484
      "Batch 50 device: cuda time passed: 8.547 time per batch: 0.171\n",
1485
      "Batch 100 device: cuda time passed: 14.485 time per batch: 0.145\n",
1486
      "ver 34, iter 4, fold 1, val ll: 0.0597, cor: 0.8469, auc: 0.9898\n",
1487
      "setFeats, augmentation -1\n",
1488
      "Batch 50 device: cuda time passed: 8.303 time per batch: 0.166\n",
1489
      "Batch 100 device: cuda time passed: 14.678 time per batch: 0.147\n",
1490
      "ver 34, iter 5, fold 1, val ll: 0.0597, cor: 0.8468, auc: 0.9897\n",
1491
      "setFeats, augmentation -1\n",
1492
      "Batch 50 device: cuda time passed: 8.365 time per batch: 0.167\n",
1493
      "Batch 100 device: cuda time passed: 13.899 time per batch: 0.139\n",
1494
      "ver 34, iter 6, fold 1, val ll: 0.0598, cor: 0.8463, auc: 0.9896\n",
1495
      "setFeats, augmentation -1\n",
1496
      "Batch 50 device: cuda time passed: 8.115 time per batch: 0.162\n",
1497
      "Batch 100 device: cuda time passed: 14.203 time per batch: 0.142\n",
1498
      "ver 34, iter 7, fold 1, val ll: 0.0596, cor: 0.8469, auc: 0.9898\n",
1499
      "setFeats, augmentation -1\n",
1500
      "Batch 50 device: cuda time passed: 7.933 time per batch: 0.159\n",
1501
      "Batch 100 device: cuda time passed: 14.748 time per batch: 0.147\n",
1502
      "ver 34, iter 8, fold 1, val ll: 0.0599, cor: 0.8462, auc: 0.9897\n",
1503
      "setFeats, augmentation -1\n",
1504
      "Batch 50 device: cuda time passed: 7.972 time per batch: 0.159\n",
1505
      "Batch 100 device: cuda time passed: 14.605 time per batch: 0.146\n",
1506
      "ver 34, iter 9, fold 1, val ll: 0.0594, cor: 0.8476, auc: 0.9898\n",
1507
      "setFeats, augmentation -1\n",
1508
      "Batch 50 device: cuda time passed: 7.555 time per batch: 0.151\n",
1509
      "Batch 100 device: cuda time passed: 14.077 time per batch: 0.141\n",
1510
      "ver 34, iter 10, fold 1, val ll: 0.0597, cor: 0.8466, auc: 0.9898\n",
1511
      "setFeats, augmentation -1\n",
1512
      "Batch 50 device: cuda time passed: 8.297 time per batch: 0.166\n",
1513
      "Batch 100 device: cuda time passed: 14.186 time per batch: 0.142\n",
1514
      "ver 34, iter 11, fold 1, val ll: 0.0594, cor: 0.8472, auc: 0.9898\n",
1515
      "setFeats, augmentation -1\n",
1516
      "Batch 50 device: cuda time passed: 7.592 time per batch: 0.152\n",
1517
      "Batch 100 device: cuda time passed: 13.891 time per batch: 0.139\n",
1518
      "ver 34, iter 12, fold 1, val ll: 0.0598, cor: 0.8465, auc: 0.9897\n",
1519
      "setFeats, augmentation -1\n",
1520
      "Batch 50 device: cuda time passed: 7.432 time per batch: 0.149\n",
1521
      "Batch 100 device: cuda time passed: 13.820 time per batch: 0.138\n",
1522
      "ver 34, iter 13, fold 1, val ll: 0.0597, cor: 0.8471, auc: 0.9896\n",
1523
      "setFeats, augmentation -1\n",
1524
      "Batch 50 device: cuda time passed: 8.365 time per batch: 0.167\n",
1525
      "Batch 100 device: cuda time passed: 14.299 time per batch: 0.143\n",
1526
      "ver 34, iter 14, fold 1, val ll: 0.0598, cor: 0.8466, auc: 0.9896\n",
1527
      "setFeats, augmentation -1\n",
1528
      "Batch 50 device: cuda time passed: 8.044 time per batch: 0.161\n",
1529
      "Batch 100 device: cuda time passed: 14.697 time per batch: 0.147\n",
1530
      "ver 34, iter 15, fold 1, val ll: 0.0596, cor: 0.8473, auc: 0.9898\n",
1531
      "setFeats, augmentation -1\n",
1532
      "Batch 50 device: cuda time passed: 8.451 time per batch: 0.169\n",
1533
      "Batch 100 device: cuda time passed: 14.529 time per batch: 0.145\n",
1534
      "ver 34, iter 16, fold 1, val ll: 0.0599, cor: 0.8465, auc: 0.9897\n",
1535
      "setFeats, augmentation -1\n",
1536
      "Batch 50 device: cuda time passed: 7.849 time per batch: 0.157\n",
1537
      "Batch 100 device: cuda time passed: 13.789 time per batch: 0.138\n",
1538
      "ver 34, iter 17, fold 1, val ll: 0.0599, cor: 0.8463, auc: 0.9897\n",
1539
      "setFeats, augmentation -1\n",
1540
      "Batch 50 device: cuda time passed: 7.653 time per batch: 0.153\n",
1541
      "Batch 100 device: cuda time passed: 14.070 time per batch: 0.141\n",
1542
      "ver 34, iter 18, fold 1, val ll: 0.0597, cor: 0.8469, auc: 0.9897\n",
1543
      "setFeats, augmentation -1\n",
1544
      "Batch 50 device: cuda time passed: 8.706 time per batch: 0.174\n",
1545
      "Batch 100 device: cuda time passed: 14.003 time per batch: 0.140\n",
1546
      "ver 34, iter 19, fold 1, val ll: 0.0596, cor: 0.8471, auc: 0.9897\n",
1547
      "setFeats, augmentation -1\n",
1548
      "Batch 50 device: cuda time passed: 8.402 time per batch: 0.168\n",
1549
      "Batch 100 device: cuda time passed: 14.393 time per batch: 0.144\n",
1550
      "ver 34, iter 20, fold 1, val ll: 0.0598, cor: 0.8462, auc: 0.9897\n",
1551
      "setFeats, augmentation -1\n",
1552
      "Batch 50 device: cuda time passed: 7.810 time per batch: 0.156\n",
1553
      "Batch 100 device: cuda time passed: 14.209 time per batch: 0.142\n",
1554
      "ver 34, iter 21, fold 1, val ll: 0.0598, cor: 0.8464, auc: 0.9897\n",
1555
      "setFeats, augmentation -1\n",
1556
      "Batch 50 device: cuda time passed: 8.650 time per batch: 0.173\n",
1557
      "Batch 100 device: cuda time passed: 14.641 time per batch: 0.146\n",
1558
      "ver 34, iter 22, fold 1, val ll: 0.0597, cor: 0.8468, auc: 0.9898\n",
1559
      "setFeats, augmentation -1\n",
1560
      "Batch 50 device: cuda time passed: 7.703 time per batch: 0.154\n",
1561
      "Batch 100 device: cuda time passed: 14.439 time per batch: 0.144\n",
1562
      "ver 34, iter 23, fold 1, val ll: 0.0597, cor: 0.8470, auc: 0.9897\n",
1563
      "setFeats, augmentation -1\n",
1564
      "Batch 50 device: cuda time passed: 8.274 time per batch: 0.165\n",
1565
      "Batch 100 device: cuda time passed: 14.597 time per batch: 0.146\n",
1566
      "ver 34, iter 24, fold 1, val ll: 0.0595, cor: 0.8471, auc: 0.9899\n",
1567
      "setFeats, augmentation -1\n",
1568
      "Batch 50 device: cuda time passed: 7.443 time per batch: 0.149\n",
1569
      "Batch 100 device: cuda time passed: 13.518 time per batch: 0.135\n",
1570
      "ver 34, iter 25, fold 1, val ll: 0.0597, cor: 0.8466, auc: 0.9898\n",
1571
      "setFeats, augmentation -1\n",
1572
      "Batch 50 device: cuda time passed: 7.435 time per batch: 0.149\n",
1573
      "Batch 100 device: cuda time passed: 13.988 time per batch: 0.140\n",
1574
      "ver 34, iter 26, fold 1, val ll: 0.0596, cor: 0.8471, auc: 0.9898\n",
1575
      "setFeats, augmentation -1\n",
1576
      "Batch 50 device: cuda time passed: 7.661 time per batch: 0.153\n",
1577
      "Batch 100 device: cuda time passed: 14.249 time per batch: 0.142\n"
1578
     ]
1579
    },
1580
    {
1581
     "name": "stdout",
1582
     "output_type": "stream",
1583
     "text": [
1584
      "ver 34, iter 27, fold 1, val ll: 0.0599, cor: 0.8462, auc: 0.9897\n",
1585
      "setFeats, augmentation -1\n",
1586
      "Batch 50 device: cuda time passed: 7.806 time per batch: 0.156\n",
1587
      "Batch 100 device: cuda time passed: 14.135 time per batch: 0.141\n",
1588
      "ver 34, iter 28, fold 1, val ll: 0.0598, cor: 0.8464, auc: 0.9897\n",
1589
      "setFeats, augmentation -1\n",
1590
      "Batch 50 device: cuda time passed: 8.158 time per batch: 0.163\n",
1591
      "Batch 100 device: cuda time passed: 14.689 time per batch: 0.147\n",
1592
      "ver 34, iter 29, fold 1, val ll: 0.0598, cor: 0.8465, auc: 0.9897\n",
1593
      "setFeats, augmentation -1\n",
1594
      "Batch 50 device: cuda time passed: 8.615 time per batch: 0.172\n",
1595
      "Batch 100 device: cuda time passed: 14.820 time per batch: 0.148\n",
1596
      "ver 34, iter 30, fold 1, val ll: 0.0594, cor: 0.8474, auc: 0.9898\n",
1597
      "setFeats, augmentation -1\n",
1598
      "Batch 50 device: cuda time passed: 8.738 time per batch: 0.175\n",
1599
      "Batch 100 device: cuda time passed: 15.174 time per batch: 0.152\n",
1600
      "ver 34, iter 31, fold 1, val ll: 0.0595, cor: 0.8472, auc: 0.9898\n",
1601
      "total running time 721.0183691978455\n",
1602
      "total time 11170.231662034988\n",
1603
      "completed epochs: 3 iters starting now: 32\n",
1604
      "adding dummy serieses 27\n",
1605
      "DataSet 11 valid size 4416 fold 2\n",
1606
      "dataset valid: 4416 loader valid: 138\n",
1607
      "loading model model.b3.f2.d11.v34\n",
1608
      "setFeats, augmentation -1\n",
1609
      "Batch 50 device: cuda time passed: 8.321 time per batch: 0.166\n",
1610
      "Batch 100 device: cuda time passed: 14.535 time per batch: 0.145\n",
1611
      "ver 34, iter 0, fold 2, val ll: 0.0598, cor: 0.8435, auc: 0.9894\n",
1612
      "setFeats, augmentation -1\n",
1613
      "Batch 50 device: cuda time passed: 8.470 time per batch: 0.169\n",
1614
      "Batch 100 device: cuda time passed: 14.441 time per batch: 0.144\n",
1615
      "ver 34, iter 1, fold 2, val ll: 0.0597, cor: 0.8432, auc: 0.9895\n",
1616
      "setFeats, augmentation -1\n",
1617
      "Batch 50 device: cuda time passed: 7.828 time per batch: 0.157\n",
1618
      "Batch 100 device: cuda time passed: 14.318 time per batch: 0.143\n",
1619
      "ver 34, iter 2, fold 2, val ll: 0.0596, cor: 0.8440, auc: 0.9895\n",
1620
      "setFeats, augmentation -1\n",
1621
      "Batch 50 device: cuda time passed: 7.636 time per batch: 0.153\n",
1622
      "Batch 100 device: cuda time passed: 14.246 time per batch: 0.142\n",
1623
      "ver 34, iter 3, fold 2, val ll: 0.0597, cor: 0.8434, auc: 0.9895\n",
1624
      "setFeats, augmentation -1\n",
1625
      "Batch 50 device: cuda time passed: 7.601 time per batch: 0.152\n",
1626
      "Batch 100 device: cuda time passed: 14.000 time per batch: 0.140\n",
1627
      "ver 34, iter 4, fold 2, val ll: 0.0595, cor: 0.8441, auc: 0.9895\n",
1628
      "setFeats, augmentation -1\n",
1629
      "Batch 50 device: cuda time passed: 7.245 time per batch: 0.145\n",
1630
      "Batch 100 device: cuda time passed: 14.561 time per batch: 0.146\n",
1631
      "ver 34, iter 5, fold 2, val ll: 0.0596, cor: 0.8439, auc: 0.9895\n",
1632
      "setFeats, augmentation -1\n",
1633
      "Batch 50 device: cuda time passed: 8.083 time per batch: 0.162\n",
1634
      "Batch 100 device: cuda time passed: 14.816 time per batch: 0.148\n",
1635
      "ver 34, iter 6, fold 2, val ll: 0.0597, cor: 0.8438, auc: 0.9895\n",
1636
      "setFeats, augmentation -1\n",
1637
      "Batch 50 device: cuda time passed: 8.189 time per batch: 0.164\n",
1638
      "Batch 100 device: cuda time passed: 14.019 time per batch: 0.140\n",
1639
      "ver 34, iter 7, fold 2, val ll: 0.0595, cor: 0.8439, auc: 0.9896\n",
1640
      "setFeats, augmentation -1\n",
1641
      "Batch 50 device: cuda time passed: 7.470 time per batch: 0.149\n",
1642
      "Batch 100 device: cuda time passed: 14.477 time per batch: 0.145\n",
1643
      "ver 34, iter 8, fold 2, val ll: 0.0599, cor: 0.8433, auc: 0.9894\n",
1644
      "setFeats, augmentation -1\n",
1645
      "Batch 50 device: cuda time passed: 8.395 time per batch: 0.168\n",
1646
      "Batch 100 device: cuda time passed: 15.214 time per batch: 0.152\n",
1647
      "ver 34, iter 9, fold 2, val ll: 0.0596, cor: 0.8438, auc: 0.9895\n",
1648
      "setFeats, augmentation -1\n",
1649
      "Batch 50 device: cuda time passed: 8.550 time per batch: 0.171\n",
1650
      "Batch 100 device: cuda time passed: 14.843 time per batch: 0.148\n",
1651
      "ver 34, iter 10, fold 2, val ll: 0.0598, cor: 0.8433, auc: 0.9895\n",
1652
      "setFeats, augmentation -1\n",
1653
      "Batch 50 device: cuda time passed: 8.069 time per batch: 0.161\n",
1654
      "Batch 100 device: cuda time passed: 15.071 time per batch: 0.151\n",
1655
      "ver 34, iter 11, fold 2, val ll: 0.0598, cor: 0.8435, auc: 0.9894\n",
1656
      "setFeats, augmentation -1\n",
1657
      "Batch 50 device: cuda time passed: 7.822 time per batch: 0.156\n",
1658
      "Batch 100 device: cuda time passed: 14.030 time per batch: 0.140\n",
1659
      "ver 34, iter 12, fold 2, val ll: 0.0599, cor: 0.8429, auc: 0.9895\n",
1660
      "setFeats, augmentation -1\n",
1661
      "Batch 50 device: cuda time passed: 7.770 time per batch: 0.155\n",
1662
      "Batch 100 device: cuda time passed: 14.464 time per batch: 0.145\n",
1663
      "ver 34, iter 13, fold 2, val ll: 0.0599, cor: 0.8434, auc: 0.9893\n",
1664
      "setFeats, augmentation -1\n",
1665
      "Batch 50 device: cuda time passed: 7.993 time per batch: 0.160\n",
1666
      "Batch 100 device: cuda time passed: 14.393 time per batch: 0.144\n",
1667
      "ver 34, iter 14, fold 2, val ll: 0.0598, cor: 0.8434, auc: 0.9895\n",
1668
      "setFeats, augmentation -1\n",
1669
      "Batch 50 device: cuda time passed: 7.532 time per batch: 0.151\n",
1670
      "Batch 100 device: cuda time passed: 14.834 time per batch: 0.148\n",
1671
      "ver 34, iter 15, fold 2, val ll: 0.0599, cor: 0.8434, auc: 0.9894\n",
1672
      "setFeats, augmentation -1\n",
1673
      "Batch 50 device: cuda time passed: 7.617 time per batch: 0.152\n",
1674
      "Batch 100 device: cuda time passed: 14.796 time per batch: 0.148\n",
1675
      "ver 34, iter 16, fold 2, val ll: 0.0599, cor: 0.8430, auc: 0.9895\n",
1676
      "setFeats, augmentation -1\n",
1677
      "Batch 50 device: cuda time passed: 8.682 time per batch: 0.174\n",
1678
      "Batch 100 device: cuda time passed: 14.858 time per batch: 0.149\n",
1679
      "ver 34, iter 17, fold 2, val ll: 0.0600, cor: 0.8431, auc: 0.9894\n",
1680
      "setFeats, augmentation -1\n",
1681
      "Batch 50 device: cuda time passed: 7.982 time per batch: 0.160\n",
1682
      "Batch 100 device: cuda time passed: 14.452 time per batch: 0.145\n",
1683
      "ver 34, iter 18, fold 2, val ll: 0.0596, cor: 0.8440, auc: 0.9895\n",
1684
      "setFeats, augmentation -1\n",
1685
      "Batch 50 device: cuda time passed: 8.125 time per batch: 0.162\n",
1686
      "Batch 100 device: cuda time passed: 14.476 time per batch: 0.145\n",
1687
      "ver 34, iter 19, fold 2, val ll: 0.0600, cor: 0.8430, auc: 0.9894\n",
1688
      "setFeats, augmentation -1\n",
1689
      "Batch 50 device: cuda time passed: 8.754 time per batch: 0.175\n",
1690
      "Batch 100 device: cuda time passed: 15.201 time per batch: 0.152\n",
1691
      "ver 34, iter 20, fold 2, val ll: 0.0600, cor: 0.8430, auc: 0.9894\n",
1692
      "setFeats, augmentation -1\n",
1693
      "Batch 50 device: cuda time passed: 8.643 time per batch: 0.173\n",
1694
      "Batch 100 device: cuda time passed: 15.414 time per batch: 0.154\n",
1695
      "ver 34, iter 21, fold 2, val ll: 0.0598, cor: 0.8435, auc: 0.9894\n",
1696
      "setFeats, augmentation -1\n",
1697
      "Batch 50 device: cuda time passed: 7.911 time per batch: 0.158\n",
1698
      "Batch 100 device: cuda time passed: 14.572 time per batch: 0.146\n",
1699
      "ver 34, iter 22, fold 2, val ll: 0.0597, cor: 0.8437, auc: 0.9894\n",
1700
      "setFeats, augmentation -1\n",
1701
      "Batch 50 device: cuda time passed: 8.263 time per batch: 0.165\n",
1702
      "Batch 100 device: cuda time passed: 14.524 time per batch: 0.145\n",
1703
      "ver 34, iter 23, fold 2, val ll: 0.0597, cor: 0.8435, auc: 0.9895\n",
1704
      "setFeats, augmentation -1\n",
1705
      "Batch 50 device: cuda time passed: 8.294 time per batch: 0.166\n",
1706
      "Batch 100 device: cuda time passed: 14.922 time per batch: 0.149\n",
1707
      "ver 34, iter 24, fold 2, val ll: 0.0599, cor: 0.8435, auc: 0.9894\n",
1708
      "setFeats, augmentation -1\n",
1709
      "Batch 50 device: cuda time passed: 8.698 time per batch: 0.174\n",
1710
      "Batch 100 device: cuda time passed: 15.055 time per batch: 0.151\n",
1711
      "ver 34, iter 25, fold 2, val ll: 0.0599, cor: 0.8433, auc: 0.9894\n",
1712
      "setFeats, augmentation -1\n",
1713
      "Batch 50 device: cuda time passed: 7.893 time per batch: 0.158\n",
1714
      "Batch 100 device: cuda time passed: 14.455 time per batch: 0.145\n",
1715
      "ver 34, iter 26, fold 2, val ll: 0.0595, cor: 0.8439, auc: 0.9896\n",
1716
      "setFeats, augmentation -1\n",
1717
      "Batch 50 device: cuda time passed: 8.170 time per batch: 0.163\n",
1718
      "Batch 100 device: cuda time passed: 14.426 time per batch: 0.144\n",
1719
      "ver 34, iter 27, fold 2, val ll: 0.0599, cor: 0.8433, auc: 0.9893\n",
1720
      "setFeats, augmentation -1\n",
1721
      "Batch 50 device: cuda time passed: 8.228 time per batch: 0.165\n",
1722
      "Batch 100 device: cuda time passed: 14.495 time per batch: 0.145\n",
1723
      "ver 34, iter 28, fold 2, val ll: 0.0597, cor: 0.8437, auc: 0.9894\n",
1724
      "setFeats, augmentation -1\n",
1725
      "Batch 50 device: cuda time passed: 8.403 time per batch: 0.168\n",
1726
      "Batch 100 device: cuda time passed: 14.746 time per batch: 0.147\n",
1727
      "ver 34, iter 29, fold 2, val ll: 0.0598, cor: 0.8434, auc: 0.9895\n",
1728
      "setFeats, augmentation -1\n",
1729
      "Batch 50 device: cuda time passed: 8.353 time per batch: 0.167\n",
1730
      "Batch 100 device: cuda time passed: 14.725 time per batch: 0.147\n",
1731
      "ver 34, iter 30, fold 2, val ll: 0.0599, cor: 0.8429, auc: 0.9894\n",
1732
      "setFeats, augmentation -1\n",
1733
      "Batch 50 device: cuda time passed: 7.921 time per batch: 0.158\n",
1734
      "Batch 100 device: cuda time passed: 14.335 time per batch: 0.143\n",
1735
      "ver 34, iter 31, fold 2, val ll: 0.0599, cor: 0.8432, auc: 0.9894\n",
1736
      "total running time 752.1568894386292\n"
1737
     ]
1738
    },
1739
    {
1740
     "name": "stdout",
1741
     "output_type": "stream",
1742
     "text": [
1743
      "total time 11922.615085601807\n",
1744
      "completed epochs: 3 iters starting now: 32\n",
1745
      "adding dummy serieses 16\n",
1746
      "DataSet 11 valid size 4352 fold 3\n",
1747
      "dataset valid: 4352 loader valid: 136\n",
1748
      "loading model model.b3.f3.d11.v34\n",
1749
      "setFeats, augmentation -1\n",
1750
      "Batch 50 device: cuda time passed: 8.425 time per batch: 0.168\n",
1751
      "Batch 100 device: cuda time passed: 14.529 time per batch: 0.145\n",
1752
      "ver 34, iter 0, fold 3, val ll: 0.0630, cor: 0.8408, auc: 0.9887\n",
1753
      "setFeats, augmentation -1\n",
1754
      "Batch 50 device: cuda time passed: 8.520 time per batch: 0.170\n",
1755
      "Batch 100 device: cuda time passed: 14.945 time per batch: 0.149\n",
1756
      "ver 34, iter 1, fold 3, val ll: 0.0631, cor: 0.8410, auc: 0.9886\n",
1757
      "setFeats, augmentation -1\n",
1758
      "Batch 50 device: cuda time passed: 8.122 time per batch: 0.162\n",
1759
      "Batch 100 device: cuda time passed: 14.718 time per batch: 0.147\n",
1760
      "ver 34, iter 2, fold 3, val ll: 0.0632, cor: 0.8404, auc: 0.9887\n",
1761
      "setFeats, augmentation -1\n",
1762
      "Batch 50 device: cuda time passed: 8.313 time per batch: 0.166\n",
1763
      "Batch 100 device: cuda time passed: 14.583 time per batch: 0.146\n",
1764
      "ver 34, iter 3, fold 3, val ll: 0.0629, cor: 0.8410, auc: 0.9887\n",
1765
      "setFeats, augmentation -1\n",
1766
      "Batch 50 device: cuda time passed: 7.467 time per batch: 0.149\n",
1767
      "Batch 100 device: cuda time passed: 14.490 time per batch: 0.145\n",
1768
      "ver 34, iter 4, fold 3, val ll: 0.0629, cor: 0.8410, auc: 0.9888\n",
1769
      "setFeats, augmentation -1\n",
1770
      "Batch 50 device: cuda time passed: 7.773 time per batch: 0.155\n",
1771
      "Batch 100 device: cuda time passed: 14.122 time per batch: 0.141\n",
1772
      "ver 34, iter 5, fold 3, val ll: 0.0627, cor: 0.8414, auc: 0.9888\n",
1773
      "setFeats, augmentation -1\n",
1774
      "Batch 50 device: cuda time passed: 8.125 time per batch: 0.163\n",
1775
      "Batch 100 device: cuda time passed: 14.389 time per batch: 0.144\n",
1776
      "ver 34, iter 6, fold 3, val ll: 0.0631, cor: 0.8411, auc: 0.9887\n",
1777
      "setFeats, augmentation -1\n",
1778
      "Batch 50 device: cuda time passed: 8.044 time per batch: 0.161\n",
1779
      "Batch 100 device: cuda time passed: 13.814 time per batch: 0.138\n",
1780
      "ver 34, iter 7, fold 3, val ll: 0.0627, cor: 0.8417, auc: 0.9888\n",
1781
      "setFeats, augmentation -1\n",
1782
      "Batch 50 device: cuda time passed: 8.442 time per batch: 0.169\n",
1783
      "Batch 100 device: cuda time passed: 14.809 time per batch: 0.148\n",
1784
      "ver 34, iter 8, fold 3, val ll: 0.0631, cor: 0.8408, auc: 0.9886\n",
1785
      "setFeats, augmentation -1\n",
1786
      "Batch 50 device: cuda time passed: 7.894 time per batch: 0.158\n",
1787
      "Batch 100 device: cuda time passed: 14.364 time per batch: 0.144\n",
1788
      "ver 34, iter 9, fold 3, val ll: 0.0630, cor: 0.8409, auc: 0.9886\n",
1789
      "setFeats, augmentation -1\n",
1790
      "Batch 50 device: cuda time passed: 7.779 time per batch: 0.156\n",
1791
      "Batch 100 device: cuda time passed: 14.097 time per batch: 0.141\n",
1792
      "ver 34, iter 10, fold 3, val ll: 0.0631, cor: 0.8406, auc: 0.9886\n",
1793
      "setFeats, augmentation -1\n",
1794
      "Batch 50 device: cuda time passed: 8.392 time per batch: 0.168\n",
1795
      "Batch 100 device: cuda time passed: 14.809 time per batch: 0.148\n",
1796
      "ver 34, iter 11, fold 3, val ll: 0.0628, cor: 0.8411, auc: 0.9888\n",
1797
      "setFeats, augmentation -1\n",
1798
      "Batch 50 device: cuda time passed: 7.458 time per batch: 0.149\n",
1799
      "Batch 100 device: cuda time passed: 14.714 time per batch: 0.147\n",
1800
      "ver 34, iter 12, fold 3, val ll: 0.0630, cor: 0.8406, auc: 0.9886\n",
1801
      "setFeats, augmentation -1\n",
1802
      "Batch 50 device: cuda time passed: 8.365 time per batch: 0.167\n",
1803
      "Batch 100 device: cuda time passed: 14.288 time per batch: 0.143\n",
1804
      "ver 34, iter 13, fold 3, val ll: 0.0630, cor: 0.8407, auc: 0.9887\n",
1805
      "setFeats, augmentation -1\n",
1806
      "Batch 50 device: cuda time passed: 8.378 time per batch: 0.168\n",
1807
      "Batch 100 device: cuda time passed: 14.711 time per batch: 0.147\n",
1808
      "ver 34, iter 14, fold 3, val ll: 0.0629, cor: 0.8410, auc: 0.9888\n",
1809
      "setFeats, augmentation -1\n",
1810
      "Batch 50 device: cuda time passed: 8.608 time per batch: 0.172\n",
1811
      "Batch 100 device: cuda time passed: 14.700 time per batch: 0.147\n",
1812
      "ver 34, iter 15, fold 3, val ll: 0.0632, cor: 0.8404, auc: 0.9886\n",
1813
      "setFeats, augmentation -1\n",
1814
      "Batch 50 device: cuda time passed: 7.907 time per batch: 0.158\n",
1815
      "Batch 100 device: cuda time passed: 14.268 time per batch: 0.143\n",
1816
      "ver 34, iter 16, fold 3, val ll: 0.0630, cor: 0.8410, auc: 0.9886\n",
1817
      "setFeats, augmentation -1\n",
1818
      "Batch 50 device: cuda time passed: 8.337 time per batch: 0.167\n",
1819
      "Batch 100 device: cuda time passed: 14.435 time per batch: 0.144\n",
1820
      "ver 34, iter 17, fold 3, val ll: 0.0631, cor: 0.8408, auc: 0.9886\n",
1821
      "setFeats, augmentation -1\n",
1822
      "Batch 50 device: cuda time passed: 7.253 time per batch: 0.145\n",
1823
      "Batch 100 device: cuda time passed: 13.893 time per batch: 0.139\n",
1824
      "ver 34, iter 18, fold 3, val ll: 0.0630, cor: 0.8410, auc: 0.9886\n",
1825
      "setFeats, augmentation -1\n",
1826
      "Batch 50 device: cuda time passed: 8.474 time per batch: 0.169\n",
1827
      "Batch 100 device: cuda time passed: 15.130 time per batch: 0.151\n",
1828
      "ver 34, iter 19, fold 3, val ll: 0.0632, cor: 0.8405, auc: 0.9886\n",
1829
      "setFeats, augmentation -1\n",
1830
      "Batch 50 device: cuda time passed: 8.107 time per batch: 0.162\n",
1831
      "Batch 100 device: cuda time passed: 14.488 time per batch: 0.145\n",
1832
      "ver 34, iter 20, fold 3, val ll: 0.0630, cor: 0.8410, auc: 0.9887\n",
1833
      "setFeats, augmentation -1\n",
1834
      "Batch 50 device: cuda time passed: 8.328 time per batch: 0.167\n",
1835
      "Batch 100 device: cuda time passed: 14.122 time per batch: 0.141\n",
1836
      "ver 34, iter 21, fold 3, val ll: 0.0630, cor: 0.8411, auc: 0.9887\n",
1837
      "setFeats, augmentation -1\n",
1838
      "Batch 50 device: cuda time passed: 7.747 time per batch: 0.155\n",
1839
      "Batch 100 device: cuda time passed: 14.564 time per batch: 0.146\n",
1840
      "ver 34, iter 22, fold 3, val ll: 0.0629, cor: 0.8414, auc: 0.9888\n",
1841
      "setFeats, augmentation -1\n",
1842
      "Batch 50 device: cuda time passed: 7.608 time per batch: 0.152\n",
1843
      "Batch 100 device: cuda time passed: 14.575 time per batch: 0.146\n",
1844
      "ver 34, iter 23, fold 3, val ll: 0.0631, cor: 0.8409, auc: 0.9885\n",
1845
      "setFeats, augmentation -1\n",
1846
      "Batch 50 device: cuda time passed: 8.066 time per batch: 0.161\n",
1847
      "Batch 100 device: cuda time passed: 14.204 time per batch: 0.142\n",
1848
      "ver 34, iter 24, fold 3, val ll: 0.0630, cor: 0.8413, auc: 0.9886\n",
1849
      "setFeats, augmentation -1\n",
1850
      "Batch 50 device: cuda time passed: 7.599 time per batch: 0.152\n",
1851
      "Batch 100 device: cuda time passed: 14.531 time per batch: 0.145\n",
1852
      "ver 34, iter 25, fold 3, val ll: 0.0630, cor: 0.8410, auc: 0.9887\n",
1853
      "setFeats, augmentation -1\n",
1854
      "Batch 50 device: cuda time passed: 8.369 time per batch: 0.167\n",
1855
      "Batch 100 device: cuda time passed: 14.646 time per batch: 0.146\n",
1856
      "ver 34, iter 26, fold 3, val ll: 0.0629, cor: 0.8409, auc: 0.9887\n",
1857
      "setFeats, augmentation -1\n",
1858
      "Batch 50 device: cuda time passed: 7.989 time per batch: 0.160\n",
1859
      "Batch 100 device: cuda time passed: 14.261 time per batch: 0.143\n",
1860
      "ver 34, iter 27, fold 3, val ll: 0.0631, cor: 0.8409, auc: 0.9886\n",
1861
      "setFeats, augmentation -1\n",
1862
      "Batch 50 device: cuda time passed: 7.613 time per batch: 0.152\n",
1863
      "Batch 100 device: cuda time passed: 13.975 time per batch: 0.140\n",
1864
      "ver 34, iter 28, fold 3, val ll: 0.0629, cor: 0.8409, auc: 0.9888\n",
1865
      "setFeats, augmentation -1\n",
1866
      "Batch 50 device: cuda time passed: 7.974 time per batch: 0.159\n",
1867
      "Batch 100 device: cuda time passed: 14.795 time per batch: 0.148\n",
1868
      "ver 34, iter 29, fold 3, val ll: 0.0630, cor: 0.8409, auc: 0.9887\n",
1869
      "setFeats, augmentation -1\n",
1870
      "Batch 50 device: cuda time passed: 7.833 time per batch: 0.157\n",
1871
      "Batch 100 device: cuda time passed: 13.972 time per batch: 0.140\n",
1872
      "ver 34, iter 30, fold 3, val ll: 0.0632, cor: 0.8405, auc: 0.9886\n",
1873
      "setFeats, augmentation -1\n",
1874
      "Batch 50 device: cuda time passed: 8.679 time per batch: 0.174\n",
1875
      "Batch 100 device: cuda time passed: 14.658 time per batch: 0.147\n",
1876
      "ver 34, iter 31, fold 3, val ll: 0.0629, cor: 0.8412, auc: 0.9887\n",
1877
      "total running time 734.1475803852081\n",
1878
      "total time 12656.995476007462\n",
1879
      "completed epochs: 3 iters starting now: 32\n",
1880
      "adding dummy serieses 16\n",
1881
      "DataSet 11 valid size 4384 fold 4\n",
1882
      "dataset valid: 4384 loader valid: 137\n",
1883
      "loading model model.b3.f4.d11.v34\n",
1884
      "setFeats, augmentation -1\n",
1885
      "Batch 50 device: cuda time passed: 7.537 time per batch: 0.151\n",
1886
      "Batch 100 device: cuda time passed: 14.791 time per batch: 0.148\n",
1887
      "ver 34, iter 0, fold 4, val ll: 0.0621, cor: 0.8422, auc: 0.9883\n",
1888
      "setFeats, augmentation -1\n",
1889
      "Batch 50 device: cuda time passed: 7.658 time per batch: 0.153\n",
1890
      "Batch 100 device: cuda time passed: 14.894 time per batch: 0.149\n",
1891
      "ver 34, iter 1, fold 4, val ll: 0.0621, cor: 0.8425, auc: 0.9881\n",
1892
      "setFeats, augmentation -1\n",
1893
      "Batch 50 device: cuda time passed: 8.153 time per batch: 0.163\n",
1894
      "Batch 100 device: cuda time passed: 15.152 time per batch: 0.152\n",
1895
      "ver 34, iter 2, fold 4, val ll: 0.0621, cor: 0.8424, auc: 0.9882\n",
1896
      "setFeats, augmentation -1\n",
1897
      "Batch 50 device: cuda time passed: 7.949 time per batch: 0.159\n"
1898
     ]
1899
    },
1900
    {
1901
     "name": "stdout",
1902
     "output_type": "stream",
1903
     "text": [
1904
      "Batch 100 device: cuda time passed: 14.614 time per batch: 0.146\n",
1905
      "ver 34, iter 3, fold 4, val ll: 0.0621, cor: 0.8423, auc: 0.9882\n",
1906
      "setFeats, augmentation -1\n",
1907
      "Batch 50 device: cuda time passed: 7.994 time per batch: 0.160\n",
1908
      "Batch 100 device: cuda time passed: 14.679 time per batch: 0.147\n",
1909
      "ver 34, iter 4, fold 4, val ll: 0.0621, cor: 0.8424, auc: 0.9883\n",
1910
      "setFeats, augmentation -1\n",
1911
      "Batch 50 device: cuda time passed: 7.531 time per batch: 0.151\n",
1912
      "Batch 100 device: cuda time passed: 14.144 time per batch: 0.141\n",
1913
      "ver 34, iter 5, fold 4, val ll: 0.0622, cor: 0.8423, auc: 0.9881\n",
1914
      "setFeats, augmentation -1\n",
1915
      "Batch 50 device: cuda time passed: 7.704 time per batch: 0.154\n",
1916
      "Batch 100 device: cuda time passed: 14.333 time per batch: 0.143\n",
1917
      "ver 34, iter 6, fold 4, val ll: 0.0621, cor: 0.8422, auc: 0.9882\n",
1918
      "setFeats, augmentation -1\n",
1919
      "Batch 50 device: cuda time passed: 8.156 time per batch: 0.163\n",
1920
      "Batch 100 device: cuda time passed: 14.257 time per batch: 0.143\n",
1921
      "ver 34, iter 7, fold 4, val ll: 0.0620, cor: 0.8423, auc: 0.9883\n",
1922
      "setFeats, augmentation -1\n",
1923
      "Batch 50 device: cuda time passed: 8.861 time per batch: 0.177\n",
1924
      "Batch 100 device: cuda time passed: 15.148 time per batch: 0.151\n",
1925
      "ver 34, iter 8, fold 4, val ll: 0.0622, cor: 0.8420, auc: 0.9882\n",
1926
      "setFeats, augmentation -1\n",
1927
      "Batch 50 device: cuda time passed: 7.667 time per batch: 0.153\n",
1928
      "Batch 100 device: cuda time passed: 14.239 time per batch: 0.142\n",
1929
      "ver 34, iter 9, fold 4, val ll: 0.0623, cor: 0.8420, auc: 0.9881\n",
1930
      "setFeats, augmentation -1\n",
1931
      "Batch 50 device: cuda time passed: 7.540 time per batch: 0.151\n",
1932
      "Batch 100 device: cuda time passed: 14.616 time per batch: 0.146\n",
1933
      "ver 34, iter 10, fold 4, val ll: 0.0621, cor: 0.8422, auc: 0.9883\n",
1934
      "setFeats, augmentation -1\n",
1935
      "Batch 50 device: cuda time passed: 8.561 time per batch: 0.171\n",
1936
      "Batch 100 device: cuda time passed: 14.683 time per batch: 0.147\n",
1937
      "ver 34, iter 11, fold 4, val ll: 0.0621, cor: 0.8424, auc: 0.9882\n",
1938
      "setFeats, augmentation -1\n",
1939
      "Batch 50 device: cuda time passed: 7.547 time per batch: 0.151\n",
1940
      "Batch 100 device: cuda time passed: 14.696 time per batch: 0.147\n",
1941
      "ver 34, iter 12, fold 4, val ll: 0.0621, cor: 0.8427, auc: 0.9882\n",
1942
      "setFeats, augmentation -1\n",
1943
      "Batch 50 device: cuda time passed: 8.117 time per batch: 0.162\n",
1944
      "Batch 100 device: cuda time passed: 14.203 time per batch: 0.142\n",
1945
      "ver 34, iter 13, fold 4, val ll: 0.0621, cor: 0.8421, auc: 0.9882\n",
1946
      "setFeats, augmentation -1\n",
1947
      "Batch 50 device: cuda time passed: 7.766 time per batch: 0.155\n",
1948
      "Batch 100 device: cuda time passed: 14.236 time per batch: 0.142\n",
1949
      "ver 34, iter 14, fold 4, val ll: 0.0620, cor: 0.8421, auc: 0.9884\n",
1950
      "setFeats, augmentation -1\n",
1951
      "Batch 50 device: cuda time passed: 8.059 time per batch: 0.161\n",
1952
      "Batch 100 device: cuda time passed: 14.706 time per batch: 0.147\n",
1953
      "ver 34, iter 15, fold 4, val ll: 0.0620, cor: 0.8429, auc: 0.9882\n",
1954
      "setFeats, augmentation -1\n",
1955
      "Batch 50 device: cuda time passed: 7.840 time per batch: 0.157\n",
1956
      "Batch 100 device: cuda time passed: 14.664 time per batch: 0.147\n",
1957
      "ver 34, iter 16, fold 4, val ll: 0.0624, cor: 0.8417, auc: 0.9882\n",
1958
      "setFeats, augmentation -1\n",
1959
      "Batch 50 device: cuda time passed: 7.848 time per batch: 0.157\n",
1960
      "Batch 100 device: cuda time passed: 14.174 time per batch: 0.142\n",
1961
      "ver 34, iter 17, fold 4, val ll: 0.0619, cor: 0.8430, auc: 0.9882\n",
1962
      "setFeats, augmentation -1\n",
1963
      "Batch 50 device: cuda time passed: 8.310 time per batch: 0.166\n",
1964
      "Batch 100 device: cuda time passed: 14.376 time per batch: 0.144\n",
1965
      "ver 34, iter 18, fold 4, val ll: 0.0619, cor: 0.8422, auc: 0.9883\n",
1966
      "setFeats, augmentation -1\n",
1967
      "Batch 50 device: cuda time passed: 7.770 time per batch: 0.155\n",
1968
      "Batch 100 device: cuda time passed: 14.641 time per batch: 0.146\n",
1969
      "ver 34, iter 19, fold 4, val ll: 0.0619, cor: 0.8430, auc: 0.9882\n",
1970
      "setFeats, augmentation -1\n",
1971
      "Batch 50 device: cuda time passed: 8.277 time per batch: 0.166\n",
1972
      "Batch 100 device: cuda time passed: 14.477 time per batch: 0.145\n",
1973
      "ver 34, iter 20, fold 4, val ll: 0.0621, cor: 0.8423, auc: 0.9882\n",
1974
      "setFeats, augmentation -1\n",
1975
      "Batch 50 device: cuda time passed: 7.529 time per batch: 0.151\n",
1976
      "Batch 100 device: cuda time passed: 14.157 time per batch: 0.142\n",
1977
      "ver 34, iter 21, fold 4, val ll: 0.0622, cor: 0.8422, auc: 0.9882\n",
1978
      "setFeats, augmentation -1\n",
1979
      "Batch 50 device: cuda time passed: 8.420 time per batch: 0.168\n",
1980
      "Batch 100 device: cuda time passed: 14.098 time per batch: 0.141\n",
1981
      "ver 34, iter 22, fold 4, val ll: 0.0620, cor: 0.8427, auc: 0.9882\n",
1982
      "setFeats, augmentation -1\n",
1983
      "Batch 50 device: cuda time passed: 7.731 time per batch: 0.155\n",
1984
      "Batch 100 device: cuda time passed: 14.621 time per batch: 0.146\n",
1985
      "ver 34, iter 23, fold 4, val ll: 0.0622, cor: 0.8424, auc: 0.9881\n",
1986
      "setFeats, augmentation -1\n",
1987
      "Batch 50 device: cuda time passed: 7.574 time per batch: 0.151\n",
1988
      "Batch 100 device: cuda time passed: 14.133 time per batch: 0.141\n",
1989
      "ver 34, iter 24, fold 4, val ll: 0.0621, cor: 0.8421, auc: 0.9883\n",
1990
      "setFeats, augmentation -1\n",
1991
      "Batch 50 device: cuda time passed: 7.682 time per batch: 0.154\n",
1992
      "Batch 100 device: cuda time passed: 14.033 time per batch: 0.140\n",
1993
      "ver 34, iter 25, fold 4, val ll: 0.0621, cor: 0.8424, auc: 0.9882\n",
1994
      "setFeats, augmentation -1\n",
1995
      "Batch 50 device: cuda time passed: 8.161 time per batch: 0.163\n",
1996
      "Batch 100 device: cuda time passed: 14.308 time per batch: 0.143\n",
1997
      "ver 34, iter 26, fold 4, val ll: 0.0621, cor: 0.8422, auc: 0.9882\n",
1998
      "setFeats, augmentation -1\n",
1999
      "Batch 50 device: cuda time passed: 7.618 time per batch: 0.152\n",
2000
      "Batch 100 device: cuda time passed: 13.931 time per batch: 0.139\n",
2001
      "ver 34, iter 27, fold 4, val ll: 0.0620, cor: 0.8426, auc: 0.9883\n",
2002
      "setFeats, augmentation -1\n",
2003
      "Batch 50 device: cuda time passed: 8.213 time per batch: 0.164\n",
2004
      "Batch 100 device: cuda time passed: 14.428 time per batch: 0.144\n",
2005
      "ver 34, iter 28, fold 4, val ll: 0.0621, cor: 0.8424, auc: 0.9881\n",
2006
      "setFeats, augmentation -1\n",
2007
      "Batch 50 device: cuda time passed: 8.041 time per batch: 0.161\n",
2008
      "Batch 100 device: cuda time passed: 14.991 time per batch: 0.150\n",
2009
      "ver 34, iter 29, fold 4, val ll: 0.0620, cor: 0.8427, auc: 0.9882\n",
2010
      "setFeats, augmentation -1\n",
2011
      "Batch 50 device: cuda time passed: 8.222 time per batch: 0.164\n",
2012
      "Batch 100 device: cuda time passed: 14.845 time per batch: 0.148\n",
2013
      "ver 34, iter 30, fold 4, val ll: 0.0620, cor: 0.8426, auc: 0.9881\n",
2014
      "setFeats, augmentation -1\n",
2015
      "Batch 50 device: cuda time passed: 8.285 time per batch: 0.166\n",
2016
      "Batch 100 device: cuda time passed: 14.113 time per batch: 0.141\n",
2017
      "ver 34, iter 31, fold 4, val ll: 0.0621, cor: 0.8423, auc: 0.9882\n",
2018
      "total running time 742.8772552013397\n",
2019
      "total time 13400.111248254776\n",
2020
      "completed epochs: 3 iters starting now: 32\n",
2021
      "adding dummy serieses 9\n",
2022
      "DataSet 12 valid size 4384 fold 0\n",
2023
      "dataset valid: 4384 loader valid: 137\n",
2024
      "loading model model.b3.f0.d12.v34\n",
2025
      "setFeats, augmentation -1\n",
2026
      "Batch 50 device: cuda time passed: 8.200 time per batch: 0.164\n",
2027
      "Batch 100 device: cuda time passed: 14.845 time per batch: 0.148\n",
2028
      "ver 34, iter 0, fold 0, val ll: 0.0608, cor: 0.8451, auc: 0.9887\n",
2029
      "setFeats, augmentation -1\n",
2030
      "Batch 50 device: cuda time passed: 7.747 time per batch: 0.155\n",
2031
      "Batch 100 device: cuda time passed: 15.452 time per batch: 0.155\n",
2032
      "ver 34, iter 1, fold 0, val ll: 0.0610, cor: 0.8443, auc: 0.9888\n",
2033
      "setFeats, augmentation -1\n",
2034
      "Batch 50 device: cuda time passed: 7.907 time per batch: 0.158\n",
2035
      "Batch 100 device: cuda time passed: 14.721 time per batch: 0.147\n",
2036
      "ver 34, iter 2, fold 0, val ll: 0.0609, cor: 0.8442, auc: 0.9888\n",
2037
      "setFeats, augmentation -1\n",
2038
      "Batch 50 device: cuda time passed: 8.093 time per batch: 0.162\n",
2039
      "Batch 100 device: cuda time passed: 14.886 time per batch: 0.149\n",
2040
      "ver 34, iter 3, fold 0, val ll: 0.0608, cor: 0.8449, auc: 0.9888\n",
2041
      "setFeats, augmentation -1\n",
2042
      "Batch 50 device: cuda time passed: 7.963 time per batch: 0.159\n",
2043
      "Batch 100 device: cuda time passed: 14.266 time per batch: 0.143\n",
2044
      "ver 34, iter 4, fold 0, val ll: 0.0608, cor: 0.8444, auc: 0.9888\n",
2045
      "setFeats, augmentation -1\n",
2046
      "Batch 50 device: cuda time passed: 8.064 time per batch: 0.161\n",
2047
      "Batch 100 device: cuda time passed: 14.130 time per batch: 0.141\n",
2048
      "ver 34, iter 5, fold 0, val ll: 0.0608, cor: 0.8446, auc: 0.9888\n",
2049
      "setFeats, augmentation -1\n",
2050
      "Batch 50 device: cuda time passed: 8.710 time per batch: 0.174\n",
2051
      "Batch 100 device: cuda time passed: 14.791 time per batch: 0.148\n",
2052
      "ver 34, iter 6, fold 0, val ll: 0.0611, cor: 0.8444, auc: 0.9887\n",
2053
      "setFeats, augmentation -1\n",
2054
      "Batch 50 device: cuda time passed: 8.206 time per batch: 0.164\n",
2055
      "Batch 100 device: cuda time passed: 14.944 time per batch: 0.149\n"
2056
     ]
2057
    },
2058
    {
2059
     "name": "stdout",
2060
     "output_type": "stream",
2061
     "text": [
2062
      "ver 34, iter 7, fold 0, val ll: 0.0609, cor: 0.8444, auc: 0.9889\n",
2063
      "setFeats, augmentation -1\n",
2064
      "Batch 50 device: cuda time passed: 7.895 time per batch: 0.158\n",
2065
      "Batch 100 device: cuda time passed: 14.094 time per batch: 0.141\n",
2066
      "ver 34, iter 8, fold 0, val ll: 0.0611, cor: 0.8444, auc: 0.9886\n",
2067
      "setFeats, augmentation -1\n",
2068
      "Batch 50 device: cuda time passed: 8.393 time per batch: 0.168\n",
2069
      "Batch 100 device: cuda time passed: 14.507 time per batch: 0.145\n",
2070
      "ver 34, iter 9, fold 0, val ll: 0.0610, cor: 0.8446, auc: 0.9888\n",
2071
      "setFeats, augmentation -1\n",
2072
      "Batch 50 device: cuda time passed: 8.617 time per batch: 0.172\n",
2073
      "Batch 100 device: cuda time passed: 14.751 time per batch: 0.148\n",
2074
      "ver 34, iter 10, fold 0, val ll: 0.0610, cor: 0.8447, auc: 0.9887\n",
2075
      "setFeats, augmentation -1\n",
2076
      "Batch 50 device: cuda time passed: 7.839 time per batch: 0.157\n",
2077
      "Batch 100 device: cuda time passed: 14.319 time per batch: 0.143\n",
2078
      "ver 34, iter 11, fold 0, val ll: 0.0611, cor: 0.8444, auc: 0.9887\n",
2079
      "setFeats, augmentation -1\n",
2080
      "Batch 50 device: cuda time passed: 7.572 time per batch: 0.151\n",
2081
      "Batch 100 device: cuda time passed: 14.793 time per batch: 0.148\n",
2082
      "ver 34, iter 12, fold 0, val ll: 0.0609, cor: 0.8446, auc: 0.9888\n",
2083
      "setFeats, augmentation -1\n",
2084
      "Batch 50 device: cuda time passed: 8.295 time per batch: 0.166\n",
2085
      "Batch 100 device: cuda time passed: 14.345 time per batch: 0.143\n",
2086
      "ver 34, iter 13, fold 0, val ll: 0.0608, cor: 0.8447, auc: 0.9888\n",
2087
      "setFeats, augmentation -1\n",
2088
      "Batch 50 device: cuda time passed: 7.880 time per batch: 0.158\n",
2089
      "Batch 100 device: cuda time passed: 14.614 time per batch: 0.146\n",
2090
      "ver 34, iter 14, fold 0, val ll: 0.0609, cor: 0.8445, auc: 0.9888\n",
2091
      "setFeats, augmentation -1\n",
2092
      "Batch 50 device: cuda time passed: 8.390 time per batch: 0.168\n",
2093
      "Batch 100 device: cuda time passed: 14.339 time per batch: 0.143\n",
2094
      "ver 34, iter 15, fold 0, val ll: 0.0610, cor: 0.8446, auc: 0.9887\n",
2095
      "setFeats, augmentation -1\n",
2096
      "Batch 50 device: cuda time passed: 8.170 time per batch: 0.163\n",
2097
      "Batch 100 device: cuda time passed: 14.692 time per batch: 0.147\n",
2098
      "ver 34, iter 16, fold 0, val ll: 0.0609, cor: 0.8448, auc: 0.9887\n",
2099
      "setFeats, augmentation -1\n",
2100
      "Batch 50 device: cuda time passed: 8.665 time per batch: 0.173\n",
2101
      "Batch 100 device: cuda time passed: 14.827 time per batch: 0.148\n",
2102
      "ver 34, iter 17, fold 0, val ll: 0.0610, cor: 0.8442, auc: 0.9887\n",
2103
      "setFeats, augmentation -1\n",
2104
      "Batch 50 device: cuda time passed: 8.021 time per batch: 0.160\n",
2105
      "Batch 100 device: cuda time passed: 14.415 time per batch: 0.144\n",
2106
      "ver 34, iter 18, fold 0, val ll: 0.0608, cor: 0.8448, auc: 0.9888\n",
2107
      "setFeats, augmentation -1\n",
2108
      "Batch 50 device: cuda time passed: 8.678 time per batch: 0.174\n",
2109
      "Batch 100 device: cuda time passed: 14.637 time per batch: 0.146\n",
2110
      "ver 34, iter 19, fold 0, val ll: 0.0609, cor: 0.8445, auc: 0.9888\n",
2111
      "setFeats, augmentation -1\n",
2112
      "Batch 50 device: cuda time passed: 8.078 time per batch: 0.162\n",
2113
      "Batch 100 device: cuda time passed: 14.884 time per batch: 0.149\n",
2114
      "ver 34, iter 20, fold 0, val ll: 0.0611, cor: 0.8443, auc: 0.9886\n",
2115
      "setFeats, augmentation -1\n",
2116
      "Batch 50 device: cuda time passed: 8.296 time per batch: 0.166\n",
2117
      "Batch 100 device: cuda time passed: 14.547 time per batch: 0.145\n",
2118
      "ver 34, iter 21, fold 0, val ll: 0.0609, cor: 0.8446, auc: 0.9888\n",
2119
      "setFeats, augmentation -1\n",
2120
      "Batch 50 device: cuda time passed: 7.769 time per batch: 0.155\n",
2121
      "Batch 100 device: cuda time passed: 14.252 time per batch: 0.143\n",
2122
      "ver 34, iter 22, fold 0, val ll: 0.0610, cor: 0.8447, auc: 0.9886\n",
2123
      "setFeats, augmentation -1\n",
2124
      "Batch 50 device: cuda time passed: 7.987 time per batch: 0.160\n",
2125
      "Batch 100 device: cuda time passed: 14.371 time per batch: 0.144\n",
2126
      "ver 34, iter 23, fold 0, val ll: 0.0610, cor: 0.8445, auc: 0.9888\n",
2127
      "setFeats, augmentation -1\n",
2128
      "Batch 50 device: cuda time passed: 8.220 time per batch: 0.164\n",
2129
      "Batch 100 device: cuda time passed: 14.524 time per batch: 0.145\n",
2130
      "ver 34, iter 24, fold 0, val ll: 0.0612, cor: 0.8440, auc: 0.9887\n",
2131
      "setFeats, augmentation -1\n",
2132
      "Batch 50 device: cuda time passed: 7.965 time per batch: 0.159\n",
2133
      "Batch 100 device: cuda time passed: 14.629 time per batch: 0.146\n",
2134
      "ver 34, iter 25, fold 0, val ll: 0.0608, cor: 0.8451, auc: 0.9888\n",
2135
      "setFeats, augmentation -1\n",
2136
      "Batch 50 device: cuda time passed: 8.405 time per batch: 0.168\n",
2137
      "Batch 100 device: cuda time passed: 14.582 time per batch: 0.146\n",
2138
      "ver 34, iter 26, fold 0, val ll: 0.0608, cor: 0.8449, auc: 0.9888\n",
2139
      "setFeats, augmentation -1\n",
2140
      "Batch 50 device: cuda time passed: 7.919 time per batch: 0.158\n",
2141
      "Batch 100 device: cuda time passed: 14.240 time per batch: 0.142\n",
2142
      "ver 34, iter 27, fold 0, val ll: 0.0607, cor: 0.8450, auc: 0.9889\n",
2143
      "setFeats, augmentation -1\n",
2144
      "Batch 50 device: cuda time passed: 7.292 time per batch: 0.146\n",
2145
      "Batch 100 device: cuda time passed: 14.701 time per batch: 0.147\n",
2146
      "ver 34, iter 28, fold 0, val ll: 0.0608, cor: 0.8449, auc: 0.9888\n",
2147
      "setFeats, augmentation -1\n",
2148
      "Batch 50 device: cuda time passed: 7.592 time per batch: 0.152\n",
2149
      "Batch 100 device: cuda time passed: 13.886 time per batch: 0.139\n",
2150
      "ver 34, iter 29, fold 0, val ll: 0.0609, cor: 0.8444, auc: 0.9888\n",
2151
      "setFeats, augmentation -1\n",
2152
      "Batch 50 device: cuda time passed: 7.837 time per batch: 0.157\n",
2153
      "Batch 100 device: cuda time passed: 14.618 time per batch: 0.146\n",
2154
      "ver 34, iter 30, fold 0, val ll: 0.0609, cor: 0.8448, auc: 0.9887\n",
2155
      "setFeats, augmentation -1\n",
2156
      "Batch 50 device: cuda time passed: 7.908 time per batch: 0.158\n",
2157
      "Batch 100 device: cuda time passed: 14.168 time per batch: 0.142\n",
2158
      "ver 34, iter 31, fold 0, val ll: 0.0609, cor: 0.8445, auc: 0.9888\n",
2159
      "total running time 743.3103971481323\n",
2160
      "total time 14143.665662765503\n",
2161
      "completed epochs: 3 iters starting now: 32\n",
2162
      "adding dummy serieses 12\n",
2163
      "DataSet 12 valid size 4288 fold 1\n",
2164
      "dataset valid: 4288 loader valid: 134\n",
2165
      "loading model model.b3.f1.d12.v34\n",
2166
      "setFeats, augmentation -1\n",
2167
      "Batch 50 device: cuda time passed: 8.167 time per batch: 0.163\n",
2168
      "Batch 100 device: cuda time passed: 14.378 time per batch: 0.144\n",
2169
      "ver 34, iter 0, fold 1, val ll: 0.0597, cor: 0.8453, auc: 0.9897\n",
2170
      "setFeats, augmentation -1\n",
2171
      "Batch 50 device: cuda time passed: 8.083 time per batch: 0.162\n",
2172
      "Batch 100 device: cuda time passed: 14.542 time per batch: 0.145\n",
2173
      "ver 34, iter 1, fold 1, val ll: 0.0594, cor: 0.8461, auc: 0.9898\n",
2174
      "setFeats, augmentation -1\n",
2175
      "Batch 50 device: cuda time passed: 7.404 time per batch: 0.148\n",
2176
      "Batch 100 device: cuda time passed: 13.462 time per batch: 0.135\n",
2177
      "ver 34, iter 2, fold 1, val ll: 0.0597, cor: 0.8455, auc: 0.9897\n",
2178
      "setFeats, augmentation -1\n",
2179
      "Batch 50 device: cuda time passed: 8.099 time per batch: 0.162\n",
2180
      "Batch 100 device: cuda time passed: 14.128 time per batch: 0.141\n",
2181
      "ver 34, iter 3, fold 1, val ll: 0.0595, cor: 0.8458, auc: 0.9897\n",
2182
      "setFeats, augmentation -1\n",
2183
      "Batch 50 device: cuda time passed: 8.284 time per batch: 0.166\n",
2184
      "Batch 100 device: cuda time passed: 14.410 time per batch: 0.144\n",
2185
      "ver 34, iter 4, fold 1, val ll: 0.0597, cor: 0.8456, auc: 0.9897\n",
2186
      "setFeats, augmentation -1\n",
2187
      "Batch 50 device: cuda time passed: 7.496 time per batch: 0.150\n",
2188
      "Batch 100 device: cuda time passed: 14.514 time per batch: 0.145\n",
2189
      "ver 34, iter 5, fold 1, val ll: 0.0597, cor: 0.8454, auc: 0.9898\n",
2190
      "setFeats, augmentation -1\n",
2191
      "Batch 50 device: cuda time passed: 8.689 time per batch: 0.174\n",
2192
      "Batch 100 device: cuda time passed: 14.776 time per batch: 0.148\n",
2193
      "ver 34, iter 6, fold 1, val ll: 0.0596, cor: 0.8457, auc: 0.9897\n",
2194
      "setFeats, augmentation -1\n",
2195
      "Batch 50 device: cuda time passed: 7.608 time per batch: 0.152\n",
2196
      "Batch 100 device: cuda time passed: 14.817 time per batch: 0.148\n",
2197
      "ver 34, iter 7, fold 1, val ll: 0.0597, cor: 0.8454, auc: 0.9897\n",
2198
      "setFeats, augmentation -1\n",
2199
      "Batch 50 device: cuda time passed: 7.919 time per batch: 0.158\n",
2200
      "Batch 100 device: cuda time passed: 13.820 time per batch: 0.138\n",
2201
      "ver 34, iter 8, fold 1, val ll: 0.0597, cor: 0.8454, auc: 0.9897\n",
2202
      "setFeats, augmentation -1\n",
2203
      "Batch 50 device: cuda time passed: 7.601 time per batch: 0.152\n",
2204
      "Batch 100 device: cuda time passed: 14.223 time per batch: 0.142\n",
2205
      "ver 34, iter 9, fold 1, val ll: 0.0598, cor: 0.8453, auc: 0.9897\n",
2206
      "setFeats, augmentation -1\n",
2207
      "Batch 50 device: cuda time passed: 8.007 time per batch: 0.160\n",
2208
      "Batch 100 device: cuda time passed: 14.392 time per batch: 0.144\n",
2209
      "ver 34, iter 10, fold 1, val ll: 0.0596, cor: 0.8456, auc: 0.9897\n",
2210
      "setFeats, augmentation -1\n",
2211
      "Batch 50 device: cuda time passed: 7.733 time per batch: 0.155\n",
2212
      "Batch 100 device: cuda time passed: 14.334 time per batch: 0.143\n",
2213
      "ver 34, iter 11, fold 1, val ll: 0.0595, cor: 0.8461, auc: 0.9898\n",
2214
      "setFeats, augmentation -1\n"
2215
     ]
2216
    },
2217
    {
2218
     "name": "stdout",
2219
     "output_type": "stream",
2220
     "text": [
2221
      "Batch 50 device: cuda time passed: 8.393 time per batch: 0.168\n",
2222
      "Batch 100 device: cuda time passed: 14.742 time per batch: 0.147\n",
2223
      "ver 34, iter 12, fold 1, val ll: 0.0595, cor: 0.8458, auc: 0.9898\n",
2224
      "setFeats, augmentation -1\n",
2225
      "Batch 50 device: cuda time passed: 7.863 time per batch: 0.157\n",
2226
      "Batch 100 device: cuda time passed: 13.937 time per batch: 0.139\n",
2227
      "ver 34, iter 13, fold 1, val ll: 0.0597, cor: 0.8454, auc: 0.9898\n",
2228
      "setFeats, augmentation -1\n",
2229
      "Batch 50 device: cuda time passed: 7.618 time per batch: 0.152\n",
2230
      "Batch 100 device: cuda time passed: 14.623 time per batch: 0.146\n",
2231
      "ver 34, iter 14, fold 1, val ll: 0.0594, cor: 0.8458, auc: 0.9898\n",
2232
      "setFeats, augmentation -1\n",
2233
      "Batch 50 device: cuda time passed: 8.221 time per batch: 0.164\n",
2234
      "Batch 100 device: cuda time passed: 14.541 time per batch: 0.145\n",
2235
      "ver 34, iter 15, fold 1, val ll: 0.0596, cor: 0.8456, auc: 0.9897\n",
2236
      "setFeats, augmentation -1\n",
2237
      "Batch 50 device: cuda time passed: 8.317 time per batch: 0.166\n",
2238
      "Batch 100 device: cuda time passed: 14.128 time per batch: 0.141\n",
2239
      "ver 34, iter 16, fold 1, val ll: 0.0596, cor: 0.8457, auc: 0.9898\n",
2240
      "setFeats, augmentation -1\n",
2241
      "Batch 50 device: cuda time passed: 7.753 time per batch: 0.155\n",
2242
      "Batch 100 device: cuda time passed: 13.980 time per batch: 0.140\n",
2243
      "ver 34, iter 17, fold 1, val ll: 0.0595, cor: 0.8459, auc: 0.9897\n",
2244
      "setFeats, augmentation -1\n",
2245
      "Batch 50 device: cuda time passed: 8.082 time per batch: 0.162\n",
2246
      "Batch 100 device: cuda time passed: 14.130 time per batch: 0.141\n",
2247
      "ver 34, iter 18, fold 1, val ll: 0.0594, cor: 0.8461, auc: 0.9898\n",
2248
      "setFeats, augmentation -1\n",
2249
      "Batch 50 device: cuda time passed: 7.979 time per batch: 0.160\n",
2250
      "Batch 100 device: cuda time passed: 14.019 time per batch: 0.140\n",
2251
      "ver 34, iter 19, fold 1, val ll: 0.0593, cor: 0.8464, auc: 0.9899\n",
2252
      "setFeats, augmentation -1\n",
2253
      "Batch 50 device: cuda time passed: 7.634 time per batch: 0.153\n",
2254
      "Batch 100 device: cuda time passed: 14.590 time per batch: 0.146\n",
2255
      "ver 34, iter 20, fold 1, val ll: 0.0597, cor: 0.8455, auc: 0.9897\n",
2256
      "setFeats, augmentation -1\n",
2257
      "Batch 50 device: cuda time passed: 8.381 time per batch: 0.168\n",
2258
      "Batch 100 device: cuda time passed: 14.405 time per batch: 0.144\n",
2259
      "ver 34, iter 21, fold 1, val ll: 0.0595, cor: 0.8463, auc: 0.9897\n",
2260
      "setFeats, augmentation -1\n",
2261
      "Batch 50 device: cuda time passed: 8.240 time per batch: 0.165\n",
2262
      "Batch 100 device: cuda time passed: 14.160 time per batch: 0.142\n",
2263
      "ver 34, iter 22, fold 1, val ll: 0.0596, cor: 0.8458, auc: 0.9897\n",
2264
      "setFeats, augmentation -1\n",
2265
      "Batch 50 device: cuda time passed: 7.938 time per batch: 0.159\n",
2266
      "Batch 100 device: cuda time passed: 14.408 time per batch: 0.144\n",
2267
      "ver 34, iter 23, fold 1, val ll: 0.0596, cor: 0.8455, auc: 0.9898\n",
2268
      "setFeats, augmentation -1\n",
2269
      "Batch 50 device: cuda time passed: 7.677 time per batch: 0.154\n",
2270
      "Batch 100 device: cuda time passed: 14.507 time per batch: 0.145\n",
2271
      "ver 34, iter 24, fold 1, val ll: 0.0596, cor: 0.8460, auc: 0.9897\n",
2272
      "setFeats, augmentation -1\n",
2273
      "Batch 50 device: cuda time passed: 8.103 time per batch: 0.162\n",
2274
      "Batch 100 device: cuda time passed: 14.345 time per batch: 0.143\n",
2275
      "ver 34, iter 25, fold 1, val ll: 0.0593, cor: 0.8464, auc: 0.9898\n",
2276
      "setFeats, augmentation -1\n",
2277
      "Batch 50 device: cuda time passed: 8.097 time per batch: 0.162\n",
2278
      "Batch 100 device: cuda time passed: 14.301 time per batch: 0.143\n",
2279
      "ver 34, iter 26, fold 1, val ll: 0.0597, cor: 0.8456, auc: 0.9897\n",
2280
      "setFeats, augmentation -1\n",
2281
      "Batch 50 device: cuda time passed: 8.920 time per batch: 0.178\n",
2282
      "Batch 100 device: cuda time passed: 14.919 time per batch: 0.149\n",
2283
      "ver 34, iter 27, fold 1, val ll: 0.0592, cor: 0.8466, auc: 0.9899\n",
2284
      "setFeats, augmentation -1\n",
2285
      "Batch 50 device: cuda time passed: 8.225 time per batch: 0.165\n",
2286
      "Batch 100 device: cuda time passed: 14.347 time per batch: 0.143\n",
2287
      "ver 34, iter 28, fold 1, val ll: 0.0595, cor: 0.8456, auc: 0.9898\n",
2288
      "setFeats, augmentation -1\n",
2289
      "Batch 50 device: cuda time passed: 8.348 time per batch: 0.167\n",
2290
      "Batch 100 device: cuda time passed: 14.347 time per batch: 0.143\n",
2291
      "ver 34, iter 29, fold 1, val ll: 0.0595, cor: 0.8462, auc: 0.9898\n",
2292
      "setFeats, augmentation -1\n",
2293
      "Batch 50 device: cuda time passed: 7.762 time per batch: 0.155\n",
2294
      "Batch 100 device: cuda time passed: 14.563 time per batch: 0.146\n",
2295
      "ver 34, iter 30, fold 1, val ll: 0.0597, cor: 0.8455, auc: 0.9897\n",
2296
      "setFeats, augmentation -1\n",
2297
      "Batch 50 device: cuda time passed: 8.218 time per batch: 0.164\n",
2298
      "Batch 100 device: cuda time passed: 14.506 time per batch: 0.145\n",
2299
      "ver 34, iter 31, fold 1, val ll: 0.0595, cor: 0.8458, auc: 0.9898\n",
2300
      "total running time 723.6225900650024\n",
2301
      "total time 14867.529915571213\n",
2302
      "completed epochs: 3 iters starting now: 32\n",
2303
      "adding dummy serieses 27\n",
2304
      "DataSet 12 valid size 4416 fold 2\n",
2305
      "dataset valid: 4416 loader valid: 138\n",
2306
      "loading model model.b3.f2.d12.v34\n",
2307
      "setFeats, augmentation -1\n",
2308
      "Batch 50 device: cuda time passed: 8.349 time per batch: 0.167\n",
2309
      "Batch 100 device: cuda time passed: 14.921 time per batch: 0.149\n",
2310
      "ver 34, iter 0, fold 2, val ll: 0.0605, cor: 0.8428, auc: 0.9889\n",
2311
      "setFeats, augmentation -1\n",
2312
      "Batch 50 device: cuda time passed: 7.820 time per batch: 0.156\n",
2313
      "Batch 100 device: cuda time passed: 14.591 time per batch: 0.146\n",
2314
      "ver 34, iter 1, fold 2, val ll: 0.0600, cor: 0.8438, auc: 0.9891\n",
2315
      "setFeats, augmentation -1\n",
2316
      "Batch 50 device: cuda time passed: 8.419 time per batch: 0.168\n",
2317
      "Batch 100 device: cuda time passed: 14.394 time per batch: 0.144\n",
2318
      "ver 34, iter 2, fold 2, val ll: 0.0603, cor: 0.8434, auc: 0.9890\n",
2319
      "setFeats, augmentation -1\n",
2320
      "Batch 50 device: cuda time passed: 7.641 time per batch: 0.153\n",
2321
      "Batch 100 device: cuda time passed: 14.569 time per batch: 0.146\n",
2322
      "ver 34, iter 3, fold 2, val ll: 0.0601, cor: 0.8436, auc: 0.9891\n",
2323
      "setFeats, augmentation -1\n",
2324
      "Batch 50 device: cuda time passed: 8.368 time per batch: 0.167\n",
2325
      "Batch 100 device: cuda time passed: 14.677 time per batch: 0.147\n",
2326
      "ver 34, iter 4, fold 2, val ll: 0.0604, cor: 0.8432, auc: 0.9888\n",
2327
      "setFeats, augmentation -1\n",
2328
      "Batch 50 device: cuda time passed: 7.538 time per batch: 0.151\n",
2329
      "Batch 100 device: cuda time passed: 14.921 time per batch: 0.149\n",
2330
      "ver 34, iter 5, fold 2, val ll: 0.0603, cor: 0.8434, auc: 0.9890\n",
2331
      "setFeats, augmentation -1\n",
2332
      "Batch 50 device: cuda time passed: 7.965 time per batch: 0.159\n",
2333
      "Batch 100 device: cuda time passed: 14.174 time per batch: 0.142\n",
2334
      "ver 34, iter 6, fold 2, val ll: 0.0600, cor: 0.8438, auc: 0.9890\n",
2335
      "setFeats, augmentation -1\n",
2336
      "Batch 50 device: cuda time passed: 8.064 time per batch: 0.161\n",
2337
      "Batch 100 device: cuda time passed: 14.308 time per batch: 0.143\n",
2338
      "ver 34, iter 7, fold 2, val ll: 0.0603, cor: 0.8434, auc: 0.9889\n",
2339
      "setFeats, augmentation -1\n",
2340
      "Batch 50 device: cuda time passed: 8.181 time per batch: 0.164\n",
2341
      "Batch 100 device: cuda time passed: 14.395 time per batch: 0.144\n",
2342
      "ver 34, iter 8, fold 2, val ll: 0.0600, cor: 0.8439, auc: 0.9890\n",
2343
      "setFeats, augmentation -1\n",
2344
      "Batch 50 device: cuda time passed: 8.308 time per batch: 0.166\n",
2345
      "Batch 100 device: cuda time passed: 14.523 time per batch: 0.145\n",
2346
      "ver 34, iter 9, fold 2, val ll: 0.0602, cor: 0.8433, auc: 0.9890\n",
2347
      "setFeats, augmentation -1\n",
2348
      "Batch 50 device: cuda time passed: 8.045 time per batch: 0.161\n",
2349
      "Batch 100 device: cuda time passed: 14.723 time per batch: 0.147\n",
2350
      "ver 34, iter 10, fold 2, val ll: 0.0604, cor: 0.8427, auc: 0.9889\n",
2351
      "setFeats, augmentation -1\n",
2352
      "Batch 50 device: cuda time passed: 7.624 time per batch: 0.152\n",
2353
      "Batch 100 device: cuda time passed: 14.710 time per batch: 0.147\n",
2354
      "ver 34, iter 11, fold 2, val ll: 0.0603, cor: 0.8435, auc: 0.9889\n",
2355
      "setFeats, augmentation -1\n",
2356
      "Batch 50 device: cuda time passed: 8.438 time per batch: 0.169\n",
2357
      "Batch 100 device: cuda time passed: 14.611 time per batch: 0.146\n",
2358
      "ver 34, iter 12, fold 2, val ll: 0.0603, cor: 0.8434, auc: 0.9890\n",
2359
      "setFeats, augmentation -1\n",
2360
      "Batch 50 device: cuda time passed: 8.431 time per batch: 0.169\n",
2361
      "Batch 100 device: cuda time passed: 14.251 time per batch: 0.143\n",
2362
      "ver 34, iter 13, fold 2, val ll: 0.0602, cor: 0.8432, auc: 0.9891\n",
2363
      "setFeats, augmentation -1\n",
2364
      "Batch 50 device: cuda time passed: 7.725 time per batch: 0.154\n",
2365
      "Batch 100 device: cuda time passed: 14.859 time per batch: 0.149\n",
2366
      "ver 34, iter 14, fold 2, val ll: 0.0604, cor: 0.8429, auc: 0.9889\n",
2367
      "setFeats, augmentation -1\n",
2368
      "Batch 50 device: cuda time passed: 7.611 time per batch: 0.152\n",
2369
      "Batch 100 device: cuda time passed: 14.069 time per batch: 0.141\n",
2370
      "ver 34, iter 15, fold 2, val ll: 0.0603, cor: 0.8435, auc: 0.9889\n",
2371
      "setFeats, augmentation -1\n",
2372
      "Batch 50 device: cuda time passed: 8.264 time per batch: 0.165\n"
2373
     ]
2374
    },
2375
    {
2376
     "name": "stdout",
2377
     "output_type": "stream",
2378
     "text": [
2379
      "Batch 100 device: cuda time passed: 15.089 time per batch: 0.151\n",
2380
      "ver 34, iter 16, fold 2, val ll: 0.0605, cor: 0.8425, auc: 0.9890\n",
2381
      "setFeats, augmentation -1\n",
2382
      "Batch 50 device: cuda time passed: 7.920 time per batch: 0.158\n",
2383
      "Batch 100 device: cuda time passed: 14.389 time per batch: 0.144\n",
2384
      "ver 34, iter 17, fold 2, val ll: 0.0603, cor: 0.8430, auc: 0.9890\n",
2385
      "setFeats, augmentation -1\n",
2386
      "Batch 50 device: cuda time passed: 8.102 time per batch: 0.162\n",
2387
      "Batch 100 device: cuda time passed: 14.775 time per batch: 0.148\n",
2388
      "ver 34, iter 18, fold 2, val ll: 0.0604, cor: 0.8428, auc: 0.9889\n",
2389
      "setFeats, augmentation -1\n",
2390
      "Batch 50 device: cuda time passed: 8.398 time per batch: 0.168\n",
2391
      "Batch 100 device: cuda time passed: 14.497 time per batch: 0.145\n",
2392
      "ver 34, iter 19, fold 2, val ll: 0.0603, cor: 0.8435, auc: 0.9889\n",
2393
      "setFeats, augmentation -1\n",
2394
      "Batch 50 device: cuda time passed: 8.201 time per batch: 0.164\n",
2395
      "Batch 100 device: cuda time passed: 15.056 time per batch: 0.151\n",
2396
      "ver 34, iter 20, fold 2, val ll: 0.0602, cor: 0.8432, auc: 0.9891\n",
2397
      "setFeats, augmentation -1\n",
2398
      "Batch 50 device: cuda time passed: 7.929 time per batch: 0.159\n",
2399
      "Batch 100 device: cuda time passed: 14.420 time per batch: 0.144\n",
2400
      "ver 34, iter 21, fold 2, val ll: 0.0603, cor: 0.8429, auc: 0.9889\n",
2401
      "setFeats, augmentation -1\n",
2402
      "Batch 50 device: cuda time passed: 8.047 time per batch: 0.161\n",
2403
      "Batch 100 device: cuda time passed: 14.790 time per batch: 0.148\n",
2404
      "ver 34, iter 22, fold 2, val ll: 0.0601, cor: 0.8436, auc: 0.9891\n",
2405
      "setFeats, augmentation -1\n",
2406
      "Batch 50 device: cuda time passed: 8.442 time per batch: 0.169\n",
2407
      "Batch 100 device: cuda time passed: 14.588 time per batch: 0.146\n",
2408
      "ver 34, iter 23, fold 2, val ll: 0.0599, cor: 0.8439, auc: 0.9892\n",
2409
      "setFeats, augmentation -1\n",
2410
      "Batch 50 device: cuda time passed: 8.374 time per batch: 0.167\n",
2411
      "Batch 100 device: cuda time passed: 14.482 time per batch: 0.145\n",
2412
      "ver 34, iter 24, fold 2, val ll: 0.0604, cor: 0.8428, auc: 0.9890\n",
2413
      "setFeats, augmentation -1\n",
2414
      "Batch 50 device: cuda time passed: 8.083 time per batch: 0.162\n",
2415
      "Batch 100 device: cuda time passed: 14.383 time per batch: 0.144\n",
2416
      "ver 34, iter 25, fold 2, val ll: 0.0605, cor: 0.8427, auc: 0.9889\n",
2417
      "setFeats, augmentation -1\n",
2418
      "Batch 50 device: cuda time passed: 8.471 time per batch: 0.169\n",
2419
      "Batch 100 device: cuda time passed: 14.410 time per batch: 0.144\n",
2420
      "ver 34, iter 26, fold 2, val ll: 0.0600, cor: 0.8438, auc: 0.9891\n",
2421
      "setFeats, augmentation -1\n",
2422
      "Batch 50 device: cuda time passed: 8.600 time per batch: 0.172\n",
2423
      "Batch 100 device: cuda time passed: 14.631 time per batch: 0.146\n",
2424
      "ver 34, iter 27, fold 2, val ll: 0.0604, cor: 0.8430, auc: 0.9889\n",
2425
      "setFeats, augmentation -1\n",
2426
      "Batch 50 device: cuda time passed: 8.599 time per batch: 0.172\n",
2427
      "Batch 100 device: cuda time passed: 14.776 time per batch: 0.148\n",
2428
      "ver 34, iter 28, fold 2, val ll: 0.0604, cor: 0.8432, auc: 0.9890\n",
2429
      "setFeats, augmentation -1\n",
2430
      "Batch 50 device: cuda time passed: 7.776 time per batch: 0.156\n",
2431
      "Batch 100 device: cuda time passed: 14.609 time per batch: 0.146\n",
2432
      "ver 34, iter 29, fold 2, val ll: 0.0602, cor: 0.8436, auc: 0.9890\n",
2433
      "setFeats, augmentation -1\n",
2434
      "Batch 50 device: cuda time passed: 8.443 time per batch: 0.169\n",
2435
      "Batch 100 device: cuda time passed: 15.589 time per batch: 0.156\n",
2436
      "ver 34, iter 30, fold 2, val ll: 0.0600, cor: 0.8438, auc: 0.9891\n",
2437
      "setFeats, augmentation -1\n",
2438
      "Batch 50 device: cuda time passed: 7.807 time per batch: 0.156\n",
2439
      "Batch 100 device: cuda time passed: 14.568 time per batch: 0.146\n",
2440
      "ver 34, iter 31, fold 2, val ll: 0.0603, cor: 0.8431, auc: 0.9889\n",
2441
      "total running time 756.8606667518616\n",
2442
      "total time 15624.633174657822\n",
2443
      "completed epochs: 3 iters starting now: 32\n",
2444
      "adding dummy serieses 16\n",
2445
      "DataSet 12 valid size 4352 fold 3\n",
2446
      "dataset valid: 4352 loader valid: 136\n",
2447
      "loading model model.b3.f3.d12.v34\n",
2448
      "setFeats, augmentation -1\n",
2449
      "Batch 50 device: cuda time passed: 8.148 time per batch: 0.163\n",
2450
      "Batch 100 device: cuda time passed: 14.303 time per batch: 0.143\n",
2451
      "ver 34, iter 0, fold 3, val ll: 0.0624, cor: 0.8412, auc: 0.9887\n",
2452
      "setFeats, augmentation -1\n",
2453
      "Batch 50 device: cuda time passed: 8.338 time per batch: 0.167\n",
2454
      "Batch 100 device: cuda time passed: 14.757 time per batch: 0.148\n",
2455
      "ver 34, iter 1, fold 3, val ll: 0.0628, cor: 0.8402, auc: 0.9886\n",
2456
      "setFeats, augmentation -1\n",
2457
      "Batch 50 device: cuda time passed: 8.353 time per batch: 0.167\n",
2458
      "Batch 100 device: cuda time passed: 14.654 time per batch: 0.147\n",
2459
      "ver 34, iter 2, fold 3, val ll: 0.0627, cor: 0.8401, auc: 0.9887\n",
2460
      "setFeats, augmentation -1\n",
2461
      "Batch 50 device: cuda time passed: 7.905 time per batch: 0.158\n",
2462
      "Batch 100 device: cuda time passed: 13.991 time per batch: 0.140\n",
2463
      "ver 34, iter 3, fold 3, val ll: 0.0629, cor: 0.8401, auc: 0.9885\n",
2464
      "setFeats, augmentation -1\n",
2465
      "Batch 50 device: cuda time passed: 8.262 time per batch: 0.165\n",
2466
      "Batch 100 device: cuda time passed: 14.397 time per batch: 0.144\n",
2467
      "ver 34, iter 4, fold 3, val ll: 0.0626, cor: 0.8405, auc: 0.9887\n",
2468
      "setFeats, augmentation -1\n",
2469
      "Batch 50 device: cuda time passed: 8.196 time per batch: 0.164\n",
2470
      "Batch 100 device: cuda time passed: 14.549 time per batch: 0.145\n",
2471
      "ver 34, iter 5, fold 3, val ll: 0.0627, cor: 0.8406, auc: 0.9886\n",
2472
      "setFeats, augmentation -1\n",
2473
      "Batch 50 device: cuda time passed: 8.002 time per batch: 0.160\n",
2474
      "Batch 100 device: cuda time passed: 14.091 time per batch: 0.141\n",
2475
      "ver 34, iter 6, fold 3, val ll: 0.0627, cor: 0.8400, auc: 0.9886\n",
2476
      "setFeats, augmentation -1\n",
2477
      "Batch 50 device: cuda time passed: 8.450 time per batch: 0.169\n",
2478
      "Batch 100 device: cuda time passed: 15.029 time per batch: 0.150\n",
2479
      "ver 34, iter 7, fold 3, val ll: 0.0628, cor: 0.8399, auc: 0.9887\n",
2480
      "setFeats, augmentation -1\n",
2481
      "Batch 50 device: cuda time passed: 8.771 time per batch: 0.175\n",
2482
      "Batch 100 device: cuda time passed: 15.029 time per batch: 0.150\n",
2483
      "ver 34, iter 8, fold 3, val ll: 0.0627, cor: 0.8405, auc: 0.9887\n",
2484
      "setFeats, augmentation -1\n",
2485
      "Batch 50 device: cuda time passed: 8.014 time per batch: 0.160\n",
2486
      "Batch 100 device: cuda time passed: 14.310 time per batch: 0.143\n",
2487
      "ver 34, iter 9, fold 3, val ll: 0.0625, cor: 0.8410, auc: 0.9887\n",
2488
      "setFeats, augmentation -1\n",
2489
      "Batch 50 device: cuda time passed: 7.964 time per batch: 0.159\n",
2490
      "Batch 100 device: cuda time passed: 14.924 time per batch: 0.149\n",
2491
      "ver 34, iter 10, fold 3, val ll: 0.0626, cor: 0.8407, auc: 0.9887\n",
2492
      "setFeats, augmentation -1\n",
2493
      "Batch 50 device: cuda time passed: 8.033 time per batch: 0.161\n",
2494
      "Batch 100 device: cuda time passed: 14.188 time per batch: 0.142\n",
2495
      "ver 34, iter 11, fold 3, val ll: 0.0629, cor: 0.8402, auc: 0.9885\n",
2496
      "setFeats, augmentation -1\n",
2497
      "Batch 50 device: cuda time passed: 7.700 time per batch: 0.154\n",
2498
      "Batch 100 device: cuda time passed: 14.264 time per batch: 0.143\n",
2499
      "ver 34, iter 12, fold 3, val ll: 0.0628, cor: 0.8400, auc: 0.9887\n",
2500
      "setFeats, augmentation -1\n",
2501
      "Batch 50 device: cuda time passed: 8.258 time per batch: 0.165\n",
2502
      "Batch 100 device: cuda time passed: 14.647 time per batch: 0.146\n",
2503
      "ver 34, iter 13, fold 3, val ll: 0.0627, cor: 0.8399, auc: 0.9887\n",
2504
      "setFeats, augmentation -1\n",
2505
      "Batch 50 device: cuda time passed: 7.675 time per batch: 0.154\n",
2506
      "Batch 100 device: cuda time passed: 15.058 time per batch: 0.151\n",
2507
      "ver 34, iter 14, fold 3, val ll: 0.0626, cor: 0.8405, auc: 0.9887\n",
2508
      "setFeats, augmentation -1\n",
2509
      "Batch 50 device: cuda time passed: 7.952 time per batch: 0.159\n",
2510
      "Batch 100 device: cuda time passed: 14.168 time per batch: 0.142\n",
2511
      "ver 34, iter 15, fold 3, val ll: 0.0628, cor: 0.8400, auc: 0.9886\n",
2512
      "setFeats, augmentation -1\n",
2513
      "Batch 50 device: cuda time passed: 8.219 time per batch: 0.164\n",
2514
      "Batch 100 device: cuda time passed: 14.740 time per batch: 0.147\n",
2515
      "ver 34, iter 16, fold 3, val ll: 0.0625, cor: 0.8409, auc: 0.9887\n",
2516
      "setFeats, augmentation -1\n",
2517
      "Batch 50 device: cuda time passed: 7.800 time per batch: 0.156\n",
2518
      "Batch 100 device: cuda time passed: 14.480 time per batch: 0.145\n",
2519
      "ver 34, iter 17, fold 3, val ll: 0.0628, cor: 0.8407, auc: 0.9885\n",
2520
      "setFeats, augmentation -1\n",
2521
      "Batch 50 device: cuda time passed: 8.316 time per batch: 0.166\n",
2522
      "Batch 100 device: cuda time passed: 14.148 time per batch: 0.141\n",
2523
      "ver 34, iter 18, fold 3, val ll: 0.0625, cor: 0.8409, auc: 0.9887\n",
2524
      "setFeats, augmentation -1\n",
2525
      "Batch 50 device: cuda time passed: 7.750 time per batch: 0.155\n",
2526
      "Batch 100 device: cuda time passed: 14.642 time per batch: 0.146\n",
2527
      "ver 34, iter 19, fold 3, val ll: 0.0628, cor: 0.8401, auc: 0.9887\n",
2528
      "setFeats, augmentation -1\n",
2529
      "Batch 50 device: cuda time passed: 8.529 time per batch: 0.171\n",
2530
      "Batch 100 device: cuda time passed: 14.557 time per batch: 0.146\n"
2531
     ]
2532
    },
2533
    {
2534
     "name": "stdout",
2535
     "output_type": "stream",
2536
     "text": [
2537
      "ver 34, iter 20, fold 3, val ll: 0.0627, cor: 0.8404, auc: 0.9886\n",
2538
      "setFeats, augmentation -1\n",
2539
      "Batch 50 device: cuda time passed: 8.006 time per batch: 0.160\n",
2540
      "Batch 100 device: cuda time passed: 14.026 time per batch: 0.140\n",
2541
      "ver 34, iter 21, fold 3, val ll: 0.0628, cor: 0.8402, auc: 0.9887\n",
2542
      "setFeats, augmentation -1\n",
2543
      "Batch 50 device: cuda time passed: 8.349 time per batch: 0.167\n",
2544
      "Batch 100 device: cuda time passed: 14.435 time per batch: 0.144\n",
2545
      "ver 34, iter 22, fold 3, val ll: 0.0627, cor: 0.8403, auc: 0.9887\n",
2546
      "setFeats, augmentation -1\n",
2547
      "Batch 50 device: cuda time passed: 8.184 time per batch: 0.164\n",
2548
      "Batch 100 device: cuda time passed: 14.495 time per batch: 0.145\n",
2549
      "ver 34, iter 23, fold 3, val ll: 0.0627, cor: 0.8403, auc: 0.9888\n",
2550
      "setFeats, augmentation -1\n",
2551
      "Batch 50 device: cuda time passed: 7.872 time per batch: 0.157\n",
2552
      "Batch 100 device: cuda time passed: 14.538 time per batch: 0.145\n",
2553
      "ver 34, iter 24, fold 3, val ll: 0.0623, cor: 0.8408, auc: 0.9888\n",
2554
      "setFeats, augmentation -1\n",
2555
      "Batch 50 device: cuda time passed: 7.826 time per batch: 0.157\n",
2556
      "Batch 100 device: cuda time passed: 15.779 time per batch: 0.158\n",
2557
      "ver 34, iter 25, fold 3, val ll: 0.0627, cor: 0.8404, auc: 0.9886\n",
2558
      "setFeats, augmentation -1\n",
2559
      "Batch 50 device: cuda time passed: 7.871 time per batch: 0.157\n",
2560
      "Batch 100 device: cuda time passed: 14.512 time per batch: 0.145\n",
2561
      "ver 34, iter 26, fold 3, val ll: 0.0627, cor: 0.8404, auc: 0.9887\n",
2562
      "setFeats, augmentation -1\n",
2563
      "Batch 50 device: cuda time passed: 8.599 time per batch: 0.172\n",
2564
      "Batch 100 device: cuda time passed: 14.392 time per batch: 0.144\n",
2565
      "ver 34, iter 27, fold 3, val ll: 0.0627, cor: 0.8403, auc: 0.9886\n",
2566
      "setFeats, augmentation -1\n",
2567
      "Batch 50 device: cuda time passed: 8.072 time per batch: 0.161\n",
2568
      "Batch 100 device: cuda time passed: 14.356 time per batch: 0.144\n",
2569
      "ver 34, iter 28, fold 3, val ll: 0.0627, cor: 0.8403, auc: 0.9886\n",
2570
      "setFeats, augmentation -1\n",
2571
      "Batch 50 device: cuda time passed: 7.696 time per batch: 0.154\n",
2572
      "Batch 100 device: cuda time passed: 14.316 time per batch: 0.143\n",
2573
      "ver 34, iter 29, fold 3, val ll: 0.0627, cor: 0.8403, auc: 0.9886\n",
2574
      "setFeats, augmentation -1\n",
2575
      "Batch 50 device: cuda time passed: 7.732 time per batch: 0.155\n",
2576
      "Batch 100 device: cuda time passed: 14.896 time per batch: 0.149\n",
2577
      "ver 34, iter 30, fold 3, val ll: 0.0626, cor: 0.8407, auc: 0.9887\n",
2578
      "setFeats, augmentation -1\n",
2579
      "Batch 50 device: cuda time passed: 8.028 time per batch: 0.161\n",
2580
      "Batch 100 device: cuda time passed: 14.561 time per batch: 0.146\n",
2581
      "ver 34, iter 31, fold 3, val ll: 0.0625, cor: 0.8406, auc: 0.9888\n",
2582
      "total running time 748.939469575882\n",
2583
      "total time 16373.82175731659\n",
2584
      "completed epochs: 3 iters starting now: 32\n",
2585
      "adding dummy serieses 16\n",
2586
      "DataSet 12 valid size 4384 fold 4\n",
2587
      "dataset valid: 4384 loader valid: 137\n",
2588
      "loading model model.b3.f4.d12.v34\n",
2589
      "setFeats, augmentation -1\n",
2590
      "Batch 50 device: cuda time passed: 8.040 time per batch: 0.161\n",
2591
      "Batch 100 device: cuda time passed: 15.289 time per batch: 0.153\n",
2592
      "ver 34, iter 0, fold 4, val ll: 0.0611, cor: 0.8442, auc: 0.9884\n",
2593
      "setFeats, augmentation -1\n",
2594
      "Batch 50 device: cuda time passed: 7.957 time per batch: 0.159\n",
2595
      "Batch 100 device: cuda time passed: 14.320 time per batch: 0.143\n",
2596
      "ver 34, iter 1, fold 4, val ll: 0.0613, cor: 0.8441, auc: 0.9885\n",
2597
      "setFeats, augmentation -1\n",
2598
      "Batch 50 device: cuda time passed: 8.086 time per batch: 0.162\n",
2599
      "Batch 100 device: cuda time passed: 14.522 time per batch: 0.145\n",
2600
      "ver 34, iter 2, fold 4, val ll: 0.0614, cor: 0.8440, auc: 0.9883\n",
2601
      "setFeats, augmentation -1\n",
2602
      "Batch 50 device: cuda time passed: 9.210 time per batch: 0.184\n",
2603
      "Batch 100 device: cuda time passed: 15.521 time per batch: 0.155\n",
2604
      "ver 34, iter 3, fold 4, val ll: 0.0613, cor: 0.8439, auc: 0.9884\n",
2605
      "setFeats, augmentation -1\n",
2606
      "Batch 50 device: cuda time passed: 7.946 time per batch: 0.159\n",
2607
      "Batch 100 device: cuda time passed: 14.025 time per batch: 0.140\n",
2608
      "ver 34, iter 4, fold 4, val ll: 0.0613, cor: 0.8437, auc: 0.9883\n",
2609
      "setFeats, augmentation -1\n",
2610
      "Batch 50 device: cuda time passed: 7.868 time per batch: 0.157\n",
2611
      "Batch 100 device: cuda time passed: 14.371 time per batch: 0.144\n",
2612
      "ver 34, iter 5, fold 4, val ll: 0.0612, cor: 0.8440, auc: 0.9883\n",
2613
      "setFeats, augmentation -1\n",
2614
      "Batch 50 device: cuda time passed: 8.101 time per batch: 0.162\n",
2615
      "Batch 100 device: cuda time passed: 14.288 time per batch: 0.143\n",
2616
      "ver 34, iter 6, fold 4, val ll: 0.0611, cor: 0.8442, auc: 0.9885\n",
2617
      "setFeats, augmentation -1\n",
2618
      "Batch 50 device: cuda time passed: 7.979 time per batch: 0.160\n",
2619
      "Batch 100 device: cuda time passed: 15.010 time per batch: 0.150\n",
2620
      "ver 34, iter 7, fold 4, val ll: 0.0612, cor: 0.8443, auc: 0.9884\n",
2621
      "setFeats, augmentation -1\n",
2622
      "Batch 50 device: cuda time passed: 8.269 time per batch: 0.165\n",
2623
      "Batch 100 device: cuda time passed: 14.420 time per batch: 0.144\n",
2624
      "ver 34, iter 8, fold 4, val ll: 0.0612, cor: 0.8443, auc: 0.9884\n",
2625
      "setFeats, augmentation -1\n",
2626
      "Batch 50 device: cuda time passed: 8.391 time per batch: 0.168\n",
2627
      "Batch 100 device: cuda time passed: 14.809 time per batch: 0.148\n",
2628
      "ver 34, iter 9, fold 4, val ll: 0.0612, cor: 0.8440, auc: 0.9885\n",
2629
      "setFeats, augmentation -1\n",
2630
      "Batch 50 device: cuda time passed: 8.734 time per batch: 0.175\n",
2631
      "Batch 100 device: cuda time passed: 14.864 time per batch: 0.149\n",
2632
      "ver 34, iter 10, fold 4, val ll: 0.0613, cor: 0.8441, auc: 0.9883\n",
2633
      "setFeats, augmentation -1\n",
2634
      "Batch 50 device: cuda time passed: 7.859 time per batch: 0.157\n",
2635
      "Batch 100 device: cuda time passed: 15.081 time per batch: 0.151\n",
2636
      "ver 34, iter 11, fold 4, val ll: 0.0614, cor: 0.8436, auc: 0.9884\n",
2637
      "setFeats, augmentation -1\n",
2638
      "Batch 50 device: cuda time passed: 7.908 time per batch: 0.158\n",
2639
      "Batch 100 device: cuda time passed: 14.954 time per batch: 0.150\n",
2640
      "ver 34, iter 12, fold 4, val ll: 0.0612, cor: 0.8447, auc: 0.9883\n",
2641
      "setFeats, augmentation -1\n",
2642
      "Batch 50 device: cuda time passed: 7.701 time per batch: 0.154\n",
2643
      "Batch 100 device: cuda time passed: 14.594 time per batch: 0.146\n",
2644
      "ver 34, iter 13, fold 4, val ll: 0.0610, cor: 0.8446, auc: 0.9885\n",
2645
      "setFeats, augmentation -1\n",
2646
      "Batch 50 device: cuda time passed: 7.549 time per batch: 0.151\n",
2647
      "Batch 100 device: cuda time passed: 14.648 time per batch: 0.146\n",
2648
      "ver 34, iter 14, fold 4, val ll: 0.0613, cor: 0.8438, auc: 0.9883\n",
2649
      "setFeats, augmentation -1\n",
2650
      "Batch 50 device: cuda time passed: 7.929 time per batch: 0.159\n",
2651
      "Batch 100 device: cuda time passed: 14.785 time per batch: 0.148\n",
2652
      "ver 34, iter 15, fold 4, val ll: 0.0613, cor: 0.8442, auc: 0.9883\n",
2653
      "setFeats, augmentation -1\n",
2654
      "Batch 50 device: cuda time passed: 8.508 time per batch: 0.170\n",
2655
      "Batch 100 device: cuda time passed: 14.877 time per batch: 0.149\n",
2656
      "ver 34, iter 16, fold 4, val ll: 0.0614, cor: 0.8439, auc: 0.9883\n",
2657
      "setFeats, augmentation -1\n",
2658
      "Batch 50 device: cuda time passed: 8.542 time per batch: 0.171\n",
2659
      "Batch 100 device: cuda time passed: 14.509 time per batch: 0.145\n",
2660
      "ver 34, iter 17, fold 4, val ll: 0.0611, cor: 0.8443, auc: 0.9885\n",
2661
      "setFeats, augmentation -1\n",
2662
      "Batch 50 device: cuda time passed: 7.896 time per batch: 0.158\n",
2663
      "Batch 100 device: cuda time passed: 14.569 time per batch: 0.146\n",
2664
      "ver 34, iter 18, fold 4, val ll: 0.0613, cor: 0.8440, auc: 0.9883\n",
2665
      "setFeats, augmentation -1\n",
2666
      "Batch 50 device: cuda time passed: 7.913 time per batch: 0.158\n",
2667
      "Batch 100 device: cuda time passed: 14.154 time per batch: 0.142\n",
2668
      "ver 34, iter 19, fold 4, val ll: 0.0612, cor: 0.8442, auc: 0.9884\n",
2669
      "setFeats, augmentation -1\n",
2670
      "Batch 50 device: cuda time passed: 7.889 time per batch: 0.158\n",
2671
      "Batch 100 device: cuda time passed: 14.455 time per batch: 0.145\n",
2672
      "ver 34, iter 20, fold 4, val ll: 0.0610, cor: 0.8451, auc: 0.9884\n",
2673
      "setFeats, augmentation -1\n",
2674
      "Batch 50 device: cuda time passed: 7.945 time per batch: 0.159\n",
2675
      "Batch 100 device: cuda time passed: 14.827 time per batch: 0.148\n",
2676
      "ver 34, iter 21, fold 4, val ll: 0.0613, cor: 0.8443, auc: 0.9883\n",
2677
      "setFeats, augmentation -1\n",
2678
      "Batch 50 device: cuda time passed: 7.976 time per batch: 0.160\n",
2679
      "Batch 100 device: cuda time passed: 14.222 time per batch: 0.142\n",
2680
      "ver 34, iter 22, fold 4, val ll: 0.0613, cor: 0.8443, auc: 0.9883\n",
2681
      "setFeats, augmentation -1\n",
2682
      "Batch 50 device: cuda time passed: 8.256 time per batch: 0.165\n",
2683
      "Batch 100 device: cuda time passed: 14.979 time per batch: 0.150\n",
2684
      "ver 34, iter 23, fold 4, val ll: 0.0613, cor: 0.8440, auc: 0.9883\n",
2685
      "setFeats, augmentation -1\n",
2686
      "Batch 50 device: cuda time passed: 8.428 time per batch: 0.169\n",
2687
      "Batch 100 device: cuda time passed: 14.389 time per batch: 0.144\n",
2688
      "ver 34, iter 24, fold 4, val ll: 0.0612, cor: 0.8442, auc: 0.9884\n",
2689
      "setFeats, augmentation -1\n"
2690
     ]
2691
    },
2692
    {
2693
     "name": "stdout",
2694
     "output_type": "stream",
2695
     "text": [
2696
      "Batch 50 device: cuda time passed: 8.386 time per batch: 0.168\n",
2697
      "Batch 100 device: cuda time passed: 14.843 time per batch: 0.148\n",
2698
      "ver 34, iter 25, fold 4, val ll: 0.0610, cor: 0.8443, auc: 0.9885\n",
2699
      "setFeats, augmentation -1\n",
2700
      "Batch 50 device: cuda time passed: 8.176 time per batch: 0.164\n",
2701
      "Batch 100 device: cuda time passed: 14.860 time per batch: 0.149\n",
2702
      "ver 34, iter 26, fold 4, val ll: 0.0614, cor: 0.8437, auc: 0.9883\n",
2703
      "setFeats, augmentation -1\n",
2704
      "Batch 50 device: cuda time passed: 7.796 time per batch: 0.156\n",
2705
      "Batch 100 device: cuda time passed: 14.797 time per batch: 0.148\n",
2706
      "ver 34, iter 27, fold 4, val ll: 0.0611, cor: 0.8441, auc: 0.9885\n",
2707
      "setFeats, augmentation -1\n",
2708
      "Batch 50 device: cuda time passed: 8.082 time per batch: 0.162\n",
2709
      "Batch 100 device: cuda time passed: 14.366 time per batch: 0.144\n",
2710
      "ver 34, iter 28, fold 4, val ll: 0.0611, cor: 0.8444, auc: 0.9884\n",
2711
      "setFeats, augmentation -1\n",
2712
      "Batch 50 device: cuda time passed: 7.765 time per batch: 0.155\n",
2713
      "Batch 100 device: cuda time passed: 14.102 time per batch: 0.141\n",
2714
      "ver 34, iter 29, fold 4, val ll: 0.0613, cor: 0.8439, auc: 0.9884\n",
2715
      "setFeats, augmentation -1\n",
2716
      "Batch 50 device: cuda time passed: 7.825 time per batch: 0.157\n",
2717
      "Batch 100 device: cuda time passed: 14.505 time per batch: 0.145\n",
2718
      "ver 34, iter 30, fold 4, val ll: 0.0612, cor: 0.8438, auc: 0.9885\n",
2719
      "setFeats, augmentation -1\n",
2720
      "Batch 50 device: cuda time passed: 7.774 time per batch: 0.155\n",
2721
      "Batch 100 device: cuda time passed: 14.729 time per batch: 0.147\n",
2722
      "ver 34, iter 31, fold 4, val ll: 0.0612, cor: 0.8444, auc: 0.9884\n",
2723
      "total running time 753.5520458221436\n",
2724
      "total time 17127.620171546936\n",
2725
      "completed epochs: 3 iters starting now: 32\n",
2726
      "adding dummy serieses 9\n",
2727
      "DataSet 13 valid size 4384 fold 0\n",
2728
      "dataset valid: 4384 loader valid: 137\n",
2729
      "loading model model.b3.f0.d13.v34\n",
2730
      "setFeats, augmentation -1\n",
2731
      "Batch 50 device: cuda time passed: 8.140 time per batch: 0.163\n",
2732
      "Batch 100 device: cuda time passed: 14.948 time per batch: 0.149\n",
2733
      "ver 34, iter 0, fold 0, val ll: 0.0607, cor: 0.8444, auc: 0.9891\n",
2734
      "setFeats, augmentation -1\n",
2735
      "Batch 50 device: cuda time passed: 7.765 time per batch: 0.155\n",
2736
      "Batch 100 device: cuda time passed: 15.278 time per batch: 0.153\n",
2737
      "ver 34, iter 1, fold 0, val ll: 0.0609, cor: 0.8441, auc: 0.9889\n",
2738
      "setFeats, augmentation -1\n",
2739
      "Batch 50 device: cuda time passed: 7.926 time per batch: 0.159\n",
2740
      "Batch 100 device: cuda time passed: 14.573 time per batch: 0.146\n",
2741
      "ver 34, iter 2, fold 0, val ll: 0.0609, cor: 0.8436, auc: 0.9890\n",
2742
      "setFeats, augmentation -1\n",
2743
      "Batch 50 device: cuda time passed: 8.082 time per batch: 0.162\n",
2744
      "Batch 100 device: cuda time passed: 14.491 time per batch: 0.145\n",
2745
      "ver 34, iter 3, fold 0, val ll: 0.0607, cor: 0.8445, auc: 0.9890\n",
2746
      "setFeats, augmentation -1\n",
2747
      "Batch 50 device: cuda time passed: 8.787 time per batch: 0.176\n",
2748
      "Batch 100 device: cuda time passed: 15.077 time per batch: 0.151\n",
2749
      "ver 34, iter 4, fold 0, val ll: 0.0609, cor: 0.8440, auc: 0.9890\n",
2750
      "setFeats, augmentation -1\n",
2751
      "Batch 50 device: cuda time passed: 8.126 time per batch: 0.163\n",
2752
      "Batch 100 device: cuda time passed: 14.653 time per batch: 0.147\n",
2753
      "ver 34, iter 5, fold 0, val ll: 0.0609, cor: 0.8441, auc: 0.9889\n",
2754
      "setFeats, augmentation -1\n",
2755
      "Batch 50 device: cuda time passed: 7.745 time per batch: 0.155\n",
2756
      "Batch 100 device: cuda time passed: 14.989 time per batch: 0.150\n",
2757
      "ver 34, iter 6, fold 0, val ll: 0.0607, cor: 0.8443, auc: 0.9891\n",
2758
      "setFeats, augmentation -1\n",
2759
      "Batch 50 device: cuda time passed: 8.878 time per batch: 0.178\n",
2760
      "Batch 100 device: cuda time passed: 14.866 time per batch: 0.149\n",
2761
      "ver 34, iter 7, fold 0, val ll: 0.0606, cor: 0.8448, auc: 0.9890\n",
2762
      "setFeats, augmentation -1\n",
2763
      "Batch 50 device: cuda time passed: 8.488 time per batch: 0.170\n",
2764
      "Batch 100 device: cuda time passed: 14.818 time per batch: 0.148\n",
2765
      "ver 34, iter 8, fold 0, val ll: 0.0607, cor: 0.8443, auc: 0.9891\n",
2766
      "setFeats, augmentation -1\n",
2767
      "Batch 50 device: cuda time passed: 7.945 time per batch: 0.159\n",
2768
      "Batch 100 device: cuda time passed: 14.104 time per batch: 0.141\n",
2769
      "ver 34, iter 9, fold 0, val ll: 0.0609, cor: 0.8439, auc: 0.9889\n",
2770
      "setFeats, augmentation -1\n",
2771
      "Batch 50 device: cuda time passed: 8.374 time per batch: 0.167\n",
2772
      "Batch 100 device: cuda time passed: 15.081 time per batch: 0.151\n",
2773
      "ver 34, iter 10, fold 0, val ll: 0.0608, cor: 0.8439, auc: 0.9890\n",
2774
      "setFeats, augmentation -1\n",
2775
      "Batch 50 device: cuda time passed: 7.888 time per batch: 0.158\n",
2776
      "Batch 100 device: cuda time passed: 14.354 time per batch: 0.144\n",
2777
      "ver 34, iter 11, fold 0, val ll: 0.0610, cor: 0.8437, auc: 0.9889\n",
2778
      "setFeats, augmentation -1\n",
2779
      "Batch 50 device: cuda time passed: 8.434 time per batch: 0.169\n",
2780
      "Batch 100 device: cuda time passed: 14.665 time per batch: 0.147\n",
2781
      "ver 34, iter 12, fold 0, val ll: 0.0609, cor: 0.8436, auc: 0.9889\n",
2782
      "setFeats, augmentation -1\n",
2783
      "Batch 50 device: cuda time passed: 7.988 time per batch: 0.160\n",
2784
      "Batch 100 device: cuda time passed: 14.515 time per batch: 0.145\n",
2785
      "ver 34, iter 13, fold 0, val ll: 0.0607, cor: 0.8446, auc: 0.9890\n",
2786
      "setFeats, augmentation -1\n",
2787
      "Batch 50 device: cuda time passed: 8.329 time per batch: 0.167\n",
2788
      "Batch 100 device: cuda time passed: 14.576 time per batch: 0.146\n",
2789
      "ver 34, iter 14, fold 0, val ll: 0.0608, cor: 0.8444, auc: 0.9890\n",
2790
      "setFeats, augmentation -1\n",
2791
      "Batch 50 device: cuda time passed: 7.950 time per batch: 0.159\n",
2792
      "Batch 100 device: cuda time passed: 14.603 time per batch: 0.146\n",
2793
      "ver 34, iter 15, fold 0, val ll: 0.0609, cor: 0.8441, auc: 0.9889\n",
2794
      "setFeats, augmentation -1\n",
2795
      "Batch 50 device: cuda time passed: 8.156 time per batch: 0.163\n",
2796
      "Batch 100 device: cuda time passed: 14.438 time per batch: 0.144\n",
2797
      "ver 34, iter 16, fold 0, val ll: 0.0607, cor: 0.8443, auc: 0.9890\n",
2798
      "setFeats, augmentation -1\n",
2799
      "Batch 50 device: cuda time passed: 7.921 time per batch: 0.158\n",
2800
      "Batch 100 device: cuda time passed: 14.418 time per batch: 0.144\n",
2801
      "ver 34, iter 17, fold 0, val ll: 0.0608, cor: 0.8442, auc: 0.9890\n",
2802
      "setFeats, augmentation -1\n",
2803
      "Batch 50 device: cuda time passed: 8.887 time per batch: 0.178\n",
2804
      "Batch 100 device: cuda time passed: 15.283 time per batch: 0.153\n",
2805
      "ver 34, iter 18, fold 0, val ll: 0.0610, cor: 0.8434, auc: 0.9890\n",
2806
      "setFeats, augmentation -1\n",
2807
      "Batch 50 device: cuda time passed: 8.868 time per batch: 0.177\n",
2808
      "Batch 100 device: cuda time passed: 15.062 time per batch: 0.151\n",
2809
      "ver 34, iter 19, fold 0, val ll: 0.0608, cor: 0.8443, auc: 0.9890\n",
2810
      "setFeats, augmentation -1\n",
2811
      "Batch 50 device: cuda time passed: 8.072 time per batch: 0.161\n",
2812
      "Batch 100 device: cuda time passed: 14.485 time per batch: 0.145\n",
2813
      "ver 34, iter 20, fold 0, val ll: 0.0608, cor: 0.8441, auc: 0.9890\n",
2814
      "setFeats, augmentation -1\n",
2815
      "Batch 50 device: cuda time passed: 8.026 time per batch: 0.161\n",
2816
      "Batch 100 device: cuda time passed: 15.079 time per batch: 0.151\n",
2817
      "ver 34, iter 21, fold 0, val ll: 0.0609, cor: 0.8440, auc: 0.9890\n",
2818
      "setFeats, augmentation -1\n",
2819
      "Batch 50 device: cuda time passed: 7.831 time per batch: 0.157\n",
2820
      "Batch 100 device: cuda time passed: 14.061 time per batch: 0.141\n",
2821
      "ver 34, iter 22, fold 0, val ll: 0.0609, cor: 0.8440, auc: 0.9889\n",
2822
      "setFeats, augmentation -1\n",
2823
      "Batch 50 device: cuda time passed: 8.278 time per batch: 0.166\n",
2824
      "Batch 100 device: cuda time passed: 15.131 time per batch: 0.151\n",
2825
      "ver 34, iter 23, fold 0, val ll: 0.0608, cor: 0.8442, auc: 0.9890\n",
2826
      "setFeats, augmentation -1\n",
2827
      "Batch 50 device: cuda time passed: 8.023 time per batch: 0.160\n",
2828
      "Batch 100 device: cuda time passed: 15.025 time per batch: 0.150\n",
2829
      "ver 34, iter 24, fold 0, val ll: 0.0609, cor: 0.8443, auc: 0.9889\n",
2830
      "setFeats, augmentation -1\n",
2831
      "Batch 50 device: cuda time passed: 8.205 time per batch: 0.164\n",
2832
      "Batch 100 device: cuda time passed: 14.707 time per batch: 0.147\n",
2833
      "ver 34, iter 25, fold 0, val ll: 0.0606, cor: 0.8443, auc: 0.9891\n",
2834
      "setFeats, augmentation -1\n",
2835
      "Batch 50 device: cuda time passed: 8.591 time per batch: 0.172\n",
2836
      "Batch 100 device: cuda time passed: 14.854 time per batch: 0.149\n",
2837
      "ver 34, iter 26, fold 0, val ll: 0.0608, cor: 0.8440, auc: 0.9890\n",
2838
      "setFeats, augmentation -1\n",
2839
      "Batch 50 device: cuda time passed: 8.166 time per batch: 0.163\n",
2840
      "Batch 100 device: cuda time passed: 14.509 time per batch: 0.145\n",
2841
      "ver 34, iter 27, fold 0, val ll: 0.0606, cor: 0.8443, auc: 0.9891\n",
2842
      "setFeats, augmentation -1\n",
2843
      "Batch 50 device: cuda time passed: 8.502 time per batch: 0.170\n",
2844
      "Batch 100 device: cuda time passed: 14.728 time per batch: 0.147\n",
2845
      "ver 34, iter 28, fold 0, val ll: 0.0608, cor: 0.8443, auc: 0.9889\n",
2846
      "setFeats, augmentation -1\n",
2847
      "Batch 50 device: cuda time passed: 7.983 time per batch: 0.160\n"
2848
     ]
2849
    },
2850
    {
2851
     "name": "stdout",
2852
     "output_type": "stream",
2853
     "text": [
2854
      "Batch 100 device: cuda time passed: 14.007 time per batch: 0.140\n",
2855
      "ver 34, iter 29, fold 0, val ll: 0.0608, cor: 0.8444, auc: 0.9889\n",
2856
      "setFeats, augmentation -1\n",
2857
      "Batch 50 device: cuda time passed: 7.859 time per batch: 0.157\n",
2858
      "Batch 100 device: cuda time passed: 14.598 time per batch: 0.146\n",
2859
      "ver 34, iter 30, fold 0, val ll: 0.0610, cor: 0.8436, auc: 0.9889\n",
2860
      "setFeats, augmentation -1\n",
2861
      "Batch 50 device: cuda time passed: 8.346 time per batch: 0.167\n",
2862
      "Batch 100 device: cuda time passed: 14.582 time per batch: 0.146\n",
2863
      "ver 34, iter 31, fold 0, val ll: 0.0610, cor: 0.8437, auc: 0.9889\n",
2864
      "total running time 753.0968706607819\n",
2865
      "total time 17880.959055662155\n",
2866
      "completed epochs: 3 iters starting now: 32\n",
2867
      "adding dummy serieses 12\n",
2868
      "DataSet 13 valid size 4288 fold 1\n",
2869
      "dataset valid: 4288 loader valid: 134\n",
2870
      "loading model model.b3.f1.d13.v34\n",
2871
      "setFeats, augmentation -1\n",
2872
      "Batch 50 device: cuda time passed: 7.968 time per batch: 0.159\n",
2873
      "Batch 100 device: cuda time passed: 14.655 time per batch: 0.147\n",
2874
      "ver 34, iter 0, fold 1, val ll: 0.0600, cor: 0.8442, auc: 0.9897\n",
2875
      "setFeats, augmentation -1\n",
2876
      "Batch 50 device: cuda time passed: 8.093 time per batch: 0.162\n",
2877
      "Batch 100 device: cuda time passed: 14.374 time per batch: 0.144\n",
2878
      "ver 34, iter 1, fold 1, val ll: 0.0597, cor: 0.8452, auc: 0.9897\n",
2879
      "setFeats, augmentation -1\n",
2880
      "Batch 50 device: cuda time passed: 7.706 time per batch: 0.154\n",
2881
      "Batch 100 device: cuda time passed: 15.249 time per batch: 0.152\n",
2882
      "ver 34, iter 2, fold 1, val ll: 0.0599, cor: 0.8446, auc: 0.9896\n",
2883
      "setFeats, augmentation -1\n",
2884
      "Batch 50 device: cuda time passed: 8.218 time per batch: 0.164\n",
2885
      "Batch 100 device: cuda time passed: 14.650 time per batch: 0.146\n",
2886
      "ver 34, iter 3, fold 1, val ll: 0.0598, cor: 0.8447, auc: 0.9897\n",
2887
      "setFeats, augmentation -1\n",
2888
      "Batch 50 device: cuda time passed: 8.348 time per batch: 0.167\n",
2889
      "Batch 100 device: cuda time passed: 14.312 time per batch: 0.143\n",
2890
      "ver 34, iter 4, fold 1, val ll: 0.0599, cor: 0.8447, auc: 0.9896\n",
2891
      "setFeats, augmentation -1\n",
2892
      "Batch 50 device: cuda time passed: 8.674 time per batch: 0.173\n",
2893
      "Batch 100 device: cuda time passed: 14.201 time per batch: 0.142\n",
2894
      "ver 34, iter 5, fold 1, val ll: 0.0598, cor: 0.8449, auc: 0.9898\n",
2895
      "setFeats, augmentation -1\n",
2896
      "Batch 50 device: cuda time passed: 7.864 time per batch: 0.157\n",
2897
      "Batch 100 device: cuda time passed: 14.388 time per batch: 0.144\n",
2898
      "ver 34, iter 6, fold 1, val ll: 0.0600, cor: 0.8445, auc: 0.9897\n",
2899
      "setFeats, augmentation -1\n",
2900
      "Batch 50 device: cuda time passed: 7.661 time per batch: 0.153\n",
2901
      "Batch 100 device: cuda time passed: 14.390 time per batch: 0.144\n",
2902
      "ver 34, iter 7, fold 1, val ll: 0.0600, cor: 0.8443, auc: 0.9896\n",
2903
      "setFeats, augmentation -1\n",
2904
      "Batch 50 device: cuda time passed: 8.293 time per batch: 0.166\n",
2905
      "Batch 100 device: cuda time passed: 14.570 time per batch: 0.146\n",
2906
      "ver 34, iter 8, fold 1, val ll: 0.0599, cor: 0.8448, auc: 0.9896\n",
2907
      "setFeats, augmentation -1\n",
2908
      "Batch 50 device: cuda time passed: 7.485 time per batch: 0.150\n",
2909
      "Batch 100 device: cuda time passed: 14.665 time per batch: 0.147\n",
2910
      "ver 34, iter 9, fold 1, val ll: 0.0599, cor: 0.8450, auc: 0.9897\n",
2911
      "setFeats, augmentation -1\n",
2912
      "Batch 50 device: cuda time passed: 8.037 time per batch: 0.161\n",
2913
      "Batch 100 device: cuda time passed: 14.668 time per batch: 0.147\n",
2914
      "ver 34, iter 10, fold 1, val ll: 0.0600, cor: 0.8444, auc: 0.9896\n",
2915
      "setFeats, augmentation -1\n",
2916
      "Batch 50 device: cuda time passed: 7.838 time per batch: 0.157\n",
2917
      "Batch 100 device: cuda time passed: 14.042 time per batch: 0.140\n",
2918
      "ver 34, iter 11, fold 1, val ll: 0.0601, cor: 0.8445, auc: 0.9896\n",
2919
      "setFeats, augmentation -1\n",
2920
      "Batch 50 device: cuda time passed: 8.105 time per batch: 0.162\n",
2921
      "Batch 100 device: cuda time passed: 14.470 time per batch: 0.145\n",
2922
      "ver 34, iter 12, fold 1, val ll: 0.0598, cor: 0.8448, auc: 0.9897\n",
2923
      "setFeats, augmentation -1\n",
2924
      "Batch 50 device: cuda time passed: 7.542 time per batch: 0.151\n",
2925
      "Batch 100 device: cuda time passed: 14.744 time per batch: 0.147\n",
2926
      "ver 34, iter 13, fold 1, val ll: 0.0600, cor: 0.8439, auc: 0.9897\n",
2927
      "setFeats, augmentation -1\n",
2928
      "Batch 50 device: cuda time passed: 7.861 time per batch: 0.157\n",
2929
      "Batch 100 device: cuda time passed: 13.964 time per batch: 0.140\n",
2930
      "ver 34, iter 14, fold 1, val ll: 0.0599, cor: 0.8448, auc: 0.9897\n",
2931
      "setFeats, augmentation -1\n",
2932
      "Batch 50 device: cuda time passed: 8.315 time per batch: 0.166\n",
2933
      "Batch 100 device: cuda time passed: 14.649 time per batch: 0.146\n",
2934
      "ver 34, iter 15, fold 1, val ll: 0.0597, cor: 0.8452, auc: 0.9897\n",
2935
      "setFeats, augmentation -1\n",
2936
      "Batch 50 device: cuda time passed: 8.373 time per batch: 0.167\n",
2937
      "Batch 100 device: cuda time passed: 14.443 time per batch: 0.144\n",
2938
      "ver 34, iter 16, fold 1, val ll: 0.0598, cor: 0.8451, auc: 0.9896\n",
2939
      "setFeats, augmentation -1\n",
2940
      "Batch 50 device: cuda time passed: 8.520 time per batch: 0.170\n",
2941
      "Batch 100 device: cuda time passed: 14.838 time per batch: 0.148\n",
2942
      "ver 34, iter 17, fold 1, val ll: 0.0600, cor: 0.8447, auc: 0.9896\n",
2943
      "setFeats, augmentation -1\n",
2944
      "Batch 50 device: cuda time passed: 7.483 time per batch: 0.150\n",
2945
      "Batch 100 device: cuda time passed: 14.632 time per batch: 0.146\n",
2946
      "ver 34, iter 18, fold 1, val ll: 0.0597, cor: 0.8449, auc: 0.9898\n",
2947
      "setFeats, augmentation -1\n",
2948
      "Batch 50 device: cuda time passed: 8.596 time per batch: 0.172\n",
2949
      "Batch 100 device: cuda time passed: 14.762 time per batch: 0.148\n",
2950
      "ver 34, iter 19, fold 1, val ll: 0.0598, cor: 0.8447, auc: 0.9898\n",
2951
      "setFeats, augmentation -1\n",
2952
      "Batch 50 device: cuda time passed: 8.654 time per batch: 0.173\n",
2953
      "Batch 100 device: cuda time passed: 14.672 time per batch: 0.147\n",
2954
      "ver 34, iter 20, fold 1, val ll: 0.0599, cor: 0.8447, auc: 0.9896\n",
2955
      "setFeats, augmentation -1\n",
2956
      "Batch 50 device: cuda time passed: 7.916 time per batch: 0.158\n",
2957
      "Batch 100 device: cuda time passed: 14.640 time per batch: 0.146\n",
2958
      "ver 34, iter 21, fold 1, val ll: 0.0599, cor: 0.8447, auc: 0.9897\n",
2959
      "setFeats, augmentation -1\n",
2960
      "Batch 50 device: cuda time passed: 7.700 time per batch: 0.154\n",
2961
      "Batch 100 device: cuda time passed: 14.168 time per batch: 0.142\n",
2962
      "ver 34, iter 22, fold 1, val ll: 0.0598, cor: 0.8450, auc: 0.9897\n",
2963
      "setFeats, augmentation -1\n",
2964
      "Batch 50 device: cuda time passed: 7.812 time per batch: 0.156\n",
2965
      "Batch 100 device: cuda time passed: 14.426 time per batch: 0.144\n",
2966
      "ver 34, iter 23, fold 1, val ll: 0.0598, cor: 0.8451, auc: 0.9898\n",
2967
      "setFeats, augmentation -1\n",
2968
      "Batch 50 device: cuda time passed: 7.864 time per batch: 0.157\n",
2969
      "Batch 100 device: cuda time passed: 14.724 time per batch: 0.147\n",
2970
      "ver 34, iter 24, fold 1, val ll: 0.0598, cor: 0.8448, auc: 0.9897\n",
2971
      "setFeats, augmentation -1\n",
2972
      "Batch 50 device: cuda time passed: 7.879 time per batch: 0.158\n",
2973
      "Batch 100 device: cuda time passed: 14.961 time per batch: 0.150\n",
2974
      "ver 34, iter 25, fold 1, val ll: 0.0599, cor: 0.8446, auc: 0.9897\n",
2975
      "setFeats, augmentation -1\n",
2976
      "Batch 50 device: cuda time passed: 8.575 time per batch: 0.171\n",
2977
      "Batch 100 device: cuda time passed: 15.369 time per batch: 0.154\n",
2978
      "ver 34, iter 26, fold 1, val ll: 0.0600, cor: 0.8445, auc: 0.9896\n",
2979
      "setFeats, augmentation -1\n",
2980
      "Batch 50 device: cuda time passed: 7.869 time per batch: 0.157\n",
2981
      "Batch 100 device: cuda time passed: 14.553 time per batch: 0.146\n",
2982
      "ver 34, iter 27, fold 1, val ll: 0.0598, cor: 0.8447, auc: 0.9898\n",
2983
      "setFeats, augmentation -1\n",
2984
      "Batch 50 device: cuda time passed: 7.820 time per batch: 0.156\n",
2985
      "Batch 100 device: cuda time passed: 13.851 time per batch: 0.139\n",
2986
      "ver 34, iter 28, fold 1, val ll: 0.0599, cor: 0.8447, auc: 0.9897\n",
2987
      "setFeats, augmentation -1\n",
2988
      "Batch 50 device: cuda time passed: 7.923 time per batch: 0.158\n",
2989
      "Batch 100 device: cuda time passed: 14.560 time per batch: 0.146\n",
2990
      "ver 34, iter 29, fold 1, val ll: 0.0598, cor: 0.8449, auc: 0.9898\n",
2991
      "setFeats, augmentation -1\n",
2992
      "Batch 50 device: cuda time passed: 8.738 time per batch: 0.175\n",
2993
      "Batch 100 device: cuda time passed: 14.646 time per batch: 0.146\n",
2994
      "ver 34, iter 30, fold 1, val ll: 0.0600, cor: 0.8442, auc: 0.9897\n",
2995
      "setFeats, augmentation -1\n",
2996
      "Batch 50 device: cuda time passed: 8.081 time per batch: 0.162\n",
2997
      "Batch 100 device: cuda time passed: 14.355 time per batch: 0.144\n",
2998
      "ver 34, iter 31, fold 1, val ll: 0.0598, cor: 0.8446, auc: 0.9897\n",
2999
      "total running time 734.7177088260651\n",
3000
      "total time 18615.920258760452\n",
3001
      "completed epochs: 3 iters starting now: 32\n",
3002
      "adding dummy serieses 27\n",
3003
      "DataSet 13 valid size 4416 fold 2\n",
3004
      "dataset valid: 4416 loader valid: 138\n",
3005
      "loading model model.b3.f2.d13.v34\n",
3006
      "setFeats, augmentation -1\n",
3007
      "Batch 50 device: cuda time passed: 8.169 time per batch: 0.163\n",
3008
      "Batch 100 device: cuda time passed: 14.845 time per batch: 0.148\n"
3009
     ]
3010
    },
3011
    {
3012
     "name": "stdout",
3013
     "output_type": "stream",
3014
     "text": [
3015
      "ver 34, iter 0, fold 2, val ll: 0.0602, cor: 0.8434, auc: 0.9890\n",
3016
      "setFeats, augmentation -1\n",
3017
      "Batch 50 device: cuda time passed: 8.255 time per batch: 0.165\n",
3018
      "Batch 100 device: cuda time passed: 14.489 time per batch: 0.145\n",
3019
      "ver 34, iter 1, fold 2, val ll: 0.0601, cor: 0.8435, auc: 0.9891\n",
3020
      "setFeats, augmentation -1\n",
3021
      "Batch 50 device: cuda time passed: 8.208 time per batch: 0.164\n",
3022
      "Batch 100 device: cuda time passed: 14.743 time per batch: 0.147\n",
3023
      "ver 34, iter 2, fold 2, val ll: 0.0602, cor: 0.8432, auc: 0.9890\n",
3024
      "setFeats, augmentation -1\n",
3025
      "Batch 50 device: cuda time passed: 7.759 time per batch: 0.155\n",
3026
      "Batch 100 device: cuda time passed: 14.327 time per batch: 0.143\n",
3027
      "ver 34, iter 3, fold 2, val ll: 0.0601, cor: 0.8439, auc: 0.9890\n",
3028
      "setFeats, augmentation -1\n",
3029
      "Batch 50 device: cuda time passed: 7.998 time per batch: 0.160\n",
3030
      "Batch 100 device: cuda time passed: 14.599 time per batch: 0.146\n",
3031
      "ver 34, iter 4, fold 2, val ll: 0.0602, cor: 0.8431, auc: 0.9890\n",
3032
      "setFeats, augmentation -1\n",
3033
      "Batch 50 device: cuda time passed: 8.098 time per batch: 0.162\n",
3034
      "Batch 100 device: cuda time passed: 15.145 time per batch: 0.151\n",
3035
      "ver 34, iter 5, fold 2, val ll: 0.0603, cor: 0.8429, auc: 0.9890\n",
3036
      "setFeats, augmentation -1\n",
3037
      "Batch 50 device: cuda time passed: 8.067 time per batch: 0.161\n",
3038
      "Batch 100 device: cuda time passed: 15.346 time per batch: 0.153\n",
3039
      "ver 34, iter 6, fold 2, val ll: 0.0600, cor: 0.8437, auc: 0.9890\n",
3040
      "setFeats, augmentation -1\n",
3041
      "Batch 50 device: cuda time passed: 7.533 time per batch: 0.151\n",
3042
      "Batch 100 device: cuda time passed: 14.791 time per batch: 0.148\n",
3043
      "ver 34, iter 7, fold 2, val ll: 0.0601, cor: 0.8434, auc: 0.9891\n",
3044
      "setFeats, augmentation -1\n",
3045
      "Batch 50 device: cuda time passed: 8.288 time per batch: 0.166\n",
3046
      "Batch 100 device: cuda time passed: 14.702 time per batch: 0.147\n",
3047
      "ver 34, iter 8, fold 2, val ll: 0.0600, cor: 0.8437, auc: 0.9891\n",
3048
      "setFeats, augmentation -1\n",
3049
      "Batch 50 device: cuda time passed: 8.228 time per batch: 0.165\n",
3050
      "Batch 100 device: cuda time passed: 14.857 time per batch: 0.149\n",
3051
      "ver 34, iter 9, fold 2, val ll: 0.0601, cor: 0.8434, auc: 0.9891\n",
3052
      "setFeats, augmentation -1\n",
3053
      "Batch 50 device: cuda time passed: 7.901 time per batch: 0.158\n",
3054
      "Batch 100 device: cuda time passed: 14.469 time per batch: 0.145\n",
3055
      "ver 34, iter 10, fold 2, val ll: 0.0601, cor: 0.8440, auc: 0.9890\n",
3056
      "setFeats, augmentation -1\n",
3057
      "Batch 50 device: cuda time passed: 8.809 time per batch: 0.176\n",
3058
      "Batch 100 device: cuda time passed: 14.642 time per batch: 0.146\n",
3059
      "ver 34, iter 11, fold 2, val ll: 0.0602, cor: 0.8433, auc: 0.9890\n",
3060
      "setFeats, augmentation -1\n",
3061
      "Batch 50 device: cuda time passed: 8.417 time per batch: 0.168\n",
3062
      "Batch 100 device: cuda time passed: 15.000 time per batch: 0.150\n",
3063
      "ver 34, iter 12, fold 2, val ll: 0.0603, cor: 0.8430, auc: 0.9889\n",
3064
      "setFeats, augmentation -1\n",
3065
      "Batch 50 device: cuda time passed: 7.645 time per batch: 0.153\n",
3066
      "Batch 100 device: cuda time passed: 15.082 time per batch: 0.151\n",
3067
      "ver 34, iter 13, fold 2, val ll: 0.0600, cor: 0.8437, auc: 0.9891\n",
3068
      "setFeats, augmentation -1\n",
3069
      "Batch 50 device: cuda time passed: 8.678 time per batch: 0.174\n",
3070
      "Batch 100 device: cuda time passed: 15.222 time per batch: 0.152\n",
3071
      "ver 34, iter 14, fold 2, val ll: 0.0599, cor: 0.8439, auc: 0.9891\n",
3072
      "setFeats, augmentation -1\n",
3073
      "Batch 50 device: cuda time passed: 8.440 time per batch: 0.169\n",
3074
      "Batch 100 device: cuda time passed: 14.694 time per batch: 0.147\n",
3075
      "ver 34, iter 15, fold 2, val ll: 0.0601, cor: 0.8438, auc: 0.9890\n",
3076
      "setFeats, augmentation -1\n",
3077
      "Batch 50 device: cuda time passed: 7.452 time per batch: 0.149\n",
3078
      "Batch 100 device: cuda time passed: 14.976 time per batch: 0.150\n",
3079
      "ver 34, iter 16, fold 2, val ll: 0.0603, cor: 0.8432, auc: 0.9889\n",
3080
      "setFeats, augmentation -1\n",
3081
      "Batch 50 device: cuda time passed: 8.036 time per batch: 0.161\n",
3082
      "Batch 100 device: cuda time passed: 14.464 time per batch: 0.145\n",
3083
      "ver 34, iter 17, fold 2, val ll: 0.0600, cor: 0.8440, auc: 0.9890\n",
3084
      "setFeats, augmentation -1\n",
3085
      "Batch 50 device: cuda time passed: 8.432 time per batch: 0.169\n",
3086
      "Batch 100 device: cuda time passed: 14.917 time per batch: 0.149\n",
3087
      "ver 34, iter 18, fold 2, val ll: 0.0600, cor: 0.8437, auc: 0.9891\n",
3088
      "setFeats, augmentation -1\n",
3089
      "Batch 50 device: cuda time passed: 8.448 time per batch: 0.169\n",
3090
      "Batch 100 device: cuda time passed: 14.730 time per batch: 0.147\n",
3091
      "ver 34, iter 19, fold 2, val ll: 0.0601, cor: 0.8436, auc: 0.9890\n",
3092
      "setFeats, augmentation -1\n",
3093
      "Batch 50 device: cuda time passed: 7.992 time per batch: 0.160\n",
3094
      "Batch 100 device: cuda time passed: 14.586 time per batch: 0.146\n",
3095
      "ver 34, iter 20, fold 2, val ll: 0.0602, cor: 0.8431, auc: 0.9891\n",
3096
      "setFeats, augmentation -1\n",
3097
      "Batch 50 device: cuda time passed: 7.981 time per batch: 0.160\n",
3098
      "Batch 100 device: cuda time passed: 14.312 time per batch: 0.143\n",
3099
      "ver 34, iter 21, fold 2, val ll: 0.0602, cor: 0.8433, auc: 0.9890\n",
3100
      "setFeats, augmentation -1\n",
3101
      "Batch 50 device: cuda time passed: 7.863 time per batch: 0.157\n",
3102
      "Batch 100 device: cuda time passed: 14.493 time per batch: 0.145\n",
3103
      "ver 34, iter 22, fold 2, val ll: 0.0601, cor: 0.8436, auc: 0.9891\n",
3104
      "setFeats, augmentation -1\n",
3105
      "Batch 50 device: cuda time passed: 8.453 time per batch: 0.169\n",
3106
      "Batch 100 device: cuda time passed: 14.578 time per batch: 0.146\n",
3107
      "ver 34, iter 23, fold 2, val ll: 0.0602, cor: 0.8434, auc: 0.9890\n",
3108
      "setFeats, augmentation -1\n",
3109
      "Batch 50 device: cuda time passed: 8.230 time per batch: 0.165\n",
3110
      "Batch 100 device: cuda time passed: 14.510 time per batch: 0.145\n",
3111
      "ver 34, iter 24, fold 2, val ll: 0.0602, cor: 0.8431, auc: 0.9890\n",
3112
      "setFeats, augmentation -1\n",
3113
      "Batch 50 device: cuda time passed: 8.489 time per batch: 0.170\n",
3114
      "Batch 100 device: cuda time passed: 14.700 time per batch: 0.147\n",
3115
      "ver 34, iter 25, fold 2, val ll: 0.0601, cor: 0.8434, auc: 0.9890\n",
3116
      "setFeats, augmentation -1\n",
3117
      "Batch 50 device: cuda time passed: 7.810 time per batch: 0.156\n",
3118
      "Batch 100 device: cuda time passed: 14.881 time per batch: 0.149\n",
3119
      "ver 34, iter 26, fold 2, val ll: 0.0601, cor: 0.8438, auc: 0.9890\n",
3120
      "setFeats, augmentation -1\n",
3121
      "Batch 50 device: cuda time passed: 8.647 time per batch: 0.173\n",
3122
      "Batch 100 device: cuda time passed: 14.903 time per batch: 0.149\n",
3123
      "ver 34, iter 27, fold 2, val ll: 0.0602, cor: 0.8434, auc: 0.9890\n",
3124
      "setFeats, augmentation -1\n",
3125
      "Batch 50 device: cuda time passed: 8.024 time per batch: 0.160\n",
3126
      "Batch 100 device: cuda time passed: 15.108 time per batch: 0.151\n",
3127
      "ver 34, iter 28, fold 2, val ll: 0.0601, cor: 0.8436, auc: 0.9891\n",
3128
      "setFeats, augmentation -1\n",
3129
      "Batch 50 device: cuda time passed: 8.179 time per batch: 0.164\n",
3130
      "Batch 100 device: cuda time passed: 15.084 time per batch: 0.151\n",
3131
      "ver 34, iter 29, fold 2, val ll: 0.0603, cor: 0.8433, auc: 0.9889\n",
3132
      "setFeats, augmentation -1\n",
3133
      "Batch 50 device: cuda time passed: 8.489 time per batch: 0.170\n",
3134
      "Batch 100 device: cuda time passed: 15.248 time per batch: 0.152\n",
3135
      "ver 34, iter 30, fold 2, val ll: 0.0602, cor: 0.8435, auc: 0.9890\n",
3136
      "setFeats, augmentation -1\n",
3137
      "Batch 50 device: cuda time passed: 7.780 time per batch: 0.156\n",
3138
      "Batch 100 device: cuda time passed: 14.653 time per batch: 0.147\n",
3139
      "ver 34, iter 31, fold 2, val ll: 0.0602, cor: 0.8432, auc: 0.9890\n",
3140
      "total running time 759.7050180435181\n",
3141
      "total time 19375.86375927925\n",
3142
      "completed epochs: 3 iters starting now: 32\n",
3143
      "adding dummy serieses 16\n",
3144
      "DataSet 13 valid size 4352 fold 3\n",
3145
      "dataset valid: 4352 loader valid: 136\n",
3146
      "loading model model.b3.f3.d13.v34\n",
3147
      "setFeats, augmentation -1\n",
3148
      "Batch 50 device: cuda time passed: 8.209 time per batch: 0.164\n",
3149
      "Batch 100 device: cuda time passed: 14.432 time per batch: 0.144\n",
3150
      "ver 34, iter 0, fold 3, val ll: 0.0629, cor: 0.8398, auc: 0.9886\n",
3151
      "setFeats, augmentation -1\n",
3152
      "Batch 50 device: cuda time passed: 8.060 time per batch: 0.161\n",
3153
      "Batch 100 device: cuda time passed: 14.405 time per batch: 0.144\n",
3154
      "ver 34, iter 1, fold 3, val ll: 0.0631, cor: 0.8396, auc: 0.9885\n",
3155
      "setFeats, augmentation -1\n",
3156
      "Batch 50 device: cuda time passed: 7.998 time per batch: 0.160\n",
3157
      "Batch 100 device: cuda time passed: 14.574 time per batch: 0.146\n",
3158
      "ver 34, iter 2, fold 3, val ll: 0.0632, cor: 0.8388, auc: 0.9886\n",
3159
      "setFeats, augmentation -1\n",
3160
      "Batch 50 device: cuda time passed: 8.088 time per batch: 0.162\n",
3161
      "Batch 100 device: cuda time passed: 14.750 time per batch: 0.148\n",
3162
      "ver 34, iter 3, fold 3, val ll: 0.0630, cor: 0.8399, auc: 0.9885\n",
3163
      "setFeats, augmentation -1\n",
3164
      "Batch 50 device: cuda time passed: 7.872 time per batch: 0.157\n",
3165
      "Batch 100 device: cuda time passed: 15.131 time per batch: 0.151\n",
3166
      "ver 34, iter 4, fold 3, val ll: 0.0629, cor: 0.8396, auc: 0.9887\n",
3167
      "setFeats, augmentation -1\n"
3168
     ]
3169
    },
3170
    {
3171
     "name": "stdout",
3172
     "output_type": "stream",
3173
     "text": [
3174
      "Batch 50 device: cuda time passed: 7.764 time per batch: 0.155\n",
3175
      "Batch 100 device: cuda time passed: 14.443 time per batch: 0.144\n",
3176
      "ver 34, iter 5, fold 3, val ll: 0.0630, cor: 0.8392, auc: 0.9886\n",
3177
      "setFeats, augmentation -1\n",
3178
      "Batch 50 device: cuda time passed: 8.046 time per batch: 0.161\n",
3179
      "Batch 100 device: cuda time passed: 14.542 time per batch: 0.145\n",
3180
      "ver 34, iter 6, fold 3, val ll: 0.0630, cor: 0.8400, auc: 0.9886\n",
3181
      "setFeats, augmentation -1\n",
3182
      "Batch 50 device: cuda time passed: 8.369 time per batch: 0.167\n",
3183
      "Batch 100 device: cuda time passed: 14.797 time per batch: 0.148\n",
3184
      "ver 34, iter 7, fold 3, val ll: 0.0629, cor: 0.8397, auc: 0.9886\n",
3185
      "setFeats, augmentation -1\n",
3186
      "Batch 50 device: cuda time passed: 8.385 time per batch: 0.168\n",
3187
      "Batch 100 device: cuda time passed: 15.082 time per batch: 0.151\n",
3188
      "ver 34, iter 8, fold 3, val ll: 0.0630, cor: 0.8396, auc: 0.9887\n",
3189
      "setFeats, augmentation -1\n",
3190
      "Batch 50 device: cuda time passed: 8.233 time per batch: 0.165\n",
3191
      "Batch 100 device: cuda time passed: 14.812 time per batch: 0.148\n",
3192
      "ver 34, iter 9, fold 3, val ll: 0.0630, cor: 0.8392, auc: 0.9887\n",
3193
      "setFeats, augmentation -1\n",
3194
      "Batch 50 device: cuda time passed: 8.719 time per batch: 0.174\n",
3195
      "Batch 100 device: cuda time passed: 15.227 time per batch: 0.152\n",
3196
      "ver 34, iter 10, fold 3, val ll: 0.0631, cor: 0.8393, auc: 0.9886\n",
3197
      "setFeats, augmentation -1\n",
3198
      "Batch 50 device: cuda time passed: 8.403 time per batch: 0.168\n",
3199
      "Batch 100 device: cuda time passed: 14.953 time per batch: 0.150\n",
3200
      "ver 34, iter 11, fold 3, val ll: 0.0630, cor: 0.8396, auc: 0.9886\n",
3201
      "setFeats, augmentation -1\n",
3202
      "Batch 50 device: cuda time passed: 7.628 time per batch: 0.153\n",
3203
      "Batch 100 device: cuda time passed: 15.699 time per batch: 0.157\n",
3204
      "ver 34, iter 12, fold 3, val ll: 0.0632, cor: 0.8390, auc: 0.9886\n",
3205
      "setFeats, augmentation -1\n",
3206
      "Batch 50 device: cuda time passed: 7.949 time per batch: 0.159\n",
3207
      "Batch 100 device: cuda time passed: 14.760 time per batch: 0.148\n",
3208
      "ver 34, iter 13, fold 3, val ll: 0.0630, cor: 0.8398, auc: 0.9885\n",
3209
      "setFeats, augmentation -1\n",
3210
      "Batch 50 device: cuda time passed: 8.102 time per batch: 0.162\n",
3211
      "Batch 100 device: cuda time passed: 14.558 time per batch: 0.146\n",
3212
      "ver 34, iter 14, fold 3, val ll: 0.0631, cor: 0.8389, auc: 0.9886\n",
3213
      "setFeats, augmentation -1\n",
3214
      "Batch 50 device: cuda time passed: 8.056 time per batch: 0.161\n",
3215
      "Batch 100 device: cuda time passed: 14.553 time per batch: 0.146\n",
3216
      "ver 34, iter 15, fold 3, val ll: 0.0633, cor: 0.8389, auc: 0.9886\n",
3217
      "setFeats, augmentation -1\n",
3218
      "Batch 50 device: cuda time passed: 8.476 time per batch: 0.170\n",
3219
      "Batch 100 device: cuda time passed: 14.739 time per batch: 0.147\n",
3220
      "ver 34, iter 16, fold 3, val ll: 0.0630, cor: 0.8395, auc: 0.9887\n",
3221
      "setFeats, augmentation -1\n",
3222
      "Batch 50 device: cuda time passed: 8.207 time per batch: 0.164\n",
3223
      "Batch 100 device: cuda time passed: 14.548 time per batch: 0.145\n",
3224
      "ver 34, iter 17, fold 3, val ll: 0.0632, cor: 0.8392, auc: 0.9886\n",
3225
      "setFeats, augmentation -1\n",
3226
      "Batch 50 device: cuda time passed: 8.305 time per batch: 0.166\n",
3227
      "Batch 100 device: cuda time passed: 14.417 time per batch: 0.144\n",
3228
      "ver 34, iter 18, fold 3, val ll: 0.0630, cor: 0.8397, auc: 0.9887\n",
3229
      "setFeats, augmentation -1\n",
3230
      "Batch 50 device: cuda time passed: 7.726 time per batch: 0.155\n",
3231
      "Batch 100 device: cuda time passed: 15.397 time per batch: 0.154\n",
3232
      "ver 34, iter 19, fold 3, val ll: 0.0630, cor: 0.8398, auc: 0.9886\n",
3233
      "setFeats, augmentation -1\n",
3234
      "Batch 50 device: cuda time passed: 7.974 time per batch: 0.159\n",
3235
      "Batch 100 device: cuda time passed: 14.515 time per batch: 0.145\n",
3236
      "ver 34, iter 20, fold 3, val ll: 0.0634, cor: 0.8385, auc: 0.9885\n",
3237
      "setFeats, augmentation -1\n",
3238
      "Batch 50 device: cuda time passed: 7.898 time per batch: 0.158\n",
3239
      "Batch 100 device: cuda time passed: 14.685 time per batch: 0.147\n",
3240
      "ver 34, iter 21, fold 3, val ll: 0.0629, cor: 0.8399, auc: 0.9886\n",
3241
      "setFeats, augmentation -1\n",
3242
      "Batch 50 device: cuda time passed: 7.814 time per batch: 0.156\n",
3243
      "Batch 100 device: cuda time passed: 15.322 time per batch: 0.153\n",
3244
      "ver 34, iter 22, fold 3, val ll: 0.0631, cor: 0.8392, auc: 0.9885\n",
3245
      "setFeats, augmentation -1\n",
3246
      "Batch 50 device: cuda time passed: 8.499 time per batch: 0.170\n",
3247
      "Batch 100 device: cuda time passed: 15.189 time per batch: 0.152\n",
3248
      "ver 34, iter 23, fold 3, val ll: 0.0630, cor: 0.8397, auc: 0.9887\n",
3249
      "setFeats, augmentation -1\n",
3250
      "Batch 50 device: cuda time passed: 7.627 time per batch: 0.153\n",
3251
      "Batch 100 device: cuda time passed: 14.563 time per batch: 0.146\n",
3252
      "ver 34, iter 24, fold 3, val ll: 0.0630, cor: 0.8396, auc: 0.9886\n",
3253
      "setFeats, augmentation -1\n",
3254
      "Batch 50 device: cuda time passed: 8.223 time per batch: 0.164\n",
3255
      "Batch 100 device: cuda time passed: 14.639 time per batch: 0.146\n",
3256
      "ver 34, iter 25, fold 3, val ll: 0.0630, cor: 0.8394, auc: 0.9886\n",
3257
      "setFeats, augmentation -1\n",
3258
      "Batch 50 device: cuda time passed: 8.255 time per batch: 0.165\n",
3259
      "Batch 100 device: cuda time passed: 14.365 time per batch: 0.144\n",
3260
      "ver 34, iter 26, fold 3, val ll: 0.0629, cor: 0.8395, auc: 0.9887\n",
3261
      "setFeats, augmentation -1\n",
3262
      "Batch 50 device: cuda time passed: 8.287 time per batch: 0.166\n",
3263
      "Batch 100 device: cuda time passed: 14.326 time per batch: 0.143\n",
3264
      "ver 34, iter 27, fold 3, val ll: 0.0631, cor: 0.8390, auc: 0.9886\n",
3265
      "setFeats, augmentation -1\n",
3266
      "Batch 50 device: cuda time passed: 8.750 time per batch: 0.175\n",
3267
      "Batch 100 device: cuda time passed: 14.716 time per batch: 0.147\n",
3268
      "ver 34, iter 28, fold 3, val ll: 0.0630, cor: 0.8395, auc: 0.9886\n",
3269
      "setFeats, augmentation -1\n",
3270
      "Batch 50 device: cuda time passed: 7.832 time per batch: 0.157\n",
3271
      "Batch 100 device: cuda time passed: 14.454 time per batch: 0.145\n",
3272
      "ver 34, iter 29, fold 3, val ll: 0.0629, cor: 0.8397, auc: 0.9887\n",
3273
      "setFeats, augmentation -1\n",
3274
      "Batch 50 device: cuda time passed: 8.247 time per batch: 0.165\n",
3275
      "Batch 100 device: cuda time passed: 14.407 time per batch: 0.144\n",
3276
      "ver 34, iter 30, fold 3, val ll: 0.0630, cor: 0.8393, auc: 0.9886\n",
3277
      "setFeats, augmentation -1\n",
3278
      "Batch 50 device: cuda time passed: 8.728 time per batch: 0.175\n",
3279
      "Batch 100 device: cuda time passed: 14.980 time per batch: 0.150\n",
3280
      "ver 34, iter 31, fold 3, val ll: 0.0634, cor: 0.8388, auc: 0.9884\n",
3281
      "total running time 748.6304786205292\n",
3282
      "total time 20124.740348815918\n",
3283
      "completed epochs: 3 iters starting now: 32\n",
3284
      "adding dummy serieses 16\n",
3285
      "DataSet 13 valid size 4384 fold 4\n",
3286
      "dataset valid: 4384 loader valid: 137\n",
3287
      "loading model model.b3.f4.d13.v34\n",
3288
      "setFeats, augmentation -1\n",
3289
      "Batch 50 device: cuda time passed: 7.680 time per batch: 0.154\n",
3290
      "Batch 100 device: cuda time passed: 14.974 time per batch: 0.150\n",
3291
      "ver 34, iter 0, fold 4, val ll: 0.0616, cor: 0.8422, auc: 0.9882\n",
3292
      "setFeats, augmentation -1\n",
3293
      "Batch 50 device: cuda time passed: 8.849 time per batch: 0.177\n",
3294
      "Batch 100 device: cuda time passed: 15.070 time per batch: 0.151\n",
3295
      "ver 34, iter 1, fold 4, val ll: 0.0618, cor: 0.8420, auc: 0.9881\n",
3296
      "setFeats, augmentation -1\n",
3297
      "Batch 50 device: cuda time passed: 7.719 time per batch: 0.154\n",
3298
      "Batch 100 device: cuda time passed: 14.696 time per batch: 0.147\n",
3299
      "ver 34, iter 2, fold 4, val ll: 0.0618, cor: 0.8423, auc: 0.9880\n",
3300
      "setFeats, augmentation -1\n",
3301
      "Batch 50 device: cuda time passed: 8.469 time per batch: 0.169\n",
3302
      "Batch 100 device: cuda time passed: 14.909 time per batch: 0.149\n",
3303
      "ver 34, iter 3, fold 4, val ll: 0.0618, cor: 0.8418, auc: 0.9881\n",
3304
      "setFeats, augmentation -1\n",
3305
      "Batch 50 device: cuda time passed: 7.626 time per batch: 0.153\n",
3306
      "Batch 100 device: cuda time passed: 14.660 time per batch: 0.147\n",
3307
      "ver 34, iter 4, fold 4, val ll: 0.0618, cor: 0.8419, auc: 0.9881\n",
3308
      "setFeats, augmentation -1\n",
3309
      "Batch 50 device: cuda time passed: 8.404 time per batch: 0.168\n",
3310
      "Batch 100 device: cuda time passed: 14.750 time per batch: 0.147\n",
3311
      "ver 34, iter 5, fold 4, val ll: 0.0619, cor: 0.8417, auc: 0.9880\n",
3312
      "setFeats, augmentation -1\n",
3313
      "Batch 50 device: cuda time passed: 8.219 time per batch: 0.164\n",
3314
      "Batch 100 device: cuda time passed: 14.164 time per batch: 0.142\n",
3315
      "ver 34, iter 6, fold 4, val ll: 0.0616, cor: 0.8422, auc: 0.9881\n",
3316
      "setFeats, augmentation -1\n",
3317
      "Batch 50 device: cuda time passed: 8.061 time per batch: 0.161\n",
3318
      "Batch 100 device: cuda time passed: 14.689 time per batch: 0.147\n",
3319
      "ver 34, iter 7, fold 4, val ll: 0.0617, cor: 0.8423, auc: 0.9881\n",
3320
      "setFeats, augmentation -1\n",
3321
      "Batch 50 device: cuda time passed: 7.938 time per batch: 0.159\n",
3322
      "Batch 100 device: cuda time passed: 14.192 time per batch: 0.142\n",
3323
      "ver 34, iter 8, fold 4, val ll: 0.0617, cor: 0.8421, auc: 0.9881\n",
3324
      "setFeats, augmentation -1\n",
3325
      "Batch 50 device: cuda time passed: 8.663 time per batch: 0.173\n"
3326
     ]
3327
    },
3328
    {
3329
     "name": "stdout",
3330
     "output_type": "stream",
3331
     "text": [
3332
      "Batch 100 device: cuda time passed: 14.850 time per batch: 0.149\n",
3333
      "ver 34, iter 9, fold 4, val ll: 0.0618, cor: 0.8418, auc: 0.9881\n",
3334
      "setFeats, augmentation -1\n",
3335
      "Batch 50 device: cuda time passed: 8.651 time per batch: 0.173\n",
3336
      "Batch 100 device: cuda time passed: 14.415 time per batch: 0.144\n",
3337
      "ver 34, iter 10, fold 4, val ll: 0.0617, cor: 0.8423, auc: 0.9881\n",
3338
      "setFeats, augmentation -1\n",
3339
      "Batch 50 device: cuda time passed: 7.635 time per batch: 0.153\n",
3340
      "Batch 100 device: cuda time passed: 15.063 time per batch: 0.151\n",
3341
      "ver 34, iter 11, fold 4, val ll: 0.0617, cor: 0.8423, auc: 0.9880\n",
3342
      "setFeats, augmentation -1\n",
3343
      "Batch 50 device: cuda time passed: 8.388 time per batch: 0.168\n",
3344
      "Batch 100 device: cuda time passed: 14.779 time per batch: 0.148\n",
3345
      "ver 34, iter 12, fold 4, val ll: 0.0618, cor: 0.8418, auc: 0.9881\n",
3346
      "setFeats, augmentation -1\n",
3347
      "Batch 50 device: cuda time passed: 7.871 time per batch: 0.157\n",
3348
      "Batch 100 device: cuda time passed: 14.803 time per batch: 0.148\n",
3349
      "ver 34, iter 13, fold 4, val ll: 0.0617, cor: 0.8418, auc: 0.9882\n",
3350
      "setFeats, augmentation -1\n",
3351
      "Batch 50 device: cuda time passed: 8.072 time per batch: 0.161\n",
3352
      "Batch 100 device: cuda time passed: 14.433 time per batch: 0.144\n",
3353
      "ver 34, iter 14, fold 4, val ll: 0.0618, cor: 0.8419, auc: 0.9882\n",
3354
      "setFeats, augmentation -1\n",
3355
      "Batch 50 device: cuda time passed: 8.584 time per batch: 0.172\n",
3356
      "Batch 100 device: cuda time passed: 15.084 time per batch: 0.151\n",
3357
      "ver 34, iter 15, fold 4, val ll: 0.0618, cor: 0.8421, auc: 0.9882\n",
3358
      "setFeats, augmentation -1\n",
3359
      "Batch 50 device: cuda time passed: 7.715 time per batch: 0.154\n",
3360
      "Batch 100 device: cuda time passed: 15.169 time per batch: 0.152\n",
3361
      "ver 34, iter 16, fold 4, val ll: 0.0618, cor: 0.8420, auc: 0.9880\n",
3362
      "setFeats, augmentation -1\n",
3363
      "Batch 50 device: cuda time passed: 7.982 time per batch: 0.160\n",
3364
      "Batch 100 device: cuda time passed: 15.258 time per batch: 0.153\n",
3365
      "ver 34, iter 17, fold 4, val ll: 0.0614, cor: 0.8429, auc: 0.9882\n",
3366
      "setFeats, augmentation -1\n",
3367
      "Batch 50 device: cuda time passed: 7.944 time per batch: 0.159\n",
3368
      "Batch 100 device: cuda time passed: 14.644 time per batch: 0.146\n",
3369
      "ver 34, iter 18, fold 4, val ll: 0.0620, cor: 0.8415, auc: 0.9879\n",
3370
      "setFeats, augmentation -1\n",
3371
      "Batch 50 device: cuda time passed: 7.880 time per batch: 0.158\n",
3372
      "Batch 100 device: cuda time passed: 14.599 time per batch: 0.146\n",
3373
      "ver 34, iter 19, fold 4, val ll: 0.0618, cor: 0.8419, auc: 0.9881\n",
3374
      "setFeats, augmentation -1\n",
3375
      "Batch 50 device: cuda time passed: 8.238 time per batch: 0.165\n",
3376
      "Batch 100 device: cuda time passed: 14.637 time per batch: 0.146\n",
3377
      "ver 34, iter 20, fold 4, val ll: 0.0617, cor: 0.8419, auc: 0.9881\n",
3378
      "setFeats, augmentation -1\n",
3379
      "Batch 50 device: cuda time passed: 8.506 time per batch: 0.170\n",
3380
      "Batch 100 device: cuda time passed: 14.624 time per batch: 0.146\n",
3381
      "ver 34, iter 21, fold 4, val ll: 0.0617, cor: 0.8422, auc: 0.9881\n",
3382
      "setFeats, augmentation -1\n",
3383
      "Batch 50 device: cuda time passed: 7.764 time per batch: 0.155\n",
3384
      "Batch 100 device: cuda time passed: 14.605 time per batch: 0.146\n",
3385
      "ver 34, iter 22, fold 4, val ll: 0.0617, cor: 0.8422, auc: 0.9882\n",
3386
      "setFeats, augmentation -1\n",
3387
      "Batch 50 device: cuda time passed: 7.726 time per batch: 0.155\n",
3388
      "Batch 100 device: cuda time passed: 14.446 time per batch: 0.144\n",
3389
      "ver 34, iter 23, fold 4, val ll: 0.0617, cor: 0.8422, auc: 0.9881\n",
3390
      "setFeats, augmentation -1\n",
3391
      "Batch 50 device: cuda time passed: 8.077 time per batch: 0.162\n",
3392
      "Batch 100 device: cuda time passed: 14.366 time per batch: 0.144\n",
3393
      "ver 34, iter 24, fold 4, val ll: 0.0621, cor: 0.8415, auc: 0.9879\n",
3394
      "setFeats, augmentation -1\n",
3395
      "Batch 50 device: cuda time passed: 7.973 time per batch: 0.159\n",
3396
      "Batch 100 device: cuda time passed: 14.194 time per batch: 0.142\n",
3397
      "ver 34, iter 25, fold 4, val ll: 0.0617, cor: 0.8420, auc: 0.9883\n",
3398
      "setFeats, augmentation -1\n",
3399
      "Batch 50 device: cuda time passed: 7.393 time per batch: 0.148\n",
3400
      "Batch 100 device: cuda time passed: 15.218 time per batch: 0.152\n",
3401
      "ver 34, iter 26, fold 4, val ll: 0.0615, cor: 0.8427, auc: 0.9883\n",
3402
      "setFeats, augmentation -1\n",
3403
      "Batch 50 device: cuda time passed: 8.723 time per batch: 0.174\n",
3404
      "Batch 100 device: cuda time passed: 15.348 time per batch: 0.153\n",
3405
      "ver 34, iter 27, fold 4, val ll: 0.0617, cor: 0.8418, auc: 0.9883\n",
3406
      "setFeats, augmentation -1\n",
3407
      "Batch 50 device: cuda time passed: 7.751 time per batch: 0.155\n",
3408
      "Batch 100 device: cuda time passed: 14.291 time per batch: 0.143\n",
3409
      "ver 34, iter 28, fold 4, val ll: 0.0619, cor: 0.8419, auc: 0.9880\n",
3410
      "setFeats, augmentation -1\n",
3411
      "Batch 50 device: cuda time passed: 8.019 time per batch: 0.160\n",
3412
      "Batch 100 device: cuda time passed: 15.011 time per batch: 0.150\n",
3413
      "ver 34, iter 29, fold 4, val ll: 0.0620, cor: 0.8414, auc: 0.9879\n",
3414
      "setFeats, augmentation -1\n",
3415
      "Batch 50 device: cuda time passed: 8.106 time per batch: 0.162\n",
3416
      "Batch 100 device: cuda time passed: 15.439 time per batch: 0.154\n",
3417
      "ver 34, iter 30, fold 4, val ll: 0.0618, cor: 0.8419, auc: 0.9880\n",
3418
      "setFeats, augmentation -1\n",
3419
      "Batch 50 device: cuda time passed: 8.527 time per batch: 0.171\n",
3420
      "Batch 100 device: cuda time passed: 15.187 time per batch: 0.152\n",
3421
      "ver 34, iter 31, fold 4, val ll: 0.0619, cor: 0.8414, auc: 0.9881\n",
3422
      "total running time 749.8348104953766\n",
3423
      "total time 20874.81984090805\n"
3424
     ]
3425
    }
3426
   ],
3427
   "source": [
3428
    "stg = time.time()\n",
3429
    "for ds in (my_datasets3 + my_datasets5):\n",
3430
    "    folds = getNFolds(ds)\n",
3431
    "    for fold in range(folds):\n",
3432
    "        #pp = pickle.load(open(PATH_DISK/'ensemble/oof_d{}_f{}_v{}'.format(ds, fold, VERSION),'rb'))\n",
3433
    "        predictions = oof_one(num_iter=32, bs=32, fold=fold, dataset=ds)\n",
3434
    "        #predictions = np.concatenate([pp,predictions],axis=0)\n",
3435
    "        pickle.dump(predictions, open(PATH_DISK/'ensemble/oof_d{}_f{}_v{}'.format(ds, fold, VERSION),'wb'))\n",
3436
    "        print('total time', time.time() - stg)"
3437
   ]
3438
  },
3439
  {
3440
   "cell_type": "code",
3441
   "execution_count": null,
3442
   "metadata": {},
3443
   "outputs": [],
3444
   "source": [
3445
    "#range(6,13) x8\n",
3446
    "#5113.189187049866\n",
3447
    "#20878.715314388275"
3448
   ]
3449
  },
3450
  {
3451
   "cell_type": "code",
3452
   "execution_count": 12,
3453
   "metadata": {},
3454
   "outputs": [
3455
    {
3456
     "data": {
3457
      "text/plain": [
3458
       "4.231111111111111"
3459
      ]
3460
     },
3461
     "execution_count": 12,
3462
     "metadata": {},
3463
     "output_type": "execute_result"
3464
    }
3465
   ],
3466
   "source": [
3467
    "#total running time 1201.68962931633\n",
3468
    "#total time 15020.348212480545"
3469
   ]
3470
  },
3471
  {
3472
   "cell_type": "code",
3473
   "execution_count": null,
3474
   "metadata": {},
3475
   "outputs": [],
3476
   "source": []
3477
  },
3478
  {
3479
   "cell_type": "code",
3480
   "execution_count": 5,
3481
   "metadata": {},
3482
   "outputs": [],
3483
   "source": [
3484
    "preds_all = getPredsOOF(aug=32,datasets=my_datasets3,datasets5=my_datasets5,ver=33)"
3485
   ]
3486
  },
3487
  {
3488
   "cell_type": "code",
3489
   "execution_count": 6,
3490
   "metadata": {},
3491
   "outputs": [],
3492
   "source": [
3493
    "preds_all = getPredsOOF(aug=32,datasets=my_datasets3,datasets5=my_datasets5,ver=34)"
3494
   ]
3495
  },
3496
  {
3497
   "cell_type": "code",
3498
   "execution_count": 6,
3499
   "metadata": {},
3500
   "outputs": [
3501
    {
3502
     "data": {
3503
      "text/plain": [
3504
       "(5, 32, 752797, 6)"
3505
      ]
3506
     },
3507
     "execution_count": 6,
3508
     "metadata": {},
3509
     "output_type": "execute_result"
3510
    }
3511
   ],
3512
   "source": [
3513
    "preds_all.shape"
3514
   ]
3515
  },
3516
  {
3517
   "cell_type": "code",
3518
   "execution_count": 7,
3519
   "metadata": {},
3520
   "outputs": [],
3521
   "source": [
3522
    "#preds_all2 = getPredsOOF(aug=32,datasets=[],datasets5=[14],ver=35)"
3523
   ]
3524
  },
3525
  {
3526
   "cell_type": "code",
3527
   "execution_count": 8,
3528
   "metadata": {
3529
    "scrolled": true
3530
   },
3531
   "outputs": [],
3532
   "source": [
3533
    "#preds_all2.shape"
3534
   ]
3535
  },
3536
  {
3537
   "cell_type": "code",
3538
   "execution_count": 9,
3539
   "metadata": {},
3540
   "outputs": [],
3541
   "source": [
3542
    "#preds_all = np.concatenate([preds_all, preds_all2], axis=0)"
3543
   ]
3544
  },
3545
  {
3546
   "cell_type": "code",
3547
   "execution_count": 7,
3548
   "metadata": {
3549
    "scrolled": true
3550
   },
3551
   "outputs": [
3552
    {
3553
     "data": {
3554
      "text/plain": [
3555
       "array([0.14302406, 0.00424933, 0.04813841, 0.03484004, 0.04746119,\n",
3556
       "       0.06259691])"
3557
      ]
3558
     },
3559
     "execution_count": 7,
3560
     "metadata": {},
3561
     "output_type": "execute_result"
3562
    }
3563
   ],
3564
   "source": [
3565
    "# weighted \n",
3566
    "# [0.15059251, 0.00462303, 0.05034504, 0.03602126, 0.04910235, 0.06661193]\n",
3567
    "\n",
3568
    "# non-weighted\n",
3569
    "# [0.14268919, 0.00409448, 0.04815497, 0.03553187, 0.04749233, 0.06196157]\n",
3570
    "\n",
3571
    "# non-weighted stage2\n",
3572
    "# [0.14302406, 0.00424933, 0.04813841, 0.03484004, 0.04746119, 0.06259691]\n",
3573
    "\n",
3574
    "# weighted stage2\n",
3575
    "# [0.14172827, 0.00397889, 0.04794982, 0.0350942 , 0.04717257, 0.06180147]\n",
3576
    "\n",
3577
    "preds_all.mean((0,1,2))"
3578
   ]
3579
  },
3580
  {
3581
   "cell_type": "code",
3582
   "execution_count": 8,
3583
   "metadata": {},
3584
   "outputs": [],
3585
   "source": [
3586
    "names_y = [\n",
3587
    "    #'model_Densenet201_3_version_classifier_splits_fullhead_resmodel_pool2_3_type_OOF_pred_split_{}.pkl',\n",
3588
    "    #'model_Densenet161_3_version_classifier_splits_fullhead_resmodel_pool2_3_type_OOF_pred_split_{}.pkl',\n",
3589
    "    'model_Densenet169_3_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl',\n",
3590
    "    'model_se_resnext101_32x4d_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl',\n",
3591
    "    'model_se_resnet101_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl'\n",
3592
    "]"
3593
   ]
3594
  },
3595
  {
3596
   "cell_type": "code",
3597
   "execution_count": 9,
3598
   "metadata": {},
3599
   "outputs": [],
3600
   "source": [
3601
    "names_y5 = [\n",
3602
    "    'model_se_resnext101_32x4d_version_new_splits_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl',\n",
3603
    "    'model_se_resnet101_version_new_splits_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl',\n",
3604
    "    'model_se_resnet101_version_new_splits_focal_fullhead_resmodel_pool2_stage2_3_type_OOF_pred_split_{}.pkl',\n",
3605
    "]"
3606
   ]
3607
  },
3608
  {
3609
   "cell_type": "code",
3610
   "execution_count": 10,
3611
   "metadata": {
3612
    "scrolled": false
3613
   },
3614
   "outputs": [
3615
    {
3616
     "name": "stdout",
3617
     "output_type": "stream",
3618
     "text": [
3619
      "adding yuval_idx\n",
3620
      "adding yuval_idx\n"
3621
     ]
3622
    }
3623
   ],
3624
   "source": [
3625
    "preds_y = getYuvalOOF(train_md=train_md, names=names_y, names5=names_y5)"
3626
   ]
3627
  },
3628
  {
3629
   "cell_type": "code",
3630
   "execution_count": null,
3631
   "metadata": {},
3632
   "outputs": [],
3633
   "source": []
3634
  },
3635
  {
3636
   "cell_type": "code",
3637
   "execution_count": 11,
3638
   "metadata": {
3639
    "scrolled": true
3640
   },
3641
   "outputs": [
3642
    {
3643
     "data": {
3644
      "text/plain": [
3645
       "array([0.14321291, 0.00391866, 0.04807696, 0.03472973, 0.04762993,\n",
3646
       "       0.06291145])"
3647
      ]
3648
     },
3649
     "execution_count": 11,
3650
     "metadata": {},
3651
     "output_type": "execute_result"
3652
    }
3653
   ],
3654
   "source": [
3655
    "preds_y.mean((0,1))"
3656
   ]
3657
  },
3658
  {
3659
   "cell_type": "code",
3660
   "execution_count": 12,
3661
   "metadata": {},
3662
   "outputs": [
3663
    {
3664
     "data": {
3665
      "text/plain": [
3666
       "(6, 752797, 6)"
3667
      ]
3668
     },
3669
     "execution_count": 12,
3670
     "metadata": {},
3671
     "output_type": "execute_result"
3672
    }
3673
   ],
3674
   "source": [
3675
    "preds_y.shape"
3676
   ]
3677
  },
3678
  {
3679
   "cell_type": "code",
3680
   "execution_count": 13,
3681
   "metadata": {},
3682
   "outputs": [],
3683
   "source": [
3684
    "preds_all = np.concatenate([preds_all.mean(1), preds_y], axis=0)"
3685
   ]
3686
  },
3687
  {
3688
   "cell_type": "code",
3689
   "execution_count": 14,
3690
   "metadata": {},
3691
   "outputs": [],
3692
   "source": [
3693
    "del preds_y"
3694
   ]
3695
  },
3696
  {
3697
   "cell_type": "code",
3698
   "execution_count": 15,
3699
   "metadata": {},
3700
   "outputs": [
3701
    {
3702
     "data": {
3703
      "text/plain": [
3704
       "(11, 752797, 6)"
3705
      ]
3706
     },
3707
     "execution_count": 15,
3708
     "metadata": {},
3709
     "output_type": "execute_result"
3710
    }
3711
   ],
3712
   "source": [
3713
    "preds_all.shape"
3714
   ]
3715
  },
3716
  {
3717
   "cell_type": "markdown",
3718
   "metadata": {},
3719
   "source": [
3720
    "## Elimination"
3721
   ]
3722
  },
3723
  {
3724
   "cell_type": "code",
3725
   "execution_count": 16,
3726
   "metadata": {},
3727
   "outputs": [],
3728
   "source": [
3729
    "def getMaskedLoss(preds_all, mask, weighted):\n",
3730
    "    \n",
3731
    "    loss = ((- train_md[all_ich].values * np.log(preds_all[mask].mean(0)) \\\n",
3732
    "            - (1 - train_md[all_ich].values) * np.log(1 - preds_all[mask].mean(0)))*class_weights)\n",
3733
    "    \n",
3734
    "    if weighted:\n",
3735
    "        loss = (loss * np.expand_dims(train_md['weights'].values,axis=1)).mean()\n",
3736
    "    else:\n",
3737
    "        loss = loss.mean()\n",
3738
    "    return loss\n",
3739
    "\n",
3740
    "def produceDSMask(weighted):\n",
3741
    "    \n",
3742
    "    N = len(preds_all)\n",
3743
    "    ds_mask = np.ones(N, dtype=bool)\n",
3744
    "    best_loss = getMaskedLoss(preds_all, ds_mask, weighted)\n",
3745
    "\n",
3746
    "    for i in range(N):\n",
3747
    "        worst_k = -1\n",
3748
    "        worst_loss = best_loss\n",
3749
    "        print('starting iter',i,'loss',best_loss,'eliminated',(~ds_mask).sum())\n",
3750
    "        for k in range(N):\n",
3751
    "            mask2 = ds_mask.copy()\n",
3752
    "            mask2[k] = False\n",
3753
    "            loss = getMaskedLoss(preds_all, mask2, weighted)\n",
3754
    "            if loss < worst_loss:\n",
3755
    "                worst_loss = loss\n",
3756
    "                worst_k = k\n",
3757
    "        if worst_k >= 0:\n",
3758
    "            print('eliminating',worst_k,'new loss',worst_loss)\n",
3759
    "            ds_mask[worst_k] = False\n",
3760
    "            best_loss = worst_loss\n",
3761
    "        else:\n",
3762
    "            break\n",
3763
    "    \n",
3764
    "    print('removed', np.where(~ds_mask)[0])\n",
3765
    "    \n",
3766
    "    return ds_mask"
3767
   ]
3768
  },
3769
  {
3770
   "cell_type": "code",
3771
   "execution_count": 17,
3772
   "metadata": {},
3773
   "outputs": [
3774
    {
3775
     "name": "stdout",
3776
     "output_type": "stream",
3777
     "text": [
3778
      "starting iter 0 loss 0.057515051687738426 eliminated 0\n",
3779
      "eliminating 1 new loss 0.057490836031309125\n",
3780
      "starting iter 1 loss 0.057490836031309125 eliminated 1\n",
3781
      "removed [1]\n",
3782
      "\n",
3783
      "starting iter 0 loss 0.05434281856430104 eliminated 0\n",
3784
      "eliminating 1 new loss 0.05431221808734583\n",
3785
      "starting iter 1 loss 0.05431221808734583 eliminated 1\n",
3786
      "eliminating 4 new loss 0.054308387316921135\n",
3787
      "starting iter 2 loss 0.054308387316921135 eliminated 2\n",
3788
      "eliminating 5 new loss 0.054305186238067175\n",
3789
      "starting iter 3 loss 0.054305186238067175 eliminated 3\n",
3790
      "removed [1 4 5]\n"
3791
     ]
3792
    }
3793
   ],
3794
   "source": [
3795
    "ds_mask1 = produceDSMask(False)\n",
3796
    "print('')\n",
3797
    "ds_mask2 = produceDSMask(True)"
3798
   ]
3799
  },
3800
  {
3801
   "cell_type": "code",
3802
   "execution_count": 18,
3803
   "metadata": {},
3804
   "outputs": [],
3805
   "source": [
3806
    "ds_mask = ds_mask1 | ds_mask2"
3807
   ]
3808
  },
3809
  {
3810
   "cell_type": "code",
3811
   "execution_count": 19,
3812
   "metadata": {
3813
    "scrolled": true
3814
   },
3815
   "outputs": [
3816
    {
3817
     "data": {
3818
      "text/plain": [
3819
       "array([ True, False,  True,  True,  True,  True,  True,  True,  True,\n",
3820
       "        True,  True])"
3821
      ]
3822
     },
3823
     "execution_count": 19,
3824
     "metadata": {},
3825
     "output_type": "execute_result"
3826
    }
3827
   ],
3828
   "source": [
3829
    "ds_mask"
3830
   ]
3831
  },
3832
  {
3833
   "cell_type": "code",
3834
   "execution_count": 20,
3835
   "metadata": {},
3836
   "outputs": [],
3837
   "source": [
3838
    "preds_all = preds_all[ds_mask]\n",
3839
    "my_len = ds_mask[:my_len].sum()"
3840
   ]
3841
  },
3842
  {
3843
   "cell_type": "code",
3844
   "execution_count": 21,
3845
   "metadata": {},
3846
   "outputs": [
3847
    {
3848
     "data": {
3849
      "text/plain": [
3850
       "4"
3851
      ]
3852
     },
3853
     "execution_count": 21,
3854
     "metadata": {},
3855
     "output_type": "execute_result"
3856
    }
3857
   ],
3858
   "source": [
3859
    "my_len"
3860
   ]
3861
  },
3862
  {
3863
   "cell_type": "markdown",
3864
   "metadata": {},
3865
   "source": [
3866
    "## OOF analysis"
3867
   ]
3868
  },
3869
  {
3870
   "cell_type": "code",
3871
   "execution_count": 23,
3872
   "metadata": {},
3873
   "outputs": [
3874
    {
3875
     "name": "stdout",
3876
     "output_type": "stream",
3877
     "text": [
3878
      "any                  [0.099 0.096 0.096 0.096 0.1   0.099 0.098 0.097 0.097 0.097]\n",
3879
      "epidural             [0.017 0.015 0.016 0.016 0.016 0.015 0.015 0.015 0.015 0.015]\n",
3880
      "intraparenchymal     [0.043 0.041 0.042 0.042 0.043 0.041 0.042 0.041 0.041 0.041]\n",
3881
      "intraventricular     [0.026 0.025 0.025 0.025 0.026 0.025 0.025 0.025 0.025 0.025]\n",
3882
      "subarachnoid         [0.066 0.064 0.064 0.064 0.066 0.064 0.064 0.064 0.063 0.063]\n",
3883
      "subdural             [0.081 0.079 0.079 0.079 0.08  0.079 0.079 0.079 0.078 0.078]\n"
3884
     ]
3885
    }
3886
   ],
3887
   "source": [
3888
    "np.set_printoptions(precision=3)\n",
3889
    "\n",
3890
    "loss = (- train_md[all_ich].values * np.log(preds_all) \\\n",
3891
    "        - (1 - train_md[all_ich].values) * np.log(1 - preds_all)).mean(1)\n",
3892
    "for k in range(6):\n",
3893
    "    print('{:20s} {}'.format(all_ich[k],loss[:,k]))"
3894
   ]
3895
  },
3896
  {
3897
   "cell_type": "code",
3898
   "execution_count": null,
3899
   "metadata": {},
3900
   "outputs": [],
3901
   "source": []
3902
  },
3903
  {
3904
   "cell_type": "code",
3905
   "execution_count": 24,
3906
   "metadata": {
3907
    "scrolled": true
3908
   },
3909
   "outputs": [
3910
    {
3911
     "name": "stdout",
3912
     "output_type": "stream",
3913
     "text": [
3914
      "[[0.142 0.143 0.143 0.143 0.143 0.145 0.144 0.142 0.142 0.143]\n",
3915
      " [0.004 0.005 0.004 0.004 0.003 0.004 0.004 0.004 0.004 0.004]\n",
3916
      " [0.048 0.048 0.048 0.048 0.048 0.048 0.049 0.048 0.048 0.048]\n",
3917
      " [0.035 0.035 0.035 0.035 0.035 0.034 0.035 0.035 0.035 0.035]\n",
3918
      " [0.047 0.047 0.047 0.048 0.048 0.049 0.047 0.047 0.047 0.048]\n",
3919
      " [0.062 0.062 0.063 0.063 0.063 0.065 0.064 0.062 0.062 0.063]]\n"
3920
     ]
3921
    }
3922
   ],
3923
   "source": [
3924
    "print(preds_all.mean(1).transpose())"
3925
   ]
3926
  },
3927
  {
3928
   "cell_type": "code",
3929
   "execution_count": 25,
3930
   "metadata": {},
3931
   "outputs": [
3932
    {
3933
     "data": {
3934
      "text/plain": [
3935
       "[<matplotlib.lines.Line2D at 0x7f82b89bfc10>]"
3936
      ]
3937
     },
3938
     "execution_count": 25,
3939
     "metadata": {},
3940
     "output_type": "execute_result"
3941
    },
3942
    {
3943
     "data": {
3944
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXyU5b3//9dnJitJSMgChIQlJGHfiewoKipoBbW41mqtSq31e7TL6dH2/LTF1nOqp7WbVal1r0VUrLhUDwqEfQm7BAhJICQs2RNIQraZ6/dHBk6MgUzWe5bP8/GYR2buZeZzQ3jPzXVf93WJMQallFK+y2Z1AUoppbqXBr1SSvk4DXqllPJxGvRKKeXjNOiVUsrHBVhdQEuxsbFmyJAhVpehlFJeZceOHSXGmLjW1nlc0A8ZMoSMjAyry1BKKa8iInkXWqdNN0op5eM06JVSysdp0CullI/ToFdKKR+nQa+UUj5Og14ppXycBr1SSvk4j+tHr3peg8PJsbIacoqqOFJSTVxEMNOTY4iPDLW6NKVUF9Cg91O1DQ6WfJTJtiNl5JVW0+D4+rwESbFhTE+OYUZyDJcNiyMiJNCCSpVSnaVB74fqGh088OYO0rOKuXJEX64a1Y/kuHBS+oaTFBNGQUUNm3NK2ZxTysrdJ3hr6zESokJ54c7JjE2MtLp8pVQ7iafNMJWWlmZ0CITu0+Bw8uDfd7Iqs5CnbhzLHVMHXXT7RoeTrUfK+Pd39lBSXc+SBaO5bcrF91FK9TwR2WGMSWttnV6M9SONDiePLNvNqsxCfnH9qDZDHiDAbmNmSiwf/dtspiZF8+iKffz03T3UNjh6oGKlVFfQoPcTDqfh39/dy8f7TvKza0fwnZlJ7do/OiyIV++ZwkOXp7A8o4BFL2wiv6ymm6pVSnUlDXo/YIzhP/+5j/d3HecnVw9j8aXJHXofu034yTXDeemuNPJKa7ht6RZKq+q6uFqlVFfToPcD6w+X8I9t+TxwWTIPXZHa6febO6ofb947leKqOh78+04aHM4uqFIp1V006H2cMYZnP89iQGQIP7yq8yF/zviBUTz9zXFsPVLGkg8zu+x9lVJdT4Pex6VnFbPrWAUPXp5CcIC9S9/7hokJfO/SobyxJY+3th7r0vdWSnUdDXof1nQ2f5iEqFBuSRvYLZ/x03kjuGxYHI9/8CXbjpR1y2copTpHg96HrT1UzJ78Ch66IoWggO75q7bbhD/ePpFB0b34/ps7KCjXnjhKeRoNeh91rm0+sU8oiyYndutnRYYG8te706hvdPL9N3dS36gXZ5XyJG4FvYjME5FDIpItIo+2sv5HIpIpIntF5AsRGdxsnUNEdrseK7uyeHVhqw8Wsbegkv93RQqB9u7/Pk+OC+d/bhnPvuOV/HbVoW7/PKWU+9pMABGxA88B84FRwO0iMqrFZruANGPMOOBd4Olm684aYya4Hgu6qG51EcYYfv/5YQZF9+KmSd17Nt/cNaP7c/uUQSxdl8um7JIe+1yl1MW5c6o3Bcg2xuQaY+qBZcDC5hsYY9YYY841zm4Bei5d1Nd8fqCIfccreaiHzuab+/++MZKk2DB+tHwP5dX1PfrZSqnWuZMCCUB+s9cFrmUXci/wr2avQ0QkQ0S2iMgNre0gIotd22QUFxe7UZK6kKaz+SwGx/TipokX+2vqHr2CAvjjbRMpra7jsRX78LRB85TyR+4EvbSyrNV/vSJyJ5AGPNNs8SDXiGp3AL8Xka/df2+MWWqMSTPGpMXFxblRkrqQrUfK2H/iND+Yk0JAD5/NnzMmIZJ/v2Y4n+4/xdvb89veQSnVrdxJggKgeSfsROBEy41EZC7wc2CBMeb8ACjGmBOun7nAWmBiJ+pVbVixs4CwIDvXjx9gaR33zRrKzJQYfvlhJjnFVZbWopS/cyfotwOpIpIkIkHAbcBXes+IyETgRZpCvqjZ8j4iEux6HgvMBPR++W5S2+DgX/tOMW9MPKFBXXsXbHvZbMJvb55AcKCNH729G6dTm3CUskqbQW+MaQQeAj4DDgDLjTH7RWSJiJzrRfMMEA6806Ib5UggQ0T2AGuA/zbGaNB3k1WZhZypa+Sbk3q+bb41/SND+MX1o9lTUMm7OwusLkcpv+XWVILGmE+AT1ose7zZ87kX2G8TMLYzBSr3rdhZQHxkCNOGxlhdynkLJwzg9c1HeeazQ1w7Np7wYJ29UqmepnfG+ojiM3WsO1zCwgkJ2GytXT+3hojwxPWjKT5Tx3Nrsq0uRym/pEHvIz7ccwKH03CThzTbNDd+YBQ3TUrgb+uPcKxUx8JRqqdp0PuIFbsKGJPQm2H9IqwupVX/MW8EAXbh15/oJRqlepoGvQ84XHiGL4+f5saJnntDcr/eITw4J5nP9heyKUeHR1CqJ2nQ+4AVu45jtwkLLO4735b7Zg8lISqUJR9m4tDulkr1GA16L+d0Gv656zizU2OJiwi2upyLCgm089i1Izh46gzLtuuMVEr1FA16L7clt5STlbU9OkplZ1w3Np4pQ6L53f9mUVXXaHU5SvkFDXovt2LXccKDA7h6VD+rS3GLiPDYtSMora7ntU1HrS5HKb+gQe/FztY7+Ne+k8wf05+QQGuHPGiPiYP6MHdkX15Mz6HybIPV5Sjl8zTovdjaQ0VU1zu4wYLhiDvrh1cN43RtI39bn2t1KUr5PA16L7bqQCGRoYFMTYq2upR2Gz0gkuvGxvO3DUco0wlKlOpWGvReqtHhZPXBIq4Y0deycec764dXpXK2wcGL6TlWl6KUT/POhFDsyCunoqaBuSO94yJsa1L6RnDDhARe23yUotO1VpejlM/SoPdSqzILCbLbuGy4d8/I9fDcVBochr+s1bN6pbqLBr0XMsaw6kAh05NjvH7Y38ExYdySlshbW49xvOKs1eUo5ZM06L1QdlEVeaU1zPWSvvNteeiKVAD+vPqwxZUo5Zs06L3QqgOFAMwd2dfiSrpGQlQod0wdxPKMAk7oWb1SXU6D3gutyixkbEIk8ZGhVpfSZe6dlYQxhje35FldilI+R4PeyxSdqWV3fgVX+UizzTkDo3tx1ah+/GPbMWobHFaXo5RP0aD3MqsPFGEMPhf0APfMTKK8poF/7jpudSlK+RQNei/z+YFCEqJCGdHfM2eS6oypSdGM6B/BKxuPYoyOV69UV9Gg9yI19Y2sP1zCVaP6IeI5E4B3FRHhuzOTOFR4hs25pVaXo5TP0KD3IhsOl1DX6PTJZptzFkwYQHRYEK9sPGp1KUr5DA16L7Iqs5DeIQFM8cJBzNwVEmjnjimD+PxAIcdKa6wuRymfoEHvJRxOw+qDRVw+oi+BXjqImbvunDYYuwivbT5qdSlK+QTfTgwfsju/nNLqeq8exMxd/SNDmD82nuXb86nW6QaV6jQNei+x9lAxdptw6TDvHsTMXffMHMKZukbe21lgdSlKeT0Nei+RnlXMpEFRRIYGWl1Kj5g0qA/jB0bx6sajOJ3a1VKpznAr6EVknogcEpFsEXm0lfU/EpFMEdkrIl+IyOBm6+4WkcOux91dWby/KKmqY29BJZf5ydn8OXdPH0xuSTXbjpZZXYpSXq3NoBcRO/AcMB8YBdwuIqNabLYLSDPGjAPeBZ527RsNPAFMBaYAT4hIn64r3z9sOFwCwGXDfGMQM3fNG9OfsCA77+/UO2WV6gx3zuinANnGmFxjTD2wDFjYfANjzBpjzLm+cFuARNfza4BVxpgyY0w5sAqY1zWl+4/0rGJiwoIYPaC31aX0qF5BAcwbE88n+07q+DdKdYI7QZ8A5Dd7XeBadiH3Av9qz74islhEMkQko7i42I2S/IfTaViXVczs1FhsNt+7G7Yt35yUwJm6RlZlFlpdilJey52gby1dWr06JiJ3AmnAM+3Z1xiz1BiTZoxJi4vzr3botuw/cZrS6nqvnzKwo6YNjSE+MoQV2vtGqQ5zJ+gLgIHNXicCJ1puJCJzgZ8DC4wxde3ZV11YelYRALNT/TPobTbhhokJrDtcQvGZurZ3UEp9jTtBvx1IFZEkEQkCbgNWNt9ARCYCL9IU8kXNVn0GXC0ifVwXYa92LVNuSs8qZmxCJLHhwVaXYpmbJibgcBpW7tFzBKU6os2gN8Y0Ag/RFNAHgOXGmP0iskREFrg2ewYIB94Rkd0istK1bxnwJE1fFtuBJa5lyg2VZxvYeazC77pVtpTaL4KxCZHafKNUBwW4s5Ex5hPgkxbLHm/2fO5F9n0ZeLmjBfqzTdklOJzGb9vnm7txYgJLPsrk0KkzDPfBsfiV6k56Z6wHS88qJiIkgIkDo6wuxXILJgzAbhNW7NKzeqXaS4PeQxljSM8qZlZKLAE+PlqlO2LDg7lsWBwf7DqBQ4dEUKpdNEE81OGiKk5W1vp9+3xzN01K4NTpWjbn6OxTSrWHBr2HSj/UdOOYv4xW6Y65I/sRERygzTdKtZMGvYdKzypmWL9wBkSFWl2KxwgJtHPduHg+/fIUNfU6Tr1S7tKg90A19Y1sO1KmzTatWDB+ADX1jvMDvSml2qZB74G25JZS73D63WiV7kgbEk1YkJ21WTomklLu0qD3QOmHigkJtJE2REd0bikowMaMlFjSDxVjjPa+UcodGvQeaN3hEqYPjSEk0G51KR5pzvA4jlecJae4yupSlPIKGvQeJq+0miMl1do+fxHn/mzWHtLmG6XcoUHvYdZlabfKtiT26UVK33ANeqXcpEHvYdKzShgYHUpSbJjVpXi0OcPi2HakTLtZKuUGDXoPUt/oZFNOCZcNi0PE/2aTao85w/tS73DqXbJKuUGD3oNk5JVRU+/gUj+dZKQ9LknqQ2igXZtvlHKDBr0HWZdVQoBNmJESa3UpHi84wM6M5BjWZhVpN0ul2qBB70HSs4pJG9KH8GC3pgnwe3OGx5FfdpYjJdVWl6KUR9Og9xBFp2s5cPK09rZph3N3DmvzjVIXp0HvIda5xm7R/vPuGxTTi6GxYaTrcAhKXZQGvYdIzyomNjyYkf17W12KV7lseBxbckupbXBYXYpSHkuD3gM4nIb1h4u5dFgsNpt2q2yPy4bFUdfoZHOudrNU6kI06D3AvuOVVNQ0aLNNB0wbGkNwgO38RC1Kqa/ToPcA6YeKEYHZ2n++3UIC7UxPjtF2eqUuQoPeA6RnFTEuIZLosCCrS/FKlw2L40hJNXml2s1SqdZo0FussqaB3fkV2mzTCef+7NbrrFNKtUqD3mIbsktwGh2tsjOSYsMYEBnCxmwNeqVao0FvsQ3ZxUQEBzBhYJTVpXgtEWFmSiybckpxOHU4BKVa0qC32IbsEqYlxxBg17+KzpiVGkvl2Qb2n6i0uhSlPI5b6SIi80TkkIhki8ijray/VER2ikijiCxqsc4hIrtdj5VdVbgvOFZaQ37ZWWbpIGadNiO56c9wgzbfKPU1bQa9iNiB54D5wCjgdhEZ1WKzY8B3gLdaeYuzxpgJrseCTtbrU86F0kwN+k6LiwhmRP8IbadXqhXunNFPAbKNMbnGmHpgGbCw+QbGmKPGmL2Asxtq9Fkbs0vo3zuE5DidTaorzEyJZfvRch0OQakW3An6BCC/2esC1zJ3hYhIhohsEZEbWttARBa7tskoLvaPG1+cTsPGnBJmpsTqbFJdZFZKLPWNTjKOlltdilIexZ2gby2F2tO1YZAxJg24A/i9iCR/7c2MWWqMSTPGpMXF+Uc3w8yTp6moaWBWaozVpfiMKUnRBNhE2+mVasGdoC8ABjZ7nQiccPcDjDEnXD9zgbXAxHbU57POt88na/t8VwkLDmDSoD7aTq9UC+4E/XYgVUSSRCQIuA1wq/eMiPQRkWDX81hgJpDZ0WJ9ycbsEob1C6dv7xCrS/Eps1Jj+fJEJeXV9VaXopTHaDPojTGNwEPAZ8ABYLkxZr+ILBGRBQAicomIFAA3Ay+KyH7X7iOBDBHZA6wB/tsY4/dBX9vgYNuRMu1t0w1mpsRiDGzK0WGLlTrHrclJjTGfAJ+0WPZ4s+fbaWrSabnfJmBsJ2v0OTvzyqlrdGr/+W4wPjGS8OAANmSXcN24eKvLUcoj6O2YFtiQXYLdJkwdqhdiu1qA3ca0oTHaTq9UMxr0FtiYXcLEgVGEB7v1HyrVTrNSYjhWVsOx0hqrS1HKI2jQ97DKmgb2Hq/U9vluNCu16c92Y46e1SsFGvQ9bnNuCcb8XxiprpccF06/3sHan14pFw36HrYhu4SwILsOS9yNzg9bnF2CU4ctVkqDvqdtzC5l6tAYAnVY4m41KyWW8poGMk+etroUpSynadODCsprOFJSre3zPeDcn/EmbadXSoO+J23KbrqJR/vPd79+rlFB9cYppTToe9TGnBJiw4MY1i/c6lL8wozkWLYdKaPBoaNnK/+mQd9DjDFsyillerIOS9xTZiTHUFPvYG9BhdWlKGUpDfoeklNcRfGZOmYm692wPWXa0BhEmi6AK+XPNOh7yLm24hk6LHGP6RMWxKj43npBVvk9Dfoesim7lISoUAZGh1pdil+ZkRzDzrwKnV5Q+TUN+h7gdBo255YyIzlG2+d72IzkWOodTnbk6fSCyn9p0PeAzJOnqTzbwIwUbZ/vaZckRWO3iTbfKL+mQd8DzoWMts/3vPDgAMYnRmp/euXXNOh7wKacUpLjwuin0wZaYmZKLHsLKjlT22B1KUpZQoO+mzU4nGw7UqZn8xaanhyDw2nYdqTM6lKUsoQGfTfbW1BBTb2DGdp/3jKTBvUhKMCmzTfKb2nQd7NN2aWINN28o6wREmgnbXAfDXrltzTou9mmnFJGxfemT1iQ1aX4tRnJMRw4eZqy6nqrS1Gqx2nQd6PaBgc7jpVrs40HmOEaMXRLrp7VK/+jQd+NduSVU9/o1AuxHmBcQiThwQFs1OkFlR/SoO9Gm3JKsNuES5KirS7F7wXYbUxJimazttMrP6RB34025ZQyPrHpTFJZb0ZyDLkl1ZysPGt1KUr1KA36bnKmtoG9BZU6baAHme66VvLFgSKLK1GqZ2nQd5PtR8twOM35cFHWGxXfmzEJvfnbhiM4nMbqcpTqMRr03WRjdilBATYmDepjdSnKRUT4wZwUjpRU88m+k1aXo1SPcSvoRWSeiBwSkWwRebSV9ZeKyE4RaRSRRS3W3S0ih12Pu7uqcE+3MbuES4b0ISTQbnUpqplrRvcnOS6M59ZkY4ye1Sv/0GbQi4gdeA6YD4wCbheRUS02OwZ8B3irxb7RwBPAVGAK8ISI+PwpbklVHQdPndFulR7IZhO+PyeFg6fOsOaQttUr/+DOGf0UINsYk2uMqQeWAQubb2CMOWqM2Qs4W+x7DbDKGFNmjCkHVgHzuqBuj3auC59eiPVMCycMICEqlD+v1rN65R/cCfoEIL/Z6wLXMne4ta+ILBaRDBHJKC4udvOtPdemnBIiQgIYmxBpdSmqFYF2G9+7bCg7j1WwVUe0VH7AnaBvbe47d0+D3NrXGLPUGJNmjEmLi4tz860918bsUqYNjcFu02kDPdUtaQOJDQ/iuTXZVpeiVLdzJ+gLgIHNXicCJ9x8/87s65Xyy2o4VlbDTO1W6dFCAu3cO2so6w+XsLegwupylOpW7gT9diBVRJJEJAi4DVjp5vt/BlwtIn1cF2Gvdi3zWefGUtH2ec9357RBRIQE8Jc1OVaXolS3ajPojTGNwEM0BfQBYLkxZr+ILBGRBQAicomIFAA3Ay+KyH7XvmXAkzR9WWwHlriW+ayNOaX0jQgmpW+41aWoNkSEBPKdGUP4dP8pDheesbocpbqNW/3ojTGfGGOGGWOSjTG/di173Biz0vV8uzEm0RgTZoyJMcaMbrbvy8aYFNfjle45DM9gjGFzTgkzkmMQ0fZ5b3DPzCSCAmy8sSXP6lKU6jZ6Z2wXOlR4hpKq+vNjnyvPFx0WxFWj+vHhnhPUN7bsHayUb9Cg70Ibs7X/vDe6aWIC5TUNpGd5f9depVqjQd+FNmWXkBQbRkJUqNWlqHa4dFgcMWFBvL+rwOpSlOoWGvRdpNHhZOuRMp020AsF2m1cP34An2cWUVnTYHU5SnU5Dfousqegkqq6Rm228VLfnJRIvcPJxzqqpfJBGvRdZFN2CSIwfaie0XujMQm9Sekbrs03yidp0HeRDdkljIrvTZ+wIKtLUR0gItw0KYHtR8s5VlpjdTlKdSkN+i5wtt7BrmMV2mzj5W6YkIAIvL/ruNWlKNWlNOi7wPajZdQ7nHoh1ssNiApl+tAYVuwq0OGLlU/RoO8CG3NKCLQLU5KirS5FddKNExPIK61h5zEd6Ez5Dg36LpB+qJjJg/vQKyjA6lJUJ80fG09IoE0vyiqfokHfSccrznLw1BmuGNHX6lJUFwgPDuCa0f35cM9J6hodVpejVJfQoO+kNQeb5h3VoPcdN05MoPJsw/m/W6W8nQZ9J605WMTA6FCS43RYYl8xKyWWhKhQfrcqSwc6Uz5Bg74TahscbMwp4coR/XRYYh8SYLfx5A2jySqsYuk6nZREeT8N+k7YnFNKbYOTy7XZxudcMaIf142N54+rszlSUm11OUp1igZ9J6w+WERooJ2p2q3SJz1x/SiCA2z8bMU+7VevvJoGfQcZY1h9sIhZqbGEBNqtLkd1g769Q3h0/gg255by7g7tbqm8lwZ9B2UVVnG84qz2tvFxt18yiLTBffj1JwcoraqzuhylOkSDvoNWu7reXT5cg96X2WzCf900luq6Rn718QGry1GqQzToO2jNwSJGxfemf2SI1aWobpbaL4IHLkvm/V3HWX9YpxtU3keDvgMqaurZcaycK0fq2by/+MHlKQyNDePn73/J2fqL3zGbX1ZDfpkOdaw8hwZ9B6RnFeNwGu1W6UdCAu38+saxHCur4Y+rD19wu8LTtSx8biPX/3kDR7VbpvIQGvQdsOZgEdFhQYxPjLK6FNWDpifHcPPkRJauy+XAydNfW+9wGh5Ztpuz9Q4E+O5r23UOWuURNOjbyeE0pGcVM2dYHHab3g3rb3527UiiQgN5dMU+HM6v9q3/8+psNueWsmThaF78dhr5ZTU8+NYOGhw6jIKylgZ9O+3OL6e8poErtH3eL/UJC+Lx60exJ7+CN7fknV++JbeUP3yRxY0TE1g0OZEpSdH8103j2JhdyuMf7NcbrpSlNOjb6YsDRdhtwuzUOKtLURZZMH4Alw6L4+lPD3Ki4ixl1fU8vGwXg2PCePKGMefHPVo0OZEH5yTzj23HeHnjUWuLbqb4TB1XP5vOhsMlVpeieohbQS8i80TkkIhki8ijrawPFpG3Xeu3isgQ1/IhInJWRHa7Hi90bfk9yxjDJ/tOMmVINJGhgVaXoywiIvz6hjE4jOHxD/bzk3f2UF7dwJ9un0h48Fcnn/nJ1cOZN7o/v/o4k1WZhRZV/FWrDxaSVVjFD5fv1pvA/ESbQS8iduA5YD4wCrhdREa12OxeoNwYkwI8C/ym2bocY8wE1+OBLqrbEtuPlnO0tIZFkxOtLkVZbGB0L344dxifHyhk9cEifnbtCMYkRH5tO5tNePbWCYxNiGTxGxn86qPMNrtndrd1h0uIDA2ksqaB/3hPx/HxB+6c0U8Bso0xucaYemAZsLDFNguB11zP3wWuFB8ct3d5Rj7hwQHMH9vf6lKUB7h3VhJTk6K5aWICd88YcsHtQoPs/P2+qdw+ZRAvbTjC/D+sY9uRsp4rtBmH07Axu4S5I/vxH/NH8PmBQt7adqzT73uqspYztV3Xw2hHXhkvbziiX0JdxJ2gTwDym70ucC1rdRtjTCNQCcS41iWJyC4RSReR2a19gIgsFpEMEckoLvbMOw+r6hr5eO9Jrh8fr3PDKqBp3Ppli6fxu1sntDkfQURIIE/dOJa37puKwxhueXEzT3zwJdV1jT1UbZP9JyqpqGlgdmos98wYwuzUWJ78KJPsoqoOv+eZ2gbm/2Ed8/+wnpzijr/POTX1jfzg77tY8lEmL6Tndvr9lHtB39pvcMuv2QttcxIYZIyZCPwIeEtEen9tQ2OWGmPSjDFpcXGeeZHz470nONvg4Oa0gVaXojxIe//jOiMllk8fvpTvzBjCa5vzuP5PG3r0Ltr1rguwM1NisdmE3948ntBAOw8v29Xh2bTe2JJHeU0DZ2obWfT8JnYdK+9UjUvX5XLqdC1pg/vw9GcH+fTLU516P+Ve0BcAzdMtEThxoW1EJACIBMqMMXXGmFIAY8wOIAcY1tmirbA8o4CUvuFMHKg3SanOCQsO4BcLRvPW/VMpqarj5hc2c7jwTI989vrDxYyK701cRDDQNBTzb745jv0nTvPbVYfa/X419Y28tP4Ilw2L44MfzCQiJJA7/rqV1Qc7duH5ZOVZXkjP4bpx8bx531TGJ0bxw7d38+Xxyg69n2riTtBvB1JFJElEgoDbgJUttlkJ3O16vghYbYwxIhLnupiLiAwFUgGv+79YdlEVO/LKuSUtUacMVF1mRnIsb39vOg5juPnFzezOr2h1u9oGB2XV9Z3+vOq6RnbklTM7NfYry68e3Z87pg5i6bpcfrcqq109cd7aeoyy6nr+7coUhsSG8d73Z5DcN4z7X9/B8oz8r2xbU99IflkNtQ0Xvhj99KeHcBp4bP4IQgLtLL1rMtFhQdz72nZOVda274DVeW02NhtjGkXkIeAzwA68bIzZLyJLgAxjzErgb8AbIpINlNH0ZQBwKbBERBoBB/CAMcaaq1Cd8M6OfOw24caJ2ttGda2R8b1574EZ3Pm3rdzx1y0s/XYas1JjMcawO7+Cd3cUsHLPCWrqHSxZOJpvTR3c4c/adqSMBodp9R6Q/7xuJMVn6vjjF4d5MT2HmyYlcu+sJFL6XnjS+9oGBy+k5zIzJYbJg5tmWYuLCGbZ4ul8/80d/PTdvbyxOY/Ksw2UVNVR4+pt1L93CK/ccwkj47/airvrWDnv7zrOQ5enkNinFwB9I0J46e40Fj2/iftfz2D596YTGqQT/bSXeNpV7bS0NJORkWF1Gec1OJxM/6/VTBwUxV/vSrO6HOWjik7XctfL28gtrubOaYNJzyoip7iakEAb88fEU1pdz7qsYhZfOpRH543A1oHhN3754X7e2nqMPSc4IuUAAA8wSURBVE9cfcFZ0bKLzvC3DUd4b+dx6hudXDGiLz+7dmSrgf/qxiP84sNMli2exrShMV9ZV9/o5DefHuTgqdPEhgcTGx5MTHgQvUMC+fPqbKrqGnn+zknnv3SMMdz0/CYKys+y9idzCGtxP8Lqg4Xc91oGV43qx3N3TCLArvd6tiQiO4wxrYaUBn0bPs8s5L7XM/jrXWlcNaqf1eUoH1ZZ08B3X9vOjrxyLhnSh0WTE7l2bDwRIYE0Opws+SiT1zfnMW90f569dUK7z2zn/i6dAVGhvP7dKW1uW1JVx5tb8nh101EAXv7OJUwa1Of8+rpGB5c9vZZB0b1Y/sD0dtVxsvIs97yyneyiKp66aSy3pA3kg93HeXjZbp5ZNO6CHR5e2XiEX36YyXVj4/n9bRMI1LD/iosFvfYTbMPyjHxiw4OZM9wzewMp3xHZK5B/3D+N0uo64iNDv7IuwG7jlwtGMyQmjCc/zuS2pZv5691p9I1wb+Kbk5VnyS6q4lY3e43FhgfzyNxh3DQxkW+/vJVv/XUrz985iTmuGdXe3VHAqdO1PHPzuPYdJBAfGco7D0znwb/v5Kfv7uVoSTX/3HWcMQm9+eakCzeP3jMzCYfT8KuPD1DvcPLnOyYSHODel13h6VqeX5vDjOQY5gzvS1CAf31J+NfRtlPxmTpWHyzim5MS9OxB9YigANvXQv4cEeG7s5J48c7JZBVWseBPG3khPYeiM21fpDzXrXL2sNg2tvyqQTG9eOeB6STFhnHfaxl8sPs4DQ4nz6/NYcLAKGaltO/9zokICeTl71zCzZMT+cvaHE5U1vL4N0a32SR13+yh/HLBaFZlFvLAGzsuemH3HKfT8OPle3h101EWv7GDqU99zuMffMmuY+V+c0OWntFfxD93HafRabg5TS/CKs9x9ej+LP/edH754X7++18HeeazQ1wxoi+3pg1kzvC4Vtuv1x8uIS4imOH9Itr9eX0jQlj2vWnc/1oGDy/bzcrdJygoP8uShaM71Qst0G7j6UXjGBHfm5q6RqYkRbu1390zhhBot/Hzf+7j/tczWPrttIs2Y725NY8N2SUsWTiagX16sWLXcd7ens/rm/NIjgvjr3elMTTuwhedfYG20V9AfaOTub9LJzY8iBUPzrS6HKValV1UxTsZ+by3s4CSqnriI0P48x2TmDz4/9rTnU5D2q8/Z86wOH5364QOf1Ztg4OHl+3is/2FjEnozYcPzbK0u/E7Gfn89L29TE2KZuldafQO+fpAg7nFVVz7x/VMTYrh1XsuOV/v6doGPt13iqf+dYBB0b147/szvP5/7Rdro/fuI+tGr28+yrGyGv7tylSrS1HqglL6hvPYtSPZ/NiVvPjtyQQF2PjWS1tYc7Do/DaZJ09TVl3f7mablkIC7fzlW5P5z+tG8syi8ZbfU3Jz2kB+f+sEMo6Wc8sLmzlRcfYr6xsdTn60fA/BAXaeXjTuK/X2DgnklksG8tSNY9lbUMlza7J7uvwepUHfioqaev60OpvZqbHnLz4p5ckC7TauGd2fdx+YQUrfcO57PYP3dhQAsO5w0/hRMzvYnt6c3SbcN3vo1/rAW2XhhARevWcKx8vPcuNfNrL/xP/dQfviulx251fwqxvG0K936xetrx0bz40TE/jT6mz2XOCGNV+gQd+KP63O5kxtAz+/bqTVpSjVLnERwfzj/mlMGxrNj9/Zw9J1OWw4XMKI/hFu99DxNrNSY3nn+9OxiXDLC5tZe6iIL49X8uyqLL4xLp7rxw+46P6/WDCavhHB/HD5bsuHkO4uGvQt5JVW8/rmo9w8eSAj+nvGWYtS7XGuR8t14+J56pODbMop5dJhvt09eET/3rz/4EwGxYRx72sZ3P96BtFhQTy5cEyb+0aGBvI/N48nt7ia33x6sAeq7Xka9C385tODBNhs/Phqrxx7TSkAggPs/Om2idw9vWnIhLkjff9mv/6RIbzzwHRmpcRysrKW3ywaR5+wILf2nZkSyz0zh/DqpqOsP+yZQ6V3hva6aSbjaBmLXtjMI3NTeWSuBr3yfsYYis/U0fcCbdS+yOE0nKg4y8DoXu3ar7bBwXV/XE91nYPPHrmUyF7eNV2o9rpxgzFNd9z16x3M4kuHWl2OUl1CRPwq5KHpgnF7Qx6aehU9e+sEiqvqeOqTA91QmXU06F0+2nuS3fkV/Pjq4TqDlFJ+alxiFPfPHsrbGflsyimxupwuo0EPlLq+wUf0j7joWBtKKd/3yNxUBsf04mcr9rk1xII38Pugb3Q4+bdluyitrueZReOxd2D4V6WU7wgJtPNfN47laGkNf/jisNXldAm/D/r/+d8sNmaX8qsbxjA2MdLqcpRSHmBGSiw3T05k6bpcMk+ctrqcTvProP/XvpO8kJ7DHVMHcYtO+q2Uaubn142kT69AHl2xl0ZHxyZO9xR+G/TZRWf4yTt7mDAwiieuH2V1OUopDxPVK4gnrh/N3oLK8xOweCu/DPqquka+98YOQgLtPH/nJLcnL1BK+ZdvjIvnyhF9+e3/ZpFfVmN1OR3md0FfWlXH/3trJ0dKqvnTHRMvOMmDUkqJCE/e0DSMwtOfHbK4mo7zm6Cvb3Ty0vpc5vzPWtYdLuEXC0YzI7nzo/kppXzbgKhQ7p2VxId7TvDl8cq2d/BAPh/0xhg+zyzkmt+v41cfH2DSoD589shs7po+xOrSlFJeYvFlQ4nqFcgzXnpW75O3gJZV17Mnv4Jd+RVsyi4hI6+c5LgwXrnnEi7X8eWVUu3UOySQB+ck89QnB9mcU8r05BirS2oXnwn64jN1PPlRJnsKKsgrbbpoYhNI7RvBE9eP4s5pg71+qjCllHXumj6Elzcc5enPDrLi+zMsn2GrPXwm6CNCAth5rJwxAyK5fcogxidGMTYxkvBgnzlEpZSFQgLtPDI3lUdX7GNVZiFXj+5vdUlu02GKlVLKTY0OJ1c/uw67Tfj0kUs9asgUHaZYKaW6QIDdxk+uGc7hoire33Xc6nLcpkGvlFLtMH9Mf8YmRPLsqizqGr1jdEu3gl5E5onIIRHJFpFHW1kfLCJvu9ZvFZEhzdY95lp+SESu6brSlVKq54kI/zFvBMcrznLnS1tZvj2f07UNbu1rjCGnuIrl2/P5aO+Jbq70/7TZRi8idiALuAooALYDtxtjMptt8yAwzhjzgIjcBtxojLlVREYB/wCmAAOAz4FhxpgLfg1qG71Syhu8tD6XN7bkkVdaQ1CAjStH9GXhhAQGx/SivtFJXaOT+kYntQ0ODhdVsSOvjB155ZTXNH0pzE6N5Y17p3ZZPRdro3enS8oUINsYk+t6s2XAQiCz2TYLgV+4nr8L/Fma+h4tBJYZY+qAIyKS7Xq/zR05kDb961E4ta9b3loppZq7D7g31lAV0UhpVT2l2XU0ZBma3zsbAIQDE4HpgXYiQgKIiAogIiSQEJsNXmlxMbf/WJj/311eqztBnwDkN3tdALT8Gjq/jTGmUUQqgRjX8i0t9k1o+QEishhYDDBo0CB3a1dKKUsJQkRwIBHBgQyO6cXps400Op3YRLCJIAI2EYIDbQTarLsk6k7Qt9Z/qGV7z4W2cWdfjDFLgaXQ1HTjRk2t64ZvQqWUcocAnjp1kTtfMQVA81k5EoGWVxHObyMiATQdb5mb+yqllOpG7gT9diBVRJJEJAi4DVjZYpuVwN2u54uA1abpKu9K4DZXr5wkIBXY1jWlK6WUckebTTeuNveHgM8AO/CyMWa/iCwBMowxK4G/AW+4LraW0fRlgGu75TRduG0EfnCxHjdKKaW6ng6BoJRSPkCHQFBKKT+mQa+UUj5Og14ppXycBr1SSvk4j7sYKyLFQF4n3iIWKOmicqzkK8cBeiyeyleOxVeOAzp3LIONMXGtrfC4oO8sEcm40JVnb+IrxwF6LJ7KV47FV44Duu9YtOlGKaV8nAa9Ukr5OF8M+qVWF9BFfOU4QI/FU/nKsfjKcUA3HYvPtdErpZT6Kl88o1dKKdWMBr1SSvk4nwn6tiYw92Qi8rKIFInIl82WRYvIKhE57PrZx8oa3SUiA0VkjYgcEJH9IvKwa7lXHY+IhIjINhHZ4zqOX7qWJ4nIVtdxvO0autsriIhdRHaJyEeu1155LCJyVET2ichuEclwLfOq3y8AEYkSkXdF5KDr38v07joOnwh61wTmzwHzgVHA7a6Jyb3Fq8C8FsseBb4wxqQCX7hee4NG4MfGmJHANOAHrr8LbzueOuAKY8x4YAIwT0SmAb8BnnUdRzlwr4U1ttfDwIFmr735WC43xkxo1ufc236/AP4AfGqMGQGMp+nvpnuOwxjj9Q9gOvBZs9ePAY9ZXVc7j2EI8GWz14eAeNfzeOCQ1TV28Lg+AK7y5uMBegE7aZoruQQIcC3/yu+dJz9omt3tC+AK4COaZr7z1mM5CsS2WOZVv19Ab+AIrg4x3X0cPnFGT+sTmH9tEnIv088YcxLA9bOvxfW0m4gMASYCW/HC43E1dewGioBVQA5QYYxpdG3iTb9nvwd+Cjhdr2Pw3mMxwP+KyA4RWexa5m2/X0OBYuAVV3PaSyISRjcdh68EvVuTkKueIyLhwHvAI8aY01bX0xHGGIcxZgJNZ8NTgJGtbdazVbWfiHwDKDLG7Gi+uJVNPf5YXGYaYybR1FT7AxG51OqCOiAAmAQ8b4yZCFTTjc1NvhL0vjgJeaGIxAO4fhZZXI/bRCSQppD/uzFmhWux1x6PMaYCWEvTNYcoETk3Bae3/J7NBBaIyFFgGU3NN7/HO48FY8wJ188i4H2avoS97ferACgwxmx1vX6XpuDvluPwlaB3ZwJzb9N8wvW7aWrr9ngiIjTNIXzAGPO7Zqu86nhEJE5EolzPQ4G5NF0sWwMscm3m8ccBYIx5zBiTaIwZQtO/jdXGmG/hhcciImEiEnHuOXA18CVe9vtljDkF5IvIcNeiK2maW7t7jsPqixJdeHHjWiCLpnbUn1tdTztr/wdwEmig6Zv+XpraUL8ADrt+Rltdp5vHMoumJoC9wG7X41pvOx5gHLDLdRxfAo+7lg8FtgHZwDtAsNW1tvO45gAfeeuxuGre43rsP/dv3dt+v1w1TwAyXL9j/wT6dNdx6BAISinl43yl6UYppdQFaNArpZSP06BXSikfp0GvlFI+ToNeKaV8nAa9Ukr5OA16pZTycf8/zo/XmpZcbZgAAAAASUVORK5CYII=\n",
3945
      "text/plain": [
3946
       "<Figure size 432x288 with 1 Axes>"
3947
      ]
3948
     },
3949
     "metadata": {
3950
      "needs_background": "light"
3951
     },
3952
     "output_type": "display_data"
3953
    }
3954
   ],
3955
   "source": [
3956
    "zz = preds_all.mean(0)[:,0]\n",
3957
    "\n",
3958
    "train_md['prob'] = zz\n",
3959
    "\n",
3960
    "plt.plot(train_md[['prob','pos_idx']].groupby('pos_idx').mean())\n",
3961
    "plt.plot([0,60],[0,0])"
3962
   ]
3963
  },
3964
  {
3965
   "cell_type": "code",
3966
   "execution_count": 26,
3967
   "metadata": {},
3968
   "outputs": [
3969
    {
3970
     "data": {
3971
      "text/plain": [
3972
       "0.0934667744853716"
3973
      ]
3974
     },
3975
     "execution_count": 26,
3976
     "metadata": {},
3977
     "output_type": "execute_result"
3978
    }
3979
   ],
3980
   "source": [
3981
    "log_loss(train_md['any'],train_md['prob'])"
3982
   ]
3983
  },
3984
  {
3985
   "cell_type": "code",
3986
   "execution_count": 27,
3987
   "metadata": {},
3988
   "outputs": [
3989
    {
3990
     "data": {
3991
      "text/plain": [
3992
       "(0, 5)"
3993
      ]
3994
     },
3995
     "execution_count": 27,
3996
     "metadata": {},
3997
     "output_type": "execute_result"
3998
    },
3999
    {
4000
     "data": {
4001
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA5gAAAGfCAYAAAA6Un0sAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deZxbZ33v8e8j6Wg8ktdZ7DhxNseOnUDIgkkglCSEBJKQErgECJQW0tCwXCilG1Da21K4L9py27RlTaBspRBCIGVpQjAEE7YkOITs8RI7i2PHHu/2SDOSjp77x3M00ow1I2nmaI6Wz/v1Oq+jdfyMjqXR9/yexVhrBQAAAADATMWibgAAAAAAoDMQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQikQ9DzLGPCHpkCRfUsFau6aZjQIAAAAAtJ+6Ambgpdba3U1rCQAAAACgrdFFFgAAAAAQCmOtrf0gY7ZK2ifJSrreWntDlcdcK+laSUqn089fvXp1yE0FAKA17Tw4MuX9S+bPmaWWAADQfPfee+9ua+1gtfvqDZhHW2u3G2MWS1or6T3W2jsne/yaNWvs+vXrp91gAADayXVrN055//suPnmWWgIAQPMZY+6dbF6eurrIWmu3B/tdkm6RdHZ4zQMAAAAAdIKaAdMYkzbGzCtdlvRySQ81u2EAAAAAgPZSzyyySyTdYowpPf5r1tofNLVVAAAAAIC2UzNgWmu3SDp9FtoCAAAAAGhjLFMCAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABAKAiYAAAAAIBQETAAAAABAKAiYAAAAAIBQEDABAAAAAKEgYAIAAAAAQkHABAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABAKAiYAAAAAIBQETAAAAABAKAiYAAAAAIBQEDABAAAAAKEgYAIAAAAAQkHABAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABAKAiYAAAAAIBR1B0xjTNwYc58x5vvNbBAAAAAAoD01UsF8r6RHm9UQAAAAAEB7qytgGmOWSXqlpM83tzkAAAAAgHZVbwXzXyX9paTiZA8wxlxrjFlvjFk/NDQUSuMAAAAAAO2jZsA0xlwuaZe19t6pHmetvcFau8Zau2ZwcDC0BgIAAAAA2kM9FcwXS3qVMeYJSTdKutAY89WmtgoAAAAA0HZqBkxr7QettcustSdIukrSHdbaNze9ZQAAAACAtsI6mAAAAACAUCQaebC1dp2kdU1pCQAAAACgrVHBBAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABAKAiYAAAAAIBQETAAAAABAKAiYAAAAAIBQEDABAAAAAKEgYAIAAAAAQkHABAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABAKAiYAAAAAIBQETAAAAABAKAiYAAAAAIBQEDABAAAAAKEgYAIAAAAAQkHABAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKACQAAAAAIBQETAAAAABCKmgHTGDPHGHOPMeZ+Y8zDxpgPz0bDAAAAAADtJVHHY0YlXWitPWyM8ST93Bhzm7X2ria3DQAAAADQRmoGTGutlXQ4uOoFm21mowAAAAAA7aeuMZjGmLgx5reSdklaa629u8pjrjXGrDfGrB8aGgq7nQAAAACAFldXwLTW+tbaMyQtk3S2Mea5VR5zg7V2jbV2zeDgYNjtBAAAAAC0uIZmkbXW7pe0TtIlTWkNAAAAAKBt1TOL7KAxZmFwuVfSRZIea3bDAAAAAADtpZ5ZZJdK+rIxJi4XSG+y1n6/uc0CAAAAALSbemaRfUDSmbPQFgAAAABAG2toDCYAAAAAAJMhYAIAAAAAQkHABAAAAACEgoAJAAAAAAgFARMAAAAAEAoCJgAAAAAgFARMAAAAAEAoCJgAAAAAgFAQMAEAAAAAoSBgAgAAAABCQcAEAAAAAISCgAkAAAAACAUBEwAAAAAQCgImAAAAACAUBEwAAAAAQCgImAAAAACAUBAwAQAAAAChIGACAAAAAEJBwAQAAAAAhIKAeWCb9NRdUbcCAAAAANoeAfPu66WvXCFZG3VLAAAAAKCtETBT/VJhRMpnom4JAAAAALQ1Amaqz+0ze6JtBwAAAAC0OQJmqt/tCZgAAAAAMCMETAImAAAAAISCgDkWMPdG2w4AAAAAaHMETCqYAAAAABAKAuacBZKJUcEEAAAAgBlKRN2AyMXiUu8iKpgAgEldt3Zj1E0AAKAtUMGUXDdZAiYAAAAAzAgBUyJgAgAAAEAICJiS1NvHGEwAAAAAmCECpiSl+qhgAgAAAMAMETClchdZa6NuCQAAAAC0LQKm5AJmMS+NHoq6JQAAAADQtgiYkguYEt1kAQAAAGAGCJhSOWBmmegHAAAAAKaLgClVVDAJmAAAAAAwXQRMyc0iK9FFFgAAAABmgIApMQYTABCaJYce0dEH74+6GQAARIKAKUlzFkgmTsAEAMzYuU99Rudv/ZeomwEAQCQImJJkjOsmS8AEAMxQT+GwkoXDUTcDAIBIEDBLUv0ETADAjHnFjHr84aibAQBAJAiYJal+ZpEFAMxY0s/I8zNRNwMAgEgQMEvoIgsACIHnZ5UsZmWsH3VTAACYdQTMEiqYAIAQlKqXnp+NuCUAAMw+AmZJaQymtVG3BADQpmLFvBI2L0lKMg4TANCFCJglqX7J+tLIgahbAgBoU5VjLwmYAIBuRMAsSfW7PeMwAQDTlCyWu8UmmegHANCFCJglvX1uzzhMAMA0VVYtqWACALoRAbOECiYAYIYqJ/YhYAIAuhEBsyRVqmASMAEA0zO+gkkXWQBA9yFgllDBBADMUGUF0yNgAgC6EAGzpGeeFPMImACAaRs3i2yBLrIAgO5DwCwxprwWJgAA05BkmRIAQJcjYFZK9UvZfVG3AgDQpkpdZAuxHsZgAgC6EgGzUqqPCiYAYNqSxYysjIa9PiqYAICuRMCsRBdZAMAMeH5G+VivcvE0FUwAQFciYFaiggkAmIGkn1E+XgqYVDABAN2HgFmpNAaz6EfdEgBAG/L8rHLxtHLxFAETANCVCJiVUv2SLUojB6JuCQCgDSX9YeXivcrH06yDCQDoSgTMSql+t6ebLABgGjw/q3w8pVHGYAIAuhQBs1Kqz+0JmACAaUj6GeXiKeXpIgsA6FIEzEpUMAEAM+AVS7PIplwF0xajbhIAALOKgFlpLGDujbYdAIC2NDbJTyItIyvPz0bdJAAAZhUBsxIVTADADCT94bFlStx1xmECALoLAbOSl5LiPQRMAEDjrB2b5CcXT0kS4zABAF2HgFnJGFfFpIssAKBB8eKoYioqF09VVDAJmACA7lIzYBpjjjXG/MQY86gx5mFjzHtno2GRSfVTwQQANKzUHdZ1kU2Nuw0AgG6RqOMxBUl/Zq39jTFmnqR7jTFrrbWPNLlt0Uj1ETABAA1LFl2YzMXTygcVTI+ACQDoMjUrmNbaHdba3wSXD0l6VNIxzW5YZKhgAgCmwauoYI4GAbOHLrIAgC7T0BhMY8wJks6UdHeV+641xqw3xqwfGhoKp3VRIGACAKYhGSxJkoullA+6yHoETABAl6k7YBpj5kr6lqQ/sdYenHi/tfYGa+0aa+2awcHBMNs4u1L90sh+yS9E3RIAQBspVzBTYxVMxmACALpNXQHTGOPJhcv/stZ+u7lNilhpLcyR/dG2AwDQVkphMhdPyY/1qKg4s8gCALpOPbPIGkn/IelRa+2/NL9JEUv1uT3dZAEADagcgyljlIunqGACALpOPRXMF0v6fUkXGmN+G2yXNbld0SFgAgCmwSsGYzCD7rG5RJoKJgCg69RcpsRa+3NJZhba0hpKXWQJmACABpTCZD7eK0lBBZOACQDoLg3NItsVCJgAgGnw/KyKiss3SUmukkkXWQBAtyFgTtRLF1kAQOOSfka5eEoyrtNPLp5WskAFEwDQXQiYEyVTkpeSMnujbgkAoI14fmase6zkliuhiywAoNsQMKtJ9VPBBAA0JOlnxyb4kdwYTI8usgCALkPArCbVR8AEADTE84fHVTDdGEwqmACA7kLArCbVTxdZAEBDksWsG4MZGJvkx9oIWwUAwOwiYFZDF1kAQIPcGMzKgJlSTEUliqMRtgoAgNlFwKymt48KJgCgIUk/o1xsfBdZd/vhqJoEAMCsI2BWk+qXRg9Ifj7qlgAA2kS1CqYk1sIEAHQVAmY1qdJamFQxAQD18fwjx2BKBEwAQHchYFaT6nd7xmECAOphi0oWsxMqmKWAyUyyAIDuQcCshoAJAGiA52claVwFMz/WRZaACQDoHgTMagiYAIAGJIsuYFZWMEcTroLp0UUWANBFCJjVEDABAA3wgiplvmIW2XzQRbanQAUTANA9CJjVMMkPAKABySpdZEuXqWACALoJAbOaRI+UnCdlCZgAgNrGKpiVYzBjvbIyjMEEAHQVAuZkUovoIgsAqEu1CqaMUS6eImACALpKIuoGtKxUPwETAFCXUjfYygqm5JYqSfoZXbd245TPf9/FJzetbQAAzCYqmJMhYAIA6pQMAmYu3jvudhcwqWACALoHAXMyBEwAQJ28YqmCmR53O11kAQDdhoA5mVQ/s8gCAOriBWMwK5cpkVyX2SSzyAIAuggBczKpPil3WMqPRN0SAECLS/oZFUxSxdj4qQ1G6SILAOgyBMzJpPrdnqVKAAA1eH5m/AyygXw8zTqYAICuQsCcTClgMg4TAFBD0s8eMYOsVBqDScAEAHQPAuZkevvcnnGYAIAaPH9Y+QkzyEpSLpFW0j8sWRtBqwAAmH0EzMlQwQQA1ClZzFbtIpuLpxS3vuI2F0GrAACYfQTMyRAwAQB18vzMJF1k3bIldJMFAHQLAuZkUnSRBQDUJ+lnlItV6SJbCpgFZpIFAHQHAuZk4p7Us4AKJgCgJs/PKh+EyUqlbrMsVQIA6BYEzKmk+giYAICakv6wctUm+aGLLACgyxAwp5LqJ2ACAGryJl2mpBQwqWACALoDAXMqBEwAQA2xYkEJm6s6i2wpdHpUMAEAXYKAOZVUP5P8AACm5BVdeKy2DuYoFUwAQJchYE4l1SdlCZgAgMmVqpO5KpP85JnkBwDQZQiYU0n1SfmMlKNrEwCgutIEPvmqy5Skxj0GAIBOR8CcSqrf7aliAgAm4flZSao6yY9MTLlYigomAKBrEDCnUgqYTPQDAJhEcqyLbJWAGdxOwAQAdAsC5lQImACAGkpjMKtWMFUKmHSRBQB0BwLmVMYCJl1kAQDV1axgJtJUMAEAXYOAORUqmACAGrziFGMw5WaXpYIJAOgWBMypzFkoyRAwAQCTKlUnc1VmkZVc8PSoYAIAugQBcyrxhNS7kIAJAJhUeRbZ6gEzF0+rp0DABAB0BwJmLb19jMEEAEzK8zOuemmq/0nNxVNjEwEBANDpCJi1pPqpYAIAJpX0M5NWLyXGYAIAugsBs5ZUPxVMAMCkPD+jXDw96f25eFoJm1OsmJ/FVgEAEA0CZi1UMAEAU6hdwUwFj2McJgCg8xEwa0n1uYBpbdQtAQC0IK+YrVnBlEQ3WQBAVyBg1pLql/xRafRQ1C0BALSgesZgusdRwQQAdD4CZi2LT3H7bfdE2w4AQEvy/IzysdSk95e7yFLBBAB0PgJmLSeeJ3kpacNtUbcEANCCkn5mLERWk0u4CqZHBRMA0AUImLV4vdJJF7qAyThMAMAEnp+dsotsPgifPQRMAEAXIGDWY9Wl0sFnpGcfiLolAIBWYm3tCma8VMGkiywAoPMRMOux8hWSDN1kAQDjxG1OMfljVcpqxib5KVDBBAB0PgJmPeYOSseeLW24NeqWAABaiOdnJalGBdN1n2UWWQBANyBg1mvVpdKO+6UDz0TdEgBAiyiFxqkqmNYklI/1MIssAKArEDDrteoyt99IN1kAgDNWwZximRLJdZOlggkA6AYEzHoNnCz1LWccJgBgTLmCOfksslIpYFLBBAB0PgJmvYxxVcytd0qjh6JuDQCgBdQzBrN0PxVMAEA3IGA2YtVlkp+THr8j6pYAAFpAsuiqklONwZToIgsA6B4EzEYce47Uu4husgAASRrr9lqri2w+nmIdTABAVyBgNiKecGtibrxd8gtRtwYAELFSaCytdTmZUcZgAgC6BAGzUasulbJ7pW33RN0SAEDEyrPI1qpg0kUWANAdCJiNWvEyKZ6UHvufqFsCAIhY0s+oqJj8WM+Uj2OSHwBAtyBgNqpnnnTCS6QNt0rWRt0aAECEPD/jZpA1ZsrH5RJpecVRGcvwCgBAZyNgTseqS6W9W6Tdm6JuCQAgQp6frTmDrFRexoRxmACATkfAnI5Vl7r9hlujbQcAIFJJf7jmGphSeRIgAiYAoNMRMKdjwTLpqOexXAkAdDmvmK25RIlUETALh5vdJAAAIlUzYBpjvmCM2WWMeWg2GtQ2Vr9SevpuaXh31C0BAEQk6WeUj9FFFgCAknoqmF+SdEmT29F+Vl0qybo1MQEAXWlskp8a8kEF0yNgAgA6XM2Aaa29U9LeWWhLeznqedL8YxiHCQBdLFnnJD+jQcDsYakSAECHC20MpjHmWmPMemPM+qGhobB+bOsyxlUxH79Dyo9E3RoAQASS/nBdYzBLIdQjYAIAOlxoAdNae4O1do21ds3g4GBYP7a1rbpUymekrXdG3RIAQAQ8P1vnLLKMwQQAdAdmkZ2JE14iJefSTRYAupEtBrPINrJMCRVMAEBnI2DORKJHWvEyaeMPpGIx6tYAAGaRVxyRka2rglmMeSqYJBVMAEDHq2eZkq9L+pWkVcaYbcaYa5rfrDay6pXSoR3S1nVRtwQAMItKM8LWs0yJ5LrJUsEEAHS6emaRfaO1dqm11rPWLrPW/sdsNKxtnHqFtOA4ae3fUsUEgC7i+VlJUq6OSX4kKZdIEzABAB0vEXUD2p43R3rZ30jf/iPpwW9Kp78h6hYBABp03dqNDT+n1N21NL6yFlfBpIssAKCzMQYzDM+9Ulp6unTHR1iyBAC6RCks1rNMiSTl4nPHutUCANCpCJhhiMWkiz8iHXhauuf6qFsDAJgFY2Mw65jkR3IVzJ4CXWQBAJ2NgBmW5edLK18u3fnPUmZv1K0BADRZsljqIltfwMzHU/IYgwkA6HAEzDBd/PdS7pB058ejbgkAoMlKk/zUX8FMMwYTANDxCJhhWnyKdOabpXs+J+3dEnVrAABNVJoRtt4KJsuUAAC6AQEzbBf8lRT3pB9/JOqWAACaaKyCGat3kp+0ksWsjPWb2SwAACJFwAzb/KXSi94tPfxtadu9UbcGANAknp9RwXgqxry6Hl9azqQUTAEA6ESsg9kML/5j6d4vSj/8a+nqWyVjom4RACBkST9T9/hLqdyVNukPK5eYO+6+Wutwvu/ikxtvIAAAEaCC2Qw986QLPig99Utpw61RtwYA0AReMVP3+EupXMFkoh8AQCcjYDbLWW+R+ldKa/9W8vNRtwYAEDJXwaxv/KVUGTCZ6AcA0LkImM0ST0gXf1jas0n6zVeibg0AIGSenx0LjfXIV3SRBQBgzDP3Sn4h6laEhoDZTKsuk447V1r3MSm7L+rWAABClPQzdc8gK5XHYHp0kQUAlAxtkL74SunHH466JaEhYDaTMdIrPipl9krXnyc9+cuoWwQACInX6CQ/CVft7KGCCQCQpMKodPM1UjLtVqHoEATMZjvm+dLVt0kmJn3xMulHfycVclG3CgAwQ0l/epP8UMEEAEiSfvz30s4HpSs+Jc1bEnVrQkPAnA3HnSO94+fSWb8v/fw66fMXSrsejbpVAIAZ8IrZBif5YQwmACCw+cfSrz4pveCPpFWXRN2aUBEwZ0vPPOlVn5Cu+pp0cId0/fnSXZ+RisWoWwYAmAZXwax/kh/fJOWbhJIFAiYAdLXh3dJ/v1MaPEV6+Ueibk3oCJizbfUrpXf9Slp+gfSDD0hffY10cHvUrQIANMDYghLF0YYqmDJGuXiadTABoJtZK33n3W4C0Nd+XvIa+DvSJgiYUZi7WHrTN6TL/1V6+h7p0y+S7vkcYzMBoE14/ogkKRerfwymJAImAHS79f8hbbxNuujD0lHPjbo1TUHAjIox0pqr3djMJc+Vbv1z6ZPPl+77r45aBwcAOlFpHGUjs8hKbhwmYzABoEvteky6/UPSSS+TznlH1K1pGgJm1PpPkt76fenN35J6+6TvvEv6zIukh29hfCYAtCjPz0pSQ7PISi6QEjABoAsVRqVvvU1KzpVe/Rkp1rkxrHN/s3ZijLTiIunaddLr/9MtafLNt0o3nCdtvN311QYAtIxyBbOxsTOj8TTLlABAN+rQJUmqIWC2EmOkU18lvfOX0muul0YPSV97vfSFV0hbfxZ16wAAgelXMNNUMAGg23TwkiTVEDBbUSwunX6V9O710uXXSfufkr58ufSVV0vP3Bt16wCg6yWLrgo5vTGYVDABoGvseKCjlySphoDZyuKetOYPpT++T3r5R6Ud90ufu1C68fekXY9G3ToA6Fqlbq6NVjBzVDABoDtY61aJ+PxFbvjblV/oyCVJqiFgtgOvVzr3PdJ775cu+Ctpy0/d0ibffru0d2vUrQOArlOqQuYbXqYkqGAyth4AOld2v3TT77tVIpafH6wacWrUrZo1BMx2Mme+dMH7pT95wAXOR/5b+uQa6fvvkw49G3XrAKBrlMZgNjrJTy6elpGVV8w2o1kAgKg9/Wvpsy+RNtzmeiC+8RtSeiDqVs0qAmY7SvW5Ptx//FvprLdIv/mK9Kmzpftv5Kw4AMyCsQpmowEzkXbPL9BNFgA6SrEo/eLfpC9eIhlJf3i7Kwh18HIkk+m+37iTzF8qXf4v0rvudgOHb3m7dOObpEM7o24ZAHQ0z88oH5sja+INPa80ZpNxmADQQYZ3S197nbT2/0irXym9/WfSsjVRtyoyiagbgBAMrJCuvlW66zPSHR+RPn2OdOnHpdOudEufAABC5RWzDU/wI5VnnW10Jtnr1m6c8v73XXxyw20BAIRg01rpO++WsvukV/6ztOaarv/+TQWzU8Ti0rnvdmdM+k6Svv026Rtvlg7virplANBxkv5ww0uUSNJoPD32fABAG8vslW55h/RfV0q9C6U/+rH0grd1fbiUCJidZ/Bk6ZofShd9WNr0Q+lT50gPfTvqVgFAR/H8rHINjr+UpHwQMD3WwgSA9vXY/0iffqH0wE3SeX8hvf1O6ajTom5Vy6CLbCeKxaXf+RPp5Evcwq43Xy2t+5jkpdzamvGkFEu4fdxz2/xjpFNfLR17NmdeAKAGV8FMN/w8xmACQBsb3iPd9hfSQ9+Slpwm/d43paWnR92qlkPA7GSLV0vXrJXu/oz05K+kYl7yg60wIo0elPyC5OekDT+Q7vq0tOA46bTXSs+9UlryHMImAFTh+VmNJOY3/LwcXWQBoP1YKz18i3TrX0gjB6SXfkj6nfe5Ig2OQMDsdPGEmyL53PdM/biRg67c/9DN0i/+Xfr5ddLgajdR0HOvlPpOnJ32AkAbSPoZHeo5quHnlQMmXWQBoC1k9krf+2Pp0e9JR58pXfE9acmpUbeqpREw4cyZL53xRrcN75Ye+W/pwZulOz7qtr6T3FkaW3RncWyxYrMuyPavdG+4xcE2sFJK9ET9mwFAzVlYG+X5mWnNIluI9aioOAETANrBrsekr79BOrhduujvpBe9x33nxZR4hXCk9ICbBesFb5P2Py09/G1p23rXXdbEJAX7yi2fkXZvlB7/sVQsuJ9j4lL/Chc6lzxHWnGx66dOt1sAbS5ZzE5rFlkZo1w8RRdZAGh1m37k5jFJzJGuvq2r17VsFAETU1t4rPTi99b/+EJO2rNZ2vWItOtRt22/z/Vbv+Ojbozn6ldKp/yudNwL3YREANBmPD+j/DRmkZVEwASAVmatdPf10u0flBY/R3rTjdKCZVG3qq0QMBGuRDKoWE7omz68R9p4m+u/vv4LbuKh1IC0+jJp9e9Ky8+nOy2AthAv5hS3hWl1kZWkXCJNwASAVuTnpVv/XLr3S9Lqy6XXXC/1zI26VW2HgInZke6Xznyz20YPSZt/5MLmQ7dIv/mK5KWluYulZNptXuulR7oAABjKSURBVOrIy/OWuorqwuNcJTTVR3dbALOutIZlPjbNgBlPMwYTAFpNZq900x9IT/xM+p0/lS78GykWi7pVbYmAidnXM096zmvcVhiVtt7pAmdmr5QblvLDUu6wdHiX2+czLpQWRsb/HC9dDpwLj5NOPE9adRlTRgNoKs/PStK0K5j5eGospAIAWsDuTdLXXi8d2OaqlqdfFXWL2hoBE9FK9EgrL3bbVKyVRvZL+59yEw/tf0o6EOz3PyU9dbf068+7Kufz3yqd9RZp/tJZ+RUAdJdS99ZpTfIjV8GcO7orzCbVNUvu+y4+OdR/EwDaSrEoZXZLh3ZIh56t2HZID33bFSje8n3puHOibmnbI2CiPRgj9S5y29LTj7y/6EubfuhC5rqPST/9J+mUy91MuCe8hK60AELjFUsVzOlN8jMaT2tubpeWHHpYO+c9J8ymAUB3GT3kwuHOh12Pt8KIlM+6rTDibsuPSCMHpMM7Jesf+TNSA9LRZ0hXfNL1iMOMETDRGWJxadWlbtvzuHTvF6X7vio98h1pYJX0gmuk017nxm0CwAyUxk/m4+lpPf+Bo16rE/bfpTc+cLXuP+pK/eL4dymXYBIJAKjbjvul9V+UHvymG07Vs0BKpiSvV0r0St4cN4dHasDd1jNfmndUxbZUmrvEbYlk1L9NxyFgovP0nyS9/KPSSz/klkf59eel2/5Suu390tLnubGaJ54vHfei+mcGs9at78n4TqDrlcZPTreCuXPec/TlM2/SuU99VmfsuEkr9vxE65b/mTb1v4zeFgAwmdHD0kPfcjO8bv+NC5LP/V/S8692a1Ty+dkyCJjoXF6vdMab3Lb9t9LG292EQndfL/3yE1IsIR3z/CBwnifNP8YN7h7bnnL7/U+7fTEvLTxeGlgp9a+UBlYE+5XuDBgfbEBXSAaT/Ex3FllJyiXmat3yP9ejg5fpZY9/TJdv+KC2LjpXdyz/Sx2cc0xYTQWA9lHIBRM9ZoJursHl3GFpw23SAzdJuUPS4CnSpf8kPe8NUu/CqFuNKgiY6A5Hn+G2C97vPqyevtuFza13Sj/7Z+nOjx/5nLlHuYV1lz7PrdcZ75H2bHbb1jvHz2rbM98FzaVnSEef6bbB1VKctxjQabxgkp/pziJbaee8U/X107+oM3bcpHOfvF5/cN8bdPexb9O9R79ZxRifHwBaULHoJl7M7JUye9z4Rm+OWyWgZ36wnycl5pRPvheLbjKdfU8E29aKy09I2X2up9hk4j1u9YE1V0vHnsNJ/RZnrLWh/9A1a9bY9evXh/5zgaYYOSA9+Sv3IblgmVv6ZP4xbobbyRSL0sFtblrrPZvdftejbkxA7pB7TKJXOuq0cuBcerrrvjvVzwUwLfXMohqWNdu+opc8+Ql94oU/UyE+J7SfO3d0py7Y8v+0cu86Heg5Wtvnnabd6ZO1K32ydqdXKpPsD+3fqoZZZgGMKRalnQ9JW9ZJ2+6Rhne770mZPS4M2mLtnxFLuKDppaXhIckfLd9nYu4716IT3Jbqd49Lpsrrn3up4HpaGjzZTfSIlmGMuddau6bafZweBeYskFZd0thzYrHy+psrXla+vViU9j4ubb/Pdcvdfp+bbOie6939Jnhe/0qpf4ULnAPB5XlHs6Av0AY8P6OiYirEwj1ZdLhnib5/yse1fO+dOu3ZW7Ts4H06ZfftY/cPe30aCsLmUGqlhtIrta/3eBVjjA0HOopfcBW+oQ3S7o3S6EFXwUskg32PFE+6CmEi6WbSz+6TsvuD/YRNVho4WVp8iutdtfgU971j4gnv/U+5QLllnbTlp25JD0nqW+5OvC8+xQXB0tbb5/ZzFrjwOHoo2A5WXD7kxk6mB8phsu9EacGxzGvRwQiYQJhiMRcYB1ZKz3u9u63ouyrnjvvL1c49m6Unf+nGF5Qk5gQf1Avdh3XvQne5t3R9kTT/6HKw5UweEImkn1E+3tu0Llpb+s7Tlr7zJElz8vs1MLxZg5mNGhzepMHhjTpj+41K2LwkyTcJ7e09QbvTK7Q7tWJsfzi5mC5k6F7Fogtoux51S1MsONYFm4XHua6c05XdJ+3dIu3dGuy3SPuedPd5c4LA1xPsJ1z35pRnNy3d5/W6+w/vCsLkBmloo/u5xXz53415469PJTk3WNZtodsvXu2+hwxtcOMYS8t0mLgLjotXu+8aT/7C/buSGyK04iJp+QXS8vPddw+gAQRMoNlicWlwldsqWSsd3B6M69zk/mCVzkCOHHBnErMPuMulbreVehaUw2Zpm7t4/NnFVJ/7AwYgNF4xo3wI4y/rMeIt1LaFa7RtYbkXUqxY0KLsExrIbNbA8GYNZDbrmAP36ZShH4w9ZjSeVi6elh/z5JukCrGk/FhSvkm622JJFU1CvkmoaBIqmrg00u++yMYSwfhxIykYRlNtOE2q3/XC6DvJfVFNhviaWOu64MXi4f1MdB5rXTjb9Yjbdgb7ocfc+ofVzDu6XElbdLwLT35eKoxKhazb57Pl67lhFyL3Ph5UAyvMX+Z+hom5St3wUPD8ETdPQ+ln+Lnav4uJu8rewCo378PAKld1HFgpzZnvQrOfc5XCQrD5Obc3Mff3fs7CqZfcKIy6k9xDj7nwPfSYe80ye6TjXiid/XYXKgdXcYIKM0LABKJijLTgGLctP3/qx/oFN6D+wDYXPPc/Je1/0u33bXXdWSqroZW8VDlsJudNOHsanFFN9Lgg2rvIjYmYv8y1K7148m67IweDM7iPS3uC/egh1+1mcJX74zh4sht/AXSQpJ8JZYKf6SrGEtqTXqE96RXaMFi+vadwcCxwLso+Kc8fUbw4qoTNK17MKR7sewqHFLc5xYsFxayvmC0oZgsa3jf+epn7ounFg7BnjPtiP/EzZ95SFzb7l7t9etB1gYslXHe+uBdcD/aFEenQs27ij0PPVmzBdT/nPrfSi93Js7mL3Yzd6cHyybRY3H0xjyUmXI65/VjFqGI/k6EIY5ObBGPRDu9yFbLDu6ThXRXXh9x+znxp8anSkue47oWLn+M+H+tdIisK+RHp4DPSgafdcUj0uF40cxYEPWwWut9rqu6NpaW9/Lz72zKdsDJy0AWgfU+64Da8y72upcvDu93rXTmuLz3oXu/nv7X8es87yv3trJxQZv+T0tafSvc/U/3fHqs89rr2LzxOOvXVwQmV5W5bdEL9J3CLxSBwBls+W3F5xP1f7ls+dTiMxaTYnJlVYBM90lHPdRvQRARMoB3EE278QnrAzYY7kbXuzOrwUPmLz9i2t3w5N+yuV/5hqzxjqwlVipjnzu4uONYFzlhC2vO4C5PDQ+MfO+9oFyY33j6+K8/8Y8qBc2Bl+czxgmVMeIS25PnZGS1R0iyjifl6ZsFZembBWU35+UdMAlR5kmnvlvKJpsduLY/dqldyXnkB9GPPkeYtcSfHhoeC0LZL2vZrt5+sMlWveLJiIfbe6iG01HVx9KD7zCxNcJLdW31yExNzC7rPXSLNHXSVp/SA+1ze+Yhbt6+y3YtOcEFo3lEqBfiqrO9OMBYL7nPVz5eDW7Hg2lIZ2uOe+/1iifGXS9dLl0vXrXWB/sDT5SW5hnfV9zp6aRc0JdceP1/Rxoq/AV6qHMoqt/6TXFfMYsGNM9z1qLTr4aAK+ahbKqxSzHMBMj3gTjAMrnbXS2MDF5/qXvtqFhwjHXfOkbfnR9zvOzaeMQiWYVfvYjFX4Q+zyg+0MGaRBeCUQurBZ8avB3rwGenAM+W1QCu/HPRVnM0t/eGsnJxg6DH3xaE0UcG4L4bGVTwWHe/WF110vAuy6YGKyQP63Fnzid3krHXV0kPPSoeflQ7tdPvhIfclY8VF7uegI8zmDLGSNHf0WcVtQZnEItcVdsKXzSsffLsk6ebTrp/VdrW6cQF05ID7PPFLwSg34XLefZGft9SFskYqeqOHXSjI7nNjy4p+ELaCfbFYDmRj3R0rqkb5TPnk2rj9hMcURt1Js9SA+yxK9Zc/n0q3laqqpWrqZIpFaf8T5fBUClKZPVP/riYWBMN4OUTGPHfSMea5/5tjwa7gXttSyPMrAmmxIpBOlOh1s6cvWBacTDy2PKP6vKXudRg5UGULhnNI5Qp1ZbU6FrR7eKhivOIT47uLJnrLbZPc8wdOdp/ji09xld++5e51nrOQbptAC2EWWQC1GRN8iepzy6tMVzxRnujolMvLtxeL0qHtrrvTWPfe4PITP5ce+IaOqKC6hgWTFQRhM7vPdT2rVsUw8WACAyMdc5a08uXSyoulpWcyQy8mZ636s1u0YvcdWrH3J1o8vGnsroJJKustVMbrU9ZbqKy3SH3ZJ7Rz7qkRNrgNlLpUNkPP3NbuYlpNLFY+GVf5uTjbKsN3seBO1s1ZMHvBrei7k5WlwLl3izvRsPhUt/WvmLqbKIC2QMAEMDtiwZpXC5ZJevGR9xdyLoBm9kiZfeXuaKUuvtm97mx534muW1WpO93cJeXLyXnSs/dLm9a6bd0/SOs+5qoNKy5yYXPBsgkVjwkVEJkjqwSlSU9inqsA5IaD7XCwDZf3Jua6bJWqAAuWlbuRoXVYqyWHH9GKPT/Rij0/Ud/IU7Iy2j7/dP30hPdqxFuo3vw+pfL71Btsqfy+YGxjVjvnnhL1bwA0LhaTYklJEYW4WNz1Vll0vHTSS6NpA4Cmo4ssgM41vEd6/A5p0w+lzT9yIbWZYp4kW+7uVdKzoBw25w66EDpORfXAxMrT15e2xBw3jsmb48Y9zV/qAmzvoo7oMta0LrDWqqdwUHNzQ0rn97h9brfmj+zQCft+ofm5nfJNXNsWrNHmvpdqc//5yiTpWt0sR4zhBAC0LbrIAuhO6X7pea9zW9GXdvzWVUFNPBjTlKi4HMxAKTthIo18xdixvAuAPXPdWmPJ9Ph9acHrw7uCMaxPjx/PeuBp6dkHNa4r8MSTfNYPxo1ljgyqEyXnuqA5bvzUsvIMn/lsMN4sK+Uy5cuJZNDuirb3lC6npZ75rttcaV/PLJB+wf2buaDdpQlGxqrBySO7KVv3Wnt+RrFi3s1yGsxgWlRcRROXNW5fWkrDBseoN79P6dwepfN7lMrtUTq3u3w5v8fdl9uthD1yeYCR+FxtW/B8/bL/ndqy6Hc06jWpKycAAF2ICiYAtCo/P36CknzWdcU9+Ex5xscDT7vxrAeePnKNthITc5XPZMpVQ/180K33UPVJPyaKJcYHTmPKgTV32F2uXCpgMibuAqeJHTnTZAiyiQUaTvYr4/VpODmow8lBDSf7NZwc1LA3EFwfUCE+g2n+0TRUOAGgfVDBBIB2VFp2QBPHcFb9PHezax7c7qqxY11s00Goq1KBtNaF18oxpKOH3Qy9I/vdEg0jB9xyFCMHytdt0a0L5wUVz2TKVUG9YBr+WKKiCpzTzzdsD9ZcdBVKY335xlMx5qlgkirGEu668eTHPBVNXMb6wZqMfsXajL6M9WVklfEWKZPs17DXH4TKfhVjU6zLh5ZXq6s0ARQA2gMBEwA6Rc9cabCBL+HGlIPoDJZ1uW7tRikntw1XecCyaf9oYAwBFADaAwETAAC0PQIoALQGAiYAdLmmzeIKtJB6/p8TQgFg5giYANDhCJBAfZr9XiHAAugGXRkw6UYDoFUQ/oDuwfcPAN2gKwMmgM4x0y9sBDwArYIKKoBO0JXrYM7GF0o+xNEKCE8AgFbS7O9HVImB2THVOph1BUxjzCWS/k1SXNLnrbX/MNXjCZgAAADA7CNEYzZMFTBrdpE1xsQlfUrSxZK2Sfq1Mea71tpHwm0mAAAAgJmgkNIZ2vlEQayOx5wtabO1dou1NifpRklXNLdZAAAAAIB2U88kP8dIerri+jZJ50x8kDHmWknXBlcPG2M2zLx5TTMgaXfUjcAROC6th2PSmjgurYdj0po4Lq2HY9KaOC4t5k9b/5gcP9kd9QRMU+W2IwZuWmtvkHRDA42KjDFm/WR9hhEdjkvr4Zi0Jo5L6+GYtCaOS+vhmLQmjkvraedjUk8X2W2Sjq24vkzS9uY0BwAAAADQruoJmL+WtNIYc6IxJinpKknfbW6zAAAAAADtpmYXWWttwRjzbkm3yy1T8gVr7cNNb1lztUVX3i7EcWk9HJPWxHFpPRyT1sRxaT0ck9bEcWk9bXtM6loHEwAAAACAWurpIgsAAAAAQE0ETAAAAABAKDo2YBpjXmeMedgYUzTGTDrFrzHmEmPMBmPMZmPMBypuP9EYc7cxZpMx5hvBBEeYAWNMnzFmbfCarjXGLKrymJcaY35bsY0YY14d3PclY8zWivvOmP3fovPUc1yCx/kVr/13K27nvRKyOt8rZxhjfhV8zj1gjHlDxX28V0I02d+Jivt7gv/7m4P3wgkV930wuH2DMeYVs9nuTlbHMflTY8wjwXvjx8aY4yvuq/pZhpmr47i81RgzVPH6v63ivrcEn3mbjDFvmd2Wd646jsl1FcdjozFmf8V9vFeawBjzBWPMLmPMQ5Pcb4wx/x4csweMMWdV3Nce7xNrbUdukk6RtErSOklrJnlMXNLjkpZLSkq6X9KpwX03SboquPxZSe+M+ndq903SP0n6QHD5A5L+scbj+yTtlZQKrn9J0pVR/x6dttV7XCQdnuR23isRHBNJJ0taGVw+WtIOSQuD67xXwjsWk/6dqHjMuyR9Nrh8laRvBJdPDR7fI+nE4OfEo/6d2n2r85i8tOJvxztLxyS4XvWzjG1WjstbJX2yynP7JG0J9ouCy4ui/p3afavnmEx4/HvkJvMsXee90pzjcp6ksyQ9NMn9l0m6TZKR9EJJdwe3t837pGMrmNbaR621G2o87GxJm621W6y1OUk3SrrCGGMkXSjp5uBxX5b06ua1tmtcIfdaSvW9pldKus1am2lqq9DocRnDe6Vpah4Ta+1Ga+2m4PJ2SbskDc5aC7tH1b8TEx5TebxulvSy4L1xhaQbrbWj1tqtkjYHPw8zU/OYWGt/UvG34y65NbzRXPW8VybzCklrrbV7rbX7JK2VdEmT2tlNGj0mb5T09VlpWRez1t4pV0CZzBWSvmKduyQtNMYsVRu9Tzo2YNbpGElPV1zfFtzWL2m/tbYw4XbMzBJr7Q5JCvaLazz+Kh35Qfd/g+4C1xljeprRyC5U73GZY4xZb4y5q9RtWbxXmqWh94ox5my5s9OPV9zMeyUck/2dqPqY4L1wQO69Uc9z0bhGX9dr5KoBJdU+yzBz9R6X1wafTTcbY45t8LloTN2va9CN/ERJd1TczHslGpMdt7Z5n9RcB7OVGWN+JOmoKnd9yFr7nXp+RJXb7BS3o4apjkmDP2eppNPk1l8t+aCkZ+W+SN8g6f2S/n56Le0uIR2X46y1240xyyXdYYx5UNLBKo/jvVKHkN8r/ynpLdbaYnAz75Xw1PP3gL8ls6vu19UY82ZJaySdX3HzEZ9l1trHqz0fDannuHxP0tettaPGmHfIVf4vrPO5aFwjr+tVkm621voVt/FeiUbb/01p64Bprb1ohj9im6RjK64vk7Rd0m65cnQiOBtduh01THVMjDE7jTFLrbU7gi/Fu6b4Ua+XdIu1Nl/xs3cEF0eNMV+U9OehNLoLhHFcgm6YstZuMcask3SmpG+J98q0hHFMjDHzJf2PpL8OutGUfjbvlfBM9nei2mO2GWMSkhbIdX+q57loXF2vqzHmIrkTNudba0dLt0/yWcaX5pmreVystXsqrn5O0j9WPPeCCc9dF3oLu08jn0FXSfrflTfwXonMZMetbd4n3d5F9teSVho3C2ZS7s31XetG0v5EbgygJL1FUj0VUUztu3KvpVT7NT1iHEDwRbs07u/VkqrOvoWG1TwuxphFpW6WxpgBSS+W9Ajvlaap55gkJd0iN07jmxPu470Snqp/JyY8pvJ4XSnpjuC98V1JVxk3y+yJklZKumeW2t3Jah4TY8yZkq6X9Cpr7a6K26t+ls1ayztbPcdlacXVV0l6NLh8u6SXB8dnkaSXa3wPJkxPPZ9fMsaskps05lcVt/Feic53Jf1BMJvsCyUdCE4ct8/7JOpZhpq1SXqNXNIflbRT0u3B7UdLurXicZdJ2ih3RuZDFbcvl/sisFnSNyX1RP07tfsmNybpx5I2Bfu+4PY1kj5f8bgTJD0jKTbh+XdIelDuy/JXJc2N+nfqhK2e4yLp3OC1vz/YX1PxfN4r0RyTN0vKS/ptxXZGcB/vlXCPxxF/J+S6HL8quDwn+L+/OXgvLK947oeC522QdGnUv0unbHUckx8Ff/tL743vBrdP+lnGNivH5WOSHg5e/59IWl3x3D8M3kObJV0d9e/SKVutYxJc/ztJ/zDhebxXmndMvi4383teLqtcI+kdkt4R3G8kfSo4Zg+qYjWMdnmfmKCxAAAAAADMSLd3kQUAAAAAhISACQAAAAAIBQETAAAAABAKAiYAAAAAIBQETAAAAABAKAiYAAAAAIBQEDABAAAAAKH4/8WYwXdu8iB1AAAAAElFTkSuQmCC\n",
4002
      "text/plain": [
4003
       "<Figure size 1152x504 with 1 Axes>"
4004
      ]
4005
     },
4006
     "metadata": {
4007
      "needs_background": "light"
4008
     },
4009
     "output_type": "display_data"
4010
    }
4011
   ],
4012
   "source": [
4013
    "#zz = preds_all.mean(1)[0,:,0]\n",
4014
    "k=0\n",
4015
    "zz = preds_all.mean(0)[:,k]\n",
4016
    "#zz = preds_all[0,0,:,k]\n",
4017
    "#zz = scalePreds(zz,power=1.3)\n",
4018
    "\n",
4019
    "\n",
4020
    "plt.figure(figsize=(16, 7))\n",
4021
    "a = plt.hist(zz - train_md[all_ich[k]],bins=100,alpha=0.5,density=True)\n",
4022
    "b = 0.5*(a[1][1:] + a[1][:-1])\n",
4023
    "plt.plot(b,-7*np.log(1-abs(b))*a[0])\n",
4024
    "plt.ylim([0,5])"
4025
   ]
4026
  },
4027
  {
4028
   "cell_type": "code",
4029
   "execution_count": 28,
4030
   "metadata": {},
4031
   "outputs": [
4032
    {
4033
     "name": "stdout",
4034
     "output_type": "stream",
4035
     "text": [
4036
      "0 [3.57e-05 6.13e-05 9.31e-05 1.79e-03 9.95e-01 9.98e-01 9.99e-01]\n",
4037
      "1 [7.12e-06 9.84e-06 1.44e-05 9.54e-05 8.47e-02 4.76e-01 8.64e-01]\n",
4038
      "2 [1.66e-05 2.25e-05 3.18e-05 3.06e-04 9.82e-01 9.94e-01 9.97e-01]\n",
4039
      "3 [8.39e-06 1.13e-05 1.72e-05 1.02e-04 9.76e-01 9.92e-01 9.95e-01]\n",
4040
      "4 [2.32e-05 3.56e-05 5.27e-05 4.59e-04 9.57e-01 9.92e-01 9.96e-01]\n",
4041
      "5 [2.51e-05 4.16e-05 6.20e-05 9.25e-04 9.68e-01 9.93e-01 9.96e-01]\n"
4042
     ]
4043
    }
4044
   ],
4045
   "source": [
4046
    "np.set_printoptions(precision=2)\n",
4047
    "zz = preds_all.mean(0)\n",
4048
    "for k in range(6):\n",
4049
    "    print(k,np.quantile(zz[:,k],[0.0001,0.001,0.01,0.5,0.99,0.999,0.9999]))"
4050
   ]
4051
  },
4052
  {
4053
   "cell_type": "markdown",
4054
   "metadata": {},
4055
   "source": [
4056
    "## Bounding"
4057
   ]
4058
  },
4059
  {
4060
   "cell_type": "code",
4061
   "execution_count": 29,
4062
   "metadata": {
4063
    "scrolled": true
4064
   },
4065
   "outputs": [
4066
    {
4067
     "data": {
4068
      "text/plain": [
4069
       "(10, 752797, 6)"
4070
      ]
4071
     },
4072
     "execution_count": 29,
4073
     "metadata": {},
4074
     "output_type": "execute_result"
4075
    }
4076
   ],
4077
   "source": [
4078
    "preds_all.shape"
4079
   ]
4080
  },
4081
  {
4082
   "cell_type": "code",
4083
   "execution_count": 30,
4084
   "metadata": {},
4085
   "outputs": [
4086
    {
4087
     "name": "stdout",
4088
     "output_type": "stream",
4089
     "text": [
4090
      "0.057490836031309125\n"
4091
     ]
4092
    }
4093
   ],
4094
   "source": [
4095
    "loss = ((- train_md[all_ich].values * np.log(preds_all.mean(0)) \\\n",
4096
    "        - (1 - train_md[all_ich].values) * np.log(1 - preds_all.mean(0)))*class_weights).mean()\n",
4097
    "print(loss)"
4098
   ]
4099
  },
4100
  {
4101
   "cell_type": "code",
4102
   "execution_count": 31,
4103
   "metadata": {
4104
    "scrolled": true
4105
   },
4106
   "outputs": [
4107
    {
4108
     "name": "stdout",
4109
     "output_type": "stream",
4110
     "text": [
4111
      "initial score 0.057490836031309125\n",
4112
      "any too low inconsistencies\n",
4113
      "1 class: 0.004179878506423379\n",
4114
      "2 class: 0.025429033325053103\n",
4115
      "3 class: 0.012410782720972586\n",
4116
      "4 class: 0.033147714456885455\n",
4117
      "5 class: 0.09211925658577279\n",
4118
      "total 0.14343999776832267\n",
4119
      "any too low corrected score 0.05748886375218989\n",
4120
      "any too high inconsistencies\n",
4121
      "total 0.24964844440134593\n",
4122
      "any too high corrected score 0.0574848864789516\n"
4123
     ]
4124
    }
4125
   ],
4126
   "source": [
4127
    "preds_all = predBounding(preds_all, target=train_md[all_ich].values)"
4128
   ]
4129
  },
4130
  {
4131
   "cell_type": "code",
4132
   "execution_count": 32,
4133
   "metadata": {
4134
    "scrolled": true
4135
   },
4136
   "outputs": [
4137
    {
4138
     "name": "stdout",
4139
     "output_type": "stream",
4140
     "text": [
4141
      "0.0574848864789516\n"
4142
     ]
4143
    }
4144
   ],
4145
   "source": [
4146
    "loss = ((- train_md[all_ich].values * np.log(preds_all.mean(0)) \\\n",
4147
    "        - (1 - train_md[all_ich].values) * np.log(1 - preds_all.mean(0)))*class_weights).mean()\n",
4148
    "print(loss)"
4149
   ]
4150
  },
4151
  {
4152
   "cell_type": "markdown",
4153
   "metadata": {},
4154
   "source": [
4155
    "## Models behavior per groups"
4156
   ]
4157
  },
4158
  {
4159
   "cell_type": "code",
4160
   "execution_count": 33,
4161
   "metadata": {
4162
    "scrolled": true
4163
   },
4164
   "outputs": [
4165
    {
4166
     "name": "stdout",
4167
     "output_type": "stream",
4168
     "text": [
4169
      " 0: 452096  84109 [0.0744 0.0723 0.0724 0.0725 0.0742 0.0729 0.073  0.072  0.0718 0.0718]\n",
4170
      " 1: 300701  37123 [0.0418 0.0406 0.0404 0.0408 0.0425 0.0415 0.0417 0.041  0.0405 0.0406]\n"
4171
     ]
4172
    }
4173
   ],
4174
   "source": [
4175
    "np.set_printoptions(precision=4)\n",
4176
    "for col in ['PxlMin_zero']:\n",
4177
    "    for i in train_md[col].unique():\n",
4178
    "        res = ((- train_md[all_ich].values * np.log(preds_all) - (1 - train_md[all_ich].values) \\\n",
4179
    "                * np.log(1 - preds_all)) * class_weights)[:,(train_md[col] == i)].mean((1,2))\n",
4180
    "        sz = (train_md[col] == i).sum()\n",
4181
    "        sz_test = (test_md[col] == i).sum()\n",
4182
    "        print('{:2d}: {:6d} {:6d} {}'.format(i,sz,sz_test,res))"
4183
   ]
4184
  },
4185
  {
4186
   "cell_type": "markdown",
4187
   "metadata": {},
4188
   "source": [
4189
    "# Inference"
4190
   ]
4191
  },
4192
  {
4193
   "cell_type": "code",
4194
   "execution_count": 84,
4195
   "metadata": {
4196
    "scrolled": true
4197
   },
4198
   "outputs": [
4199
    {
4200
     "name": "stdout",
4201
     "output_type": "stream",
4202
     "text": [
4203
      "completed epochs: 13\n",
4204
      "loading model model.b13.f0.d14.v35\n",
4205
      "adding dummy serieses 2\n",
4206
      "DataSet 14 test size 3520 fold 0\n",
4207
      "dataset test: 3520 loader test: 110 anum: 0\n",
4208
      "setFeats, augmentation -1\n"
4209
     ]
4210
    },
4211
    {
4212
     "ename": "FileNotFoundError",
4213
     "evalue": "[Errno 2] No such file or directory: '/mnt/edisk/running/features/se_resnet101_5n/test2/test.f0.a0'",
4214
     "output_type": "error",
4215
     "traceback": [
4216
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
4217
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
4218
      "\u001b[0;32m<ipython-input-84-de854db3530e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      8\u001b[0m         \u001b[0mpreds2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      9\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0manum\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 10\u001b[0;31m             \u001b[0mpredictions\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minference_one\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfold\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfold\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0manum\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0manum\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mbs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdataset\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     11\u001b[0m             \u001b[0mpreds2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpredictions\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     12\u001b[0m         \u001b[0mpreds\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpreds2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
4219
      "\u001b[0;32m<ipython-input-82-5f73bc7ddaa9>\u001b[0m in \u001b[0;36minference_one\u001b[0;34m(dataset, bs, add_seed, fold, anum)\u001b[0m\n\u001b[1;32m     28\u001b[0m     \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'dataset test:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtst_ds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'loader test:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloader_tst\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'anum:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0manum\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     29\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 30\u001b[0;31m     \u001b[0mtst_ds\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msetFeats\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepoch\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0manum\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     31\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     32\u001b[0m     \u001b[0mloc_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtst_ds\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmetadata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcopy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
4220
      "\u001b[0;32m<ipython-input-82-44e0138eb17e>\u001b[0m in \u001b[0;36msetFeats\u001b[0;34m(self, anum, epoch)\u001b[0m\n\u001b[1;32m     81\u001b[0m                                    getAPathFeats('test', self.metadata.test.sum(), self.test_mask)] ,axis=0)\n\u001b[1;32m     82\u001b[0m             \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 83\u001b[0;31m                 \u001b[0mfeats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetAPathFeats\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msz\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     84\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     85\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdataset\u001b[0m \u001b[0;34m<=\u001b[0m \u001b[0;36m13\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmode\u001b[0m \u001b[0;32min\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m'train'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'valid'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mTRAIN_ON_STAGE_1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
4221
      "\u001b[0;32m<ipython-input-82-44e0138eb17e>\u001b[0m in \u001b[0;36mgetAPathFeats\u001b[0;34m(mode, sz, mask)\u001b[0m\n\u001b[1;32m     62\u001b[0m         \u001b[0;32mdef\u001b[0m \u001b[0mgetAPathFeats\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msz\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmask\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     63\u001b[0m             \u001b[0mmax_a\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m8\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mmode\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'test'\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;36m4\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 64\u001b[0;31m             \u001b[0mfeats2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mpickle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgetAPath\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0man\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'rb'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0man\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmax_a\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     65\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mmask\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     66\u001b[0m                 \u001b[0mfeats2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfeats2\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
4222
      "\u001b[0;32m<ipython-input-82-44e0138eb17e>\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m     62\u001b[0m         \u001b[0;32mdef\u001b[0m \u001b[0mgetAPathFeats\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msz\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmask\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     63\u001b[0m             \u001b[0mmax_a\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m8\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mmode\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'test'\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;36m4\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 64\u001b[0;31m             \u001b[0mfeats2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mpickle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgetAPath\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0man\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'rb'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0man\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmax_a\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     65\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mmask\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     66\u001b[0m                 \u001b[0mfeats2\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfeats2\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
4223
      "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '/mnt/edisk/running/features/se_resnet101_5n/test2/test.f0.a0'"
4224
     ]
4225
    }
4226
   ],
4227
   "source": [
4228
    "stg = time.time()\n",
4229
    "\n",
4230
    "#for ds in (my_datasets3 + my_datasets5):\n",
4231
    "for ds in [14]:\n",
4232
    "    folds = getNFolds(ds)\n",
4233
    "    preds = []\n",
4234
    "    for fold in range(folds):\n",
4235
    "        preds2 = []\n",
4236
    "        for anum in range(32):\n",
4237
    "            predictions = inference_one(fold = fold, anum = anum, bs=bs, dataset=ds)\n",
4238
    "            preds2.append(predictions)\n",
4239
    "        preds.append(np.stack(preds2))\n",
4240
    "    preds = np.stack(preds)\n",
4241
    "    print('total time', time.time() - stg)\n",
4242
    "    \n",
4243
    "    pickle.dump(preds, open(PATH_DISK/'preds_d{}_v{}'.format(ds, VERSION),'wb'))"
4244
   ]
4245
  },
4246
  {
4247
   "cell_type": "code",
4248
   "execution_count": null,
4249
   "metadata": {},
4250
   "outputs": [],
4251
   "source": [
4252
    "#11221.892060995102"
4253
   ]
4254
  },
4255
  {
4256
   "cell_type": "code",
4257
   "execution_count": null,
4258
   "metadata": {},
4259
   "outputs": [],
4260
   "source": [
4261
    "#total time 1466.092379808426 5x8\n",
4262
    "#total time 5399.404406309128 5x32"
4263
   ]
4264
  },
4265
  {
4266
   "cell_type": "markdown",
4267
   "metadata": {},
4268
   "source": [
4269
    "# Files transfer"
4270
   ]
4271
  },
4272
  {
4273
   "cell_type": "code",
4274
   "execution_count": 68,
4275
   "metadata": {},
4276
   "outputs": [
4277
    {
4278
     "name": "stdout",
4279
     "output_type": "stream",
4280
     "text": [
4281
      "Copying file:///home/zahar_chikishev/running/oof_d6_f0_v20 [Content-Type=application/octet-stream]...\n",
4282
      "Copying file:///home/zahar_chikishev/running/oof_d6_f1_v20 [Content-Type=application/octet-stream]...\n",
4283
      "Copying file:///home/zahar_chikishev/running/oof_d6_f2_v20 [Content-Type=application/octet-stream]...\n",
4284
      "Copying file:///home/zahar_chikishev/running/oof_d7_f0_v20 [Content-Type=application/octet-stream]...\n",
4285
      "- [4 files][164.5 MiB/164.5 MiB]                                                \n",
4286
      "==> NOTE: You are performing a sequence of gsutil operations that may\n",
4287
      "run significantly faster if you instead use gsutil -m cp ... Please\n",
4288
      "see the -m section under \"gsutil help options\" for further information\n",
4289
      "about when gsutil -m can be advantageous.\n",
4290
      "\n",
4291
      "Copying file:///home/zahar_chikishev/running/oof_d7_f1_v20 [Content-Type=application/octet-stream]...\n",
4292
      "Copying file:///home/zahar_chikishev/running/oof_d7_f2_v20 [Content-Type=application/octet-stream]...\n",
4293
      "Copying file:///home/zahar_chikishev/running/oof_d8_f0_v20 [Content-Type=application/octet-stream]...\n",
4294
      "Copying file:///home/zahar_chikishev/running/oof_d8_f1_v20 [Content-Type=application/octet-stream]...\n",
4295
      "Copying file:///home/zahar_chikishev/running/oof_d8_f2_v20 [Content-Type=application/octet-stream]...\n",
4296
      "Copying file:///home/zahar_chikishev/running/oof_d9_f0_v20 [Content-Type=application/octet-stream]...\n",
4297
      "Copying file:///home/zahar_chikishev/running/oof_d9_f1_v20 [Content-Type=application/octet-stream]...\n",
4298
      "Copying file:///home/zahar_chikishev/running/oof_d9_f2_v20 [Content-Type=application/octet-stream]...\n",
4299
      "| [12 files][493.8 MiB/493.8 MiB]   35.9 MiB/s                                  \n",
4300
      "Operation completed over 12 objects/493.8 MiB.                                   \n"
4301
     ]
4302
    }
4303
   ],
4304
   "source": [
4305
    "!gsutil cp /home/zahar_chikishev/running/oof* gs://rsna-hemorrhage/results"
4306
   ]
4307
  },
4308
  {
4309
   "cell_type": "code",
4310
   "execution_count": 69,
4311
   "metadata": {},
4312
   "outputs": [
4313
    {
4314
     "name": "stdout",
4315
     "output_type": "stream",
4316
     "text": [
4317
      "Copying file:///home/zahar_chikishev/running/preds_d6_v20 [Content-Type=application/octet-stream]...\n",
4318
      "Copying file:///home/zahar_chikishev/running/preds_d7_v20 [Content-Type=application/octet-stream]...\n",
4319
      "Copying file:///home/zahar_chikishev/running/preds_d8_v20 [Content-Type=application/octet-stream]...\n",
4320
      "Copying file:///home/zahar_chikishev/running/preds_d9_v20 [Content-Type=application/octet-stream]...\n",
4321
      "\\ [4 files][172.6 MiB/172.6 MiB]                                                \n",
4322
      "Operation completed over 4 objects/172.6 MiB.                                    \n"
4323
     ]
4324
    }
4325
   ],
4326
   "source": [
4327
    "!gsutil cp /home/zahar_chikishev/running/preds* gs://rsna-hemorrhage/results"
4328
   ]
4329
  },
4330
  {
4331
   "cell_type": "code",
4332
   "execution_count": null,
4333
   "metadata": {},
4334
   "outputs": [],
4335
   "source": [
4336
    "!gsutil -m cp gs://rsna-hemorrhage/results/* C:\\StudioProjects\\Hemorrhage\\running\\ensemble"
4337
   ]
4338
  },
4339
  {
4340
   "cell_type": "code",
4341
   "execution_count": null,
4342
   "metadata": {},
4343
   "outputs": [],
4344
   "source": [
4345
    "!gsutil -m cp gs://rsna-hemorrhage/yuvals/model_Densenet161_3_version_classifier_splits_fullhead_resmodel_type_OOF_pred_split_* ."
4346
   ]
4347
  },
4348
  {
4349
   "cell_type": "code",
4350
   "execution_count": null,
4351
   "metadata": {},
4352
   "outputs": [],
4353
   "source": [
4354
    "!gsutil -m cp gs://rsna-hemorrhage/yuvals/model_*_version_classifier_splits_fullhead_resmodel_type_OOF_pred_split_* ."
4355
   ]
4356
  },
4357
  {
4358
   "cell_type": "code",
4359
   "execution_count": null,
4360
   "metadata": {},
4361
   "outputs": [],
4362
   "source": [
4363
    "!gsutil -m cp gs://rsna-hemorrhage/yuvals/model_Densenet161_3_version_classifier_splits_fullhead_resmodel_type_test_pred_ensamble_split_* ."
4364
   ]
4365
  },
4366
  {
4367
   "cell_type": "code",
4368
   "execution_count": null,
4369
   "metadata": {},
4370
   "outputs": [],
4371
   "source": [
4372
    "!gsutil cp gs://rsna-hemorrhage/yuvals/OOF_validation_image_ids.pkl .\n",
4373
    "!gsutil cp gs://rsna-hemorrhage/yuvals/ensemble_test_image_ids.pkl ."
4374
   ]
4375
  },
4376
  {
4377
   "cell_type": "code",
4378
   "execution_count": null,
4379
   "metadata": {},
4380
   "outputs": [],
4381
   "source": []
4382
  },
4383
  {
4384
   "cell_type": "code",
4385
   "execution_count": null,
4386
   "metadata": {},
4387
   "outputs": [],
4388
   "source": []
4389
  },
4390
  {
4391
   "cell_type": "code",
4392
   "execution_count": 19,
4393
   "metadata": {},
4394
   "outputs": [],
4395
   "source": [
4396
    "!rm /home/zahar_chikishev/running/*v53"
4397
   ]
4398
  },
4399
  {
4400
   "cell_type": "code",
4401
   "execution_count": 18,
4402
   "metadata": {},
4403
   "outputs": [
4404
    {
4405
     "name": "stdout",
4406
     "output_type": "stream",
4407
     "text": [
4408
      "/home/zahar_chikishev/running/preds_se_resnext101_32x4d_v53\r\n",
4409
      "/home/zahar_chikishev/running/stats.f0.v53\r\n",
4410
      "/home/zahar_chikishev/running/stats.f1.v53\r\n",
4411
      "/home/zahar_chikishev/running/stats.f2.v53\r\n"
4412
     ]
4413
    }
4414
   ],
4415
   "source": [
4416
    "!ls /home/zahar_chikishev/running/*v53"
4417
   ]
4418
  },
4419
  {
4420
   "cell_type": "code",
4421
   "execution_count": 20,
4422
   "metadata": {},
4423
   "outputs": [
4424
    {
4425
     "name": "stdout",
4426
     "output_type": "stream",
4427
     "text": [
4428
      "/home/zahar_chikishev/running/oof_Densenet161_f0_v72\r\n",
4429
      "/home/zahar_chikishev/running/oof_Densenet161_f1_v72\r\n",
4430
      "/home/zahar_chikishev/running/oof_Densenet161_f2_v72\r\n",
4431
      "/home/zahar_chikishev/running/oof_Densenet169_f0_v73\r\n",
4432
      "/home/zahar_chikishev/running/oof_Densenet169_f1_v73\r\n",
4433
      "/home/zahar_chikishev/running/oof_Densenet169_f2_v73\r\n",
4434
      "/home/zahar_chikishev/running/oof_Densenet201_f0_v74\r\n",
4435
      "/home/zahar_chikishev/running/oof_Densenet201_f1_v74\r\n",
4436
      "/home/zahar_chikishev/running/oof_Densenet201_f2_v74\r\n",
4437
      "/home/zahar_chikishev/running/oof_se_resnext101_32x4d_f0_v75\r\n",
4438
      "/home/zahar_chikishev/running/oof_se_resnext101_32x4d_f1_v75\r\n",
4439
      "/home/zahar_chikishev/running/oof_se_resnext101_32x4d_f2_v75\r\n"
4440
     ]
4441
    }
4442
   ],
4443
   "source": [
4444
    "!ls /home/zahar_chikishev/running/oof*"
4445
   ]
4446
  },
4447
  {
4448
   "cell_type": "code",
4449
   "execution_count": 21,
4450
   "metadata": {
4451
    "scrolled": true
4452
   },
4453
   "outputs": [
4454
    {
4455
     "name": "stdout",
4456
     "output_type": "stream",
4457
     "text": [
4458
      "/home/zahar_chikishev/running/preds_Densenet161_v72\r\n",
4459
      "/home/zahar_chikishev/running/preds_Densenet169_v73\r\n",
4460
      "/home/zahar_chikishev/running/preds_Densenet201_v74\r\n",
4461
      "/home/zahar_chikishev/running/preds_se_resnext101_32x4d_v75\r\n"
4462
     ]
4463
    }
4464
   ],
4465
   "source": [
4466
    "!ls /home/zahar_chikishev/running/preds*"
4467
   ]
4468
  },
4469
  {
4470
   "cell_type": "markdown",
4471
   "metadata": {},
4472
   "source": [
4473
    "# Ensembling"
4474
   ]
4475
  },
4476
  {
4477
   "cell_type": "code",
4478
   "execution_count": 34,
4479
   "metadata": {
4480
    "scrolled": true
4481
   },
4482
   "outputs": [
4483
    {
4484
     "data": {
4485
      "text/plain": [
4486
       "(10, 752797, 6)"
4487
      ]
4488
     },
4489
     "execution_count": 34,
4490
     "metadata": {},
4491
     "output_type": "execute_result"
4492
    }
4493
   ],
4494
   "source": [
4495
    "preds_all.shape"
4496
   ]
4497
  },
4498
  {
4499
   "cell_type": "code",
4500
   "execution_count": null,
4501
   "metadata": {},
4502
   "outputs": [],
4503
   "source": []
4504
  },
4505
  {
4506
   "cell_type": "code",
4507
   "execution_count": 35,
4508
   "metadata": {},
4509
   "outputs": [
4510
    {
4511
     "data": {
4512
      "text/plain": [
4513
       "[<matplotlib.lines.Line2D at 0x7f8298f99450>]"
4514
      ]
4515
     },
4516
     "execution_count": 35,
4517
     "metadata": {},
4518
     "output_type": "execute_result"
4519
    },
4520
    {
4521
     "data": {
4522
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deVxUVR/H8c+ZAUWQJUAUFcQF913cTcsl0UzULJfKrfKp57GetseszNQ2bc8yy8wyLS2Xcsktl0xzRU3FBcUNcEEURZR95jx/XExSTNTBYWZ+79eLF9x7DzO/24Vv18O55yitNUIIIRyfyd4FCCGEsA0JdCGEcBIS6EII4SQk0IUQwklIoAshhJNws9cbBwYG6rCwMHu9vRBCOKStW7ee1lqXKeiY3QI9LCyM6Ohoe729EEI4JKXU0Wsdky4XIYRwEhLoQgjhJCTQhRDCSUigCyGEk5BAF0IIJ3HdQFdKTVVKnVJKxVzjuFJKTVBKxSmldiqlGtu+TCGEENdTmDv0b4DIfzjeBQjP+xgKTLr1soQQQtyo6wa61vp3IOUfmkQB32rDRsBPKRVsqwKFEMJZZGRbeHvJXhLPphfJ69uiD70CkJBvOzFv31WUUkOVUtFKqejk5GQbvLUQQjiG9QdP0/mj3/lizSFWxxZN/tniSVFVwL4CV83QWk8GJgNERETIyhpCCKd3PjOHtxfvZebmBMICPJk1tAUtqgQUyXvZItATgZB82xWB4zZ4XSGEcEzJ+yFmDodPprD6QApls2Fk0x483L0tHu7mIntbWwT6AmCYUmoW0BxI1VqfsMHrCiGE49AaDq6CjZ9B3AqsmCivTQxUVsxuVgiPhCIMcyhEoCulZgJ3AYFKqUTgNcDdqF9/DiwGugJxQDowuKiKFUKIYik3Cxa/ANu+JbNkIFNVH77Nbs9D7Zvwr3ZVMd+mJ36uG+ha637XOa6B/9isIiGEcCRpSfDjI5CwiUW+/Xk2KZK6oYFMv78+4WW9b2spdps+VwghHN7hteh5Q7GkpzBcP8uSlBa81K0GA1uFYTYVNF6kaEmgCyHEjcq+CCvGwOYvOGkuz5D01wio2oTlveoR4u9pt7Ik0IUQ4kYc346eMwSVcohvrZF8wkP8r1cjHoioiFK3/648Pwl0IYQoDK1h2zSsv/yP0/jydPZIfGrezaIedSnr42Hv6gAJdCGEuLbsi3D+BKSdwLL9O8w7Z/KHtT6j3Z/hub6t6FqvnN3vyvOTQBdCiIKs+xBWjgVtBcAMfJzbi/h6w5jTrR53eJWwb30FkEAXQogrbZgIK0aTW/1eFmY3ZvZ+K5neoTzdrz3/rRFk7+quSQJdCCHy2zIFlr1McmhXescP5ui5bAa0rMTwyJqULlm8I7N4VyeEEEXtXDxs+sL4nJoIx7exx7sV3ff3JTTQnR//1YRmlf3tXWWhSKALIVyXJQdmPQTJ++COME6byvCLqQfjzvTk8btq8N8O4UU6mZatSaALIVzXHx/ByZ2kdv+al/eG8cuuE9QO9mH2o/WpW8HX3tXdMAl0IYRrStqD/m08ieW70G1haTKyk/hf5xoMbVsF99s1m5aNSaALIVyPJZfseU+QiRdRh6KoVqk04++vT7Wg0vau7JZIoAshXIr13DHiZz1DWNIORlif4b/dW/JIi0qY7DCZlq1JoAshXENOBim/vofnlk8ItlqY6zuAZwcOJyTAy96V2YwEuhDC6eXkWkj8vA+Vz6xhOS3I7TiGXm2aF6vH9m1BAl0I4dRijqXy6/cf8OzFNcwN+Bd3DhpLkHfxmEzL1iTQhRBOKTPHwierDjB/zRaWlphMSmAE9/97HJgccwRLYUigCyGcTvSRFIbP3cmh5AssD/gGr2wo3X+KU4c5SKALIZzIhaxc3l26jxUbt9Kp9GFm1Ygj6Gg03PsB+Fe2d3lFTgJdCOEU1uw7yYq5X/Jg1hzGlDwCOcAJb2gyGCKG2Lm620MCXQjh0M6lZzN75lQ6Hf2A101JZPhVgZZvQ1gbKFsHTI4zF8utkkAXQjisJbtOMOHn3/kxdzTZXuXI7jqNUnXuc6kQz08CXQjhcE6dz2TU/N0s3X2S732+xgsr3o/PA/8q9i7NriTQhRAOQ2vNnK2JvL5oD5m5ViZFnKRVzAbo8JrLhzlIoAshHERCSjoLZ3xMVtIB7i3blqG97qHyD89BUB1o9ZS9yysWJNCFEMWaxar5dsMRvlu2jsXqPUq450LKXPi6JFiyoc90MLvbu8xiQQJdCFFsxZ1K48W5u9h69CzTA+bjnmmGx1ZDcizE/Qrl6kHFCHuXWWxIoAship0ci5Uv1hxkwso4PEuamRLpSZvfVqBaDYPg+sZH/QfsXWaxI4EuhChWdiWmMnzuTvaeOM+99YMZfV8dyiwaBCW9oc1z9i6vWJNAF0IUC5k5Fj5acYAv1x4iwKsEXzzShM51ykH8JohdDO1fBU9/e5dZrBUq0JVSkcDHgBmYorUed8XxUGAa4JfXZoTWerGNaxVCOKlNh84wYt4uDp++SJ+IEF6+txa+pdxBa1gxGryCoMWT9i6z2LtuoCulzMBEoBOQCGxRSi3QWu/J12wk8KPWepJSqjawGAgrgnqFEE4kLTOHd5bGMn3jUUL8S/HdY81pXS3wcoN9iyB+vTG5VgnnWVmoqBTmDr0ZEKe1PgSglJoFRAH5A10DPnlf+wLHbVmkEML5rI49xSvzdnHifCZDWlfmhc7V8SyRL5Jys+HXUVCmJjQeaL9CHUhhAr0CkJBvOxFofkWb0cBypdRTgBfQsaAXUkoNBYYChIaG3mitQggncPZiNq8v2sO87ccIDyrN3Cdb0Tj0jqsbbpkCKYfgoTlglj/3FUZh/isVtOievmK7H/CN1vp9pVRLYLpSqq7W2vq3b9J6MjAZICIi4srXEEI4Ma01v+w6wWvzd5OakcPTHcL5T4QnJUtYrm6cngJrxkPV9lCtwPtDUYDCBHoiEJJvuyJXd6k8CkQCaK03KKU8gEDglC2KFEI4tqTzmYz8OYZf9yRRv6IvMx5rTq0AM7xfC7JS4Y7KULEp+ASDyR1O7ICs83DPG+BkCzkXpcIE+hYgXClVGTgG9AX6X9EmHugAfKOUqgV4AMm2LFQI4Xi01vwYncAbv+wlO9fKy11rMqR1ZdzMJtj9sxHmTR+DC0lwZB2knzEe50dDm2eN+cxFoV030LXWuUqpYcAyjCGJU7XWu5VSY4ForfUC4HngS6XUsxjdMYO01tKlIoQLiz+Tzoh5O1l/8AzNK/sz/v76hAXmG6myZz54BkLk+Kv7yK0Wl53T/FYU6i8NeWPKF1+xb1S+r/cArW1bmhDCEVmsmm/WH+G9ZbGYTYo3e9alX9NQTKZ8XSc5GbB/mfH4fkF/8JQwvynyp2MhhM3sT0pj+Jyd/JlwjvY1g3izZ12CfUtd3TBuBeRchNo9bn+RTkwCXQhxy7JzrUz67SCfrt5P6ZLufNy3Id0blEdd6w+ae+ZDKX8Iu/P2FurkJNCFEDfPauXIulnsWPcLd2fu5okSJ7A2Gkipuu0uj07RGk7tAf+q4O4BOZkQuxTq9pTx5TYm/zWFEDclIyuHfVOG0Ch5AUF4kFmuISUDG8DWL+DYerh/KpyJg3UfQOIWKFsXHvgGTh+A7DSoHWXvU3A6EuhCiBu24cApUn8YSmTualYHPUKTQe/i75nXVx67FOb/GyY2Nbb9QqHtcOPJzy/aQUAV8PCDyu3sdwJOSgJdCFFo5zNzeGdxDE23v0yUeT3xDZ7h7p5j/t6oRiQ88Qds+BSCG0CdXkbXSpNBMGcIJGyEhg/LsnFFQAJdCFEoK/cm8cpPMfROn0WU23py7n6N0HbXWHDCJxg6v/n3fb4VYNAvsGMmVOtQ9AW7IAl0IcQ/OnMhizEL97Bgx3G6BCbzXO5PUKsX7tcK839idoPGj9i+SAFIoAshrkFrzYIdxxmzcA9pmTm80D6Mfx94HRP+cO/79i5PFEACXQhxlROpGYz8KYaV+07RIMSPd+6vT41d70HyXug/W5aCK6Yk0IUQf7FaNbO2JPD24r3kWK2MvLcWg1tVwrxlMqyfAI0HQPV77F2muAYJdCEEAEdOX2TEvJ1sPJRCyyoBjLu/HpU4CdO6GcvAhd8Dnd+yd5niH0igC+Hici1Wvv7jCO//Gou7ycS4XvXo0zQEFb8RpvcEcwnoMQka9JO5yYs5CXQhXNi+k+d5cc5OdiSm0rFWWd7oUZdyvh6QnW48HORdFgYvAZ/y9i5VFIIEuhAuKCvXwsTVB/lsdRwNPE7xa/0YqvV4GeXpYTT47S1jPc+BCyXMHYgEuhAuZnv8WV6cu5P9SRcYVLcEryaNx7z/OExeDn2mgyUXNkyEJoOhclt7lytugAS6EC4iPTuX95fvZ+ofhynn48G0h2rSbt1AY+3OqImw6k346h7wKgPewdBprL1LFjdIAl0IF7A+7jQj5u0iPiWdh1uE8mKnqnjPe8iY1vahH6FaRwjvDHMGw5G1xlhzDx97ly1ukAS6EE4sNSOHtxfvZdaWBCoHejFraAtaVAmAVW/AwVXQ/RMjzAFKl4FHfobUBPCvbN/CxU2RQBfCSS3ffZKRP8dw+kIW/2pXhWc7VsfD3QxnDsIfH0O9B40HhfIzu0mYOzAJdCGczOkLWYxesJtFO09Qs5w3UwZGUL+in3FQa1jyIphLwj2v27dQYXMS6EI4Ca01P/95jDEL95CeZeH5TtV54q6quJtNlxvFLoa4X40nPr3L2a9YUSQk0IVwAsfPZfDKT7tYHZtMo1BjMq3wst5/b5STAUtHQJla0GyofQoVRUoCXQgHZrVqvtscz/gl+7BYNaO61WZgqzDMpgIe0d/0BZyLNxaZkNWCnJIEuhAO6lDyBUbM3cXmIym0qRbI273qEeLvCTtnw/FtRrfKpblXrFbY+g1UagNhbexatyg6EuhCOJhci5Up6w7z4a/7Kelm4p3e9XmgSUWUUnB0Pfz8BFhzoUZXqHyn8U1H18HZw3D3y/YtXhQpCXQhHMie4+cZPncHMcfO07lOWV6PqkuQT978K+ePw48Dwa8SZKXBug8vB/q26VDSF2rdZ7/iRZGTQBfCAWTlWvh0VRyTfjuIn6c7nz3UmC51yxl35QC5WfDjAMi+CAMXGKNZVo6FEzvBLwT2zDfGnLuXsu+JiCIlgS5EMbf1aAovzt1F2ql4vgleSYtSibhtcYNoM1hzIOMsXEw2Pj8wDYJqGXOxrP3QeIAopDlYsq5+iEg4HQl0IYqpi1m5vLsslgUbdvI/z8X08VyKKVWDbwujgbaCmwcE1YZSdxh/7KzTwzhWyg8iBhmzJiZuhuCGEFzfbucibo9CBbpSKhL4GDADU7TW4wpo8yAwGtDADq11fxvWKYRLWXsgmZfm7SLw3C7WeH1MactZVIN+0O5FuKNS4V6kxb9h4+fGUMXWzxRtwaJYuG6gK6XMwESgE5AIbFFKLdBa78nXJhx4CWittT6rlAoqqoKFcGap6Tm88cseZm9N5DG/aF4u9Smm0sHQdz6Uq3djL+ZTHhr2g5ifoF7voilYFCuFuUNvBsRprQ8BKKVmAVHAnnxtHgcmaq3PAmitT9m6UCGc0roPISAcanVjacxJXp0fQ8rFbL4NX0vbhElQqTU8OB28Am7u9bu8A22Hg4evbesWxVJhAr0CkJBvOxFofkWb6gBKqT8wumVGa62XXvlCSqmhwFCA0NDQm6lXCOdxOg5WjEa7efB6+c+Yut+D2sE+zO5wkbClk6DeAxD1GbiVuPn3cC9ljHIRLqEwgV7QMt+6gNcJB+4CKgJrlVJ1tdbn/vZNWk8GJgNERERc+RpCuBQd/RVauXEu153eR8dSptMsHmvqj/sXbYz5Vrp/cmthLlxOYQI9Ecj/v/iKwPEC2mzUWucAh5VSsRgBv8UmVQrhZI6dOo3v5umszo1gt39nRqSOobZ1JiyKhcxUGPCzjBkXN6wwgb4FCFdKVQaOAX2BK0ew/Az0A75RSgVidMEcsmWhQjic+E0QVPNv/ddWq2b6xqPsXzqJN01puDd/nOFd74dfjhpjxgEix0PZOnYqWjiy6wa61jpXKTUMWIbRPz5Va71bKTUWiNZaL8g7do9Sag9gAf6ntT5TlIULUaxdPA1fd4Gw1saybiYzB5Mv8OKcnUQfPctKn9XklK5O5L33GxNodX4LEqON1YKa/8ve1QsHVahx6FrrxcDiK/aNyve1Bp7L+xBCHFwF2gKHf8ey7mM+t3Tn45UHKOVu5qtOZqqujYXm716eDbGEF/zrd1Cmy/uEuEHypKgQRSFuJZTyJ7VcC7xWvcHyLDMd67Zh9H21CFrxDLh7QoM+f/8ek9k+tQqnIYEuhK1ZreiDq9jrGUH/2F4sKbGZ7/2n4FUhA74aBKnxxopBMjZc2Jjp+k2EEDdi9/Y/UBdP8dWJKnRqVB3v/tPwSk+E396GgCrQe6rRZy6EjckduhA2ciErl3eW7sNr8wzquMMDfQfQon7eaJXHVoKnP9wRZtcahXOTQBfCBtbsT+blebs4nprBqoD9WDzrXg5zgAqN7VeccBnS5SLELTiXns1zP/7JwKmb8XA3Me/RelRO34U5vIO9SxMuSO7QhbgJWmuWxJxk1PwYzqXnMOzuagxrXw2Pg8uMRSeqSqCL208CXYhrycmA6K+hQV+j/zvPqfOZvDo/hmW7k6hbwYdpQ5pRp3zeiJWDK8HdC0Jb2Klo4cok0IW4lq3TYNlLxvqcj/yENrkxe2sibyzaQ1aulRFdavJYm8q4YYGUQ5ByGPYvMxZmditp7+qFC5JAF6IgWkP0VPAMhCNrSfv5BZ482591cadpFubPuPvrUaVMaUhNhC/bw4Wky99710v2q1u4NAl0IXKzIW4FVI8EU944gaN/wOlYrN0/ZfeOzdTb9Q3hVjc693iah5qFYjLlPZ6/6k3IOAfdPoLA6uBfBXyC7XcuwqXJKBchds6CWf1g/ceX9235CktJX/pvKE9U7D3sKNWMUW7f8EiZQ5fD/OQu2DHTmEwrYrAxEZeEubAjCXQhDvxqfF71BhzbRk7qSSx7FvBteiv2nbHwfp9G1H96NiqwOvzwCJzYabRf/iqU8oM7n7df7ULkI10uwrVZcuHQGqjVHY5tJeuHIczLiqCfziWhal9W9G5HYOm8P3A+NAe+6gTf9Tb6yQ+ths5vG6EuRDEgd+jCtR2LhqxUsmv3YkaFV3BPPUK/rNmcCWrBqEE9Loc5gG8FeHgu5GbComeMx/ibPma30oW4kgS6cG1xK9DKTK+l7ozc7seaoIcACLjr3wW3D6oF/X6A0uWMu3NZ81MUI9LlIlzHgRWwZDgMXgze5UjLzCF1y0JOWKqSqj357rH6tK4SCScehfKNrv06lVrC8/tkIQpR7MgdunAdO76HlIOw6g1W7zvFgx8spHx6LBcq3sWyZ9rSulqgschEhcbXD2sJc1EMyR26cA2WXKN7xc0DvX0G4zfWoa1fMialufvevlBCfhWE45M7dOESdMJGyEzlDcsgzmtPPi/7Ey9WSwTPAAj+h+4VIRyI3JYIp5d0PpNtP31LB20mxr8DWTXLErZ+DKRth9pRl58OFcLByU+ycFpaa2ZtjqfjB2uodu4Pkv2b8N1/OlK2/TDjEX1LtkxzK5yKBLpwLL+Ng6mRYLX+Y7P4M+k8NGUTI+btol2ZdMJVIhWa9cDNbDKGGnZ51wj18E63qXAhip50uQjHsvNHY6TKgeVQI/Kqwxar5us/DvPe8ljcTCbe6lmPviyFJRiTb10S3hHCt9++uoW4DSTQheNITTTCHGD9hKsCPfZkGsPn7mRHwjna1wzizZ51CfYtBTOWgX9VCKhqh6KFuH0k0IXjOPy78blBf2NMeeJWqNiE7Fwrn/0Wx8TVcXh7uPNx34Z0b1AepRRkX4TDa6Hpo/atXYjbQAJdOI7DvxvDDLuMg32/wPoJ7Gj5McPn7CQ2KY2ohuUZ1a02AZfmXzl7xFhCzpIF4ffYtXQhbgcJdOEYtDZmRQy7Ezx8yWk8CPOGT3h6ezuyvCsxZUAEHWuXNR4g2jIFNn8JyfuM7w1tCZVa27d+IW4DCXThGM4chLTjUKUd6w+e5r0/GzBLKyaU/YWqUS9SOlAb63kufxVOx0LFpsbkWdU7S9+5cBkS6MIxHP4NgPcOBPPpjk1UCvDnTI1+NNg/A6atuNzOvyr0/R5qdJX5VoTLkUAXDiFpx69oAvlsp4WhbavybMfqlHJrByeGQNpJOH8cSnhB3d4ypa1wWYUKdKVUJPAxYAamaK3HXaNdb2A20FRrHW2zKoVryDhrfC51x1+7zlzIYuyCGF5LWEd0yeb89GgbGoTkWyGoQpPbXKQQxdd1A10pZQYmAp2ARGCLUmqB1nrPFe28gaeBTUVRqHByudnwZQdj+tonN6BNZhbsOM7oBbsJyYrD3/0C7SN74xYiy70JcS2FuUNvBsRprQ8BKKVmAVHAnivavQ68A7xg0wqFa4ie+tdDQ+c2fM3zcQ1Zue8UDUP8+LJaOmwAt2p32bdGIYq5wszlUgFIyLedmLfvL0qpRkCI1nrRP72QUmqoUipaKRWdnJx8w8UKJ5VxDtaMQ1duR7JfQ7J/fZOtB48x8t5azB1YkzJ7v4UytcCnvL0rFaJYK0ygFzRUQP91UCkT8CHw/PVeSGs9WWsdobWOKFOmTOGrFM5t3QfojHO8mPYg/07qTpA6y29tYnmsVQjmuYMg7QR0/8TeVQpR7BWmyyURCMm3XRE4nm/bG6gL/KaMYWLlgAVKqe7yh1FxPblnjsCGSSy03smS02UY2astev8m/LZNhIx44+nQHpMgpKm9SxWi2CtMoG8BwpVSlYFjQF+g/6WDWutUIPDStlLqN+AFCXNRIK2Nx/aPbeXcqXjOH9xEkEWzIexJVvRuR1kfDwh9DSa1hj9nQMth0LD/9V9XCHH9QNda5yqlhgHLMIYtTtVa71ZKjQWitdYLirpI4SSO/AHLR8LxbViUmXSrH6nKnzMRYxnfrQvq0oNAZevAnc/B+RPQaax9axbCgSit9fVbFYGIiAgdHS038S5Ba5j/H/jzO7K9gvnI8iCfn2tKVKMQRnWrzR1e8iCQEIWllNqqtY4o6Jg8KSqK3v5l8Od3bAx6kEEJXbnDx4evBtXj7ppB9q5MCKcigS6KliWX9F9e5owK5uH4bvRtUYUXI2vi7eFu78qEcDoS6KLIpGbksHL62/Q6f5C3PF7iu0fa0LxKgL3LEsJpSaAL27JaQJlYvieJt3/azJycL4n3acTIp17Ao4T8uAlRlOQ3TNhG5nnY+BnW9Z9wweJOTnY4b3qYCFDnCej7AUiYC1Hk5LdM3LrNX6JXv4XKSOE3mnLe6kF7zwP4ZJ2E+n2hQmN7VyiES5BAF7dm1xxY/AJ7PRoyIutZ3EKa8E7v+vgEeRtzlHsGXv81hBA2IYEurk3nLetWri74VrzqsDXlKLnzn2G3rs4jF4fzfLfaDGgZhtmU94CQTKYlxG0lgS6u7dBqmNkHlBlqdIFmj0NYWzCZOJSUSuaU/oTk5DK9/CsseeBuQvw97V2xEC5NAl1cW/TXUMofGj8C26bDvkXo0uXY492aHcdS6W+KYXOjN3k/KuryY/tCCLspzPS5whWlJUHsYmNirE5j4bm9JLafwB/ZVQk7voj+phVk1oiiWdR/JMyFKCbkDl0U7M8ZYM2FJoPJzLHw6aqjfL6mDH6ez/Fmz2p09k3Ao2IESJgLUWxIoIurWa2wdRqE3cnWi/4Mn7aWg8kXub9xRV7tVgs/zxJAmL2rFEJcQQJdXO3Qajh3lDl+Q/jf5xso71uKaUOa0a66rDIlRHEmgS6ukrzmC9zw4ZXYMAa0qMT/ImtSuqT8qAhR3MlvqfhLanoOn/60iuHxK5hX4j5mDGpL0zB/e5clhCgkCXQBwNKYE7z2804+yR6FditJ1L/G4BEoYS6EI5FAd3Gn0jJ5bf5ulsScZIzfEprl7oPun0NgmL1LE0LcIAl0F6W1Zs7WRN74ZS8ZORbea5nN/X9+D3V7Q4O+9i5PCHETJNBdUEJKOi//tIu1B04TUekO3ulakSo/dQPfCtDtAxlbLoSDkkB3FanHsCZGs2VnDC/srUoKfoyNqsPD1XIwzYyCtJMwcCF4+Nq7UiHETZJAd2ZaQ/RU+P09SDuOCWgOrDB7kN34Ubz9NUx9EpQJBi2CkGb2rlgIcQsk0J3V+ROwYBjEreCYbxOmWu4h1lydAXfVodOZ7yi59TPYOhECwuGhH8G/ir0rFkLcIgl0Z3RoDcweiDU7g89LPcG7SW3oUq88H3avSxnvksBdcOfzsG8hRDwKnjI8UQhnIIHuyE7GwO550HLY5VA+tAb9fR9OuwfTL2MkqaYwJj1ch8i6wX//3qCaxocQwmlIoDuyZS/B4d+NibS6vgNeZbB89yBHdVl6nx1Ox4javNK1Nr6e7vauVAhxG0igO6pTe40wbzwQTu6COUOwYuKgNZjnPEczoV9b2oTLep5CuBIJdEe16Qtw84AOr7E6Pptdc8ZRJ3sH2xu9zg/3tsRLJtMSwuXIb31xlXHW6CN3L2V8+Fa8PEY84yzsmEVWrft5aVEC87Yfo1pQL1oPGM0Lle6wb91CCLuRQC+u5gyBg6sub5f0gQenQdX26G3TUbkZDNrTiC0Zx3mqfTWGta9GSTez/eoVQthdoQJdKRUJfAyYgSla63FXHH8OeAzIBZKBIVrrozau1XXEbzLCvOUwqHIXZF+ANe/CjN6cb/822WsmEmetxYWAWix4tD61y/vYu2IhRDFw3UBXSpmBiUAnIBHYopRaoLXek6/ZdiBCa52ulHoSeAfoUxQFu4Q148AzEO5+GUp4AaCrtufkVw8RvHI4ANvqP89PPVvhZpZ1voUQhsKkQTMgTmt9SGudDcwCovI30Fqv1lqn521uBCratkwXkrDFuDtv9dRfYZ6Qks4jM/bRJmEoi7x6klGhNff0HCJhLoT4m8J0uVQAEvJtJ2JMCXItjwJLCjqglBoKDAUIDQ0tZIkuZs048AyApo9hsWqmrT/Cu8tiMZsUY3o0oGuz+zCZZDZEIcTVChPoBazgH1sAAAvUSURBVKWHLrChUg8DEUC7go5rrScDkwEiIiIKfA2npbXxYfqHu+qELRC3Ajq8xoFzmuFz17M9/hx31SjDWz3rUd6v1O2rVwjhcAoT6IlASL7tisDxKxsppToCrwDttNZZtinPiXzbHRI2G5NhlakBXoFgtYC2QFoSJMXAuaPoUv58nt6eDyesw6ukmY/6NCSqYXmUzFEuhLiOwgT6FiBcKVUZOAb0Bfrnb6CUagR8AURqrU/ZvEpHd2yb8VRntU7GduJmyEg17taV2ZiHpUITTlR7kFGxlfl19THua1Ce1+6rTWDpkvatXQjhMK4b6FrrXKXUMGAZxrDFqVrr3UqpsUC01noB8C5QGpiddycZr7XuXoR1O5YtU6BEaeg9FTyuHmKYmWPhw1/38+XaQ5TxLsmXA+rRqXZZOxQqhHBkhRqHrrVeDCy+Yt+ofF93tHFdzuPiGdg1Bxo9XGCYbzx0hhFzd3LkTDr9moUwokstfEvJZFpCiBsnT4oWte3TwZIFzR7/2+60zBzGLdnHd5viCfX35PvHmtOqmkymJYS4eRLoRclqgeivoFIbCKr11+5V+5J45acYks5n8libyjx3T3U8S8ilEELcGkmRonRgOZyLh05jAUi5mM3Yhbv5+c/jhAeV5rMnW9EoVCbTEkLYhgR6UclOh7UfgHcwusa9LNxxnNELdpOWmcN/O4Tz77urymRaQgibkkC/VZZc+PlJ8CoDbV8whiBeSIaZfeDYNs51nsAL3+1kxd4kGlT0ZXzv5tQsJ5NpCSFsTwL9Vm2aBLt+BBRsnwEtnoQdM9EXTrG28Yf8Z2lZcqzJvNK1FkPaVMYsj+0LIYqIzO50K1IOw6o3oXoXeHI9hDaHNeOwZF1kpN94BqwPok4FH5b+ty2Pt60iYS6EKFJyh36ztIZFz4LJDPe+B74VsfT7kUWLF/L+pjTOpgfyVs9a9G0aIpNpCSFuCwn0G2W1GuPKY+bCodXQ1Qjz2JNpDJ+7kx0JZjrUrM4bPesS7CuTaQkhbh8J9MLKOAtTIyF53+V9FZuR3Wgwn63Yz8TVcXh7uDOhXyPuqx8sk2kJIW47CfTC+m0cnN4Pdz5vzMvi7kmMf0ee/3Q9sUlpRDUsz2v31cHfq4S9KxVCuCgJ9MI4tRc2fwlNBkGHUWRkW3h/eSxT58cS5O3BVwMj6FBLJtMSQtiXBPqVUo/B112g1n3Q4TUwu8OSF6Fkabh7JOsPnmbE3F3Ep6TTv3koI7rUxMdDJtMSQtifBPqV1r4HqQmw4VM4uh7qPQCH15DR8W3GLjvGzM0JVArwZObjLWhZNcDe1QohxF8k0PM7ewS2TYcmg6FKO5j/FCx7iQs+1bhnTRVOXkhgaNsqPNuxOqVKyGP7QojiRQI9vzXvgjIZj/D7lOesb20O/PAybya3waesJ5MG1KdBiJ+9qxRCiAJJoF9yOg52fA/Nn0B7BzN/+zHGLDzMhazBPNUxnCfaVaWEmzxYK4QoviTQL1kzDtw8OFnvSV6eFs2qfadoGOLHO73rU72st72rE0KI65JAB9g5G71rDrsrD6bv5L1YrJpXu9VmUKswmX9FCOEwJNA3fAbLXmJPiXr02duahtV8ebtnfUIDPO1dmRBC3BDXCnSrFRI3Q1YaWHOxHvwN0+bPWWptxivZTzPq/gY8GBEij+0LIRyS6wR6ymFY8BQcWfvXLhMwI7cDv4e/yOKeDSjr42G/+oQQ4hY5f6BbrbB5MqwcAyY3ciLfZfaxAH7YdgKzhzeP9uzEFzKZlhDCCTh3oOdmGcvDxcyF8HvY2WgMzy1NJu7UBXo1asOr3Wpzh0ymJYRwEs4b6Bnn4IeH4chasu96lbfPR/LN9MME+3jw9eCm3F0jyN4VCiGETTlnoJ8/DjPuh9MHiG31Po9uqkLi2aM80qISwyNr4C2TaQkhnJDzBfqZgzC9Bzo9hS9Dx/PWqnJUDjTxw9AWNK8ik2kJIZyXcwX6yRiY3pPsnGwet7zKuv3BPHlXFf7bIRwPd5lMSwjh3Jwn0A+twfrDI6RaStD74iuUKFeLnwfXp15FX3tXJoQQt4XjB7rVgl7zDqwZz2HK82jOCB64pxVD21bB3SyTaQkhXIdjB/qFU2T+MASPhLXMtdzJnHLPMuWBZlQLksm0hBCup1C3sEqpSKVUrFIqTik1ooDjJZVSP+Qd36SUCrN1oVeyHtlA+ietIH4jI61PkNZ5AjOebC9hLoRwWde9Q1dKmYGJQCcgEdiilFqgtd6Tr9mjwFmtdTWlVF9gPNCnKApGa06v/Ai/da9zyhrIl8ETeKJPD0L8ZTItIYRrK0yXSzMgTmt9CEApNQuIAvIHehQwOu/rOcCnSimltdY2rBWAmJkjqbv/U1YTQWqXCbzRorY8ti+EEBQu0CsACfm2E4Hm12qjtc5VSqUCAcDp/I2UUkOBoQChoaE3VXBug/7MTrHQbsAognzkrlwIIS4pTKAXdPt75Z13YdqgtZ4MTAaIiIi4qbv3hnXq0LDOuJv5ViGEcGqF+aNoIhCSb7sicPxabZRSboAvkGKLAoUQQhROYQJ9CxCulKqslCoB9AUWXNFmATAw7+vewKqi6D8XQghxbdftcsnrEx8GLAPMwFSt9W6l1FggWmu9APgKmK6UisO4M+9blEULIYS4WqEeLNJaLwYWX7FvVL6vM4EHbFuaEEKIGyHPxgshhJOQQBdCCCchgS6EEE5CAl0IIZyEstfoQqVUMnD0Jr89kCueQnUBcs6uQc7ZNdzKOVfSWpcp6IDdAv1WKKWitdYR9q7jdpJzdg1yzq6hqM5ZulyEEMJJSKALIYSTcNRAn2zvAuxAztk1yDm7hiI5Z4fsQxdCCHE1R71DF0IIcQUJdCGEcBIOF+jXW7DaGSilQpRSq5VSe5VSu5VS/83b76+U+lUpdSDv8x32rtWWlFJmpdR2pdSivO3KeYuOH8hbhLyEvWu0JaWUn1JqjlJqX961bukC1/jZvJ/pGKXUTKWUh7NdZ6XUVKXUKaVUTL59BV5XZZiQl2c7lVKNb+W9HSrQ8y1Y3QWoDfRTStW2b1VFIhd4XmtdC2gB/CfvPEcAK7XW4cDKvG1n8l9gb77t8cCHeed7FmMxcmfyMbBUa10TaIBx7k57jZVSFYCngQitdV2M6bgvLSrvTNf5GyDyin3Xuq5dgPC8j6HApFt5Y4cKdPItWK21zgYuLVjtVLTWJ7TW2/K+TsP4Ra+Aca7T8ppNA3rYp0LbU0pVBO4FpuRtK6A9xqLj4Hzn6wO0xVhLAK11ttb6HE58jfO4AaXyVjbzBE7gZNdZa/07V6/Ydq3rGgV8qw0bAT+lVPDNvrejBXpBC1ZXsFMtt4VSKgxoBGwCymqtT4AR+kCQ/SqzuY+A4YA1bzsAOKe1zs3bdrZrXQVIBr7O62aaopTywomvsdb6GPAeEI8R5KnAVpz7Ol9yretq00xztEAv1GLUzkIpVRqYCzyjtT5v73qKilKqG3BKa701/+4CmjrTtXYDGgOTtNaNgIs4UfdKQfL6jaOAykB5wAujy+FKznSdr8emP+eOFuiFWbDaKSil3DHC/Dut9by83UmX/jmW9/mUveqzsdZAd6XUEYxutPYYd+x+ef80B+e71olAotZ6U972HIyAd9ZrDNAROKy1TtZa5wDzgFY493W+5FrX1aaZ5miBXpgFqx1eXv/xV8BerfUH+Q7lX4x7IDD/dtdWFLTWL2mtK2qtwzCu6Sqt9UPAaoxFx8GJzhdAa30SSFBK1cjb1QHYg5Ne4zzxQAullGfez/ilc3ba65zPta7rAmBA3miXFkDqpa6Zm6K1dqgPoCuwHzgIvGLveoroHNtg/LNrJ/Bn3kdXjH7llcCBvM/+9q61CM79LmBR3tdVgM1AHDAbKGnv+mx8rg2B6Lzr/DNwh7NfY2AMsA+IAaYDJZ3tOgMzMf5GkINxB/7ota4rRpfLxLw824UxAuim31se/RdCCCfhaF0uQgghrkECXQghnIQEuhBCOAkJdCGEcBIS6EII4SQk0IUQwklIoAshhJP4P6267PfBKD5nAAAAAElFTkSuQmCC\n",
4523
      "text/plain": [
4524
       "<Figure size 432x288 with 1 Axes>"
4525
      ]
4526
     },
4527
     "metadata": {
4528
      "needs_background": "light"
4529
     },
4530
     "output_type": "display_data"
4531
    }
4532
   ],
4533
   "source": [
4534
    "#dd = pd.DataFrame(preds_all.mean(1)[4], columns=all_ich)\n",
4535
    "dd = pd.DataFrame(preds_all.mean(0), columns=all_ich)\n",
4536
    "\n",
4537
    "k=5\n",
4538
    "plt.plot([0,100],[0,1])\n",
4539
    "plt.plot(train_md[[all_ich[k]]].groupby(pd.cut(dd[all_ich[k]],np.arange(101)/100)).mean().values)"
4540
   ]
4541
  },
4542
  {
4543
   "cell_type": "code",
4544
   "execution_count": 36,
4545
   "metadata": {},
4546
   "outputs": [
4547
    {
4548
     "data": {
4549
      "text/plain": [
4550
       "[<matplotlib.lines.Line2D at 0x7f8298d8b790>]"
4551
      ]
4552
     },
4553
     "execution_count": 36,
4554
     "metadata": {},
4555
     "output_type": "execute_result"
4556
    },
4557
    {
4558
     "data": {
4559
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXxM1//H8dfJJLLZkiCIpIJYItbGUqp2tRRVtKiuSjfdF1TpQtdvtdVWteqHLlpqqTWqqK22CiWILYQkEpKIJGRP5vz+uGlFBEGSMTOf5+ORR+beOTPzud/bvL/XmXPPUVprhBBCWD8HSxcghBCiZEigCyGEjZBAF0IIGyGBLoQQNkICXQghbISjpT64SpUqunbt2pb6eCGEsEq7du1K1FpXLeo5iwV67dq1CQ0NtdTHCyGEVVJKnbzSc9LlIoQQNkICXQghbIQEuhBC2AgJdCGEsBES6EIIYSOuGehKqVlKqXil1P4rPK+UUl8opSKUUmFKqZYlX6YQQohrKc4V+hyg51We7wUE5P+MAqbffFlCCCGu1zUDXWu9CUi6SpP+wA/asB2orJSqUVIFCiGErcjIzuODVQeJOZdeKu9fEjcW+QDRBbZj8vfFFW6olBqFcRWPn59fCXy0EELc4hIOw1+fsbv2CF5cc4GopHRqebjxUNvbSvyjSiLQVRH7ilw1Q2s9A5gBEBwcLCtrCCFsV+we2PwJ+uAKcpQz/7ezKg4eXZg3qi1t63iVykeWRKDHAL4FtmsBsSXwvkIIYX3On4E1EyFsHjlOFfjRYSDTMrox6K7mTOlWHxcnU6l9dEkE+jJgtFJqHtAGSNFaX9bdIoQQNi0vF/6eARs+QOdk8IfnMF6N7YJPdW9mP96UprUql3oJ1wx0pdQvQCegilIqBngLcALQWn8DhAC9gQggHXistIoVQohbUuweWP48xO3lTLU7GZUwmINnvHmuez2e7FiXco5lc8vPNQNdaz30Gs9r4NkSq0gIIaxFTiasfw+2TSPP1Ytvqk7kf1ENaOHnwcqBTQnwrlCm5Vhs+lwhhLBqKTEw/yGI3c3RWgN5JLoP5867M/GeBjzSrjYmh6LGi5QuCXQhhLhekZthwaOYczKZ4jGRaRENubNeFT64rwm+nm4WK0sCXQghrsfeeeglz5Ds6suwjHHE5Pjy8cBABgfXQqmyvyovSAJdCCGKa8/P6CXPEObYlAeTnqNdoD9z7g3Cu6KLpSsDJNCFEKJYckK/x3HFC2wxN2aMfp2Pht1O7ybVLX5VXpAEuhBCXMPJ1V/iu20Cm/OCWNl4Civ6tsDDvZyly7qMBLoQQlxBelYO274fT9fYb/nLIRjun8NHgb7XfqGFSKALIUQR/joST8yvrzAkdxl7PO6m+cjvKe/maumyrkoCXQghCkhNiGXzwi8IjFvMnQ5nON3wEZrf/zk43PoLvEmgCyEEgNnM0YUTqR0+nT7kEl2pOdldJ1O92WC4hb74vBoJdCGE3UuMP82ZOQ/TOH0HG5zuwqf/RAKCWlm6rOsmgS6EsFtaa9atX0ejTU8RoJPYUH8c7R94DSfH0pvitjRJoAsh7NKp5Ax+mTuLp+LfJcvkzpkBS+jU5C5Ll3VTJNCFEHbFbNb8tOMkEaumMVHNJLViAB4jfsOrso+lS7tpEuhCCLtxLOECbyz8hy6npvOu40oy/Drh+eBP4Fy209yWFgl0IYTNy8kz893m4/y4didfOH5JK8cD6OARuPb6CExOli6vxEigCyFsl9YcPnyAn1asxelcBKtcQqik0qHvt6hmQyxdXYmTQBdC2KTMnDwOzhxJizOLmATGwpmVA2DwUqjexMLVlQ4JdCGEzQk9kcScX3/lq/RFhFa6mwa9n6GCT2Nwr2I1NwndCAl0IYTNuJCVy/9+P8RP2yNZ6TKDLFdvgp+ZBc7lLV1amZBAF0LYhI1HEnhj8T5iUzKYGrCfhlHHoNdMuwlzkEAXQli55PRsJq04yKLdMdSt6s7iRwNpsfQ58GsHTQZZurwyJYEuhLBaq/bFMWHpAc6lZzO6cz2ea3QB5z+fg4xz0Osjm+4vL4oEuhDC6sSnZjJx6QF+P3CaIJ+KzO/nRt2w8bBtDbhUgl4fQ42mli6zzEmgCyGshtaahbtimLQinMxcM2N6NmRUlX2YljwJ5dyh60RoNRJcKlq6VIuQQBdCWIXopHTeWryL0IhYGt7my4cDm1Dn6GxYOBFqtYKhvxjDEu2YBLoQ4paWZ9b8sO0En64OZ756g0CXE+gkd9TPVSD5JATeCwO+Aadbe3m4siCBLoS4ZUXEn2fMon3sOnmOd2tsJfDcCWjzNEopSImB4Meg3QtWsTxcWZBAF0LccnLyzHy78RhfrIvAzdnEV/feRp8Nz4B/R+j5gd2NXikuCXQhxC1lX0wKry8K42BcKn2a1uDtvo2punEcZJ23y6GI10MCXQhxS8jMyePztUf5bvNxvNzL8e1Dt3N34+oQFwa7ZkPrUVCtkaXLvKUVK9CVUj2BqYAJmKm1/rDQ837A90Dl/DZjtdYhJVyrEMJG7Th+lrGL9xGZmMYDwb680acRlVydQGtYNQZcPaDTWEuXecu7ZqArpUzANKA7EAPsVEot01qHF2j2JvCr1nq6UioQCAFql0K9Qggbcj4zh49/P8yP209S3wPmjmhF+4BqFxuE/QpRW6HvVCPUxVUV5wq9NRChtT4OoJSaB/QHCga6Bv4dyV8JiC3JIoUQtmf94XjGL95HXGomL7Ry58VjI1BbG0PtX8HJBTJTYM0E8LkdWjxs6XKtQnHG+vgA0QW2Y/L3FfQ2MFwpFYNxdf5cUW+klBqllApVSoUmJCTcQLlCCGt3Li2bl+fv4bHZO3F3dmTRk2146fwnqOwLELkRFj8B5jzY8CFciIfen8iwxGIqzhV6UV8p60LbQ4E5WuspSqk7gB+VUkFaa/MlL9J6BjADIDg4uPB7CCFsmNaalfvieGvpAVIycni+awDPdq6L89bP4MRm6P81ZCbD6jdg/nA4stoYZ+7T0tKlW43iBHoM4FtguxaXd6mMAHoCaK23KaVcgCpAfEkUKYSwbmdSM3lzyX62hJ9kpfskqt1WDfdKg+F4JKx/H4IGQvNhxpDE9LOweQq4ekKXCZYu3aoUJ9B3AgFKKX/gFDAEGFaoTRTQFZijlGoEuADSpyKEndNa82toNJNXHiQ718xPgaH4H4+ELAcIedVoVMkP+nx6cXx5lwng5gXeQeDmabnirdA1A11rnauUGg2sxhiSOEtrfUAp9S4QqrVeBrwCfKeUegmjO+ZRrbV0qQhhx6LOpjN2cRhbj52ljb8n/+vji9+PT0KD3jDkZ4g/CIdDoH5PcK188YVKwR3PWq5wK1ascej5Y8pDCu2bWOBxONC+ZEsTQlijPLNmztYTfLL6MCYHxXsDghjayg+HP9+BrFToPN4Ibe9A40eUGLlTVAhRYo6cOc/rC8PYE51Ml4bVeG9AEDUqucL5M7D9G2NJuOpBli7TZkmgCyFuWnaumekbjvHV+qOUd3Zk6pDm9GtW05gVEWDzJ5CXDZ3GWbZQGyeBLoS4PnF74ccBMOQX8GvD3uhkxiwK49Dp8/RrVpO3+gbiVd75YvvUOAidDS2Gg1ddy9VtByTQhRDXZ90kSD9L3pq3+Kj6p8z8K5JqFVyY+XAw3QK9L2+/8zsw58KdL5V9rXZGAl0IUXzRf0PEGtI8AnGP3kb4sWU80KoP43o3pKKL0+Xts9MhdBY07AOe/mVfr52R+2mFEMWWu24yFxwr0y7uRc6oKkyvuYoPBgQVHeYAYfMg4xy0faZsC7VTEuhCiGLZtXEFjic2MjWzD/d3aIJHz/FUSNxj3KJfFLMZtk+HGs3gtnZlW6ydki4XIcRVnb2QxTvLDjDs0GTOmjy4Z8QEmvnXgLwA2D4V1k+GvCyI2gFn9kODXnD7Y8b8LIlHYMAMWWWojEigCyGKpLVm2d5Y3lkeTt2scNo6HiS3+4d4+dcwGpicoONYWPIU/PowOLpAJV/4fSxsmQrOFaF8dWg8wLIHYkck0IUQl4lLyeDN3/az7lA8zXwrM933LPzjgGPLQtM4NX0AHJ2NIK/RDBzLQeQm+PM9iN4O3d429okyIYEuhPiP2ayZtzOaD0IOkmM282afRjzW3h/T/000FppwqXTpCxwcIOi+S/f53wWPd4CEw1AloOyKFxLoQgjDicQ0xi4OY/vxJO6o48WHA5twm5c7ZCRD7G7o8Grx30wpqNaw9IoVRZJAF8LO5eaZmb3lBFPWHMbJwYEP72vCA618L962f2IzaDPU6WTJMkUxSKALYccOnU5lzMIw9sak0K2RN5PvDaJ6JZdLGx1bD07uUKuVZYoUxSaBLoQdysrNY9r6Y3y9PoJKrk58ObQF9zStcfGqvKDjG6D2nfLlphWQG4uEsDP/RJ2j75d/8cW6o/RtVpM1L3ek778zIx5cAVObGYtPACRHQdIx6W6xEnKFLoSdSM/OZcofR5i1JZLqFV2Y9WgwXRoWmEwr7SwsfwHSE2HRSBi5zrg6B6jb2SI1i+sjgS6EHdgakcjYxfuISkpneFs/xvRsSIXC86+sHgeZycbY8bVvw5+TISXGuDmoqoxYsQYS6ELYsJSMHD4IOci8ndH4V3Fn3qi2tK3jdXnDo2sgbD50HGNMc5scBVu/BCdXaNRPbt23EhLoQtioPw6c5s0l+0m8kMWTHevwUrf6uDiZLm94IQGWv2hchXd4xdjXY7Jxx+fZCOlusSIS6ELYmMQLWby97AArwuJoWL0CMx8Jpmmtypc2ysmEfb/CgSUX+8kH/27cxg9Qzh0GzYJ170JAjzKtX9w4CXQhbITWmiV7TvHO8nDSs/J4pXt9nupUFydTocFsmSkw935jrhWP2tD+eWhyP3gHXtquRjMYvqjM6hc3TwJdCBsQm5zB+N/2sf5wAi38KvPxwKYEeFeAU7shfCk0vR+8G0NaIvx0H5w5APfNhCaDpH/chkigC2HFzGbN3L+j+GjVIfLMmon3BPJIu9qYHBRoDStfMeZh2fI5+LYxVg9KjjIWeK4vXSm2RgJdCCt1POECYxft4+8TSfT2NzGha3Vq1CuwbueJzUaYd30LTOVg12w4fwYeXAj+HSxXuCg1EuhCWJncPDMz/4rkszVHcHZ04ONBTRl89HXU/A3w5KaLU9b+9Tm4V4W2TxvDD+94FnIzjcfCJkmgC2FFwmNTeX3RXvafSuXuxt5M6h9ENcd0CPkDzLmw6AkYsQYSD8OxddBlwsUAV0rC3MZJoAthBbJy8/jqzwimbzhGZTcnvn6wJb2Cqhvzr4T+YoR5xzGw8SNjjc/UWChXHlqNsHTpogxJoAtxqzi6Ftw8waflJbt3nUxizKJ9RMRf4L6WPkzoE4iHe4GZD/cvAq8A6DQOLsTDli+Mq/G2z4CrRxkfhLAkCXQhbgVaw29PgnN5GL0LTI6kZeXyv9WH+X7bCWpWcmXOY63o1KDapa9LOQUn/jLCXCm4+z1j+1ykEejCrhQr0JVSPYGpgAmYqbX+sIg29wNvAxrYq7UeVriNEOIKkqOMWQ7TE2H/Ija7dWHc4n3EnMvgkTtu47WeDSnvXMSf64HFgDbGk4Nxh+cjyyElGir5lOkhCMu7ZqArpUzANKA7EAPsVEot01qHF2gTAIwD2mutzymlqhX9bkKIIsXuBsDs4kFCyHs8nOKGf9UKLHjqDlrV9rzy6/YtgJotwKvuxX0Vaxg/wu4UZ4GL1kCE1vq41jobmAf0L9RmJDBNa30OQGsdX7JlCmHjTu3C7ODE2znD8c46yWdNogl5vsPlYZ6bBelJxuPEoxC3F5oMLvt6xS2pOF0uPkB0ge0YoE2hNvUBlFJbMLpl3tZa/174jZRSo4BRAH5+fjdSrxA2J/58Jsn/bCI914/dnt3I0iu59/wv4Pj0pQ21hrmDjFkQ3byMUSwoaHyfReoWt57iXKEXNdGDLrTtCAQAnYChwEylVOXLXqT1DK11sNY6uGrVqtdbqxA2RWvNwl0x3D1lPT7ph3CoFcxvz92Fc6dX4XSYMUd5QUdWG2HebBg0vAcqVIfWI6V7RfynOFfoMYBvge1aQGwRbbZrrXOASKXUYYyA31kiVQphY2LOpfPGb/vZdCSBe31ScT+bRdPWncHkYEyktfFDYwUh39bgWhnMecYqQp51od8XYHK65mcI+1OcK/SdQIBSyl8pVQ4YAiwr1GYJ0BlAKVUFowvmeEkWKoQtMJs13289QY/PNhF6Iol3+jXm0/Z5xpM+txu/TU5w7zdw7gQsfBzycmHPz5BwELpOlDAXV3TNK3Stda5SajSwGqN/fJbW+oBS6l0gVGu9LP+5HkqpcCAPeE1rfbY0CxfC2hxLuMCYhWEQtY3u/g15bXBnanm4wYovwLkieNW72Lh2e+j9Cax4EX4fC4dWgk8wBBYejyDERcUah661DgFCCu2bWOCxBl7O/xFCFJCTZ2bGpuNMXXcUT8dstrh+hINDU1Tl3kaD2N3GYhIOhf7BHPwYxIfD3zOM7YEzZe5ycVVyp6gQpWj/qRReXxhGeFwqvZtU54OGkZiWZ0HMTghfAg16w+n9xkyIRbn7A+N2fufyxlW7EFchgS5EKcjMyWPquqPM2HQcD7dyfDO8JT2DasDiGcb8KhVqGl9ylq8O5pzL5m/5j8kR7v++TGsX1ksCXYiSFBPK7vQqvLrsBMcT0xh8ey3e7BNIJTcn48vNo6sh4G5jJMtP98HK/F7Kf78QFeImSKALUULSzhzHdWY3nMy34eQ2mR9HtKVDQIH7LaK2GUvANewN9bpC3a7GnOXu1aCizLsibl5xhi0KIa5h45EEvvxuBg5oGjtEEVL1azrULn9po8MhYHI2ghygxyRQDkZ3i3zZKUqABLoQNyE5PZuXf93DI7P+pg1hZLt54zDwO0zR2+DXhyE322iotTH0sE4n4wtOAO/GxsiVzm9YqnxhY6TLRYgboLVm1f7TTFy6n+T0HEZ3qkOnvQdRAT2NqWyzUmHFSzBvKAyabUyPm3wSOhQa2Rs00DIHIGySBLoQ1yk+NZMJS/ez+sAZgnwq8v3jrWnMcdieBHU7G42CHze6U1a+Av/XA/zaAArq97Jo7cK2SaALUUw6OYoFRzWTVx4iK9fM2F4NeeJOfxxNDvDXBqORf8eLL7j9UfCoDfMfhl1zoFYrqOBd9oULuyF96EIUw9mtP6A+b4L/8kH08Epk1QsdeKpjXSPMAY6th2qNLw/sOp3gibVGmLceVdZlCzsjV+hCXEWeWbP8j9Xcve0VDnEbTZzjCU56DrU7HLq8CU6ukJMBUduh1RNFv0nV+kaoC1HKJNCFuIKI+PO88+sWJsc/R4ZTRSo9vgIXj/Kw7h3YNs1YMWjIXGN8eV7Wxf5zISxEAl2IQnLyzHyz4Rhz/tzL505f4WtKQj26CuWTv8pW36lQo7kxE+LS0VC+Kjg4wW3tLFu4sHsS6EIUsP9kAmvnfU6LC5v42+kAJvKg9xRjoYmCgh+D9ET4czI4OIJvWyjnbpmihcgngS4ExmRa36zaSdvQF3nR4SDpFf0wNR9tzD9+pXlWOrwKaYmw4xuo26lM6xWiKBLowu7tOH6W6QtX8s6FSdQ0JZPe+2vcgodd+3Z8pYzpbW9rD/W6lU2xQlyFBLqwW+czc/jo90Mc+3sV35X7jHJubjg9GIKTb6viv4mDAwT2K70ihbgOEujCLq0/FM8bv+2jwYXt/OD8OSavOjg8tAgq1bJ0aULcMAl0YVeS0rJ5d/kBluyJ5TGPMCY4f4aDdyAM/w3cvSxdnhA3RQJd2AWtNTvWLMBv6xuM19m8V8GEW2YyyicYHlwArpUtXaIQN00CXdi8M6mZvLMolDdOjEOZnHBseDfurk7g5gl3vnxxOlshrJwEurBZWmvm74zmvZCDPGmeTy2HRHKHr8CxTgdLlyZEqZBAFzYp6mw6YxeHsfXYWe7xzebZpBXQaKCEubBpEujCpuSZNbO3RPLJH4dxdHDg/QFNGBo5DpVsgu6TLF2eEKVKAl3YjMOnz/P6ojD2RifTpWE13u9Vi+qHfoRDK6DrRKgkCzEL2yaBLqzXzv+DsPlk3zuTr//JZNr6CCq4OPFtP296nP0JNXM+5KRDve7Q9llLVytEqZNAF9br7xmQcIhzX3VhaeZYeje7nUmBsVRcNQiy06Hp/dDmSajexNKVClEmJNCFVco8fRSXhEPMy+tMD9Mufq/4Ps4VBsDimeDdBO7/HrzqWrpMIcqULEEnrM7WY4nMnPU1ANGNn6bcyNU4O7vAzpnQ8mF4Yo2EubBLcoUurEZqZg4fhBzil7+j+M1tB2mVG/DakLuNJ0f+CYlHwP8uyxYphAVJoIuyl5FsrMXp6Fzsl6wNP8P4JftIOJ/FC3d40XzPIVSTly42qFDd+BHCjhWry0Up1VMpdVgpFaGUGnuVdoOUUlopFVxyJQqb80M/mN4OUmOv2fTshSye/+UfnvghFA+3cvz2THteqn0SpfOgYe8yKFYI63HNQFdKmYBpQC8gEBiqlAosol0F4HlgR0kXKWxIxjmI2wtnI2B2b0iONvZHrIOv28GCxwDjtv2le07R7dONrNofx0vd6rNs9J00860Mh0OgfHWo0cKCByLErac4XS6tgQit9XEApdQ8oD8QXqjdJOBj4NUSrVDYlri9xu/O42HrVzCnN3gHGSHt5A7xB0hsOpIx25xYdyie5r6V+XhQU+p7VzBel5sFEWuhySBjcQkhxH+K8xfhA0QX2I7J3/cfpVQLwFdrveJqb6SUGqWUClVKhSYkJFx3scIGxP5j/G71BDyyFDJTIXITdHsb8wthZDlVJOznCWw5lsibfRqx6Ol2F8Mc4MRmyL4ADaS7RYjCinOFXtTCivq/J5VyAD4DHr3WG2mtZwAzAIKDg/U1mgtbdGo3ePgbU9e6ecKzO8DBkcgMV8bODaNNeg9edlrI+gerUKNhHdAa/voc9i00umvSE8HJTUazCFGE4lyhxwC+BbZrAQW/zaoABAEblFIngLbAMvliVBQpdg/UvNj3netWjRm7Uuj5+SbC41Kp3ecltHNFaoR9aYT52rdh7VtQzh3qdITgEXDfd8YoGSHEJYpzhb4TCFBK+QOngCHAsH+f1FqnAFX+3VZKbQBe1VqHlmypwuqlJUJKFLQeCcDBuFTGLAojLCaF7oHeTL43CO+KLpAxCjZPgcUjYd8CCH4cek+RPnMhruGaga61zlVKjQZWAyZgltb6gFLqXSBUa72stIsUNiK//zzbuxlfrTnC1+sjqOTqxFfDWtCnSQ2Uyu/da/sMbJ9uhHnrUdDrY1BF9fwJIQoq1o1FWusQIKTQvolXaNvp5ssSNin2HzSKwUvT2ZtwlAEtfJh4TyAe7uUubefuBX0/h/Nx0O55CXMhiknuFBUlR2v47Slo0BMaD7jkqfTsXKJ3b8JkrkF8djlmP9qEzg2rXfm9mt5fysUKYXsk0EXJidsDYfMgfClUC4SqDQDYEpHI2MVhLEjbz2mv1vzx5F1UcHGycLFC2B75lkmUnPCloExQzg0WPEpKaipjFobx4MwdeHOO6uoczdt0kTAXopRIoIuSobUR6P53wYAZEB/Ous9HsHB3DE91rMvPffIn4qopt+sLUVok0EXJOLMfko5zvm4fnt3pyTe593Cf+Q+2tt3J2M41KHdmr3H1LqsHCVFqpA9dlAgdvhRwoN/aypzKPkPjzm9ijs/De/ensP87cC4P1RoZ3TFCiFIhgS5u2qnkDNg2n5N5DfGoUZPvBjWlXrUKwDzjVv/tX8P+xRDY39KlCmHTJNDFDTObNXN3nGTBqrUsc4giqvE4FtzfDpNDgXHjPi1h4Ezo/T9jDhYhRKmRQBc35HjCBcYu2sffJ5KYUm0vOlVxR59HweEKNwG5epRpfULYIwl0cV1y88x8tzmSRWs30dgxhkWtXGkZswnl11aWgBPCwiTQxaW0hlO78r/AdL/kqfDYVF5ftBcdu5fVzhMxkQf7AOUAHa+4MqEQooxIoItLbfsK/ngTnCsaqwLd/iiZVYL46s8Ivtl4DE9XE39Un4cp2wuGzoOKNcG9CpjkZiEhLE0C3d6YzVeehjZiHayZCAE9wM0L9vwMobNYUu4+vkodyMCWvkzy2Ybbmn1w30yodXvZ1i6EuCq5scieHN8IH/hAYsTlzyUdh4WPQ9VGMGg2ab2/4sPAJfyc14Uh2YvZ1ngpU3pWw23T++Df0bh6F0LcUiTQ7cmenyEn3ZhAq6DsNPhlmDFN7ZC5bDqZQY/PNvHtziQO3/4u2e1fpcaxX2H6HZCbAX2myJS2QtyCpMvFXuRmw+FVxuN9C6Dz+Iuh/M9PkHCQtEHzeWtdKgt3hVOnqju/PnkHrWp7Ak2gQhX4fazx5WeVAIsdhhDiyiTQ7UXkRshKgUb94OAyYyRLrWCjT33HtyR7NqP7UkeS0k7xTKe6PN81ABcn08XXt33auNOzQg3LHYMQ4qqky8VehC8xRq7c8xmYnI2rdCB5/++QdIyJp++kanlnlj7bntd7Nrw0zP9VsaZ0tQhxC5MrdHuQlwOHVkL9nsYQw/p3o/cvZpHXU3iHfEwDXZlGXR9iSqcGOJnk/+OFsFby12sPTvwFGef+mxwr0b8fKi2ePUun0oF/MLUewdNdG0mYC2Hl5C/YHoQvBSd3zHW6MGdLJN1WOHNeu/JOuR/RDk543fWkpSsUQpQACXRbpDWcPwN5uWDOg0MrOO/Xhftn7eHt5eE0rV0dh8b9MelcVOMBUMHb0hULIUqA9KHboq1fwpoJoExo96qotATGH67LUacLTBncjPta+qCiNRxZAnc8a+lqhRAlRALd1iRHwYYP4LY7ifdowb7w/VzIq4Nq0IO19wZTtUL+2p5+beCNWHAoYjSLEMIqSaDbmt/HoYFvPF/lkx0ZeLp3YtIDjZkaVMT4cQlzIWyKBLo1i91jjCcPfhy86sKR1XBoBTPLPcRH29K5P7gW43sHUslNZkIUwh5IoFsrsxmWPQenw2D7dHKaPMD5QxtIMtdkrkNffhpxO3cGVLF0lUKIMiSBbq0OLjPCvPskok9GUC3sZzzJYUY+WzkAAAxuSURBVHmDL1g5qCvuznJqhbA38ldvDbIuQFoCePob2+Y8WP8+eV71eT26PYvC6tKmSifevdOFR9r2tmytQgiLkUC/1WkN84fDic3Q80No9QQ6bD4q8TBvOLzC0rgzPNelHqO71MPZUb7kFMKeFSvQlVI9gamACZiptf6w0PMvA08AuUAC8LjW+mQJ12qfDi6H4+vBsw6EvEpG9B4uHFzHGXNtDnp2Ytmg5gTWrGjpKoUQt4Br3imqlDIB04BeQCAwVCkVWKjZP0Cw1ropsBD4uKQLtUvZ6bB6PFRrjH5mOwfrPoHrvp+omhtHVPOXWfzsnRLmQoj/FOcKvTUQobU+DqCUmgf0B8L/baC1Xl+g/XZgeEkWabe2TIWUKOLvW8TLc/bwV0QXRlevzoh6afTu87BMZSuEuERxAt0HiC6wHQO0uUr7EcCqop5QSo0CRgH4+fkVs0Q7de4EesvnRHr3oM+CXEwOyUy+N4hhrXvj4CBBLoS4XHECvaj00EU2VGo4EAx0LOp5rfUMYAZAcHBwke9h1+LCYM9ciNmJjgsjy+zAgyf70qaBJ+8PaELNyq6WrlAIcQsrTqDHAL4FtmsBsYUbKaW6AeOBjlrrrJIpz46c2AJzB6O1mVi3RqzM6cVGx3aMeaAr/ZvXREn3ihDiGooT6DuBAKWUP3AKGAIMK9hAKdUC+BboqbWOL/EqbV3kJvj5ATLda/KYeSLb4h3p26wmU/sGUqW8s6WrE0JYiWsGutY6Vyk1GliNMWxxltb6gFLqXSBUa70M+B9QHliQfyUZpbXuV4p1W7+8HEg8ClHb0KvHk+hUgz5nXkFVcOe7h5vQPVDmKBdCXJ9ijUPXWocAIYX2TSzwuFsJ12W7stNh+QvGKkJ5Rs9UhIM/D5x7nbtbN2Zsr0ZUcpXJtIQQ10/uFC1LaYnwyxCICSW7xaMsOevLd0fLk1u5Ll8Na067ejKZlhDixkmgl5Wzx2DuIEiNZV/7LxgV6sOZ1Eweb+/Pyz3q41ZOToUQ4uZIipSF3CyYOwhzRjJf1PyEz9d5ElDNka+fbkcLPw9LVyeEsBES6GVA7/gWlXSc5xze5I9jXrzQtR7PdK4rk2kJIUqUBHopOxMXQ4W1H7IjrxkxVe5g+aCmNKwu868IIUqeBHop0Vozb2c0auUrDCKDxHYTWHx3e0xy274QopRIoJeCk2fTGLtoHwmRe1ntvJa0Jg8xuFd3S5clhLBxEuglKM+smb0lkk/+OIyzA6yuuQiHC+Wp2PMtS5cmhLADEug3Ky8XTmzmmK7Jy6sT2RudTNeG1ZjquYDyu3dA36ng7mXpKoUQdkAC/UZlpsDuH4wRLCnReGtXmjiMZMTQp+mbsxq14lto8xTc/qilKxVC2AkJ9BuxfzGseAkykwkzBTE7uy/PVdrM5IwvYP9hOLYO6nWHHu9ZulIhhB2RQL8emSkQ8hqEzeeUeyBPZ79KfPlA3nsoiLr1J8HmKbDxQ6jSAAbNApP8zyuEKDuSOMWVFAk/9EenxDDbaQjvne3DA238GdurIRVd8ifT6jQGgu4D9yrgImPNhRBlSwK9OJIiMc/pQ2baeYZnTuCsZ3N+GtmUO+oW8WVnlYCyr08IIZBAv7akSDJn9iIr/TzDst+gfYcuvNStPq7l5LZ9IcStRQL9KlIOb8a84DHIyeCNiu/z/v39aeZb2dJlCSFEkSTQi6Dzcjm04G0CDk0jTldh0+3fMbV3T8o5Oli6NCGEuCIJ9IJS40gOCyFx80waZYWzwbkztYZ/zYO+NS1dmRBCXJMEOkDCYfRvT6Nid1EZSNdebAyaTIeBo2UyLSGE1ZBAj9xE3rzhnM9x4NucIZyr1YlnBvejYxV3S1cmhBDXxa4DPe+fn2HZc0SavXlWjePxezvyerAvSslVuRDC+thnoJ/cxoU/3qP8qc1szQtkXp0P+GFgW7wruli6MiGEuGH2Fehnj2Fe/iIOJzaRqSsy0/QQ9e99janN/OSqXAhh9ewn0A/8Rt6S0aTlwNScB7kQ9DBj+7XEw72cpSsTQogSYfuBnpNBzu9v4rRrJnvN9Zjk8hrPD+1C5wbVLF2ZEEKUKNsNdK3h4DIyV47DJe0U3+X25lTL1/mhdxAV/p1MSwghbIhtBvqZcHJDxuB4chORZl++dX+PoYOHMrKOrBwkhLBdthXoaYmw/j106BzSceXT3Edxaz+SD7s1wsVJJtMSQtg22wn0Pb9gXvU6OusCP+R2Z5XXI0wYfCdNalWydGVCCFEmrD/Qs9PQIa+i9vzMbhoxIecJ7unWibl31cHJJJNpCSHsh3UHesIRcn55EFPSUb7IHcDmGo/z5eAW1KtWwdKVCSFEmSvWJaxSqqdS6rBSKkIpNbaI552VUvPzn9+hlKpd0oUWZj70O9nfdib17GlGmt+gUq+3mP90BwlzIYTduuYVulLKBEwDugMxwE6l1DKtdXiBZiOAc1rrekqpIcBHwAOlUTBac3b1R3hs/5Aj5tuY6TOZtwd3xdfTrVQ+TgghrEVxulxaAxFa6+MASql5QH+gYKD3B97Of7wQ+EoppbTWugRrBWD/L+MJOjKNVbQjo89UPmsdILftCyEExQt0HyC6wHYM0OZKbbTWuUqpFMALSCzYSCk1ChgF4Ofnd0MF5zYbzoIkTceHJ1KtousNvYcQQtii4gR6UZe/ha+8i9MGrfUMYAZAcHDwDV29N28cSPPGH9zIS4UQwqYV50vRGMC3wHYtIPZKbZRSjkAlIKkkChRCCFE8xQn0nUCAUspfKVUOGAIsK9RmGfBI/uNBwJ+l0X8uhBDiyq7Z5ZLfJz4aWA2YgFla6wNKqXeBUK31MuD/gB+VUhEYV+ZDSrNoIYQQlyvWjUVa6xAgpNC+iQUeZwKDS7Y0IYQQ10PujRdCCBshgS6EEDZCAl0IIWyEBLoQQtgIZanRhUqpBODkDb68CoXuQrUDcsz2QY7ZPtzMMd+mta5a1BMWC/SboZQK1VoHW7qOsiTHbB/kmO1DaR2zdLkIIYSNkEAXQggbYa2BPsPSBViAHLN9kGO2D6VyzFbZhy6EEOJy1nqFLoQQohAJdCGEsBFWF+jXWrDaFiilfJVS65VSB5VSB5RSL+Tv91RKrVFKHc3/7WHpWkuSUsqklPpHKbUif9s/f9Hxo/mLkJezdI0lSSlVWSm1UCl1KP9c32EH5/il/P+m9yulflFKudjaeVZKzVJKxSul9hfYV+R5VYYv8vMsTCnV8mY+26oCvcCC1b2AQGCoUirQslWVilzgFa11I6At8Gz+cY4F1mmtA4B1+du25AXgYIHtj4DP8o/3HMZi5LZkKvC71roh0Azj2G32HCulfIDngWCtdRDGdNz/LipvS+d5DtCz0L4rnddeQED+zyhg+s18sFUFOgUWrNZaZwP/LlhtU7TWcVrr3fmPz2P8oftgHOv3+c2+B+61TIUlTylVC+gDzMzfVkAXjEXHwfaOtyJwF8ZaAmits7XWydjwOc7nCLjmr2zmBsRhY+dZa72Jy1dsu9J57Q/8oA3bgcpKqRo3+tnWFuhFLVjtY6FayoRSqjbQAtgBeGut48AIfaCa5SorcZ8DrwPm/G0vIFlrnZu/bWvnug6QAMzO72aaqZRyx4bPsdb6FPAJEIUR5CnALmz7PP/rSue1RDPN2gK9WItR2wqlVHlgEfCi1jrV0vWUFqXUPUC81npXwd1FNLWlc+0ItASma61bAGnYUPdKUfL7jfsD/kBNwB2jy6EwWzrP11Ki/51bW6AXZ8Fqm6CUcsII87la68X5u8/8+8+x/N/xlqqvhLUH+imlTmB0o3XBuGKvnP9Pc7C9cx0DxGitd+RvL8QIeFs9xwDdgEitdYLWOgdYDLTDts/zv650Xks006wt0IuzYLXVy+8//j/goNb60wJPFVyM+xFgaVnXVhq01uO01rW01rUxzumfWusHgfUYi46DDR0vgNb6NBCtlGqQv6srEI6NnuN8UUBbpZRb/n/j/x6zzZ7nAq50XpcBD+ePdmkLpPzbNXNDtNZW9QP0Bo4Ax4Dxlq6nlI7xTox/doUBe/J/emP0K68Djub/9rR0raVw7J2AFfmP6wB/AxHAAsDZ0vWV8LE2B0Lzz/MSwMPWzzHwDnAI2A/8CDjb2nkGfsH4jiAH4wp8xJXOK0aXy7T8PNuHMQLohj9bbv0XQggbYW1dLkIIIa5AAl0IIWyEBLoQQtgICXQhhLAREuhCCGEjJNCFEMJGSKALIYSN+H87mAnrTHyEpQAAAABJRU5ErkJggg==\n",
4560
      "text/plain": [
4561
       "<Figure size 432x288 with 1 Axes>"
4562
      ]
4563
     },
4564
     "metadata": {
4565
      "needs_background": "light"
4566
     },
4567
     "output_type": "display_data"
4568
    }
4569
   ],
4570
   "source": [
4571
    "k = 0\n",
4572
    "dd = pd.DataFrame((preds_all.mean(0)), columns=all_ich)\n",
4573
    "vals = (train_md[all_ich[k]]*train_md['weights']).groupby(pd.cut(dd[all_ich[k]],np.arange(101)/100)).mean()/ \\\n",
4574
    "    train_md['weights'].groupby(pd.cut(dd[all_ich[k]],np.arange(101)/100)).mean()\n",
4575
    "\n",
4576
    "#dd = pd.DataFrame(preds_all.mean(1)[4], columns=all_ich)\n",
4577
    "\n",
4578
    "plt.plot([0,100],[0,1])\n",
4579
    "plt.plot(vals.values)"
4580
   ]
4581
  },
4582
  {
4583
   "cell_type": "code",
4584
   "execution_count": null,
4585
   "metadata": {},
4586
   "outputs": [],
4587
   "source": []
4588
  },
4589
  {
4590
   "cell_type": "code",
4591
   "execution_count": 37,
4592
   "metadata": {},
4593
   "outputs": [
4594
    {
4595
     "name": "stdout",
4596
     "output_type": "stream",
4597
     "text": [
4598
      "0.05430755335203294\n"
4599
     ]
4600
    }
4601
   ],
4602
   "source": [
4603
    "res = np.zeros(6)\n",
4604
    "for k in range(6):\n",
4605
    "    res[k] = log_loss(train_md[all_ich[k]], preds_all.mean(0)[:,k], eps=1e-7, labels=[0,1], \\\n",
4606
    "                      sample_weight=train_md.weights)\n",
4607
    "print((res*class_weights).mean())"
4608
   ]
4609
  },
4610
  {
4611
   "cell_type": "code",
4612
   "execution_count": 38,
4613
   "metadata": {},
4614
   "outputs": [
4615
    {
4616
     "name": "stdout",
4617
     "output_type": "stream",
4618
     "text": [
4619
      "0.05507244939620463\n"
4620
     ]
4621
    }
4622
   ],
4623
   "source": [
4624
    "res = np.zeros(6)\n",
4625
    "for k in range(6):\n",
4626
    "    res[k] = log_loss(train_md[all_ich[k]], preds_all.mean(0)[:,k]**(0.9), eps=1e-7, labels=[0,1], \\\n",
4627
    "                      sample_weight=train_md.weights)\n",
4628
    "print((res*class_weights).mean())"
4629
   ]
4630
  },
4631
  {
4632
   "cell_type": "code",
4633
   "execution_count": null,
4634
   "metadata": {},
4635
   "outputs": [],
4636
   "source": []
4637
  },
4638
  {
4639
   "cell_type": "code",
4640
   "execution_count": 71,
4641
   "metadata": {},
4642
   "outputs": [],
4643
   "source": [
4644
    "%run ./Code.ipynb"
4645
   ]
4646
  },
4647
  {
4648
   "cell_type": "code",
4649
   "execution_count": null,
4650
   "metadata": {},
4651
   "outputs": [],
4652
   "source": []
4653
  },
4654
  {
4655
   "cell_type": "code",
4656
   "execution_count": 39,
4657
   "metadata": {},
4658
   "outputs": [],
4659
   "source": [
4660
    "stats_fn = PATH_DISK/'ensemble'/'stats.v{}'.format(VERSION)\n",
4661
    "if stats_fn.is_file():\n",
4662
    "    stats_fn.unlink()"
4663
   ]
4664
  },
4665
  {
4666
   "cell_type": "code",
4667
   "execution_count": 43,
4668
   "metadata": {
4669
    "scrolled": true
4670
   },
4671
   "outputs": [
4672
    {
4673
     "name": "stdout",
4674
     "output_type": "stream",
4675
     "text": [
4676
      "starting fold 0 target 0\n",
4677
      "my_len 4\n",
4678
      "obj  0.08727681341480276\n",
4679
      "obj  0.08727680961247067\n",
4680
      "obj  0.08727681635930583\n",
4681
      "obj  0.08727683790936251\n",
4682
      "obj  0.08727727172526722\n",
4683
      "obj  0.0872790250770156\n",
4684
      "obj  0.08728207769630063\n",
4685
      "obj  0.08730524551710207\n",
4686
      "obj  0.08741163374725006\n",
4687
      "obj  0.08737828492011111\n",
4688
      "obj  0.08728987139098107\n",
4689
      "obj  0.08727957160159115\n",
4690
      "obj  0.08727743735897546\n",
4691
      "obj  0.08727694643886197\n",
4692
      "obj  0.08727683642000757\n",
4693
      "obj  0.08727680763252753\n",
4694
      "obj  0.08727680110920166\n",
4695
      "obj  0.0872767999865726\n",
4696
      "model [0.5022 0.4978] sum 0.9999676813453886\n",
4697
      "my_len 4\n",
4698
      "v34 f0 t0: original ll 0.0935/0.0886, ensemble ll 0.0935/0.0886\n",
4699
      "running time 3.408698320388794\n",
4700
      "starting fold 0 target 1\n",
4701
      "my_len 4\n",
4702
      "obj  0.012081623796648948\n",
4703
      "obj  0.012064844467400441\n",
4704
      "obj  0.012069603621018268\n",
4705
      "obj  0.012057825517191024\n",
4706
      "obj  0.012063435473538067\n",
4707
      "obj  0.012078578108915878\n",
4708
      "obj  0.012068894688607414\n",
4709
      "obj  0.012071291095892334\n",
4710
      "obj  0.012059318504672595\n",
4711
      "obj  0.012042443367714269\n",
4712
      "obj  0.012035341615015361\n",
4713
      "obj  0.012028032305076937\n",
4714
      "obj  0.01202777543489789\n",
4715
      "obj  0.012027740476985575\n",
4716
      "model [0.2831 0.6969] sum 0.9800013450345408\n",
4717
      "my_len 4\n",
4718
      "v34 f0 t1: original ll 0.0150/0.0139, ensemble ll 0.0149/0.0138\n",
4719
      "running time 2.795881509780884\n",
4720
      "starting fold 0 target 2\n",
4721
      "my_len 4\n",
4722
      "obj  0.03643239100707699\n",
4723
      "obj  0.03645343904689349\n",
4724
      "obj  0.0364534366143448\n",
4725
      "obj  0.036453513478217306\n",
4726
      "obj  0.036453516366294375\n",
4727
      "obj  0.03645341604099942\n",
4728
      "obj  0.03645330420145025\n",
4729
      "obj  0.03645267895293359\n",
4730
      "obj  0.036450097188809066\n",
4731
      "obj  0.03641199892754806\n",
4732
      "obj  0.036373801711966164\n",
4733
      "obj  0.03637281406397558\n",
4734
      "obj  0.036372806451137295\n",
4735
      "obj  0.03637272605629483\n",
4736
      "model [0.2802 0.7198] sum 0.9999991225720998\n",
4737
      "my_len 4\n",
4738
      "v34 f0 t2: original ll 0.0415/0.0392, ensemble ll 0.0415/0.0392\n",
4739
      "running time 2.8015294075012207\n",
4740
      "starting fold 0 target 3\n",
4741
      "my_len 4\n",
4742
      "obj  0.022513923350337257\n",
4743
      "obj  0.022514823788354697\n",
4744
      "obj  0.02251479174800453\n",
4745
      "obj  0.022514828025576614\n",
4746
      "obj  0.022514807377993946\n",
4747
      "obj  0.022514793548033103\n",
4748
      "obj  0.02251478234170425\n",
4749
      "obj  0.0225148540628997\n",
4750
      "obj  0.02251532492335685\n",
4751
      "obj  0.02252397376897954\n",
4752
      "obj  0.0224612699479598\n",
4753
      "obj  0.022460291244167933\n",
4754
      "obj  0.022460281207617266\n",
4755
      "obj  0.02246025802669623\n",
4756
      "model [0.2562 0.7438] sum 0.9999997939535652\n",
4757
      "my_len 4\n",
4758
      "v34 f0 t3: original ll 0.0243/0.0239, ensemble ll 0.0243/0.0239\n",
4759
      "running time 2.816469669342041\n",
4760
      "starting fold 0 target 4\n",
4761
      "my_len 4\n",
4762
      "obj  0.05866908744304866\n",
4763
      "obj  0.058655421224092855\n",
4764
      "obj  0.05865799778032658\n",
4765
      "obj  0.05865917796177444\n",
4766
      "obj  0.05867525787484774\n",
4767
      "obj  0.058666700956343514\n",
4768
      "obj  0.05866190108829274\n",
4769
      "obj  0.05867066520315169\n",
4770
      "obj  0.058622009226577926\n",
4771
      "obj  0.05857392212815765\n",
4772
      "obj  0.058552161805164274\n",
4773
      "obj  0.05854793797715716\n",
4774
      "obj  0.0585477836014417\n",
4775
      "obj  0.05854778093821152\n",
4776
      "obj  0.05854778093568046\n",
4777
      "model [0.1849 0.8112] sum 0.9961132357622775\n",
4778
      "my_len 4\n",
4779
      "v34 f0 t4: original ll 0.0617/0.0596, ensemble ll 0.0617/0.0596\n",
4780
      "running time 2.985300064086914\n",
4781
      "starting fold 0 target 5\n",
4782
      "my_len 4\n",
4783
      "obj  0.07111888085217989\n",
4784
      "obj  0.07109943900079979\n",
4785
      "obj  0.07109626046895302\n",
4786
      "obj  0.07109808791436022\n",
4787
      "obj  0.07111092588847316\n",
4788
      "obj  0.0710957678702807\n",
4789
      "obj  0.07109475918862561\n",
4790
      "obj  0.07109280184395775\n",
4791
      "obj  0.07107889783936418\n",
4792
      "obj  0.07107915998501899\n",
4793
      "obj  0.07106262999212018\n",
4794
      "obj  0.07106191775845142\n",
4795
      "obj  0.07106191688882099\n",
4796
      "model [0.3466 0.6453] sum 0.9918766105998069\n",
4797
      "my_len 4\n",
4798
      "v34 f0 t5: original ll 0.0789/0.0757, ensemble ll 0.0789/0.0757\n",
4799
      "running time 2.694082736968994\n",
4800
      "starting fold 1 target 0\n",
4801
      "my_len 4\n",
4802
      "obj  0.086462634155086\n",
4803
      "obj  0.08646150298320077\n",
4804
      "obj  0.0864614802906338\n",
4805
      "obj  0.08646149569706231\n",
4806
      "obj  0.08646172873837056\n",
4807
      "obj  0.08646289351196788\n",
4808
      "obj  0.08646556301640597\n",
4809
      "obj  0.08648759494216639\n",
4810
      "obj  0.0866088306497439\n",
4811
      "obj  0.08657377479849461\n",
4812
      "obj  0.08646196862069534\n",
4813
      "obj  0.08644958080609667\n",
4814
      "obj  0.08644661615687646\n",
4815
      "obj  0.0864458232439799\n",
4816
      "obj  0.08644566400280582\n",
4817
      "obj  0.08644565114567859\n",
4818
      "model [0.5941 0.4059] sum 0.9999997875126851\n",
4819
      "my_len 4\n",
4820
      "v34 f1 t0: original ll 0.0964/0.0902, ensemble ll 0.0965/0.0902\n",
4821
      "running time 3.123203992843628\n",
4822
      "starting fold 1 target 1\n",
4823
      "my_len 4\n",
4824
      "obj  0.013554873239082691\n",
4825
      "obj  0.013536021601660043\n",
4826
      "obj  0.013540780340295681\n",
4827
      "obj  0.013566508884523805\n",
4828
      "obj  0.013564699748967186\n",
4829
      "obj  0.013552392374760967\n",
4830
      "obj  0.013547296797121735\n",
4831
      "obj  0.01354698457004907\n",
4832
      "obj  0.01350746236715416\n",
4833
      "obj  0.013447856802878057\n",
4834
      "obj  0.013419140435866268\n",
4835
      "obj  0.013415370101089641\n",
4836
      "obj  0.01341048435640438\n",
4837
      "obj  0.013410353069281782\n",
4838
      "obj  0.013410352471037947\n",
4839
      "obj  0.013410330155687895\n",
4840
      "model [0.1071 0.8929] sum 0.9999981188620239\n",
4841
      "my_len 4\n",
4842
      "v34 f1 t1: original ll 0.0128/0.0110, ensemble ll 0.0129/0.0111\n",
4843
      "running time 3.057985782623291\n",
4844
      "starting fold 1 target 2\n",
4845
      "my_len 4\n",
4846
      "obj  0.03700983338270924\n",
4847
      "obj  0.03700942511496985\n",
4848
      "obj  0.03700941857757188\n",
4849
      "obj  0.037009493333549054\n",
4850
      "obj  0.03701008954460971\n",
4851
      "obj  0.037016731112905855\n",
4852
      "obj  0.037024638902028846\n",
4853
      "obj  0.037049487602528804\n",
4854
      "obj  0.03702979731288853\n",
4855
      "obj  0.037045004627654254\n",
4856
      "obj  0.0369870605249707\n",
4857
      "obj  0.036984237597943385\n",
4858
      "obj  0.036983631845849435\n",
4859
      "obj  0.03698346188992404\n",
4860
      "obj  0.036983414748853076\n",
4861
      "obj  0.036983401729567406\n",
4862
      "obj  0.036983398096645255\n",
4863
      "obj  0.036983397111147245\n",
4864
      "model [0.351 0.649] sum 0.9999845409128088\n",
4865
      "my_len 4\n",
4866
      "v34 f1 t2: original ll 0.0401/0.0381, ensemble ll 0.0400/0.0380\n",
4867
      "running time 3.355295419692993\n",
4868
      "starting fold 1 target 3\n",
4869
      "my_len 4\n",
4870
      "obj  0.023432825312353096\n",
4871
      "obj  0.023430311446247156\n",
4872
      "obj  0.023430223179945912\n",
4873
      "obj  0.023430262634468815\n",
4874
      "obj  0.0234308175057871\n",
4875
      "obj  0.023438794235891954\n",
4876
      "obj  0.02345313737719687\n",
4877
      "obj  0.02348204017957184\n",
4878
      "obj  0.023460208851731834\n",
4879
      "obj  0.023476355780291332\n",
4880
      "obj  0.02340827851846919\n",
4881
      "obj  0.023406000871457035\n",
4882
      "obj  0.023405403449075084\n",
4883
      "obj  0.023405297323883776\n",
4884
      "obj  0.0234052886876152\n",
4885
      "model [0.3264 0.6736] sum 0.9999998723930892\n",
4886
      "my_len 4\n",
4887
      "v34 f1 t3: original ll 0.0233/0.0221, ensemble ll 0.0233/0.0220\n",
4888
      "running time 2.9708051681518555\n",
4889
      "starting fold 1 target 4\n",
4890
      "my_len 4\n",
4891
      "obj  0.05876162822408629\n",
4892
      "obj  0.05877796616366613\n",
4893
      "obj  0.058777955484026846\n",
4894
      "obj  0.058778224218930426\n",
4895
      "obj  0.0587782146454934\n",
4896
      "obj  0.05877773096065451\n",
4897
      "obj  0.05877722216040082\n",
4898
      "obj  0.05877439407851651\n",
4899
      "obj  0.058760743102489674\n",
4900
      "obj  0.05868165579294521\n",
4901
      "obj  0.05865935503477886\n",
4902
      "obj  0.05865909561692464\n",
4903
      "obj  0.05865909552127853\n",
4904
      "obj  0.05865908163335363\n",
4905
      "model [0.2072 0.7928] sum 0.9999997236086989\n",
4906
      "my_len 4\n",
4907
      "v34 f1 t4: original ll 0.0632/0.0594, ensemble ll 0.0632/0.0594\n",
4908
      "running time 2.8280203342437744\n",
4909
      "starting fold 1 target 5\n",
4910
      "my_len 4\n",
4911
      "obj  0.07183112972231193\n",
4912
      "obj  0.07182105040716405\n",
4913
      "obj  0.07182017839075075\n",
4914
      "obj  0.07182183010763957\n",
4915
      "obj  0.07183346670238054\n",
4916
      "obj  0.07181975923420639\n",
4917
      "obj  0.07182105338358573\n",
4918
      "obj  0.07182045801712832\n",
4919
      "obj  0.07180832229135492\n",
4920
      "obj  0.07180906614397237\n",
4921
      "obj  0.07179208329104021\n",
4922
      "obj  0.07178991670375105\n",
4923
      "obj  0.07178988693665318\n",
4924
      "obj  0.07178988689714585\n",
4925
      "model [0.3481 0.6455] sum 0.9935939570813068\n",
4926
      "my_len 4\n",
4927
      "v34 f1 t5: original ll 0.0780/0.0742, ensemble ll 0.0780/0.0742\n",
4928
      "running time 2.8228495121002197\n",
4929
      "starting fold 2 target 0\n",
4930
      "my_len 4\n",
4931
      "obj  0.08937950117764316\n",
4932
      "obj  0.08937672934263124\n",
4933
      "obj  0.08937644247528813\n",
4934
      "obj  0.08937645539574389\n",
4935
      "obj  0.08937661990672387\n",
4936
      "obj  0.08937751986380715\n",
4937
      "obj  0.08938010041832908\n",
4938
      "obj  0.0894013641490466\n",
4939
      "obj  0.08953362229177689\n",
4940
      "obj  0.08950694179309915\n",
4941
      "obj  0.08939116686116341\n",
4942
      "obj  0.08937686219260021\n",
4943
      "obj  0.08937344427477412\n",
4944
      "obj  0.08937270519391056\n",
4945
      "obj  0.08937262922565642\n",
4946
      "model [0.5579 0.4421] sum 0.9999993467855719\n",
4947
      "my_len 4\n",
4948
      "v34 f2 t0: original ll 0.0905/0.0844, ensemble ll 0.0905/0.0844\n",
4949
      "running time 2.990800380706787\n",
4950
      "starting fold 2 target 1\n",
4951
      "my_len 4\n",
4952
      "obj  0.012436140247391692\n",
4953
      "obj  0.012426658871830904\n",
4954
      "obj  0.012428926899146497\n",
4955
      "obj  0.012424188907848488\n",
4956
      "obj  0.012426993325575834\n",
4957
      "obj  0.012433845116420816\n",
4958
      "obj  0.012423954674312862\n",
4959
      "obj  0.01242496008015588\n",
4960
      "obj  0.012400535807525133\n",
4961
      "obj  0.01236383429098519\n",
4962
      "obj  0.012348143529084353\n",
4963
      "obj  0.012343410505174578\n",
4964
      "obj  0.012343410238957164\n",
4965
      "obj  0.012343391580164224\n",
4966
      "model [0.1901 0.7899] sum 0.9800025514053641\n",
4967
      "my_len 4\n",
4968
      "v34 f2 t1: original ll 0.0150/0.0132, ensemble ll 0.0149/0.0131\n",
4969
      "running time 2.79952335357666\n",
4970
      "starting fold 2 target 2\n",
4971
      "my_len 4\n",
4972
      "obj  0.03864261774481127\n",
4973
      "obj  0.03864184039486668\n",
4974
      "obj  0.038641796041188246\n",
4975
      "obj  0.038642060704163234\n",
4976
      "obj  0.03864494722268341\n",
4977
      "obj  0.03864537483997842\n",
4978
      "obj  0.03864934154151825\n",
4979
      "obj  0.03866640020838346\n",
4980
      "obj  0.0386494241163526\n",
4981
      "obj  0.03865924189909652\n",
4982
      "obj  0.03862338492653919\n",
4983
      "obj  0.03861377744256826\n"
4984
     ]
4985
    },
4986
    {
4987
     "name": "stdout",
4988
     "output_type": "stream",
4989
     "text": [
4990
      "obj  0.03861288644457599\n",
4991
      "obj  0.03861282478178117\n",
4992
      "obj  0.03861282377857218\n",
4993
      "obj  0.03861282377493841\n",
4994
      "model [0.3423 0.6557] sum 0.998008898781132\n",
4995
      "my_len 4\n",
4996
      "v34 f2 t2: original ll 0.0370/0.0348, ensemble ll 0.0369/0.0348\n",
4997
      "running time 3.07651686668396\n",
4998
      "starting fold 2 target 3\n",
4999
      "my_len 4\n",
5000
      "obj  0.022995224518522383\n",
5001
      "obj  0.02299340932717575\n",
5002
      "obj  0.022993385766960923\n",
5003
      "obj  0.0229934166686739\n",
5004
      "obj  0.022994249545199564\n",
5005
      "obj  0.0230039489232303\n",
5006
      "obj  0.02301358839178314\n",
5007
      "obj  0.023040209826170773\n",
5008
      "obj  0.023015709942233124\n",
5009
      "obj  0.02303114245854406\n",
5010
      "obj  0.022971532575813594\n",
5011
      "obj  0.022959230997852857\n",
5012
      "obj  0.02295682416350499\n",
5013
      "obj  0.022956168474470892\n",
5014
      "obj  0.022955983453470243\n",
5015
      "obj  0.02295594581037717\n",
5016
      "obj  0.022955942947855967\n",
5017
      "model [0.2945 0.7055] sum 0.9999997552090265\n",
5018
      "my_len 4\n",
5019
      "v34 f2 t3: original ll 0.0236/0.0230, ensemble ll 0.0235/0.0229\n",
5020
      "running time 3.2023861408233643\n",
5021
      "starting fold 2 target 4\n",
5022
      "my_len 4\n",
5023
      "obj  0.05949662701641982\n",
5024
      "obj  0.059496427471729306\n",
5025
      "obj  0.059496342460381116\n",
5026
      "obj  0.059496656778369565\n",
5027
      "obj  0.05949867889401739\n",
5028
      "obj  0.05949875155855878\n",
5029
      "obj  0.05950472150471753\n",
5030
      "obj  0.059513989228462204\n",
5031
      "obj  0.05949473181554839\n",
5032
      "obj  0.05949902808253696\n",
5033
      "obj  0.05946635705740825\n",
5034
      "obj  0.05945508305105207\n",
5035
      "obj  0.05945429089413502\n",
5036
      "obj  0.05945422305596472\n",
5037
      "obj  0.059454221242697204\n",
5038
      "obj  0.059454221233066123\n",
5039
      "model [0.3094 0.6888] sum 0.9982120655965214\n",
5040
      "my_len 4\n",
5041
      "v34 f2 t4: original ll 0.0608/0.0579, ensemble ll 0.0606/0.0578\n",
5042
      "running time 3.11377215385437\n",
5043
      "starting fold 2 target 5\n",
5044
      "my_len 4\n",
5045
      "obj  0.07494273297957547\n",
5046
      "obj  0.07494193934979902\n",
5047
      "obj  0.0749418088452629\n",
5048
      "obj  0.07494215623220447\n",
5049
      "obj  0.07494534611048678\n",
5050
      "obj  0.07494407120248021\n",
5051
      "obj  0.07494607787971268\n",
5052
      "obj  0.07495872278249605\n",
5053
      "obj  0.07493779709805286\n",
5054
      "obj  0.0749435731885866\n",
5055
      "obj  0.0749166852814284\n",
5056
      "obj  0.07490477996495228\n",
5057
      "obj  0.0749038705129651\n",
5058
      "obj  0.07490382029340723\n",
5059
      "obj  0.07490381954831983\n",
5060
      "obj  0.07490381954523084\n",
5061
      "model [0.3463 0.6515] sum 0.9977752843293807\n",
5062
      "my_len 4\n",
5063
      "v34 f2 t5: original ll 0.0719/0.0680, ensemble ll 0.0718/0.0680\n",
5064
      "running time 3.083256244659424\n",
5065
      "total running time 54.07902908325195\n"
5066
     ]
5067
    }
5068
   ],
5069
   "source": [
5070
    "stg = time.time()\n",
5071
    "for fold in range(3):\n",
5072
    "    for target in range(6):\n",
5073
    "        train_ensemble(train_md, preds_all, fold=fold, target=target, weighted=True)\n",
5074
    "print('total running time', time.time() - stg)"
5075
   ]
5076
  },
5077
  {
5078
   "cell_type": "code",
5079
   "execution_count": 40,
5080
   "metadata": {
5081
    "scrolled": true
5082
   },
5083
   "outputs": [
5084
    {
5085
     "name": "stdout",
5086
     "output_type": "stream",
5087
     "text": [
5088
      "starting fold 0 target 0\n",
5089
      "my_len 4\n",
5090
      "obj  0.0933852150101332\n",
5091
      "obj  0.09338046248442476\n",
5092
      "obj  0.09338029486041102\n",
5093
      "obj  0.09338035972731744\n",
5094
      "obj  0.0933811143108087\n",
5095
      "obj  0.09338210596694844\n",
5096
      "obj  0.09338474272802534\n",
5097
      "obj  0.09340376645538785\n",
5098
      "obj  0.0934854721225016\n",
5099
      "obj  0.09345651504702965\n",
5100
      "obj  0.09338703775747244\n",
5101
      "obj  0.09338029268632698\n",
5102
      "obj  0.09337958969300378\n",
5103
      "obj  0.09337955385335427\n",
5104
      "obj  0.0933795535198863\n",
5105
      "model [0.5182 0.4808] sum 0.9989492472281913\n",
5106
      "my_len 4\n",
5107
      "v33 f0 t0: original ll 0.0935/0.0887, ensemble ll 0.0935/0.0887\n",
5108
      "running time 3.2507238388061523\n",
5109
      "starting fold 0 target 1\n",
5110
      "my_len 4\n",
5111
      "obj  0.013897395265353997\n",
5112
      "obj  0.013881880847430866\n",
5113
      "obj  0.013885991780512735\n",
5114
      "obj  0.013876246790549903\n",
5115
      "obj  0.013881127990830817\n",
5116
      "obj  0.013894008184175567\n",
5117
      "obj  0.013882457611996777\n",
5118
      "obj  0.013884478813005323\n",
5119
      "obj  0.013863346223612806\n",
5120
      "obj  0.01383540008841151\n",
5121
      "obj  0.013825250237213925\n",
5122
      "obj  0.013818474145475493\n",
5123
      "obj  0.01381843625969093\n",
5124
      "obj  0.013818436182581805\n",
5125
      "obj  0.013818405670253893\n",
5126
      "model [0.2427 0.7373] sum 0.9800015590878185\n",
5127
      "my_len 4\n",
5128
      "v33 f0 t1: original ll 0.0149/0.0138, ensemble ll 0.0148/0.0137\n",
5129
      "running time 3.0066232681274414\n",
5130
      "starting fold 0 target 2\n",
5131
      "my_len 4\n",
5132
      "obj  0.03855627126313789\n",
5133
      "obj  0.03856925253658012\n",
5134
      "obj  0.03856923323281781\n",
5135
      "obj  0.03856931508023857\n",
5136
      "obj  0.038569302973706525\n",
5137
      "obj  0.03856915496260997\n",
5138
      "obj  0.03856899304326355\n",
5139
      "obj  0.03856816332340819\n",
5140
      "obj  0.0385645791222383\n",
5141
      "obj  0.03852293886610801\n",
5142
      "obj  0.03849000992229287\n",
5143
      "obj  0.038489241766752426\n",
5144
      "obj  0.038489236663254565\n",
5145
      "obj  0.038489183204291004\n",
5146
      "model [0.2783 0.7217] sum 0.9999990774048544\n",
5147
      "my_len 4\n",
5148
      "v33 f0 t2: original ll 0.0415/0.0392, ensemble ll 0.0415/0.0393\n",
5149
      "running time 2.8931853771209717\n",
5150
      "starting fold 0 target 3\n",
5151
      "my_len 4\n",
5152
      "obj  0.023471819011950737\n",
5153
      "obj  0.02346308538936012\n",
5154
      "obj  0.02346306370142104\n",
5155
      "obj  0.023463096870067094\n",
5156
      "obj  0.023463084589733965\n",
5157
      "obj  0.023463085675027997\n",
5158
      "obj  0.023463090035115736\n",
5159
      "obj  0.02346320596213014\n",
5160
      "obj  0.02346382293862838\n",
5161
      "obj  0.02348155081080937\n",
5162
      "obj  0.02343353697215318\n",
5163
      "obj  0.023433088692115203\n",
5164
      "obj  0.02343308800612711\n",
5165
      "obj  0.023433059211329016\n",
5166
      "model [0.306 0.694] sum 0.9999997220069807\n",
5167
      "my_len 4\n",
5168
      "v33 f0 t3: original ll 0.0243/0.0239, ensemble ll 0.0243/0.0239\n",
5169
      "running time 2.9580657482147217\n",
5170
      "starting fold 0 target 4\n",
5171
      "my_len 4\n",
5172
      "obj  0.06201436184726342\n",
5173
      "obj  0.061994016197883355\n",
5174
      "obj  0.061996952747678406\n",
5175
      "obj  0.061997831217601926\n",
5176
      "obj  0.06201332307115508\n",
5177
      "obj  0.06200539452619764\n",
5178
      "obj  0.061997782770088716\n",
5179
      "obj  0.061999809977407665\n",
5180
      "obj  0.061946341140497775\n",
5181
      "obj  0.06189433265221826\n",
5182
      "obj  0.06187496883012553\n",
5183
      "obj  0.06187358174569984\n",
5184
      "obj  0.061873550371914346\n",
5185
      "obj  0.06187354965251176\n",
5186
      "obj  0.06187354965195954\n",
5187
      "model [0.1759 0.8163] sum 0.9922753054062554\n",
5188
      "my_len 4\n",
5189
      "v33 f0 t4: original ll 0.0617/0.0596, ensemble ll 0.0617/0.0596\n",
5190
      "running time 3.1102256774902344\n",
5191
      "starting fold 0 target 5\n",
5192
      "my_len 4\n",
5193
      "obj  0.07485970317009938\n",
5194
      "obj  0.07484302022808972\n",
5195
      "obj  0.07484070942818977\n",
5196
      "obj  0.07484219496860454\n",
5197
      "obj  0.07485295263606241\n",
5198
      "obj  0.07484035286995884\n",
5199
      "obj  0.07484141900156345\n",
5200
      "obj  0.07484183422896018\n",
5201
      "obj  0.0748343331646704\n",
5202
      "obj  0.07483511386213709\n",
5203
      "obj  0.07482549187014455\n",
5204
      "obj  0.0748237146789029\n",
5205
      "obj  0.07482368467518487\n",
5206
      "obj  0.07482367083879939\n",
5207
      "obj  0.07482367083046154\n",
5208
      "model [0.3925 0.6007] sum 0.9931778590255735\n",
5209
      "my_len 4\n",
5210
      "v33 f0 t5: original ll 0.0789/0.0757, ensemble ll 0.0789/0.0757\n",
5211
      "running time 3.0676307678222656\n",
5212
      "starting fold 1 target 0\n",
5213
      "my_len 4\n",
5214
      "obj  0.0919894615986076\n",
5215
      "obj  0.09198934066490368\n",
5216
      "obj  0.09198934060039965\n",
5217
      "obj  0.0919893485003721\n",
5218
      "obj  0.09198977183141734\n",
5219
      "obj  0.09199123504674073\n",
5220
      "obj  0.09199391461415146\n",
5221
      "obj  0.09201565951872456\n",
5222
      "obj  0.09211729091908993\n",
5223
      "obj  0.09208098074046837\n",
5224
      "obj  0.0919837225645165\n",
5225
      "obj  0.0919736624585904\n",
5226
      "obj  0.09197157155893894\n",
5227
      "obj  0.09197109088607663\n",
5228
      "obj  0.09197098268778833\n",
5229
      "obj  0.09197095367445211\n",
5230
      "obj  0.0919709467643533\n",
5231
      "model [0.5953 0.4047] sum 0.9999621194280907\n",
5232
      "my_len 4\n",
5233
      "v33 f1 t0: original ll 0.0963/0.0902, ensemble ll 0.0963/0.0902\n",
5234
      "running time 3.3472983837127686\n",
5235
      "starting fold 1 target 1\n",
5236
      "my_len 4\n",
5237
      "obj  0.014958787142420498\n",
5238
      "obj  0.014937330106503348\n",
5239
      "obj  0.014942650987592622\n",
5240
      "obj  0.014975860772633154\n",
5241
      "obj  0.014972362994514807\n",
5242
      "obj  0.014957564541716397\n",
5243
      "obj  0.014951577023955265\n",
5244
      "obj  0.014951063187963957\n",
5245
      "obj  0.01491070342318423\n",
5246
      "obj  0.014853979550418115\n",
5247
      "obj  0.014829774294376331\n",
5248
      "obj  0.014821112462190839\n",
5249
      "obj  0.014821058186807711\n",
5250
      "obj  0.0148210268020669\n",
5251
      "obj  0.01482102580854534\n",
5252
      "model [0.1376 0.8624] sum 0.9999999409483264\n",
5253
      "my_len 4\n",
5254
      "v33 f1 t1: original ll 0.0128/0.0110, ensemble ll 0.0128/0.0110\n",
5255
      "running time 2.9967260360717773\n",
5256
      "starting fold 1 target 2\n",
5257
      "my_len 4\n",
5258
      "obj  0.039245835968578474\n",
5259
      "obj  0.03924574127471867\n",
5260
      "obj  0.039245713624497434\n",
5261
      "obj  0.03924588587989087\n",
5262
      "obj  0.03924803573797128\n",
5263
      "obj  0.03925147467544376\n",
5264
      "obj  0.0392563703952373\n",
5265
      "obj  0.03927551263462263\n",
5266
      "obj  0.03925650976537233\n",
5267
      "obj  0.03926796310975214\n",
5268
      "obj  0.0392185324397789\n",
5269
      "obj  0.039216537422768455\n",
5270
      "obj  0.039216281028530475\n",
5271
      "obj  0.039216255398415924\n",
5272
      "obj  0.03921625454996835\n",
5273
      "obj  0.039216254539564024\n",
5274
      "model [0.3469 0.6519] sum 0.9988259427442799\n",
5275
      "my_len 4\n",
5276
      "v33 f1 t2: original ll 0.0401/0.0381, ensemble ll 0.0401/0.0381\n",
5277
      "running time 3.1578216552734375\n",
5278
      "starting fold 1 target 3\n",
5279
      "my_len 4\n",
5280
      "obj  0.02395696038549021\n",
5281
      "obj  0.02395486428261125\n",
5282
      "obj  0.023954835163552384\n",
5283
      "obj  0.023954877532169596\n",
5284
      "obj  0.023955568418781947\n",
5285
      "obj  0.023963995166198675\n",
5286
      "obj  0.023978023975066425\n",
5287
      "obj  0.024002845343395824\n",
5288
      "obj  0.023983817519113094\n",
5289
      "obj  0.023997071439984398\n",
5290
      "obj  0.02394045500310195\n",
5291
      "obj  0.023937978579431436\n",
5292
      "obj  0.0239372920681272\n",
5293
      "obj  0.02393711483122209\n",
5294
      "obj  0.02393708937017656\n",
5295
      "model [0.359 0.641] sum 0.9999985792555948\n",
5296
      "my_len 4\n",
5297
      "v33 f1 t3: original ll 0.0233/0.0221, ensemble ll 0.0233/0.0220\n",
5298
      "running time 3.0442991256713867\n",
5299
      "starting fold 1 target 4\n",
5300
      "my_len 4\n",
5301
      "obj  0.061258507345704066\n",
5302
      "obj  0.061265689746832726\n",
5303
      "obj  0.06126562077837275\n",
5304
      "obj  0.061265854859751424\n",
5305
      "obj  0.0612658046238735\n",
5306
      "obj  0.061265351617187\n",
5307
      "obj  0.061264866771111565\n",
5308
      "obj  0.061262444514258725\n",
5309
      "obj  0.061251536610214256\n",
5310
      "obj  0.06115744444115026\n",
5311
      "obj  0.061149983693155184\n",
5312
      "obj  0.061149771919456586\n",
5313
      "obj  0.061149771087428396\n",
5314
      "obj  0.06114975345151202\n",
5315
      "model [0.2158 0.7842] sum 0.9999996248180504\n",
5316
      "my_len 4\n",
5317
      "v33 f1 t4: original ll 0.0632/0.0594, ensemble ll 0.0632/0.0594\n",
5318
      "running time 2.8806824684143066\n",
5319
      "starting fold 1 target 5\n",
5320
      "my_len 4\n",
5321
      "obj  0.07535926071665933\n",
5322
      "obj  0.07534861733988092\n",
5323
      "obj  0.07534762674133844\n",
5324
      "obj  0.07534904493869508\n",
5325
      "obj  0.07535936082490458\n",
5326
      "obj  0.0753471265997745\n",
5327
      "obj  0.07534426077367337\n",
5328
      "obj  0.07534711702093719\n",
5329
      "obj  0.07533211331536023\n",
5330
      "obj  0.07533317814561168\n",
5331
      "obj  0.07531220405379302\n",
5332
      "obj  0.07530996229389857\n",
5333
      "obj  0.07530993777029112\n",
5334
      "obj  0.07530993774604199\n",
5335
      "model [0.3331 0.6604] sum 0.993527447026961\n",
5336
      "my_len 4\n",
5337
      "v33 f1 t5: original ll 0.0779/0.0742, ensemble ll 0.0779/0.0742\n",
5338
      "running time 2.8882410526275635\n",
5339
      "starting fold 2 target 0\n",
5340
      "my_len 4\n",
5341
      "obj  0.09489490438158145\n",
5342
      "obj  0.09489430912501066\n",
5343
      "obj  0.09489431102390815\n",
5344
      "obj  0.09489431535393611\n",
5345
      "obj  0.0948945806962745\n",
5346
      "obj  0.09489608391205885\n",
5347
      "obj  0.09489866734487919\n",
5348
      "obj  0.0949200247646217\n",
5349
      "obj  0.09503918400778456\n",
5350
      "obj  0.09501153296540055\n",
5351
      "obj  0.09490249627913898\n",
5352
      "obj  0.09488951860739707\n",
5353
      "obj  0.09488647087827386\n",
5354
      "obj  0.09488564556714987\n",
5355
      "obj  0.0948854679188669\n",
5356
      "obj  0.09488545200417911\n",
5357
      "model [0.5661 0.4339] sum 0.9999997699565845\n",
5358
      "my_len 4\n",
5359
      "v33 f2 t0: original ll 0.0905/0.0845, ensemble ll 0.0905/0.0845\n",
5360
      "running time 3.214533567428589\n",
5361
      "starting fold 2 target 1\n",
5362
      "my_len 4\n",
5363
      "obj  0.013868197433815987\n",
5364
      "obj  0.01386027871457582\n",
5365
      "obj  0.013862065714890364\n",
5366
      "obj  0.013858828553407412\n",
5367
      "obj  0.013860879366817503\n",
5368
      "obj  0.013866290527273424\n",
5369
      "obj  0.013857530201794333\n",
5370
      "obj  0.013858298202236378\n",
5371
      "obj  0.013836364815367146\n",
5372
      "obj  0.013804921280345871\n",
5373
      "obj  0.01379261828630837\n",
5374
      "obj  0.013789536265747205\n",
5375
      "obj  0.013789525064620644\n",
5376
      "model [0.2207 0.7593] sum 0.980004158654775\n",
5377
      "my_len 4\n",
5378
      "v33 f2 t1: original ll 0.0150/0.0133, ensemble ll 0.0149/0.0131\n",
5379
      "running time 2.7753772735595703\n",
5380
      "starting fold 2 target 2\n",
5381
      "my_len 4\n",
5382
      "obj  0.04081306058254699\n",
5383
      "obj  0.040812031203401905\n",
5384
      "obj  0.040811937510774186\n",
5385
      "obj  0.04081223601044173\n",
5386
      "obj  0.04081528137789779\n",
5387
      "obj  0.04081538347060062\n",
5388
      "obj  0.04081682793135578\n",
5389
      "obj  0.04083251985984981\n",
5390
      "obj  0.040810479353506623\n",
5391
      "obj  0.04081936079563107\n",
5392
      "obj  0.04077962409069144\n",
5393
      "obj  0.04077101062103325\n",
5394
      "obj  0.0407701494171347\n",
5395
      "obj  0.04077012004624905\n"
5396
     ]
5397
    },
5398
    {
5399
     "name": "stdout",
5400
     "output_type": "stream",
5401
     "text": [
5402
      "obj  0.040770119816992766\n",
5403
      "model [0.3174 0.68  ] sum 0.9973906231915857\n",
5404
      "my_len 4\n",
5405
      "v33 f2 t2: original ll 0.0370/0.0348, ensemble ll 0.0370/0.0348\n",
5406
      "running time 3.0440049171447754\n",
5407
      "starting fold 2 target 3\n",
5408
      "my_len 4\n",
5409
      "obj  0.023823516135291243\n",
5410
      "obj  0.02382017911708748\n",
5411
      "obj  0.023820106378154727\n",
5412
      "obj  0.023820136798804403\n",
5413
      "obj  0.023820572725337157\n",
5414
      "obj  0.02383469183098074\n",
5415
      "obj  0.02384809531847105\n",
5416
      "obj  0.023873357339416913\n",
5417
      "obj  0.02385181419582523\n",
5418
      "obj  0.02386641578011786\n",
5419
      "obj  0.023805227006125536\n",
5420
      "obj  0.023802547496832128\n",
5421
      "obj  0.023801797197722675\n",
5422
      "obj  0.02380161177878747\n",
5423
      "obj  0.02380158861610797\n",
5424
      "model [0.3532 0.6468] sum 0.9999989006282439\n",
5425
      "my_len 4\n",
5426
      "v33 f2 t3: original ll 0.0236/0.0230, ensemble ll 0.0236/0.0229\n",
5427
      "running time 3.017551898956299\n",
5428
      "starting fold 2 target 4\n",
5429
      "my_len 4\n",
5430
      "obj  0.06243179531025402\n",
5431
      "obj  0.062427099035585125\n",
5432
      "obj  0.06242713613481103\n",
5433
      "obj  0.062430316328200154\n",
5434
      "obj  0.06243658002881348\n",
5435
      "obj  0.062430816202982437\n",
5436
      "obj  0.06243507738993893\n",
5437
      "obj  0.06244554339226623\n",
5438
      "obj  0.062427434946619766\n",
5439
      "obj  0.06243147007524259\n",
5440
      "obj  0.06240271238770716\n",
5441
      "obj  0.06239411214025914\n",
5442
      "obj  0.062393648133430374\n",
5443
      "obj  0.06239362889706076\n",
5444
      "obj  0.06239362873376792\n",
5445
      "model [0.3265 0.6706] sum 0.9971639400865144\n",
5446
      "my_len 4\n",
5447
      "v33 f2 t4: original ll 0.0608/0.0580, ensemble ll 0.0607/0.0578\n",
5448
      "running time 3.0167412757873535\n",
5449
      "starting fold 2 target 5\n",
5450
      "my_len 4\n",
5451
      "obj  0.07838134560788608\n",
5452
      "obj  0.07838076163206673\n",
5453
      "obj  0.07838071177563007\n",
5454
      "obj  0.07838080623175392\n",
5455
      "obj  0.07838065017243255\n",
5456
      "obj  0.07838512557832436\n",
5457
      "obj  0.07839379106272942\n",
5458
      "obj  0.07841541302344214\n",
5459
      "obj  0.07839227446721775\n",
5460
      "obj  0.0784030307958988\n",
5461
      "obj  0.07835324790612748\n",
5462
      "obj  0.078350745139307\n",
5463
      "obj  0.07835024106315232\n",
5464
      "obj  0.07835010723361426\n",
5465
      "obj  0.07835007445229823\n",
5466
      "obj  0.07835006754602017\n",
5467
      "obj  0.0783500665027853\n",
5468
      "obj  0.07835006642985422\n",
5469
      "model [0.366  0.6339] sum 0.999906731906863\n",
5470
      "my_len 4\n",
5471
      "v33 f2 t5: original ll 0.0718/0.0680, ensemble ll 0.0718/0.0680\n",
5472
      "running time 3.3957345485687256\n",
5473
      "total running time 55.21563124656677\n"
5474
     ]
5475
    }
5476
   ],
5477
   "source": [
5478
    "stg = time.time()\n",
5479
    "for fold in range(3):\n",
5480
    "    for target in range(6):\n",
5481
    "        train_ensemble(train_md, preds_all, fold=fold, target=target, weighted=False)\n",
5482
    "print('total running time', time.time() - stg)"
5483
   ]
5484
  },
5485
  {
5486
   "cell_type": "code",
5487
   "execution_count": 41,
5488
   "metadata": {
5489
    "scrolled": false
5490
   },
5491
   "outputs": [
5492
    {
5493
     "data": {
5494
      "text/html": [
5495
       "<div>\n",
5496
       "<style scoped>\n",
5497
       "    .dataframe tbody tr th:only-of-type {\n",
5498
       "        vertical-align: middle;\n",
5499
       "    }\n",
5500
       "\n",
5501
       "    .dataframe tbody tr th {\n",
5502
       "        vertical-align: top;\n",
5503
       "    }\n",
5504
       "\n",
5505
       "    .dataframe thead th {\n",
5506
       "        text-align: right;\n",
5507
       "    }\n",
5508
       "</style>\n",
5509
       "<table border=\"1\" class=\"dataframe\">\n",
5510
       "  <thead>\n",
5511
       "    <tr style=\"text-align: right;\">\n",
5512
       "      <th></th>\n",
5513
       "      <th></th>\n",
5514
       "      <th>valid_loss</th>\n",
5515
       "      <th>valid_loss_ens</th>\n",
5516
       "      <th>valid_w_loss</th>\n",
5517
       "      <th>valid_w_loss_ens</th>\n",
5518
       "    </tr>\n",
5519
       "    <tr>\n",
5520
       "      <th>weighted</th>\n",
5521
       "      <th>target</th>\n",
5522
       "      <th></th>\n",
5523
       "      <th></th>\n",
5524
       "      <th></th>\n",
5525
       "      <th></th>\n",
5526
       "    </tr>\n",
5527
       "  </thead>\n",
5528
       "  <tbody>\n",
5529
       "    <tr>\n",
5530
       "      <td rowspan=\"6\" valign=\"top\">False</td>\n",
5531
       "      <td>0</td>\n",
5532
       "      <td>0.093420</td>\n",
5533
       "      <td>0.093428</td>\n",
5534
       "      <td>0.087781</td>\n",
5535
       "      <td>0.087797</td>\n",
5536
       "    </tr>\n",
5537
       "    <tr>\n",
5538
       "      <td>1</td>\n",
5539
       "      <td>0.014243</td>\n",
5540
       "      <td>0.014183</td>\n",
5541
       "      <td>0.012694</td>\n",
5542
       "      <td>0.012633</td>\n",
5543
       "    </tr>\n",
5544
       "    <tr>\n",
5545
       "      <td>2</td>\n",
5546
       "      <td>0.039539</td>\n",
5547
       "      <td>0.039512</td>\n",
5548
       "      <td>0.037398</td>\n",
5549
       "      <td>0.037367</td>\n",
5550
       "    </tr>\n",
5551
       "    <tr>\n",
5552
       "      <td>3</td>\n",
5553
       "      <td>0.023751</td>\n",
5554
       "      <td>0.023728</td>\n",
5555
       "      <td>0.022988</td>\n",
5556
       "      <td>0.022953</td>\n",
5557
       "    </tr>\n",
5558
       "    <tr>\n",
5559
       "      <td>4</td>\n",
5560
       "      <td>0.061900</td>\n",
5561
       "      <td>0.061866</td>\n",
5562
       "      <td>0.058991</td>\n",
5563
       "      <td>0.058947</td>\n",
5564
       "    </tr>\n",
5565
       "    <tr>\n",
5566
       "      <td>5</td>\n",
5567
       "      <td>0.076200</td>\n",
5568
       "      <td>0.076194</td>\n",
5569
       "      <td>0.072653</td>\n",
5570
       "      <td>0.072625</td>\n",
5571
       "    </tr>\n",
5572
       "  </tbody>\n",
5573
       "</table>\n",
5574
       "</div>"
5575
      ],
5576
      "text/plain": [
5577
       "                 valid_loss  valid_loss_ens  valid_w_loss  valid_w_loss_ens\n",
5578
       "weighted target                                                            \n",
5579
       "False    0         0.093420        0.093428      0.087781          0.087797\n",
5580
       "         1         0.014243        0.014183      0.012694          0.012633\n",
5581
       "         2         0.039539        0.039512      0.037398          0.037367\n",
5582
       "         3         0.023751        0.023728      0.022988          0.022953\n",
5583
       "         4         0.061900        0.061866      0.058991          0.058947\n",
5584
       "         5         0.076200        0.076194      0.072653          0.072625"
5585
      ]
5586
     },
5587
     "execution_count": 41,
5588
     "metadata": {},
5589
     "output_type": "execute_result"
5590
    }
5591
   ],
5592
   "source": [
5593
    "stats = pd.read_csv(PATH_DISK/'ensemble'/'stats.v{}'.format(VERSION))\n",
5594
    "stats.groupby(['weighted','target'])[['valid_loss','valid_loss_ens','valid_w_loss','valid_w_loss_ens']].mean()"
5595
   ]
5596
  },
5597
  {
5598
   "cell_type": "code",
5599
   "execution_count": 46,
5600
   "metadata": {},
5601
   "outputs": [
5602
    {
5603
     "data": {
5604
      "text/html": [
5605
       "<div>\n",
5606
       "<style scoped>\n",
5607
       "    .dataframe tbody tr th:only-of-type {\n",
5608
       "        vertical-align: middle;\n",
5609
       "    }\n",
5610
       "\n",
5611
       "    .dataframe tbody tr th {\n",
5612
       "        vertical-align: top;\n",
5613
       "    }\n",
5614
       "\n",
5615
       "    .dataframe thead th {\n",
5616
       "        text-align: right;\n",
5617
       "    }\n",
5618
       "</style>\n",
5619
       "<table border=\"1\" class=\"dataframe\">\n",
5620
       "  <thead>\n",
5621
       "    <tr style=\"text-align: right;\">\n",
5622
       "      <th></th>\n",
5623
       "      <th>not weighted</th>\n",
5624
       "      <th>weighted</th>\n",
5625
       "    </tr>\n",
5626
       "  </thead>\n",
5627
       "  <tbody>\n",
5628
       "    <tr>\n",
5629
       "      <td>valid_loss</td>\n",
5630
       "      <td>0.057518</td>\n",
5631
       "      <td>0.057518</td>\n",
5632
       "    </tr>\n",
5633
       "    <tr>\n",
5634
       "      <td>valid_w_loss</td>\n",
5635
       "      <td>0.054293</td>\n",
5636
       "      <td>0.054293</td>\n",
5637
       "    </tr>\n",
5638
       "    <tr>\n",
5639
       "      <td>valid_loss_ens</td>\n",
5640
       "      <td>0.057500</td>\n",
5641
       "      <td>0.057498</td>\n",
5642
       "    </tr>\n",
5643
       "    <tr>\n",
5644
       "      <td>valid_w_loss_ens</td>\n",
5645
       "      <td>0.054274</td>\n",
5646
       "      <td>0.054269</td>\n",
5647
       "    </tr>\n",
5648
       "  </tbody>\n",
5649
       "</table>\n",
5650
       "</div>"
5651
      ],
5652
      "text/plain": [
5653
       "                  not weighted  weighted\n",
5654
       "valid_loss            0.057518  0.057518\n",
5655
       "valid_w_loss          0.054293  0.054293\n",
5656
       "valid_loss_ens        0.057500  0.057498\n",
5657
       "valid_w_loss_ens      0.054274  0.054269"
5658
      ]
5659
     },
5660
     "execution_count": 46,
5661
     "metadata": {},
5662
     "output_type": "execute_result"
5663
    }
5664
   ],
5665
   "source": [
5666
    "# STAGE2 weighted models\n",
5667
    "tt = pd.concat([\n",
5668
    "stats.loc[stats.weighted == False].groupby('target')[['valid_loss','valid_w_loss',\n",
5669
    "                                                      'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5670
    "    .apply(lambda x: x*class_weights).mean(),\n",
5671
    "stats.loc[stats.weighted == True].groupby('target')[['valid_loss','valid_w_loss',\n",
5672
    "                                                     'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5673
    "    .apply(lambda x: x*class_weights).mean()\n",
5674
    "],axis=1)\n",
5675
    "tt.columns = ['not weighted','weighted']\n",
5676
    "tt"
5677
   ]
5678
  },
5679
  {
5680
   "cell_type": "code",
5681
   "execution_count": 52,
5682
   "metadata": {},
5683
   "outputs": [
5684
    {
5685
     "data": {
5686
      "text/html": [
5687
       "<div>\n",
5688
       "<style scoped>\n",
5689
       "    .dataframe tbody tr th:only-of-type {\n",
5690
       "        vertical-align: middle;\n",
5691
       "    }\n",
5692
       "\n",
5693
       "    .dataframe tbody tr th {\n",
5694
       "        vertical-align: top;\n",
5695
       "    }\n",
5696
       "\n",
5697
       "    .dataframe thead th {\n",
5698
       "        text-align: right;\n",
5699
       "    }\n",
5700
       "</style>\n",
5701
       "<table border=\"1\" class=\"dataframe\">\n",
5702
       "  <thead>\n",
5703
       "    <tr style=\"text-align: right;\">\n",
5704
       "      <th></th>\n",
5705
       "      <th>not weighted</th>\n",
5706
       "      <th>weighted</th>\n",
5707
       "    </tr>\n",
5708
       "  </thead>\n",
5709
       "  <tbody>\n",
5710
       "    <tr>\n",
5711
       "      <td>valid_loss</td>\n",
5712
       "      <td>0.057496</td>\n",
5713
       "      <td>0.057496</td>\n",
5714
       "    </tr>\n",
5715
       "    <tr>\n",
5716
       "      <td>valid_w_loss</td>\n",
5717
       "      <td>0.054326</td>\n",
5718
       "      <td>0.054326</td>\n",
5719
       "    </tr>\n",
5720
       "    <tr>\n",
5721
       "      <td>valid_loss_ens</td>\n",
5722
       "      <td>0.057477</td>\n",
5723
       "      <td>0.057475</td>\n",
5724
       "    </tr>\n",
5725
       "    <tr>\n",
5726
       "      <td>valid_w_loss_ens</td>\n",
5727
       "      <td>0.054303</td>\n",
5728
       "      <td>0.054299</td>\n",
5729
       "    </tr>\n",
5730
       "  </tbody>\n",
5731
       "</table>\n",
5732
       "</div>"
5733
      ],
5734
      "text/plain": [
5735
       "                  not weighted  weighted\n",
5736
       "valid_loss            0.057496  0.057496\n",
5737
       "valid_w_loss          0.054326  0.054326\n",
5738
       "valid_loss_ens        0.057477  0.057475\n",
5739
       "valid_w_loss_ens      0.054303  0.054299"
5740
      ]
5741
     },
5742
     "execution_count": 52,
5743
     "metadata": {},
5744
     "output_type": "execute_result"
5745
    }
5746
   ],
5747
   "source": [
5748
    "# STAGE2 non-wegihted models\n",
5749
    "tt = pd.concat([\n",
5750
    "stats.loc[stats.weighted == False].groupby('target')[['valid_loss','valid_w_loss',\n",
5751
    "                                                      'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5752
    "    .apply(lambda x: x*class_weights).mean(),\n",
5753
    "stats.loc[stats.weighted == True].groupby('target')[['valid_loss','valid_w_loss',\n",
5754
    "                                                     'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5755
    "    .apply(lambda x: x*class_weights).mean()\n",
5756
    "],axis=1)\n",
5757
    "tt.columns = ['not weighted','weighted']\n",
5758
    "tt"
5759
   ]
5760
  },
5761
  {
5762
   "cell_type": "code",
5763
   "execution_count": 41,
5764
   "metadata": {},
5765
   "outputs": [
5766
    {
5767
     "data": {
5768
      "text/html": [
5769
       "<div>\n",
5770
       "<style scoped>\n",
5771
       "    .dataframe tbody tr th:only-of-type {\n",
5772
       "        vertical-align: middle;\n",
5773
       "    }\n",
5774
       "\n",
5775
       "    .dataframe tbody tr th {\n",
5776
       "        vertical-align: top;\n",
5777
       "    }\n",
5778
       "\n",
5779
       "    .dataframe thead th {\n",
5780
       "        text-align: right;\n",
5781
       "    }\n",
5782
       "</style>\n",
5783
       "<table border=\"1\" class=\"dataframe\">\n",
5784
       "  <thead>\n",
5785
       "    <tr style=\"text-align: right;\">\n",
5786
       "      <th></th>\n",
5787
       "      <th>not weighted</th>\n",
5788
       "      <th>weighted</th>\n",
5789
       "    </tr>\n",
5790
       "  </thead>\n",
5791
       "  <tbody>\n",
5792
       "    <tr>\n",
5793
       "      <td>valid_loss</td>\n",
5794
       "      <td>0.057750</td>\n",
5795
       "      <td>0.057750</td>\n",
5796
       "    </tr>\n",
5797
       "    <tr>\n",
5798
       "      <td>valid_w_loss</td>\n",
5799
       "      <td>0.061831</td>\n",
5800
       "      <td>0.061831</td>\n",
5801
       "    </tr>\n",
5802
       "    <tr>\n",
5803
       "      <td>valid_loss_ens</td>\n",
5804
       "      <td>0.057658</td>\n",
5805
       "      <td>0.058210</td>\n",
5806
       "    </tr>\n",
5807
       "    <tr>\n",
5808
       "      <td>valid_w_loss_ens</td>\n",
5809
       "      <td>0.062176</td>\n",
5810
       "      <td>0.061514</td>\n",
5811
       "    </tr>\n",
5812
       "  </tbody>\n",
5813
       "</table>\n",
5814
       "</div>"
5815
      ],
5816
      "text/plain": [
5817
       "                  not weighted  weighted\n",
5818
       "valid_loss            0.057750  0.057750\n",
5819
       "valid_w_loss          0.061831  0.061831\n",
5820
       "valid_loss_ens        0.057658  0.058210\n",
5821
       "valid_w_loss_ens      0.062176  0.061514"
5822
      ]
5823
     },
5824
     "execution_count": 41,
5825
     "metadata": {},
5826
     "output_type": "execute_result"
5827
    }
5828
   ],
5829
   "source": [
5830
    "# wegihted models\n",
5831
    "tt = pd.concat([\n",
5832
    "stats.loc[stats.weighted == False].groupby('target')[['valid_loss','valid_w_loss',\n",
5833
    "                                                      'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5834
    "    .apply(lambda x: x*class_weights).mean(),\n",
5835
    "stats.loc[stats.weighted == True].groupby('target')[['valid_loss','valid_w_loss',\n",
5836
    "                                                     'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5837
    "    .apply(lambda x: x*class_weights).mean()\n",
5838
    "],axis=1)\n",
5839
    "tt.columns = ['not weighted','weighted']\n",
5840
    "tt"
5841
   ]
5842
  },
5843
  {
5844
   "cell_type": "code",
5845
   "execution_count": 51,
5846
   "metadata": {},
5847
   "outputs": [
5848
    {
5849
     "data": {
5850
      "text/html": [
5851
       "<div>\n",
5852
       "<style scoped>\n",
5853
       "    .dataframe tbody tr th:only-of-type {\n",
5854
       "        vertical-align: middle;\n",
5855
       "    }\n",
5856
       "\n",
5857
       "    .dataframe tbody tr th {\n",
5858
       "        vertical-align: top;\n",
5859
       "    }\n",
5860
       "\n",
5861
       "    .dataframe thead th {\n",
5862
       "        text-align: right;\n",
5863
       "    }\n",
5864
       "</style>\n",
5865
       "<table border=\"1\" class=\"dataframe\">\n",
5866
       "  <thead>\n",
5867
       "    <tr style=\"text-align: right;\">\n",
5868
       "      <th></th>\n",
5869
       "      <th>not weighted</th>\n",
5870
       "      <th>weighted</th>\n",
5871
       "    </tr>\n",
5872
       "  </thead>\n",
5873
       "  <tbody>\n",
5874
       "    <tr>\n",
5875
       "      <td>valid_loss</td>\n",
5876
       "      <td>0.057661</td>\n",
5877
       "      <td>0.057661</td>\n",
5878
       "    </tr>\n",
5879
       "    <tr>\n",
5880
       "      <td>valid_w_loss</td>\n",
5881
       "      <td>0.062715</td>\n",
5882
       "      <td>0.062715</td>\n",
5883
       "    </tr>\n",
5884
       "    <tr>\n",
5885
       "      <td>valid_loss_ens</td>\n",
5886
       "      <td>0.057638</td>\n",
5887
       "      <td>0.057799</td>\n",
5888
       "    </tr>\n",
5889
       "    <tr>\n",
5890
       "      <td>valid_w_loss_ens</td>\n",
5891
       "      <td>0.062705</td>\n",
5892
       "      <td>0.062648</td>\n",
5893
       "    </tr>\n",
5894
       "  </tbody>\n",
5895
       "</table>\n",
5896
       "</div>"
5897
      ],
5898
      "text/plain": [
5899
       "                  not weighted  weighted\n",
5900
       "valid_loss            0.057661  0.057661\n",
5901
       "valid_w_loss          0.062715  0.062715\n",
5902
       "valid_loss_ens        0.057638  0.057799\n",
5903
       "valid_w_loss_ens      0.062705  0.062648"
5904
      ]
5905
     },
5906
     "execution_count": 51,
5907
     "metadata": {},
5908
     "output_type": "execute_result"
5909
    }
5910
   ],
5911
   "source": [
5912
    "# non-weighted models\n",
5913
    "tt = pd.concat([\n",
5914
    "stats.loc[stats.weighted == False].groupby('target')[['valid_loss','valid_w_loss',\n",
5915
    "                                                      'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5916
    "    .apply(lambda x: x*class_weights).mean(),\n",
5917
    "stats.loc[stats.weighted == True].groupby('target')[['valid_loss','valid_w_loss',\n",
5918
    "                                                     'valid_loss_ens','valid_w_loss_ens']].mean()\\\n",
5919
    "    .apply(lambda x: x*class_weights).mean()\n",
5920
    "],axis=1)\n",
5921
    "tt.columns = ['not weighted','weighted']\n",
5922
    "tt"
5923
   ]
5924
  },
5925
  {
5926
   "cell_type": "code",
5927
   "execution_count": null,
5928
   "metadata": {},
5929
   "outputs": [],
5930
   "source": []
5931
  },
5932
  {
5933
   "cell_type": "code",
5934
   "execution_count": 43,
5935
   "metadata": {
5936
    "scrolled": false
5937
   },
5938
   "outputs": [
5939
    {
5940
     "name": "stdout",
5941
     "output_type": "stream",
5942
     "text": [
5943
      "0 [0.5599 0.4398] 0.9996370455376221\n",
5944
      "1 [0.2003 0.7864] 0.9866685528969734\n",
5945
      "2 [0.3142 0.6845] 0.99873854778024\n",
5946
      "3 [0.3394 0.6606] 0.9999990672969397\n",
5947
      "4 [0.2394 0.7571] 0.9964796234369401\n",
5948
      "5 [0.3639 0.6317] 0.9955373459864658\n",
5949
      "total [0.3362 0.66  ] 0.9961766971558637\n"
5950
     ]
5951
    }
5952
   ],
5953
   "source": [
5954
    "np.set_printoptions(precision=4)\n",
5955
    "res2_all = []\n",
5956
    "for target in range(6):\n",
5957
    "    res2 = np.zeros((3, 2))\n",
5958
    "    for fold in range(3):\n",
5959
    "        model = pickle.load(open(PATH_DISK/'ensemble'/'model.f{}.t{}.v{}'\n",
5960
    "                                 .format(fold,target,VERSION),'rb'))\n",
5961
    "        res2[fold] = model.x\n",
5962
    "        #print(fold,target,model.x)\n",
5963
    "    print(target, res2.mean(0), res2.mean(0).sum())\n",
5964
    "    res2_all.append(res2)\n",
5965
    "print('total', np.stack(res2_all).mean((0,1)), np.stack(res2_all).mean((0,1)).sum())"
5966
   ]
5967
  },
5968
  {
5969
   "cell_type": "raw",
5970
   "metadata": {},
5971
   "source": [
5972
    "# STAGE2 weighted\n",
5973
    "0 [0.5514 0.4486] 0.999988938547882\n",
5974
    "1 [0.1934 0.7932] 0.9866673384339762\n",
5975
    "2 [0.3245 0.6748] 0.99933085408868\n",
5976
    "3 [0.2924 0.7076] 0.999999807185227\n",
5977
    "4 [0.2338 0.7643] 0.9981083416558326\n",
5978
    "5 [0.347  0.6474] 0.9944152840034981\n",
5979
    "total [0.3237 0.6727] 0.9964184273191827\n",
5980
    "\n",
5981
    "\n",
5982
    "# STAGE2 non-weighted\n",
5983
    "0 [0.5599 0.4398] 0.9996370455376221\n",
5984
    "1 [0.2003 0.7864] 0.9866685528969734\n",
5985
    "2 [0.3142 0.6845] 0.99873854778024\n",
5986
    "3 [0.3394 0.6606] 0.9999990672969397\n",
5987
    "4 [0.2394 0.7571] 0.9964796234369401\n",
5988
    "5 [0.3639 0.6317] 0.9955373459864658\n",
5989
    "total [0.3362 0.66  ] 0.9961766971558637\n",
5990
    "\n",
5991
    "# weighted + focal both\n",
5992
    "0 [9.8936e-01 4.6289e-06] 0.9893636389192056\n",
5993
    "1 [0.1052 0.8948] 0.9999984169006635\n",
5994
    "2 [0.4988 0.49  ] 0.9887665083664696\n",
5995
    "3 [0.3443 0.6427] 0.9869733391806492\n",
5996
    "4 [0.4923 0.4959] 0.9882284866416893\n",
5997
    "5 [0.7736 0.217 ] 0.9906108307629572\n",
5998
    "total [0.5339 0.4567] 0.9906568701286057\n",
5999
    "\n",
6000
    "# weighted + focal\n",
6001
    "0 [0.9758 0.0133] 0.9890208236733224\n",
6002
    "1 [0.0755 0.9245] 0.99999750757956\n",
6003
    "2 [0.5121 0.4765] 0.9885906315242545\n",
6004
    "3 [0.3465 0.6394] 0.9858337972624043\n",
6005
    "4 [0.4733 0.5144] 0.987734118671811\n",
6006
    "5 [0.6697 0.3208] 0.9904312808036815\n",
6007
    "total [0.5088 0.4815] 0.990268026585839\n",
6008
    "\n",
6009
    "# weighted\n",
6010
    "0 [0.9856 0.0034] 0.9890322827488863\n",
6011
    "1 [0.1394 0.8606] 0.9999977090921796\n",
6012
    "2 [0.5308 0.4582] 0.9890278807370222\n",
6013
    "3 [0.3542 0.6317] 0.9858961392052805\n",
6014
    "4 [0.4851 0.5028] 0.9879196279125524\n",
6015
    "5 [0.7677 0.2223] 0.9900299923215763\n",
6016
    "total [0.5438 0.4465] 0.9903172720029163\n",
6017
    "\n",
6018
    "# non-weighted\n",
6019
    "0 [0.3504 0.6483] 0.9987279422120475\n",
6020
    "1 [0.16   0.8267] 0.9866673030259768\n",
6021
    "2 [0.2285 0.7706] 0.9990219638127198\n",
6022
    "3 [0.2355 0.7636] 0.9991180197159109\n",
6023
    "4 [0.1587 0.8378] 0.9965445852859403\n",
6024
    "5 [0.2654 0.7281] 0.9934664823990461\n",
6025
    "total [0.2331 0.7625] 0.995591049408607"
6026
   ]
6027
  },
6028
  {
6029
   "cell_type": "code",
6030
   "execution_count": null,
6031
   "metadata": {},
6032
   "outputs": [],
6033
   "source": []
6034
  },
6035
  {
6036
   "cell_type": "code",
6037
   "execution_count": 44,
6038
   "metadata": {},
6039
   "outputs": [],
6040
   "source": [
6041
    "preds3 = np.stack([pickle.load(open(PATH_DISK/'preds_d{}_v{}'.format(ds, VERSION),'rb')) for ds in my_datasets3])\n",
6042
    "preds5 = np.stack([pickle.load(open(PATH_DISK/'preds_d{}_v{}'.format(ds, VERSION),'rb')) for ds in my_datasets5])"
6043
   ]
6044
  },
6045
  {
6046
   "cell_type": "code",
6047
   "execution_count": 45,
6048
   "metadata": {},
6049
   "outputs": [],
6050
   "source": [
6051
    "preds = np.concatenate([preds3.mean((1,2)), preds5.mean((1,2))],axis=0)"
6052
   ]
6053
  },
6054
  {
6055
   "cell_type": "code",
6056
   "execution_count": 50,
6057
   "metadata": {},
6058
   "outputs": [],
6059
   "source": [
6060
    "#del test_md['yuval_idx']"
6061
   ]
6062
  },
6063
  {
6064
   "cell_type": "code",
6065
   "execution_count": 46,
6066
   "metadata": {},
6067
   "outputs": [],
6068
   "source": [
6069
    "yuval_test = pickle.load(open(PATH_DISK/'yuval/ensemble_test_image_ids_stage2.pkl','rb'))\n",
6070
    "assert len(yuval_test) == len(test_md)\n",
6071
    "\n",
6072
    "df = pd.DataFrame(np.arange(len(yuval_test)), columns=['yuval_idx'])\n",
6073
    "df.index = yuval_test\n",
6074
    "test_md = test_md.join(df, on = 'img_id')"
6075
   ]
6076
  },
6077
  {
6078
   "cell_type": "code",
6079
   "execution_count": 47,
6080
   "metadata": {},
6081
   "outputs": [],
6082
   "source": [
6083
    "names_y3 = [\n",
6084
    "    #'model_Densenet201_3_version_classifier_splits_fullhead_resmodel_pool2_3_type_OOF_pred_split_{}.pkl',\n",
6085
    "    #'model_Densenet161_3_version_classifier_splits_fullhead_resmodel_pool2_3_type_OOF_pred_split_{}.pkl',\n",
6086
    "'model_Densenet169_3_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl',\n",
6087
    "'model_se_resnext101_32x4d_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl',\n",
6088
    "'model_se_resnet101_version_classifier_splits_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl'\n",
6089
    "]"
6090
   ]
6091
  },
6092
  {
6093
   "cell_type": "code",
6094
   "execution_count": 48,
6095
   "metadata": {},
6096
   "outputs": [],
6097
   "source": [
6098
    "names_y5 = [\n",
6099
    "'model_se_resnext101_32x4d_version_new_splits_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl',\n",
6100
    "'model_se_resnet101_version_new_splits_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl',\n",
6101
    "'model_se_resnet101_version_new_splits_focal_fullhead_resmodel_pool2_stage2_3_type_test_pred_ensemble_split_{}.pkl',\n",
6102
    "]"
6103
   ]
6104
  },
6105
  {
6106
   "cell_type": "code",
6107
   "execution_count": 49,
6108
   "metadata": {},
6109
   "outputs": [],
6110
   "source": [
6111
    "preds_y3 = np.stack([torch.sigmoid(torch.stack([torch.stack(pickle.load(\n",
6112
    "    open(PATH_DISK/'yuval/OOF_stage2'/name.format(fold),'rb'))) for fold in range(3)])).numpy() for name in names_y3])"
6113
   ]
6114
  },
6115
  {
6116
   "cell_type": "code",
6117
   "execution_count": 50,
6118
   "metadata": {},
6119
   "outputs": [],
6120
   "source": [
6121
    "preds_y5 = np.stack([torch.sigmoid(torch.stack([torch.stack(pickle.load(\n",
6122
    "    open(PATH_DISK/'yuval/OOF_stage2'/name.format(fold),'rb'))) for fold in range(5)])).numpy() for name in names_y5])"
6123
   ]
6124
  },
6125
  {
6126
   "cell_type": "code",
6127
   "execution_count": 51,
6128
   "metadata": {},
6129
   "outputs": [],
6130
   "source": [
6131
    "preds_y = np.concatenate([preds_y3.mean((1,2)), preds_y5.mean((1,2))],axis=0)"
6132
   ]
6133
  },
6134
  {
6135
   "cell_type": "code",
6136
   "execution_count": 52,
6137
   "metadata": {},
6138
   "outputs": [],
6139
   "source": [
6140
    "preds_y = preds_y[:,test_md.yuval_idx]\n",
6141
    "preds_y = preds_y[:,:,np.array([5,0,1,2,3,4])]"
6142
   ]
6143
  },
6144
  {
6145
   "cell_type": "code",
6146
   "execution_count": 53,
6147
   "metadata": {},
6148
   "outputs": [],
6149
   "source": [
6150
    "preds = np.concatenate([preds, preds_y], axis=0)"
6151
   ]
6152
  },
6153
  {
6154
   "cell_type": "code",
6155
   "execution_count": 54,
6156
   "metadata": {},
6157
   "outputs": [],
6158
   "source": [
6159
    "preds = preds[ds_mask]"
6160
   ]
6161
  },
6162
  {
6163
   "cell_type": "code",
6164
   "execution_count": 55,
6165
   "metadata": {
6166
    "scrolled": false
6167
   },
6168
   "outputs": [
6169
    {
6170
     "data": {
6171
      "text/plain": [
6172
       "(10, 121232, 6)"
6173
      ]
6174
     },
6175
     "execution_count": 55,
6176
     "metadata": {},
6177
     "output_type": "execute_result"
6178
    }
6179
   ],
6180
   "source": [
6181
    "preds.shape"
6182
   ]
6183
  },
6184
  {
6185
   "cell_type": "code",
6186
   "execution_count": 56,
6187
   "metadata": {},
6188
   "outputs": [
6189
    {
6190
     "name": "stdout",
6191
     "output_type": "stream",
6192
     "text": [
6193
      "any too low inconsistencies\n",
6194
      "1 class: 0.000837237693018345\n",
6195
      "2 class: 0.016652369011482118\n",
6196
      "3 class: 0.008991850336544807\n",
6197
      "4 class: 0.016291903127887027\n",
6198
      "5 class: 0.05360135937706216\n",
6199
      "total 0.08837930579384981\n",
6200
      "any too high inconsistencies\n",
6201
      "total 0.21430810347103074\n"
6202
     ]
6203
    }
6204
   ],
6205
   "source": [
6206
    "preds = predBounding(preds)"
6207
   ]
6208
  },
6209
  {
6210
   "cell_type": "code",
6211
   "execution_count": 70,
6212
   "metadata": {},
6213
   "outputs": [],
6214
   "source": [
6215
    "#predictions = preds.mean((0,1))"
6216
   ]
6217
  },
6218
  {
6219
   "cell_type": "code",
6220
   "execution_count": 57,
6221
   "metadata": {},
6222
   "outputs": [
6223
    {
6224
     "name": "stdout",
6225
     "output_type": "stream",
6226
     "text": [
6227
      "version 33 my_len 4\n",
6228
      "total running time 0.07065796852111816\n"
6229
     ]
6230
    }
6231
   ],
6232
   "source": [
6233
    "stg = time.time()\n",
6234
    "\n",
6235
    "test_preds_trgt = []\n",
6236
    "print('version', VERSION, 'my_len', my_len)\n",
6237
    "for target in range(6):\n",
6238
    "    \n",
6239
    "    test_preds_fold = []\n",
6240
    "    for fold in range(3):\n",
6241
    "        X = np.stack([preds[:my_len,:,target].mean(0), \n",
6242
    "                      preds[my_len:,:,target].mean(0)], axis=0)\n",
6243
    "        \n",
6244
    "        model = pickle.load(open(PATH_DISK/'ensemble'/'model.f{}.t{}.v{}'.format(fold,target,VERSION),'rb'))\n",
6245
    "        test_preds_fold.append((X*np.expand_dims(model.x, axis=1)).sum(0))\n",
6246
    "    \n",
6247
    "    test_preds_trgt.append(np.stack(test_preds_fold).mean(0))\n",
6248
    "\n",
6249
    "predictions = np.stack(test_preds_trgt,axis=1)\n",
6250
    "\n",
6251
    "print('total running time', time.time() - stg)"
6252
   ]
6253
  },
6254
  {
6255
   "cell_type": "code",
6256
   "execution_count": 58,
6257
   "metadata": {},
6258
   "outputs": [
6259
    {
6260
     "data": {
6261
      "text/plain": [
6262
       "array([[1.    , 0.9956, 0.9954, 0.9953, 0.9958, 0.9937, 0.9942, 0.9947,\n",
6263
       "        0.9946, 0.9943],\n",
6264
       "       [0.9956, 1.    , 0.9978, 0.9977, 0.9951, 0.997 , 0.9964, 0.9986,\n",
6265
       "        0.997 , 0.9967],\n",
6266
       "       [0.9954, 0.9978, 1.    , 0.9992, 0.9951, 0.9956, 0.9972, 0.9966,\n",
6267
       "        0.9984, 0.9978],\n",
6268
       "       [0.9953, 0.9977, 0.9992, 1.    , 0.995 , 0.9957, 0.9973, 0.9967,\n",
6269
       "        0.9982, 0.9985],\n",
6270
       "       [0.9958, 0.9951, 0.9951, 0.995 , 1.    , 0.9944, 0.9949, 0.9954,\n",
6271
       "        0.9955, 0.9951],\n",
6272
       "       [0.9937, 0.997 , 0.9956, 0.9957, 0.9944, 1.    , 0.9965, 0.998 ,\n",
6273
       "        0.9968, 0.9967],\n",
6274
       "       [0.9942, 0.9964, 0.9972, 0.9973, 0.9949, 0.9965, 1.    , 0.9971,\n",
6275
       "        0.9983, 0.9982],\n",
6276
       "       [0.9947, 0.9986, 0.9966, 0.9967, 0.9954, 0.998 , 0.9971, 1.    ,\n",
6277
       "        0.9979, 0.9977],\n",
6278
       "       [0.9946, 0.997 , 0.9984, 0.9982, 0.9955, 0.9968, 0.9983, 0.9979,\n",
6279
       "        1.    , 0.9993],\n",
6280
       "       [0.9943, 0.9967, 0.9978, 0.9985, 0.9951, 0.9967, 0.9982, 0.9977,\n",
6281
       "        0.9993, 1.    ]])"
6282
      ]
6283
     },
6284
     "execution_count": 58,
6285
     "metadata": {},
6286
     "output_type": "execute_result"
6287
    }
6288
   ],
6289
   "source": [
6290
    "np.corrcoef(preds[:,:,0])"
6291
   ]
6292
  },
6293
  {
6294
   "cell_type": "code",
6295
   "execution_count": 59,
6296
   "metadata": {
6297
    "scrolled": true
6298
   },
6299
   "outputs": [
6300
    {
6301
     "data": {
6302
      "text/plain": [
6303
       "(121232, 6)"
6304
      ]
6305
     },
6306
     "execution_count": 59,
6307
     "metadata": {},
6308
     "output_type": "execute_result"
6309
    }
6310
   ],
6311
   "source": [
6312
    "predictions.shape"
6313
   ]
6314
  },
6315
  {
6316
   "cell_type": "markdown",
6317
   "metadata": {},
6318
   "source": [
6319
    "# Submitting"
6320
   ]
6321
  },
6322
  {
6323
   "cell_type": "code",
6324
   "execution_count": 60,
6325
   "metadata": {},
6326
   "outputs": [],
6327
   "source": [
6328
    "id_column = np.array([a + '_' + b for a in test_md.SOPInstanceUID for b in all_ich])\n",
6329
    "sub = pd.DataFrame({'ID': id_column, 'Label': predictions.reshape(-1)})\n",
6330
    "sub.to_csv(PATH/'sub.csv', index=False)"
6331
   ]
6332
  },
6333
  {
6334
   "cell_type": "markdown",
6335
   "metadata": {},
6336
   "source": [
6337
    "## Sanity checks"
6338
   ]
6339
  },
6340
  {
6341
   "cell_type": "code",
6342
   "execution_count": 61,
6343
   "metadata": {},
6344
   "outputs": [],
6345
   "source": [
6346
    "test_md['pred_any'] = predictions[:,0]"
6347
   ]
6348
  },
6349
  {
6350
   "cell_type": "code",
6351
   "execution_count": 62,
6352
   "metadata": {},
6353
   "outputs": [
6354
    {
6355
     "data": {
6356
      "text/html": [
6357
       "<div>\n",
6358
       "<style scoped>\n",
6359
       "    .dataframe tbody tr th:only-of-type {\n",
6360
       "        vertical-align: middle;\n",
6361
       "    }\n",
6362
       "\n",
6363
       "    .dataframe tbody tr th {\n",
6364
       "        vertical-align: top;\n",
6365
       "    }\n",
6366
       "\n",
6367
       "    .dataframe thead th {\n",
6368
       "        text-align: right;\n",
6369
       "    }\n",
6370
       "</style>\n",
6371
       "<table border=\"1\" class=\"dataframe\">\n",
6372
       "  <thead>\n",
6373
       "    <tr style=\"text-align: right;\">\n",
6374
       "      <th></th>\n",
6375
       "      <th>img_id</th>\n",
6376
       "      <th>SOPInstanceUID</th>\n",
6377
       "      <th>Modality</th>\n",
6378
       "      <th>PatientID</th>\n",
6379
       "      <th>StudyInstanceUID</th>\n",
6380
       "      <th>SeriesInstanceUID</th>\n",
6381
       "      <th>StudyID</th>\n",
6382
       "      <th>ImagePositionPatient</th>\n",
6383
       "      <th>ImageOrientationPatient</th>\n",
6384
       "      <th>SamplesPerPixel</th>\n",
6385
       "      <th>PhotometricInterpretation</th>\n",
6386
       "      <th>Rows</th>\n",
6387
       "      <th>Columns</th>\n",
6388
       "      <th>PixelSpacing</th>\n",
6389
       "      <th>BitsAllocated</th>\n",
6390
       "      <th>BitsStored</th>\n",
6391
       "      <th>HighBit</th>\n",
6392
       "      <th>PixelRepresentation</th>\n",
6393
       "      <th>WindowCenter</th>\n",
6394
       "      <th>WindowWidth</th>\n",
6395
       "      <th>RescaleIntercept</th>\n",
6396
       "      <th>RescaleSlope</th>\n",
6397
       "      <th>PxlMin</th>\n",
6398
       "      <th>PxlMax</th>\n",
6399
       "      <th>PxlStd</th>\n",
6400
       "      <th>PxlMean</th>\n",
6401
       "      <th>test</th>\n",
6402
       "      <th>test2</th>\n",
6403
       "      <th>ImageOrientationPatient_0</th>\n",
6404
       "      <th>ImageOrientationPatient_1</th>\n",
6405
       "      <th>ImageOrientationPatient_2</th>\n",
6406
       "      <th>ImageOrientationPatient_3</th>\n",
6407
       "      <th>ImageOrientationPatient_4</th>\n",
6408
       "      <th>ImageOrientationPatient_5</th>\n",
6409
       "      <th>ImagePositionPatient_0</th>\n",
6410
       "      <th>ImagePositionPatient_1</th>\n",
6411
       "      <th>ImagePositionPatient_2</th>\n",
6412
       "      <th>PixelSpacing_0</th>\n",
6413
       "      <th>PixelSpacing_1</th>\n",
6414
       "      <th>WindowCenter_0</th>\n",
6415
       "      <th>WindowCenter_1</th>\n",
6416
       "      <th>WindowCenter_1_NAN</th>\n",
6417
       "      <th>WindowWidth_0</th>\n",
6418
       "      <th>WindowWidth_1</th>\n",
6419
       "      <th>WindowWidth_0_le</th>\n",
6420
       "      <th>WindowWidth_1_le</th>\n",
6421
       "      <th>WindowCenter_1_le</th>\n",
6422
       "      <th>BitType_le</th>\n",
6423
       "      <th>ImageOrientationPatient_4_f</th>\n",
6424
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
6425
       "      <th>...</th>\n",
6426
       "      <th>ImageOrientationPatient_5_f</th>\n",
6427
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
6428
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
6429
       "      <th>ImagePositionPatient_0_f</th>\n",
6430
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
6431
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
6432
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
6433
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
6434
       "      <th>ImagePositionPatient_1_f</th>\n",
6435
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
6436
       "      <th>ImagePositionPatient_2_f</th>\n",
6437
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
6438
       "      <th>PixelSpacing_1_f</th>\n",
6439
       "      <th>PixelSpacing_1_enc_0</th>\n",
6440
       "      <th>PixelSpacing_1_enc_1</th>\n",
6441
       "      <th>WindowCenter_0_le</th>\n",
6442
       "      <th>pos_max</th>\n",
6443
       "      <th>pos_min</th>\n",
6444
       "      <th>pos_size</th>\n",
6445
       "      <th>pos_idx1</th>\n",
6446
       "      <th>pos_idx</th>\n",
6447
       "      <th>pos_idx2</th>\n",
6448
       "      <th>pos_inc1</th>\n",
6449
       "      <th>pos_inc2</th>\n",
6450
       "      <th>pos_inc1_grp_le</th>\n",
6451
       "      <th>pos_inc2_grp_le</th>\n",
6452
       "      <th>pos_inc1_r1</th>\n",
6453
       "      <th>pos_inc1_r0001</th>\n",
6454
       "      <th>pos_inc1_enc_0</th>\n",
6455
       "      <th>pos_inc2_enc_0</th>\n",
6456
       "      <th>pos_inc1_enc_1</th>\n",
6457
       "      <th>pos_inc2_enc_1</th>\n",
6458
       "      <th>pos_size_le</th>\n",
6459
       "      <th>pos_range</th>\n",
6460
       "      <th>pos_rel</th>\n",
6461
       "      <th>pos_zeros</th>\n",
6462
       "      <th>pos_inc_rng</th>\n",
6463
       "      <th>pos_zeros_le</th>\n",
6464
       "      <th>PxlMin_grp_le</th>\n",
6465
       "      <th>PxlMin_zero</th>\n",
6466
       "      <th>any</th>\n",
6467
       "      <th>epidural</th>\n",
6468
       "      <th>intraparenchymal</th>\n",
6469
       "      <th>intraventricular</th>\n",
6470
       "      <th>subarachnoid</th>\n",
6471
       "      <th>subdural</th>\n",
6472
       "      <th>any_series</th>\n",
6473
       "      <th>SeriesPP</th>\n",
6474
       "      <th>yuval_idx</th>\n",
6475
       "      <th>pred_any</th>\n",
6476
       "    </tr>\n",
6477
       "  </thead>\n",
6478
       "  <tbody>\n",
6479
       "    <tr>\n",
6480
       "      <td>12436</td>\n",
6481
       "      <td>68c2b8b03</td>\n",
6482
       "      <td>ID_68c2b8b03</td>\n",
6483
       "      <td>CT</td>\n",
6484
       "      <td>ID_db5b61c1</td>\n",
6485
       "      <td>ID_451abcb4a1</td>\n",
6486
       "      <td>ID_36778f2a4a</td>\n",
6487
       "      <td>NaN</td>\n",
6488
       "      <td>['-125.000', '-148.300', '135.250']</td>\n",
6489
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
6490
       "      <td>1</td>\n",
6491
       "      <td>MONOCHROME2</td>\n",
6492
       "      <td>512</td>\n",
6493
       "      <td>512</td>\n",
6494
       "      <td>['0.488281', '0.488281']</td>\n",
6495
       "      <td>16</td>\n",
6496
       "      <td>16</td>\n",
6497
       "      <td>15</td>\n",
6498
       "      <td>1</td>\n",
6499
       "      <td>40</td>\n",
6500
       "      <td>150</td>\n",
6501
       "      <td>-1024.0</td>\n",
6502
       "      <td>1.0</td>\n",
6503
       "      <td>-0.064000</td>\n",
6504
       "      <td>-1.548000</td>\n",
6505
       "      <td>-1.402099</td>\n",
6506
       "      <td>-1.620352</td>\n",
6507
       "      <td>False</td>\n",
6508
       "      <td>True</td>\n",
6509
       "      <td>1.0</td>\n",
6510
       "      <td>0.0</td>\n",
6511
       "      <td>0.0</td>\n",
6512
       "      <td>0.0</td>\n",
6513
       "      <td>1.0</td>\n",
6514
       "      <td>0.0</td>\n",
6515
       "      <td>-125.0</td>\n",
6516
       "      <td>-148.3</td>\n",
6517
       "      <td>135.250000</td>\n",
6518
       "      <td>0.488281</td>\n",
6519
       "      <td>0.488281</td>\n",
6520
       "      <td>40.0</td>\n",
6521
       "      <td>NaN</td>\n",
6522
       "      <td>True</td>\n",
6523
       "      <td>150.0</td>\n",
6524
       "      <td>NaN</td>\n",
6525
       "      <td>1</td>\n",
6526
       "      <td>1</td>\n",
6527
       "      <td>3</td>\n",
6528
       "      <td>0</td>\n",
6529
       "      <td>-1.333333</td>\n",
6530
       "      <td>1.0</td>\n",
6531
       "      <td>...</td>\n",
6532
       "      <td>-0.666667</td>\n",
6533
       "      <td>1.0</td>\n",
6534
       "      <td>False</td>\n",
6535
       "      <td>-0.720000</td>\n",
6536
       "      <td>1.0</td>\n",
6537
       "      <td>0.0</td>\n",
6538
       "      <td>1.0</td>\n",
6539
       "      <td>1.0</td>\n",
6540
       "      <td>-1.110667</td>\n",
6541
       "      <td>0.0</td>\n",
6542
       "      <td>-0.045487</td>\n",
6543
       "      <td>0.0</td>\n",
6544
       "      <td>-0.480</td>\n",
6545
       "      <td>1.0</td>\n",
6546
       "      <td>False</td>\n",
6547
       "      <td>2</td>\n",
6548
       "      <td>0.5810</td>\n",
6549
       "      <td>-0.1190</td>\n",
6550
       "      <td>0.5</td>\n",
6551
       "      <td>1.355932</td>\n",
6552
       "      <td>37</td>\n",
6553
       "      <td>-1.016949</td>\n",
6554
       "      <td>-1.5</td>\n",
6555
       "      <td>-1.5</td>\n",
6556
       "      <td>3</td>\n",
6557
       "      <td>3</td>\n",
6558
       "      <td>1.0</td>\n",
6559
       "      <td>1.0</td>\n",
6560
       "      <td>0.0</td>\n",
6561
       "      <td>0.0</td>\n",
6562
       "      <td>1.0</td>\n",
6563
       "      <td>1.0</td>\n",
6564
       "      <td>1</td>\n",
6565
       "      <td>0.266667</td>\n",
6566
       "      <td>1.771429</td>\n",
6567
       "      <td>1.6</td>\n",
6568
       "      <td>-0.600000</td>\n",
6569
       "      <td>1</td>\n",
6570
       "      <td>1</td>\n",
6571
       "      <td>False</td>\n",
6572
       "      <td>NaN</td>\n",
6573
       "      <td>NaN</td>\n",
6574
       "      <td>NaN</td>\n",
6575
       "      <td>NaN</td>\n",
6576
       "      <td>NaN</td>\n",
6577
       "      <td>NaN</td>\n",
6578
       "      <td>False</td>\n",
6579
       "      <td>-0.5</td>\n",
6580
       "      <td>80726</td>\n",
6581
       "      <td>0.000082</td>\n",
6582
       "    </tr>\n",
6583
       "    <tr>\n",
6584
       "      <td>82308</td>\n",
6585
       "      <td>7f95e978e</td>\n",
6586
       "      <td>ID_7f95e978e</td>\n",
6587
       "      <td>CT</td>\n",
6588
       "      <td>ID_ae6fa62a</td>\n",
6589
       "      <td>ID_3a1815c27a</td>\n",
6590
       "      <td>ID_64db061397</td>\n",
6591
       "      <td>NaN</td>\n",
6592
       "      <td>['-108.000', '-116.300', '114.000']</td>\n",
6593
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
6594
       "      <td>1</td>\n",
6595
       "      <td>MONOCHROME2</td>\n",
6596
       "      <td>512</td>\n",
6597
       "      <td>512</td>\n",
6598
       "      <td>['0.421875', '0.421875']</td>\n",
6599
       "      <td>16</td>\n",
6600
       "      <td>16</td>\n",
6601
       "      <td>15</td>\n",
6602
       "      <td>1</td>\n",
6603
       "      <td>40</td>\n",
6604
       "      <td>100</td>\n",
6605
       "      <td>-1024.0</td>\n",
6606
       "      <td>1.0</td>\n",
6607
       "      <td>-0.064000</td>\n",
6608
       "      <td>-1.572000</td>\n",
6609
       "      <td>-1.392679</td>\n",
6610
       "      <td>-1.599246</td>\n",
6611
       "      <td>False</td>\n",
6612
       "      <td>True</td>\n",
6613
       "      <td>1.0</td>\n",
6614
       "      <td>0.0</td>\n",
6615
       "      <td>0.0</td>\n",
6616
       "      <td>0.0</td>\n",
6617
       "      <td>1.0</td>\n",
6618
       "      <td>0.0</td>\n",
6619
       "      <td>-108.0</td>\n",
6620
       "      <td>-116.3</td>\n",
6621
       "      <td>114.000000</td>\n",
6622
       "      <td>0.421875</td>\n",
6623
       "      <td>0.421875</td>\n",
6624
       "      <td>40.0</td>\n",
6625
       "      <td>NaN</td>\n",
6626
       "      <td>True</td>\n",
6627
       "      <td>100.0</td>\n",
6628
       "      <td>NaN</td>\n",
6629
       "      <td>2</td>\n",
6630
       "      <td>1</td>\n",
6631
       "      <td>3</td>\n",
6632
       "      <td>0</td>\n",
6633
       "      <td>-1.333333</td>\n",
6634
       "      <td>1.0</td>\n",
6635
       "      <td>...</td>\n",
6636
       "      <td>-0.666667</td>\n",
6637
       "      <td>1.0</td>\n",
6638
       "      <td>False</td>\n",
6639
       "      <td>1.733333</td>\n",
6640
       "      <td>0.0</td>\n",
6641
       "      <td>0.0</td>\n",
6642
       "      <td>1.0</td>\n",
6643
       "      <td>1.0</td>\n",
6644
       "      <td>-0.684000</td>\n",
6645
       "      <td>0.0</td>\n",
6646
       "      <td>-0.075931</td>\n",
6647
       "      <td>1.0</td>\n",
6648
       "      <td>1.295</td>\n",
6649
       "      <td>0.0</td>\n",
6650
       "      <td>False</td>\n",
6651
       "      <td>2</td>\n",
6652
       "      <td>0.4760</td>\n",
6653
       "      <td>-0.1840</td>\n",
6654
       "      <td>-0.1</td>\n",
6655
       "      <td>1.016949</td>\n",
6656
       "      <td>32</td>\n",
6657
       "      <td>-1.084746</td>\n",
6658
       "      <td>-1.5</td>\n",
6659
       "      <td>-1.5</td>\n",
6660
       "      <td>3</td>\n",
6661
       "      <td>3</td>\n",
6662
       "      <td>1.0</td>\n",
6663
       "      <td>1.0</td>\n",
6664
       "      <td>0.0</td>\n",
6665
       "      <td>0.0</td>\n",
6666
       "      <td>1.0</td>\n",
6667
       "      <td>1.0</td>\n",
6668
       "      <td>4</td>\n",
6669
       "      <td>0.000000</td>\n",
6670
       "      <td>1.878788</td>\n",
6671
       "      <td>0.0</td>\n",
6672
       "      <td>-0.600000</td>\n",
6673
       "      <td>0</td>\n",
6674
       "      <td>1</td>\n",
6675
       "      <td>False</td>\n",
6676
       "      <td>NaN</td>\n",
6677
       "      <td>NaN</td>\n",
6678
       "      <td>NaN</td>\n",
6679
       "      <td>NaN</td>\n",
6680
       "      <td>NaN</td>\n",
6681
       "      <td>NaN</td>\n",
6682
       "      <td>False</td>\n",
6683
       "      <td>-0.5</td>\n",
6684
       "      <td>68171</td>\n",
6685
       "      <td>0.000082</td>\n",
6686
       "    </tr>\n",
6687
       "    <tr>\n",
6688
       "      <td>60800</td>\n",
6689
       "      <td>84735b84a</td>\n",
6690
       "      <td>ID_84735b84a</td>\n",
6691
       "      <td>CT</td>\n",
6692
       "      <td>ID_ddcad7d4</td>\n",
6693
       "      <td>ID_d7e80c40be</td>\n",
6694
       "      <td>ID_11c94b7b33</td>\n",
6695
       "      <td>NaN</td>\n",
6696
       "      <td>['-155', '23', '138.699997']</td>\n",
6697
       "      <td>['1', '0', '0', '0', '1', '0']</td>\n",
6698
       "      <td>1</td>\n",
6699
       "      <td>MONOCHROME2</td>\n",
6700
       "      <td>512</td>\n",
6701
       "      <td>512</td>\n",
6702
       "      <td>['0.48828125', '0.48828125']</td>\n",
6703
       "      <td>16</td>\n",
6704
       "      <td>12</td>\n",
6705
       "      <td>11</td>\n",
6706
       "      <td>0</td>\n",
6707
       "      <td>['00036', '00036']</td>\n",
6708
       "      <td>['00080', '00080']</td>\n",
6709
       "      <td>-1024.0</td>\n",
6710
       "      <td>1.0</td>\n",
6711
       "      <td>1.314667</td>\n",
6712
       "      <td>-1.914667</td>\n",
6713
       "      <td>-2.872322</td>\n",
6714
       "      <td>-0.693297</td>\n",
6715
       "      <td>False</td>\n",
6716
       "      <td>True</td>\n",
6717
       "      <td>1.0</td>\n",
6718
       "      <td>0.0</td>\n",
6719
       "      <td>0.0</td>\n",
6720
       "      <td>0.0</td>\n",
6721
       "      <td>1.0</td>\n",
6722
       "      <td>0.0</td>\n",
6723
       "      <td>-155.0</td>\n",
6724
       "      <td>23.0</td>\n",
6725
       "      <td>138.699997</td>\n",
6726
       "      <td>0.488281</td>\n",
6727
       "      <td>0.488281</td>\n",
6728
       "      <td>36.0</td>\n",
6729
       "      <td>36.0</td>\n",
6730
       "      <td>False</td>\n",
6731
       "      <td>80.0</td>\n",
6732
       "      <td>80.0</td>\n",
6733
       "      <td>0</td>\n",
6734
       "      <td>0</td>\n",
6735
       "      <td>0</td>\n",
6736
       "      <td>1</td>\n",
6737
       "      <td>-1.333333</td>\n",
6738
       "      <td>1.0</td>\n",
6739
       "      <td>...</td>\n",
6740
       "      <td>-0.666667</td>\n",
6741
       "      <td>1.0</td>\n",
6742
       "      <td>False</td>\n",
6743
       "      <td>0.480000</td>\n",
6744
       "      <td>0.0</td>\n",
6745
       "      <td>0.0</td>\n",
6746
       "      <td>1.0</td>\n",
6747
       "      <td>1.0</td>\n",
6748
       "      <td>1.173333</td>\n",
6749
       "      <td>1.0</td>\n",
6750
       "      <td>-0.040544</td>\n",
6751
       "      <td>0.0</td>\n",
6752
       "      <td>-0.480</td>\n",
6753
       "      <td>1.0</td>\n",
6754
       "      <td>False</td>\n",
6755
       "      <td>1</td>\n",
6756
       "      <td>0.5748</td>\n",
6757
       "      <td>-0.1252</td>\n",
6758
       "      <td>0.1</td>\n",
6759
       "      <td>1.152542</td>\n",
6760
       "      <td>34</td>\n",
6761
       "      <td>-1.084746</td>\n",
6762
       "      <td>-1.5</td>\n",
6763
       "      <td>-1.5</td>\n",
6764
       "      <td>3</td>\n",
6765
       "      <td>3</td>\n",
6766
       "      <td>1.0</td>\n",
6767
       "      <td>1.0</td>\n",
6768
       "      <td>0.0</td>\n",
6769
       "      <td>0.0</td>\n",
6770
       "      <td>1.0</td>\n",
6771
       "      <td>1.0</td>\n",
6772
       "      <td>2</td>\n",
6773
       "      <td>0.266666</td>\n",
6774
       "      <td>1.885714</td>\n",
6775
       "      <td>0.0</td>\n",
6776
       "      <td>-0.599994</td>\n",
6777
       "      <td>0</td>\n",
6778
       "      <td>2</td>\n",
6779
       "      <td>False</td>\n",
6780
       "      <td>NaN</td>\n",
6781
       "      <td>NaN</td>\n",
6782
       "      <td>NaN</td>\n",
6783
       "      <td>NaN</td>\n",
6784
       "      <td>NaN</td>\n",
6785
       "      <td>NaN</td>\n",
6786
       "      <td>False</td>\n",
6787
       "      <td>-0.5</td>\n",
6788
       "      <td>27981</td>\n",
6789
       "      <td>0.000083</td>\n",
6790
       "    </tr>\n",
6791
       "    <tr>\n",
6792
       "      <td>102453</td>\n",
6793
       "      <td>d6a5e0432</td>\n",
6794
       "      <td>ID_d6a5e0432</td>\n",
6795
       "      <td>CT</td>\n",
6796
       "      <td>ID_73887cfd</td>\n",
6797
       "      <td>ID_4cc0b3574d</td>\n",
6798
       "      <td>ID_bd88957d37</td>\n",
6799
       "      <td>NaN</td>\n",
6800
       "      <td>['-125.000', '-131.700', '105.000']</td>\n",
6801
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
6802
       "      <td>1</td>\n",
6803
       "      <td>MONOCHROME2</td>\n",
6804
       "      <td>512</td>\n",
6805
       "      <td>512</td>\n",
6806
       "      <td>['0.488281', '0.488281']</td>\n",
6807
       "      <td>16</td>\n",
6808
       "      <td>16</td>\n",
6809
       "      <td>15</td>\n",
6810
       "      <td>1</td>\n",
6811
       "      <td>40</td>\n",
6812
       "      <td>150</td>\n",
6813
       "      <td>-1024.0</td>\n",
6814
       "      <td>1.0</td>\n",
6815
       "      <td>-0.064000</td>\n",
6816
       "      <td>-1.558667</td>\n",
6817
       "      <td>-1.401150</td>\n",
6818
       "      <td>-1.616006</td>\n",
6819
       "      <td>False</td>\n",
6820
       "      <td>True</td>\n",
6821
       "      <td>1.0</td>\n",
6822
       "      <td>0.0</td>\n",
6823
       "      <td>0.0</td>\n",
6824
       "      <td>0.0</td>\n",
6825
       "      <td>1.0</td>\n",
6826
       "      <td>0.0</td>\n",
6827
       "      <td>-125.0</td>\n",
6828
       "      <td>-131.7</td>\n",
6829
       "      <td>105.000000</td>\n",
6830
       "      <td>0.488281</td>\n",
6831
       "      <td>0.488281</td>\n",
6832
       "      <td>40.0</td>\n",
6833
       "      <td>NaN</td>\n",
6834
       "      <td>True</td>\n",
6835
       "      <td>150.0</td>\n",
6836
       "      <td>NaN</td>\n",
6837
       "      <td>1</td>\n",
6838
       "      <td>1</td>\n",
6839
       "      <td>3</td>\n",
6840
       "      <td>0</td>\n",
6841
       "      <td>-1.333333</td>\n",
6842
       "      <td>1.0</td>\n",
6843
       "      <td>...</td>\n",
6844
       "      <td>-0.666667</td>\n",
6845
       "      <td>1.0</td>\n",
6846
       "      <td>False</td>\n",
6847
       "      <td>-0.720000</td>\n",
6848
       "      <td>1.0</td>\n",
6849
       "      <td>0.0</td>\n",
6850
       "      <td>1.0</td>\n",
6851
       "      <td>1.0</td>\n",
6852
       "      <td>-0.889333</td>\n",
6853
       "      <td>0.0</td>\n",
6854
       "      <td>-0.088825</td>\n",
6855
       "      <td>1.0</td>\n",
6856
       "      <td>-0.480</td>\n",
6857
       "      <td>1.0</td>\n",
6858
       "      <td>False</td>\n",
6859
       "      <td>2</td>\n",
6860
       "      <td>0.4400</td>\n",
6861
       "      <td>-0.1800</td>\n",
6862
       "      <td>-0.3</td>\n",
6863
       "      <td>0.881356</td>\n",
6864
       "      <td>30</td>\n",
6865
       "      <td>-1.084746</td>\n",
6866
       "      <td>-1.5</td>\n",
6867
       "      <td>-1.5</td>\n",
6868
       "      <td>3</td>\n",
6869
       "      <td>3</td>\n",
6870
       "      <td>1.0</td>\n",
6871
       "      <td>1.0</td>\n",
6872
       "      <td>0.0</td>\n",
6873
       "      <td>0.0</td>\n",
6874
       "      <td>1.0</td>\n",
6875
       "      <td>1.0</td>\n",
6876
       "      <td>0</td>\n",
6877
       "      <td>-0.266667</td>\n",
6878
       "      <td>1.870968</td>\n",
6879
       "      <td>0.0</td>\n",
6880
       "      <td>-0.600000</td>\n",
6881
       "      <td>0</td>\n",
6882
       "      <td>1</td>\n",
6883
       "      <td>False</td>\n",
6884
       "      <td>NaN</td>\n",
6885
       "      <td>NaN</td>\n",
6886
       "      <td>NaN</td>\n",
6887
       "      <td>NaN</td>\n",
6888
       "      <td>NaN</td>\n",
6889
       "      <td>NaN</td>\n",
6890
       "      <td>False</td>\n",
6891
       "      <td>-0.5</td>\n",
6892
       "      <td>90163</td>\n",
6893
       "      <td>0.000085</td>\n",
6894
       "    </tr>\n",
6895
       "    <tr>\n",
6896
       "      <td>10097</td>\n",
6897
       "      <td>6df94672e</td>\n",
6898
       "      <td>ID_6df94672e</td>\n",
6899
       "      <td>CT</td>\n",
6900
       "      <td>ID_39c82642</td>\n",
6901
       "      <td>ID_9f4b3b7a4d</td>\n",
6902
       "      <td>ID_81c1365f46</td>\n",
6903
       "      <td>NaN</td>\n",
6904
       "      <td>['-125', '18', '-120.099976']</td>\n",
6905
       "      <td>['1', '0', '0', '0', '1', '0']</td>\n",
6906
       "      <td>1</td>\n",
6907
       "      <td>MONOCHROME2</td>\n",
6908
       "      <td>512</td>\n",
6909
       "      <td>512</td>\n",
6910
       "      <td>['0.48828125', '0.48828125']</td>\n",
6911
       "      <td>16</td>\n",
6912
       "      <td>12</td>\n",
6913
       "      <td>11</td>\n",
6914
       "      <td>0</td>\n",
6915
       "      <td>['00036', '00036']</td>\n",
6916
       "      <td>['00080', '00080']</td>\n",
6917
       "      <td>-1024.0</td>\n",
6918
       "      <td>1.0</td>\n",
6919
       "      <td>1.301333</td>\n",
6920
       "      <td>0.108000</td>\n",
6921
       "      <td>-2.544276</td>\n",
6922
       "      <td>-0.594726</td>\n",
6923
       "      <td>False</td>\n",
6924
       "      <td>True</td>\n",
6925
       "      <td>1.0</td>\n",
6926
       "      <td>0.0</td>\n",
6927
       "      <td>0.0</td>\n",
6928
       "      <td>0.0</td>\n",
6929
       "      <td>1.0</td>\n",
6930
       "      <td>0.0</td>\n",
6931
       "      <td>-125.0</td>\n",
6932
       "      <td>18.0</td>\n",
6933
       "      <td>-120.099976</td>\n",
6934
       "      <td>0.488281</td>\n",
6935
       "      <td>0.488281</td>\n",
6936
       "      <td>36.0</td>\n",
6937
       "      <td>36.0</td>\n",
6938
       "      <td>False</td>\n",
6939
       "      <td>80.0</td>\n",
6940
       "      <td>80.0</td>\n",
6941
       "      <td>0</td>\n",
6942
       "      <td>0</td>\n",
6943
       "      <td>0</td>\n",
6944
       "      <td>1</td>\n",
6945
       "      <td>-1.333333</td>\n",
6946
       "      <td>1.0</td>\n",
6947
       "      <td>...</td>\n",
6948
       "      <td>-0.666667</td>\n",
6949
       "      <td>1.0</td>\n",
6950
       "      <td>False</td>\n",
6951
       "      <td>-0.720000</td>\n",
6952
       "      <td>1.0</td>\n",
6953
       "      <td>0.0</td>\n",
6954
       "      <td>1.0</td>\n",
6955
       "      <td>1.0</td>\n",
6956
       "      <td>1.106667</td>\n",
6957
       "      <td>1.0</td>\n",
6958
       "      <td>-0.411318</td>\n",
6959
       "      <td>0.0</td>\n",
6960
       "      <td>-0.480</td>\n",
6961
       "      <td>1.0</td>\n",
6962
       "      <td>False</td>\n",
6963
       "      <td>1</td>\n",
6964
       "      <td>-0.4404</td>\n",
6965
       "      <td>-1.4004</td>\n",
6966
       "      <td>1.4</td>\n",
6967
       "      <td>1.966102</td>\n",
6968
       "      <td>46</td>\n",
6969
       "      <td>-1.016949</td>\n",
6970
       "      <td>-1.5</td>\n",
6971
       "      <td>-1.5</td>\n",
6972
       "      <td>3</td>\n",
6973
       "      <td>3</td>\n",
6974
       "      <td>1.0</td>\n",
6975
       "      <td>1.0</td>\n",
6976
       "      <td>0.0</td>\n",
6977
       "      <td>0.0</td>\n",
6978
       "      <td>1.0</td>\n",
6979
       "      <td>1.0</td>\n",
6980
       "      <td>10</td>\n",
6981
       "      <td>2.000000</td>\n",
6982
       "      <td>1.833333</td>\n",
6983
       "      <td>0.0</td>\n",
6984
       "      <td>-0.600000</td>\n",
6985
       "      <td>0</td>\n",
6986
       "      <td>2</td>\n",
6987
       "      <td>False</td>\n",
6988
       "      <td>NaN</td>\n",
6989
       "      <td>NaN</td>\n",
6990
       "      <td>NaN</td>\n",
6991
       "      <td>NaN</td>\n",
6992
       "      <td>NaN</td>\n",
6993
       "      <td>NaN</td>\n",
6994
       "      <td>False</td>\n",
6995
       "      <td>-0.5</td>\n",
6996
       "      <td>14678</td>\n",
6997
       "      <td>0.000086</td>\n",
6998
       "    </tr>\n",
6999
       "  </tbody>\n",
7000
       "</table>\n",
7001
       "<p>5 rows × 101 columns</p>\n",
7002
       "</div>"
7003
      ],
7004
      "text/plain": [
7005
       "           img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
7006
       "12436   68c2b8b03   ID_68c2b8b03       CT  ID_db5b61c1    ID_451abcb4a1   \n",
7007
       "82308   7f95e978e   ID_7f95e978e       CT  ID_ae6fa62a    ID_3a1815c27a   \n",
7008
       "60800   84735b84a   ID_84735b84a       CT  ID_ddcad7d4    ID_d7e80c40be   \n",
7009
       "102453  d6a5e0432   ID_d6a5e0432       CT  ID_73887cfd    ID_4cc0b3574d   \n",
7010
       "10097   6df94672e   ID_6df94672e       CT  ID_39c82642    ID_9f4b3b7a4d   \n",
7011
       "\n",
7012
       "       SeriesInstanceUID  StudyID                 ImagePositionPatient  \\\n",
7013
       "12436      ID_36778f2a4a      NaN  ['-125.000', '-148.300', '135.250']   \n",
7014
       "82308      ID_64db061397      NaN  ['-108.000', '-116.300', '114.000']   \n",
7015
       "60800      ID_11c94b7b33      NaN         ['-155', '23', '138.699997']   \n",
7016
       "102453     ID_bd88957d37      NaN  ['-125.000', '-131.700', '105.000']   \n",
7017
       "10097      ID_81c1365f46      NaN        ['-125', '18', '-120.099976']   \n",
7018
       "\n",
7019
       "                                  ImageOrientationPatient  SamplesPerPixel  \\\n",
7020
       "12436   ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
7021
       "82308   ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
7022
       "60800                      ['1', '0', '0', '0', '1', '0']                1   \n",
7023
       "102453  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
7024
       "10097                      ['1', '0', '0', '0', '1', '0']                1   \n",
7025
       "\n",
7026
       "       PhotometricInterpretation  Rows  Columns                  PixelSpacing  \\\n",
7027
       "12436                MONOCHROME2   512      512      ['0.488281', '0.488281']   \n",
7028
       "82308                MONOCHROME2   512      512      ['0.421875', '0.421875']   \n",
7029
       "60800                MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
7030
       "102453               MONOCHROME2   512      512      ['0.488281', '0.488281']   \n",
7031
       "10097                MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
7032
       "\n",
7033
       "        BitsAllocated  BitsStored  HighBit  PixelRepresentation  \\\n",
7034
       "12436              16          16       15                    1   \n",
7035
       "82308              16          16       15                    1   \n",
7036
       "60800              16          12       11                    0   \n",
7037
       "102453             16          16       15                    1   \n",
7038
       "10097              16          12       11                    0   \n",
7039
       "\n",
7040
       "              WindowCenter         WindowWidth  RescaleIntercept  \\\n",
7041
       "12436                   40                 150           -1024.0   \n",
7042
       "82308                   40                 100           -1024.0   \n",
7043
       "60800   ['00036', '00036']  ['00080', '00080']           -1024.0   \n",
7044
       "102453                  40                 150           -1024.0   \n",
7045
       "10097   ['00036', '00036']  ['00080', '00080']           -1024.0   \n",
7046
       "\n",
7047
       "        RescaleSlope    PxlMin    PxlMax    PxlStd   PxlMean   test  test2  \\\n",
7048
       "12436            1.0 -0.064000 -1.548000 -1.402099 -1.620352  False   True   \n",
7049
       "82308            1.0 -0.064000 -1.572000 -1.392679 -1.599246  False   True   \n",
7050
       "60800            1.0  1.314667 -1.914667 -2.872322 -0.693297  False   True   \n",
7051
       "102453           1.0 -0.064000 -1.558667 -1.401150 -1.616006  False   True   \n",
7052
       "10097            1.0  1.301333  0.108000 -2.544276 -0.594726  False   True   \n",
7053
       "\n",
7054
       "        ImageOrientationPatient_0  ImageOrientationPatient_1  \\\n",
7055
       "12436                         1.0                        0.0   \n",
7056
       "82308                         1.0                        0.0   \n",
7057
       "60800                         1.0                        0.0   \n",
7058
       "102453                        1.0                        0.0   \n",
7059
       "10097                         1.0                        0.0   \n",
7060
       "\n",
7061
       "        ImageOrientationPatient_2  ImageOrientationPatient_3  \\\n",
7062
       "12436                         0.0                        0.0   \n",
7063
       "82308                         0.0                        0.0   \n",
7064
       "60800                         0.0                        0.0   \n",
7065
       "102453                        0.0                        0.0   \n",
7066
       "10097                         0.0                        0.0   \n",
7067
       "\n",
7068
       "        ImageOrientationPatient_4  ImageOrientationPatient_5  \\\n",
7069
       "12436                         1.0                        0.0   \n",
7070
       "82308                         1.0                        0.0   \n",
7071
       "60800                         1.0                        0.0   \n",
7072
       "102453                        1.0                        0.0   \n",
7073
       "10097                         1.0                        0.0   \n",
7074
       "\n",
7075
       "        ImagePositionPatient_0  ImagePositionPatient_1  \\\n",
7076
       "12436                   -125.0                  -148.3   \n",
7077
       "82308                   -108.0                  -116.3   \n",
7078
       "60800                   -155.0                    23.0   \n",
7079
       "102453                  -125.0                  -131.7   \n",
7080
       "10097                   -125.0                    18.0   \n",
7081
       "\n",
7082
       "        ImagePositionPatient_2  PixelSpacing_0  PixelSpacing_1  \\\n",
7083
       "12436               135.250000        0.488281        0.488281   \n",
7084
       "82308               114.000000        0.421875        0.421875   \n",
7085
       "60800               138.699997        0.488281        0.488281   \n",
7086
       "102453              105.000000        0.488281        0.488281   \n",
7087
       "10097              -120.099976        0.488281        0.488281   \n",
7088
       "\n",
7089
       "        WindowCenter_0  WindowCenter_1  WindowCenter_1_NAN  WindowWidth_0  \\\n",
7090
       "12436             40.0             NaN                True          150.0   \n",
7091
       "82308             40.0             NaN                True          100.0   \n",
7092
       "60800             36.0            36.0               False           80.0   \n",
7093
       "102453            40.0             NaN                True          150.0   \n",
7094
       "10097             36.0            36.0               False           80.0   \n",
7095
       "\n",
7096
       "        WindowWidth_1  WindowWidth_0_le  WindowWidth_1_le  WindowCenter_1_le  \\\n",
7097
       "12436             NaN                 1                 1                  3   \n",
7098
       "82308             NaN                 2                 1                  3   \n",
7099
       "60800            80.0                 0                 0                  0   \n",
7100
       "102453            NaN                 1                 1                  3   \n",
7101
       "10097            80.0                 0                 0                  0   \n",
7102
       "\n",
7103
       "        BitType_le  ImageOrientationPatient_4_f  \\\n",
7104
       "12436            0                    -1.333333   \n",
7105
       "82308            0                    -1.333333   \n",
7106
       "60800            1                    -1.333333   \n",
7107
       "102453           0                    -1.333333   \n",
7108
       "10097            1                    -1.333333   \n",
7109
       "\n",
7110
       "        ImageOrientationPatient_4_enc_0  ...  ImageOrientationPatient_5_f  \\\n",
7111
       "12436                               1.0  ...                    -0.666667   \n",
7112
       "82308                               1.0  ...                    -0.666667   \n",
7113
       "60800                               1.0  ...                    -0.666667   \n",
7114
       "102453                              1.0  ...                    -0.666667   \n",
7115
       "10097                               1.0  ...                    -0.666667   \n",
7116
       "\n",
7117
       "        ImageOrientationPatient_5_enc_0  ImageOrientationPatient_5_enc_1  \\\n",
7118
       "12436                               1.0                            False   \n",
7119
       "82308                               1.0                            False   \n",
7120
       "60800                               1.0                            False   \n",
7121
       "102453                              1.0                            False   \n",
7122
       "10097                               1.0                            False   \n",
7123
       "\n",
7124
       "        ImagePositionPatient_0_f  ImagePositionPatient_0_enc_0  \\\n",
7125
       "12436                  -0.720000                           1.0   \n",
7126
       "82308                   1.733333                           0.0   \n",
7127
       "60800                   0.480000                           0.0   \n",
7128
       "102453                 -0.720000                           1.0   \n",
7129
       "10097                  -0.720000                           1.0   \n",
7130
       "\n",
7131
       "        ImagePositionPatient_0_enc_1  ImagePositionPatient_0_f_r1  \\\n",
7132
       "12436                            0.0                          1.0   \n",
7133
       "82308                            0.0                          1.0   \n",
7134
       "60800                            0.0                          1.0   \n",
7135
       "102453                           0.0                          1.0   \n",
7136
       "10097                            0.0                          1.0   \n",
7137
       "\n",
7138
       "        ImagePositionPatient_0_f_r05  ImagePositionPatient_1_f  \\\n",
7139
       "12436                            1.0                 -1.110667   \n",
7140
       "82308                            1.0                 -0.684000   \n",
7141
       "60800                            1.0                  1.173333   \n",
7142
       "102453                           1.0                 -0.889333   \n",
7143
       "10097                            1.0                  1.106667   \n",
7144
       "\n",
7145
       "        ImagePositionPatient_1_enc_0  ImagePositionPatient_2_f  \\\n",
7146
       "12436                            0.0                 -0.045487   \n",
7147
       "82308                            0.0                 -0.075931   \n",
7148
       "60800                            1.0                 -0.040544   \n",
7149
       "102453                           0.0                 -0.088825   \n",
7150
       "10097                            1.0                 -0.411318   \n",
7151
       "\n",
7152
       "        ImagePositionPatient_2_f_r05  PixelSpacing_1_f  PixelSpacing_1_enc_0  \\\n",
7153
       "12436                            0.0            -0.480                   1.0   \n",
7154
       "82308                            1.0             1.295                   0.0   \n",
7155
       "60800                            0.0            -0.480                   1.0   \n",
7156
       "102453                           1.0            -0.480                   1.0   \n",
7157
       "10097                            0.0            -0.480                   1.0   \n",
7158
       "\n",
7159
       "        PixelSpacing_1_enc_1  WindowCenter_0_le  pos_max  pos_min  pos_size  \\\n",
7160
       "12436                  False                  2   0.5810  -0.1190       0.5   \n",
7161
       "82308                  False                  2   0.4760  -0.1840      -0.1   \n",
7162
       "60800                  False                  1   0.5748  -0.1252       0.1   \n",
7163
       "102453                 False                  2   0.4400  -0.1800      -0.3   \n",
7164
       "10097                  False                  1  -0.4404  -1.4004       1.4   \n",
7165
       "\n",
7166
       "        pos_idx1  pos_idx  pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  \\\n",
7167
       "12436   1.355932       37 -1.016949      -1.5      -1.5                3   \n",
7168
       "82308   1.016949       32 -1.084746      -1.5      -1.5                3   \n",
7169
       "60800   1.152542       34 -1.084746      -1.5      -1.5                3   \n",
7170
       "102453  0.881356       30 -1.084746      -1.5      -1.5                3   \n",
7171
       "10097   1.966102       46 -1.016949      -1.5      -1.5                3   \n",
7172
       "\n",
7173
       "        pos_inc2_grp_le  pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  \\\n",
7174
       "12436                 3          1.0             1.0             0.0   \n",
7175
       "82308                 3          1.0             1.0             0.0   \n",
7176
       "60800                 3          1.0             1.0             0.0   \n",
7177
       "102453                3          1.0             1.0             0.0   \n",
7178
       "10097                 3          1.0             1.0             0.0   \n",
7179
       "\n",
7180
       "        pos_inc2_enc_0  pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  \\\n",
7181
       "12436              0.0             1.0             1.0            1   \n",
7182
       "82308              0.0             1.0             1.0            4   \n",
7183
       "60800              0.0             1.0             1.0            2   \n",
7184
       "102453             0.0             1.0             1.0            0   \n",
7185
       "10097              0.0             1.0             1.0           10   \n",
7186
       "\n",
7187
       "        pos_range   pos_rel  pos_zeros  pos_inc_rng  pos_zeros_le  \\\n",
7188
       "12436    0.266667  1.771429        1.6    -0.600000             1   \n",
7189
       "82308    0.000000  1.878788        0.0    -0.600000             0   \n",
7190
       "60800    0.266666  1.885714        0.0    -0.599994             0   \n",
7191
       "102453  -0.266667  1.870968        0.0    -0.600000             0   \n",
7192
       "10097    2.000000  1.833333        0.0    -0.600000             0   \n",
7193
       "\n",
7194
       "        PxlMin_grp_le  PxlMin_zero  any  epidural  intraparenchymal  \\\n",
7195
       "12436               1        False  NaN       NaN               NaN   \n",
7196
       "82308               1        False  NaN       NaN               NaN   \n",
7197
       "60800               2        False  NaN       NaN               NaN   \n",
7198
       "102453              1        False  NaN       NaN               NaN   \n",
7199
       "10097               2        False  NaN       NaN               NaN   \n",
7200
       "\n",
7201
       "        intraventricular  subarachnoid  subdural  any_series  SeriesPP  \\\n",
7202
       "12436                NaN           NaN       NaN       False      -0.5   \n",
7203
       "82308                NaN           NaN       NaN       False      -0.5   \n",
7204
       "60800                NaN           NaN       NaN       False      -0.5   \n",
7205
       "102453               NaN           NaN       NaN       False      -0.5   \n",
7206
       "10097                NaN           NaN       NaN       False      -0.5   \n",
7207
       "\n",
7208
       "        yuval_idx  pred_any  \n",
7209
       "12436       80726  0.000082  \n",
7210
       "82308       68171  0.000082  \n",
7211
       "60800       27981  0.000083  \n",
7212
       "102453      90163  0.000085  \n",
7213
       "10097       14678  0.000086  \n",
7214
       "\n",
7215
       "[5 rows x 101 columns]"
7216
      ]
7217
     },
7218
     "execution_count": 62,
7219
     "metadata": {},
7220
     "output_type": "execute_result"
7221
    }
7222
   ],
7223
   "source": [
7224
    "test_md.sort_values('pred_any').head()"
7225
   ]
7226
  },
7227
  {
7228
   "cell_type": "code",
7229
   "execution_count": 63,
7230
   "metadata": {},
7231
   "outputs": [
7232
    {
7233
     "data": {
7234
      "text/html": [
7235
       "<div>\n",
7236
       "<style scoped>\n",
7237
       "    .dataframe tbody tr th:only-of-type {\n",
7238
       "        vertical-align: middle;\n",
7239
       "    }\n",
7240
       "\n",
7241
       "    .dataframe tbody tr th {\n",
7242
       "        vertical-align: top;\n",
7243
       "    }\n",
7244
       "\n",
7245
       "    .dataframe thead th {\n",
7246
       "        text-align: right;\n",
7247
       "    }\n",
7248
       "</style>\n",
7249
       "<table border=\"1\" class=\"dataframe\">\n",
7250
       "  <thead>\n",
7251
       "    <tr style=\"text-align: right;\">\n",
7252
       "      <th></th>\n",
7253
       "      <th>img_id</th>\n",
7254
       "      <th>SOPInstanceUID</th>\n",
7255
       "      <th>Modality</th>\n",
7256
       "      <th>PatientID</th>\n",
7257
       "      <th>StudyInstanceUID</th>\n",
7258
       "      <th>SeriesInstanceUID</th>\n",
7259
       "      <th>StudyID</th>\n",
7260
       "      <th>ImagePositionPatient</th>\n",
7261
       "      <th>ImageOrientationPatient</th>\n",
7262
       "      <th>SamplesPerPixel</th>\n",
7263
       "      <th>PhotometricInterpretation</th>\n",
7264
       "      <th>Rows</th>\n",
7265
       "      <th>Columns</th>\n",
7266
       "      <th>PixelSpacing</th>\n",
7267
       "      <th>BitsAllocated</th>\n",
7268
       "      <th>BitsStored</th>\n",
7269
       "      <th>HighBit</th>\n",
7270
       "      <th>PixelRepresentation</th>\n",
7271
       "      <th>WindowCenter</th>\n",
7272
       "      <th>WindowWidth</th>\n",
7273
       "      <th>RescaleIntercept</th>\n",
7274
       "      <th>RescaleSlope</th>\n",
7275
       "      <th>PxlMin</th>\n",
7276
       "      <th>PxlMax</th>\n",
7277
       "      <th>PxlStd</th>\n",
7278
       "      <th>PxlMean</th>\n",
7279
       "      <th>test</th>\n",
7280
       "      <th>test2</th>\n",
7281
       "      <th>ImageOrientationPatient_0</th>\n",
7282
       "      <th>ImageOrientationPatient_1</th>\n",
7283
       "      <th>ImageOrientationPatient_2</th>\n",
7284
       "      <th>ImageOrientationPatient_3</th>\n",
7285
       "      <th>ImageOrientationPatient_4</th>\n",
7286
       "      <th>ImageOrientationPatient_5</th>\n",
7287
       "      <th>ImagePositionPatient_0</th>\n",
7288
       "      <th>ImagePositionPatient_1</th>\n",
7289
       "      <th>ImagePositionPatient_2</th>\n",
7290
       "      <th>PixelSpacing_0</th>\n",
7291
       "      <th>PixelSpacing_1</th>\n",
7292
       "      <th>WindowCenter_0</th>\n",
7293
       "      <th>WindowCenter_1</th>\n",
7294
       "      <th>WindowCenter_1_NAN</th>\n",
7295
       "      <th>WindowWidth_0</th>\n",
7296
       "      <th>WindowWidth_1</th>\n",
7297
       "      <th>WindowWidth_0_le</th>\n",
7298
       "      <th>WindowWidth_1_le</th>\n",
7299
       "      <th>WindowCenter_1_le</th>\n",
7300
       "      <th>BitType_le</th>\n",
7301
       "      <th>ImageOrientationPatient_4_f</th>\n",
7302
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
7303
       "      <th>...</th>\n",
7304
       "      <th>ImageOrientationPatient_5_f</th>\n",
7305
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
7306
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
7307
       "      <th>ImagePositionPatient_0_f</th>\n",
7308
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
7309
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
7310
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
7311
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
7312
       "      <th>ImagePositionPatient_1_f</th>\n",
7313
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
7314
       "      <th>ImagePositionPatient_2_f</th>\n",
7315
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
7316
       "      <th>PixelSpacing_1_f</th>\n",
7317
       "      <th>PixelSpacing_1_enc_0</th>\n",
7318
       "      <th>PixelSpacing_1_enc_1</th>\n",
7319
       "      <th>WindowCenter_0_le</th>\n",
7320
       "      <th>pos_max</th>\n",
7321
       "      <th>pos_min</th>\n",
7322
       "      <th>pos_size</th>\n",
7323
       "      <th>pos_idx1</th>\n",
7324
       "      <th>pos_idx</th>\n",
7325
       "      <th>pos_idx2</th>\n",
7326
       "      <th>pos_inc1</th>\n",
7327
       "      <th>pos_inc2</th>\n",
7328
       "      <th>pos_inc1_grp_le</th>\n",
7329
       "      <th>pos_inc2_grp_le</th>\n",
7330
       "      <th>pos_inc1_r1</th>\n",
7331
       "      <th>pos_inc1_r0001</th>\n",
7332
       "      <th>pos_inc1_enc_0</th>\n",
7333
       "      <th>pos_inc2_enc_0</th>\n",
7334
       "      <th>pos_inc1_enc_1</th>\n",
7335
       "      <th>pos_inc2_enc_1</th>\n",
7336
       "      <th>pos_size_le</th>\n",
7337
       "      <th>pos_range</th>\n",
7338
       "      <th>pos_rel</th>\n",
7339
       "      <th>pos_zeros</th>\n",
7340
       "      <th>pos_inc_rng</th>\n",
7341
       "      <th>pos_zeros_le</th>\n",
7342
       "      <th>PxlMin_grp_le</th>\n",
7343
       "      <th>PxlMin_zero</th>\n",
7344
       "      <th>any</th>\n",
7345
       "      <th>epidural</th>\n",
7346
       "      <th>intraparenchymal</th>\n",
7347
       "      <th>intraventricular</th>\n",
7348
       "      <th>subarachnoid</th>\n",
7349
       "      <th>subdural</th>\n",
7350
       "      <th>any_series</th>\n",
7351
       "      <th>SeriesPP</th>\n",
7352
       "      <th>yuval_idx</th>\n",
7353
       "      <th>pred_any</th>\n",
7354
       "    </tr>\n",
7355
       "  </thead>\n",
7356
       "  <tbody>\n",
7357
       "    <tr>\n",
7358
       "      <td>100029</td>\n",
7359
       "      <td>d3bd67ff1</td>\n",
7360
       "      <td>ID_d3bd67ff1</td>\n",
7361
       "      <td>CT</td>\n",
7362
       "      <td>ID_07aa4e90</td>\n",
7363
       "      <td>ID_19039aeb7f</td>\n",
7364
       "      <td>ID_83a456ed02</td>\n",
7365
       "      <td>NaN</td>\n",
7366
       "      <td>['-125', '-5.28788193', '235.817384']</td>\n",
7367
       "      <td>['1', '0', '0', '0', '0.927183855', '-0.374606...</td>\n",
7368
       "      <td>1</td>\n",
7369
       "      <td>MONOCHROME2</td>\n",
7370
       "      <td>512</td>\n",
7371
       "      <td>512</td>\n",
7372
       "      <td>['0.48828125', '0.48828125']</td>\n",
7373
       "      <td>16</td>\n",
7374
       "      <td>12</td>\n",
7375
       "      <td>11</td>\n",
7376
       "      <td>0</td>\n",
7377
       "      <td>['00040', '00040']</td>\n",
7378
       "      <td>['00080', '00080']</td>\n",
7379
       "      <td>-1024.0</td>\n",
7380
       "      <td>1.0</td>\n",
7381
       "      <td>1.301333</td>\n",
7382
       "      <td>0.093333</td>\n",
7383
       "      <td>-0.618874</td>\n",
7384
       "      <td>1.229975</td>\n",
7385
       "      <td>False</td>\n",
7386
       "      <td>True</td>\n",
7387
       "      <td>1.0</td>\n",
7388
       "      <td>0.0</td>\n",
7389
       "      <td>0.0</td>\n",
7390
       "      <td>0.0</td>\n",
7391
       "      <td>0.927184</td>\n",
7392
       "      <td>-0.374607</td>\n",
7393
       "      <td>-125.000000</td>\n",
7394
       "      <td>-5.287882</td>\n",
7395
       "      <td>235.817384</td>\n",
7396
       "      <td>0.488281</td>\n",
7397
       "      <td>0.488281</td>\n",
7398
       "      <td>40.0</td>\n",
7399
       "      <td>40.0</td>\n",
7400
       "      <td>False</td>\n",
7401
       "      <td>80.0</td>\n",
7402
       "      <td>80.0</td>\n",
7403
       "      <td>0</td>\n",
7404
       "      <td>0</td>\n",
7405
       "      <td>1</td>\n",
7406
       "      <td>1</td>\n",
7407
       "      <td>1.695785</td>\n",
7408
       "      <td>0.0</td>\n",
7409
       "      <td>...</td>\n",
7410
       "      <td>0.835956</td>\n",
7411
       "      <td>0.0</td>\n",
7412
       "      <td>False</td>\n",
7413
       "      <td>-0.72</td>\n",
7414
       "      <td>1.0</td>\n",
7415
       "      <td>0.0</td>\n",
7416
       "      <td>1.0</td>\n",
7417
       "      <td>1.0</td>\n",
7418
       "      <td>0.796162</td>\n",
7419
       "      <td>1.0</td>\n",
7420
       "      <td>0.098592</td>\n",
7421
       "      <td>0.0</td>\n",
7422
       "      <td>-0.480000</td>\n",
7423
       "      <td>1.0</td>\n",
7424
       "      <td>False</td>\n",
7425
       "      <td>2</td>\n",
7426
       "      <td>1.202425</td>\n",
7427
       "      <td>0.502892</td>\n",
7428
       "      <td>-0.7</td>\n",
7429
       "      <td>0.000000</td>\n",
7430
       "      <td>17</td>\n",
7431
       "      <td>-0.474576</td>\n",
7432
       "      <td>2.247192</td>\n",
7433
       "      <td>2.247192</td>\n",
7434
       "      <td>3</td>\n",
7435
       "      <td>3</td>\n",
7436
       "      <td>0.0</td>\n",
7437
       "      <td>0.0</td>\n",
7438
       "      <td>0.0</td>\n",
7439
       "      <td>0.0</td>\n",
7440
       "      <td>0.0</td>\n",
7441
       "      <td>0.0</td>\n",
7442
       "      <td>3</td>\n",
7443
       "      <td>0.263550</td>\n",
7444
       "      <td>0.518123</td>\n",
7445
       "      <td>0.0</td>\n",
7446
       "      <td>-0.575802</td>\n",
7447
       "      <td>0</td>\n",
7448
       "      <td>2</td>\n",
7449
       "      <td>False</td>\n",
7450
       "      <td>NaN</td>\n",
7451
       "      <td>NaN</td>\n",
7452
       "      <td>NaN</td>\n",
7453
       "      <td>NaN</td>\n",
7454
       "      <td>NaN</td>\n",
7455
       "      <td>NaN</td>\n",
7456
       "      <td>False</td>\n",
7457
       "      <td>-0.5</td>\n",
7458
       "      <td>52190</td>\n",
7459
       "      <td>0.998915</td>\n",
7460
       "    </tr>\n",
7461
       "    <tr>\n",
7462
       "      <td>101618</td>\n",
7463
       "      <td>b5c2fbbe1</td>\n",
7464
       "      <td>ID_b5c2fbbe1</td>\n",
7465
       "      <td>CT</td>\n",
7466
       "      <td>ID_877a2214</td>\n",
7467
       "      <td>ID_f5d8b2ad40</td>\n",
7468
       "      <td>ID_c37347c9a3</td>\n",
7469
       "      <td>NaN</td>\n",
7470
       "      <td>['-126.408875', '-126.408875', '92.449158']</td>\n",
7471
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
7472
       "      <td>1</td>\n",
7473
       "      <td>MONOCHROME2</td>\n",
7474
       "      <td>512</td>\n",
7475
       "      <td>512</td>\n",
7476
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
7477
       "      <td>16</td>\n",
7478
       "      <td>16</td>\n",
7479
       "      <td>15</td>\n",
7480
       "      <td>1</td>\n",
7481
       "      <td>35.000000</td>\n",
7482
       "      <td>135.000000</td>\n",
7483
       "      <td>-1024.0</td>\n",
7484
       "      <td>1.0</td>\n",
7485
       "      <td>1.301333</td>\n",
7486
       "      <td>0.148000</td>\n",
7487
       "      <td>-0.975081</td>\n",
7488
       "      <td>1.044669</td>\n",
7489
       "      <td>False</td>\n",
7490
       "      <td>True</td>\n",
7491
       "      <td>1.0</td>\n",
7492
       "      <td>0.0</td>\n",
7493
       "      <td>0.0</td>\n",
7494
       "      <td>0.0</td>\n",
7495
       "      <td>1.000000</td>\n",
7496
       "      <td>0.000000</td>\n",
7497
       "      <td>-126.408875</td>\n",
7498
       "      <td>-126.408875</td>\n",
7499
       "      <td>92.449158</td>\n",
7500
       "      <td>0.494751</td>\n",
7501
       "      <td>0.494751</td>\n",
7502
       "      <td>35.0</td>\n",
7503
       "      <td>NaN</td>\n",
7504
       "      <td>True</td>\n",
7505
       "      <td>135.0</td>\n",
7506
       "      <td>NaN</td>\n",
7507
       "      <td>3</td>\n",
7508
       "      <td>1</td>\n",
7509
       "      <td>3</td>\n",
7510
       "      <td>0</td>\n",
7511
       "      <td>-1.333333</td>\n",
7512
       "      <td>1.0</td>\n",
7513
       "      <td>...</td>\n",
7514
       "      <td>-0.666667</td>\n",
7515
       "      <td>1.0</td>\n",
7516
       "      <td>False</td>\n",
7517
       "      <td>-0.72</td>\n",
7518
       "      <td>0.0</td>\n",
7519
       "      <td>1.0</td>\n",
7520
       "      <td>1.0</td>\n",
7521
       "      <td>1.0</td>\n",
7522
       "      <td>-0.818785</td>\n",
7523
       "      <td>0.0</td>\n",
7524
       "      <td>-0.106806</td>\n",
7525
       "      <td>0.0</td>\n",
7526
       "      <td>-0.480000</td>\n",
7527
       "      <td>0.0</td>\n",
7528
       "      <td>True</td>\n",
7529
       "      <td>3</td>\n",
7530
       "      <td>0.609797</td>\n",
7531
       "      <td>-0.010203</td>\n",
7532
       "      <td>-0.3</td>\n",
7533
       "      <td>0.135593</td>\n",
7534
       "      <td>19</td>\n",
7535
       "      <td>-0.338983</td>\n",
7536
       "      <td>-1.500000</td>\n",
7537
       "      <td>-1.500000</td>\n",
7538
       "      <td>3</td>\n",
7539
       "      <td>3</td>\n",
7540
       "      <td>1.0</td>\n",
7541
       "      <td>1.0</td>\n",
7542
       "      <td>0.0</td>\n",
7543
       "      <td>0.0</td>\n",
7544
       "      <td>1.0</td>\n",
7545
       "      <td>1.0</td>\n",
7546
       "      <td>0</td>\n",
7547
       "      <td>-0.266667</td>\n",
7548
       "      <td>0.451613</td>\n",
7549
       "      <td>0.0</td>\n",
7550
       "      <td>-0.600000</td>\n",
7551
       "      <td>0</td>\n",
7552
       "      <td>2</td>\n",
7553
       "      <td>False</td>\n",
7554
       "      <td>NaN</td>\n",
7555
       "      <td>NaN</td>\n",
7556
       "      <td>NaN</td>\n",
7557
       "      <td>NaN</td>\n",
7558
       "      <td>NaN</td>\n",
7559
       "      <td>NaN</td>\n",
7560
       "      <td>False</td>\n",
7561
       "      <td>-0.5</td>\n",
7562
       "      <td>93649</td>\n",
7563
       "      <td>0.998922</td>\n",
7564
       "    </tr>\n",
7565
       "    <tr>\n",
7566
       "      <td>54912</td>\n",
7567
       "      <td>5519471d4</td>\n",
7568
       "      <td>ID_5519471d4</td>\n",
7569
       "      <td>CT</td>\n",
7570
       "      <td>ID_35384be6</td>\n",
7571
       "      <td>ID_cc5b6c0a29</td>\n",
7572
       "      <td>ID_5d7a4ca229</td>\n",
7573
       "      <td>NaN</td>\n",
7574
       "      <td>['-125', '72.8792912', '193.380843']</td>\n",
7575
       "      <td>['1', '0', '0', '0', '0.920504853', '-0.390731...</td>\n",
7576
       "      <td>1</td>\n",
7577
       "      <td>MONOCHROME2</td>\n",
7578
       "      <td>512</td>\n",
7579
       "      <td>512</td>\n",
7580
       "      <td>['0.48828125', '0.48828125']</td>\n",
7581
       "      <td>16</td>\n",
7582
       "      <td>12</td>\n",
7583
       "      <td>11</td>\n",
7584
       "      <td>0</td>\n",
7585
       "      <td>['00040', '00040']</td>\n",
7586
       "      <td>['00080', '00080']</td>\n",
7587
       "      <td>-1024.0</td>\n",
7588
       "      <td>1.0</td>\n",
7589
       "      <td>1.301333</td>\n",
7590
       "      <td>1.525333</td>\n",
7591
       "      <td>-0.941557</td>\n",
7592
       "      <td>1.166305</td>\n",
7593
       "      <td>False</td>\n",
7594
       "      <td>True</td>\n",
7595
       "      <td>1.0</td>\n",
7596
       "      <td>0.0</td>\n",
7597
       "      <td>0.0</td>\n",
7598
       "      <td>0.0</td>\n",
7599
       "      <td>0.920505</td>\n",
7600
       "      <td>-0.390731</td>\n",
7601
       "      <td>-125.000000</td>\n",
7602
       "      <td>72.879291</td>\n",
7603
       "      <td>193.380843</td>\n",
7604
       "      <td>0.488281</td>\n",
7605
       "      <td>0.488281</td>\n",
7606
       "      <td>40.0</td>\n",
7607
       "      <td>40.0</td>\n",
7608
       "      <td>False</td>\n",
7609
       "      <td>80.0</td>\n",
7610
       "      <td>80.0</td>\n",
7611
       "      <td>0</td>\n",
7612
       "      <td>0</td>\n",
7613
       "      <td>1</td>\n",
7614
       "      <td>1</td>\n",
7615
       "      <td>1.606731</td>\n",
7616
       "      <td>0.0</td>\n",
7617
       "      <td>...</td>\n",
7618
       "      <td>0.728459</td>\n",
7619
       "      <td>0.0</td>\n",
7620
       "      <td>False</td>\n",
7621
       "      <td>-0.72</td>\n",
7622
       "      <td>1.0</td>\n",
7623
       "      <td>0.0</td>\n",
7624
       "      <td>1.0</td>\n",
7625
       "      <td>1.0</td>\n",
7626
       "      <td>1.838391</td>\n",
7627
       "      <td>1.0</td>\n",
7628
       "      <td>0.037795</td>\n",
7629
       "      <td>0.0</td>\n",
7630
       "      <td>-0.480000</td>\n",
7631
       "      <td>1.0</td>\n",
7632
       "      <td>False</td>\n",
7633
       "      <td>2</td>\n",
7634
       "      <td>1.078723</td>\n",
7635
       "      <td>0.490115</td>\n",
7636
       "      <td>-0.7</td>\n",
7637
       "      <td>-0.271186</td>\n",
7638
       "      <td>13</td>\n",
7639
       "      <td>-0.203390</td>\n",
7640
       "      <td>1.726074</td>\n",
7641
       "      <td>1.723938</td>\n",
7642
       "      <td>3</td>\n",
7643
       "      <td>3</td>\n",
7644
       "      <td>0.0</td>\n",
7645
       "      <td>0.0</td>\n",
7646
       "      <td>0.0</td>\n",
7647
       "      <td>0.0</td>\n",
7648
       "      <td>0.0</td>\n",
7649
       "      <td>0.0</td>\n",
7650
       "      <td>3</td>\n",
7651
       "      <td>-0.475944</td>\n",
7652
       "      <td>-0.074042</td>\n",
7653
       "      <td>0.0</td>\n",
7654
       "      <td>-0.598386</td>\n",
7655
       "      <td>0</td>\n",
7656
       "      <td>2</td>\n",
7657
       "      <td>False</td>\n",
7658
       "      <td>NaN</td>\n",
7659
       "      <td>NaN</td>\n",
7660
       "      <td>NaN</td>\n",
7661
       "      <td>NaN</td>\n",
7662
       "      <td>NaN</td>\n",
7663
       "      <td>NaN</td>\n",
7664
       "      <td>False</td>\n",
7665
       "      <td>-0.5</td>\n",
7666
       "      <td>115357</td>\n",
7667
       "      <td>0.998966</td>\n",
7668
       "    </tr>\n",
7669
       "    <tr>\n",
7670
       "      <td>29363</td>\n",
7671
       "      <td>dfc1d30ba</td>\n",
7672
       "      <td>ID_dfc1d30ba</td>\n",
7673
       "      <td>CT</td>\n",
7674
       "      <td>ID_7ed798ca</td>\n",
7675
       "      <td>ID_bca01d4025</td>\n",
7676
       "      <td>ID_bf75646cb6</td>\n",
7677
       "      <td>NaN</td>\n",
7678
       "      <td>['-132.5', '13.0711274', '189.612208']</td>\n",
7679
       "      <td>['1', '0', '0', '0', '0.965925826', '-0.258819...</td>\n",
7680
       "      <td>1</td>\n",
7681
       "      <td>MONOCHROME2</td>\n",
7682
       "      <td>512</td>\n",
7683
       "      <td>512</td>\n",
7684
       "      <td>['0.517578125', '0.517578125']</td>\n",
7685
       "      <td>16</td>\n",
7686
       "      <td>12</td>\n",
7687
       "      <td>11</td>\n",
7688
       "      <td>1</td>\n",
7689
       "      <td>['00040', '00040']</td>\n",
7690
       "      <td>['00080', '00080']</td>\n",
7691
       "      <td>0.0</td>\n",
7692
       "      <td>1.0</td>\n",
7693
       "      <td>1.333333</td>\n",
7694
       "      <td>-0.217333</td>\n",
7695
       "      <td>-0.920688</td>\n",
7696
       "      <td>0.910508</td>\n",
7697
       "      <td>False</td>\n",
7698
       "      <td>True</td>\n",
7699
       "      <td>1.0</td>\n",
7700
       "      <td>0.0</td>\n",
7701
       "      <td>0.0</td>\n",
7702
       "      <td>0.0</td>\n",
7703
       "      <td>0.965926</td>\n",
7704
       "      <td>-0.258819</td>\n",
7705
       "      <td>-132.500000</td>\n",
7706
       "      <td>13.071127</td>\n",
7707
       "      <td>189.612208</td>\n",
7708
       "      <td>0.517578</td>\n",
7709
       "      <td>0.517578</td>\n",
7710
       "      <td>40.0</td>\n",
7711
       "      <td>40.0</td>\n",
7712
       "      <td>False</td>\n",
7713
       "      <td>80.0</td>\n",
7714
       "      <td>80.0</td>\n",
7715
       "      <td>0</td>\n",
7716
       "      <td>0</td>\n",
7717
       "      <td>1</td>\n",
7718
       "      <td>2</td>\n",
7719
       "      <td>2.212344</td>\n",
7720
       "      <td>0.0</td>\n",
7721
       "      <td>...</td>\n",
7722
       "      <td>1.607873</td>\n",
7723
       "      <td>0.0</td>\n",
7724
       "      <td>False</td>\n",
7725
       "      <td>1.08</td>\n",
7726
       "      <td>0.0</td>\n",
7727
       "      <td>0.0</td>\n",
7728
       "      <td>0.0</td>\n",
7729
       "      <td>1.0</td>\n",
7730
       "      <td>1.040948</td>\n",
7731
       "      <td>1.0</td>\n",
7732
       "      <td>0.032396</td>\n",
7733
       "      <td>0.0</td>\n",
7734
       "      <td>2.060625</td>\n",
7735
       "      <td>0.0</td>\n",
7736
       "      <td>False</td>\n",
7737
       "      <td>2</td>\n",
7738
       "      <td>1.026354</td>\n",
7739
       "      <td>0.428849</td>\n",
7740
       "      <td>-0.5</td>\n",
7741
       "      <td>-0.067797</td>\n",
7742
       "      <td>16</td>\n",
7743
       "      <td>-0.271186</td>\n",
7744
       "      <td>1.561829</td>\n",
7745
       "      <td>1.588135</td>\n",
7746
       "      <td>3</td>\n",
7747
       "      <td>3</td>\n",
7748
       "      <td>0.0</td>\n",
7749
       "      <td>0.0</td>\n",
7750
       "      <td>0.0</td>\n",
7751
       "      <td>0.0</td>\n",
7752
       "      <td>0.0</td>\n",
7753
       "      <td>0.0</td>\n",
7754
       "      <td>5</td>\n",
7755
       "      <td>-0.416634</td>\n",
7756
       "      <td>0.206507</td>\n",
7757
       "      <td>0.0</td>\n",
7758
       "      <td>-0.579463</td>\n",
7759
       "      <td>0</td>\n",
7760
       "      <td>2</td>\n",
7761
       "      <td>False</td>\n",
7762
       "      <td>NaN</td>\n",
7763
       "      <td>NaN</td>\n",
7764
       "      <td>NaN</td>\n",
7765
       "      <td>NaN</td>\n",
7766
       "      <td>NaN</td>\n",
7767
       "      <td>NaN</td>\n",
7768
       "      <td>False</td>\n",
7769
       "      <td>-0.5</td>\n",
7770
       "      <td>111444</td>\n",
7771
       "      <td>0.999020</td>\n",
7772
       "    </tr>\n",
7773
       "    <tr>\n",
7774
       "      <td>92120</td>\n",
7775
       "      <td>e33160522</td>\n",
7776
       "      <td>ID_e33160522</td>\n",
7777
       "      <td>CT</td>\n",
7778
       "      <td>ID_7ed798ca</td>\n",
7779
       "      <td>ID_bca01d4025</td>\n",
7780
       "      <td>ID_bf75646cb6</td>\n",
7781
       "      <td>NaN</td>\n",
7782
       "      <td>['-132.5', '13.0711274', '184.488551']</td>\n",
7783
       "      <td>['1', '0', '0', '0', '0.965925826', '-0.258819...</td>\n",
7784
       "      <td>1</td>\n",
7785
       "      <td>MONOCHROME2</td>\n",
7786
       "      <td>512</td>\n",
7787
       "      <td>512</td>\n",
7788
       "      <td>['0.517578125', '0.517578125']</td>\n",
7789
       "      <td>16</td>\n",
7790
       "      <td>12</td>\n",
7791
       "      <td>11</td>\n",
7792
       "      <td>1</td>\n",
7793
       "      <td>['00040', '00040']</td>\n",
7794
       "      <td>['00080', '00080']</td>\n",
7795
       "      <td>0.0</td>\n",
7796
       "      <td>1.0</td>\n",
7797
       "      <td>1.333333</td>\n",
7798
       "      <td>-0.197333</td>\n",
7799
       "      <td>-0.930595</td>\n",
7800
       "      <td>0.931032</td>\n",
7801
       "      <td>False</td>\n",
7802
       "      <td>True</td>\n",
7803
       "      <td>1.0</td>\n",
7804
       "      <td>0.0</td>\n",
7805
       "      <td>0.0</td>\n",
7806
       "      <td>0.0</td>\n",
7807
       "      <td>0.965926</td>\n",
7808
       "      <td>-0.258819</td>\n",
7809
       "      <td>-132.500000</td>\n",
7810
       "      <td>13.071127</td>\n",
7811
       "      <td>184.488551</td>\n",
7812
       "      <td>0.517578</td>\n",
7813
       "      <td>0.517578</td>\n",
7814
       "      <td>40.0</td>\n",
7815
       "      <td>40.0</td>\n",
7816
       "      <td>False</td>\n",
7817
       "      <td>80.0</td>\n",
7818
       "      <td>80.0</td>\n",
7819
       "      <td>0</td>\n",
7820
       "      <td>0</td>\n",
7821
       "      <td>1</td>\n",
7822
       "      <td>2</td>\n",
7823
       "      <td>2.212344</td>\n",
7824
       "      <td>0.0</td>\n",
7825
       "      <td>...</td>\n",
7826
       "      <td>1.607873</td>\n",
7827
       "      <td>0.0</td>\n",
7828
       "      <td>False</td>\n",
7829
       "      <td>1.08</td>\n",
7830
       "      <td>0.0</td>\n",
7831
       "      <td>0.0</td>\n",
7832
       "      <td>0.0</td>\n",
7833
       "      <td>1.0</td>\n",
7834
       "      <td>1.040948</td>\n",
7835
       "      <td>1.0</td>\n",
7836
       "      <td>0.025055</td>\n",
7837
       "      <td>0.0</td>\n",
7838
       "      <td>2.060625</td>\n",
7839
       "      <td>0.0</td>\n",
7840
       "      <td>False</td>\n",
7841
       "      <td>2</td>\n",
7842
       "      <td>1.026354</td>\n",
7843
       "      <td>0.428849</td>\n",
7844
       "      <td>-0.5</td>\n",
7845
       "      <td>-0.135593</td>\n",
7846
       "      <td>15</td>\n",
7847
       "      <td>-0.203390</td>\n",
7848
       "      <td>1.588134</td>\n",
7849
       "      <td>1.561829</td>\n",
7850
       "      <td>3</td>\n",
7851
       "      <td>3</td>\n",
7852
       "      <td>0.0</td>\n",
7853
       "      <td>0.0</td>\n",
7854
       "      <td>0.0</td>\n",
7855
       "      <td>0.0</td>\n",
7856
       "      <td>0.0</td>\n",
7857
       "      <td>0.0</td>\n",
7858
       "      <td>5</td>\n",
7859
       "      <td>-0.416634</td>\n",
7860
       "      <td>0.069305</td>\n",
7861
       "      <td>0.0</td>\n",
7862
       "      <td>-0.579463</td>\n",
7863
       "      <td>0</td>\n",
7864
       "      <td>2</td>\n",
7865
       "      <td>False</td>\n",
7866
       "      <td>NaN</td>\n",
7867
       "      <td>NaN</td>\n",
7868
       "      <td>NaN</td>\n",
7869
       "      <td>NaN</td>\n",
7870
       "      <td>NaN</td>\n",
7871
       "      <td>NaN</td>\n",
7872
       "      <td>False</td>\n",
7873
       "      <td>-0.5</td>\n",
7874
       "      <td>111443</td>\n",
7875
       "      <td>0.999043</td>\n",
7876
       "    </tr>\n",
7877
       "  </tbody>\n",
7878
       "</table>\n",
7879
       "<p>5 rows × 101 columns</p>\n",
7880
       "</div>"
7881
      ],
7882
      "text/plain": [
7883
       "           img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
7884
       "100029  d3bd67ff1   ID_d3bd67ff1       CT  ID_07aa4e90    ID_19039aeb7f   \n",
7885
       "101618  b5c2fbbe1   ID_b5c2fbbe1       CT  ID_877a2214    ID_f5d8b2ad40   \n",
7886
       "54912   5519471d4   ID_5519471d4       CT  ID_35384be6    ID_cc5b6c0a29   \n",
7887
       "29363   dfc1d30ba   ID_dfc1d30ba       CT  ID_7ed798ca    ID_bca01d4025   \n",
7888
       "92120   e33160522   ID_e33160522       CT  ID_7ed798ca    ID_bca01d4025   \n",
7889
       "\n",
7890
       "       SeriesInstanceUID  StudyID  \\\n",
7891
       "100029     ID_83a456ed02      NaN   \n",
7892
       "101618     ID_c37347c9a3      NaN   \n",
7893
       "54912      ID_5d7a4ca229      NaN   \n",
7894
       "29363      ID_bf75646cb6      NaN   \n",
7895
       "92120      ID_bf75646cb6      NaN   \n",
7896
       "\n",
7897
       "                               ImagePositionPatient  \\\n",
7898
       "100029        ['-125', '-5.28788193', '235.817384']   \n",
7899
       "101618  ['-126.408875', '-126.408875', '92.449158']   \n",
7900
       "54912          ['-125', '72.8792912', '193.380843']   \n",
7901
       "29363        ['-132.5', '13.0711274', '189.612208']   \n",
7902
       "92120        ['-132.5', '13.0711274', '184.488551']   \n",
7903
       "\n",
7904
       "                                  ImageOrientationPatient  SamplesPerPixel  \\\n",
7905
       "100029  ['1', '0', '0', '0', '0.927183855', '-0.374606...                1   \n",
7906
       "101618  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
7907
       "54912   ['1', '0', '0', '0', '0.920504853', '-0.390731...                1   \n",
7908
       "29363   ['1', '0', '0', '0', '0.965925826', '-0.258819...                1   \n",
7909
       "92120   ['1', '0', '0', '0', '0.965925826', '-0.258819...                1   \n",
7910
       "\n",
7911
       "       PhotometricInterpretation  Rows  Columns  \\\n",
7912
       "100029               MONOCHROME2   512      512   \n",
7913
       "101618               MONOCHROME2   512      512   \n",
7914
       "54912                MONOCHROME2   512      512   \n",
7915
       "29363                MONOCHROME2   512      512   \n",
7916
       "92120                MONOCHROME2   512      512   \n",
7917
       "\n",
7918
       "                                PixelSpacing  BitsAllocated  BitsStored  \\\n",
7919
       "100029          ['0.48828125', '0.48828125']             16          12   \n",
7920
       "101618  ['0.494750976563', '0.494750976563']             16          16   \n",
7921
       "54912           ['0.48828125', '0.48828125']             16          12   \n",
7922
       "29363         ['0.517578125', '0.517578125']             16          12   \n",
7923
       "92120         ['0.517578125', '0.517578125']             16          12   \n",
7924
       "\n",
7925
       "        HighBit  PixelRepresentation        WindowCenter         WindowWidth  \\\n",
7926
       "100029       11                    0  ['00040', '00040']  ['00080', '00080']   \n",
7927
       "101618       15                    1           35.000000          135.000000   \n",
7928
       "54912        11                    0  ['00040', '00040']  ['00080', '00080']   \n",
7929
       "29363        11                    1  ['00040', '00040']  ['00080', '00080']   \n",
7930
       "92120        11                    1  ['00040', '00040']  ['00080', '00080']   \n",
7931
       "\n",
7932
       "        RescaleIntercept  RescaleSlope    PxlMin    PxlMax    PxlStd  \\\n",
7933
       "100029           -1024.0           1.0  1.301333  0.093333 -0.618874   \n",
7934
       "101618           -1024.0           1.0  1.301333  0.148000 -0.975081   \n",
7935
       "54912            -1024.0           1.0  1.301333  1.525333 -0.941557   \n",
7936
       "29363                0.0           1.0  1.333333 -0.217333 -0.920688   \n",
7937
       "92120                0.0           1.0  1.333333 -0.197333 -0.930595   \n",
7938
       "\n",
7939
       "         PxlMean   test  test2  ImageOrientationPatient_0  \\\n",
7940
       "100029  1.229975  False   True                        1.0   \n",
7941
       "101618  1.044669  False   True                        1.0   \n",
7942
       "54912   1.166305  False   True                        1.0   \n",
7943
       "29363   0.910508  False   True                        1.0   \n",
7944
       "92120   0.931032  False   True                        1.0   \n",
7945
       "\n",
7946
       "        ImageOrientationPatient_1  ImageOrientationPatient_2  \\\n",
7947
       "100029                        0.0                        0.0   \n",
7948
       "101618                        0.0                        0.0   \n",
7949
       "54912                         0.0                        0.0   \n",
7950
       "29363                         0.0                        0.0   \n",
7951
       "92120                         0.0                        0.0   \n",
7952
       "\n",
7953
       "        ImageOrientationPatient_3  ImageOrientationPatient_4  \\\n",
7954
       "100029                        0.0                   0.927184   \n",
7955
       "101618                        0.0                   1.000000   \n",
7956
       "54912                         0.0                   0.920505   \n",
7957
       "29363                         0.0                   0.965926   \n",
7958
       "92120                         0.0                   0.965926   \n",
7959
       "\n",
7960
       "        ImageOrientationPatient_5  ImagePositionPatient_0  \\\n",
7961
       "100029                  -0.374607             -125.000000   \n",
7962
       "101618                   0.000000             -126.408875   \n",
7963
       "54912                   -0.390731             -125.000000   \n",
7964
       "29363                   -0.258819             -132.500000   \n",
7965
       "92120                   -0.258819             -132.500000   \n",
7966
       "\n",
7967
       "        ImagePositionPatient_1  ImagePositionPatient_2  PixelSpacing_0  \\\n",
7968
       "100029               -5.287882              235.817384        0.488281   \n",
7969
       "101618             -126.408875               92.449158        0.494751   \n",
7970
       "54912                72.879291              193.380843        0.488281   \n",
7971
       "29363                13.071127              189.612208        0.517578   \n",
7972
       "92120                13.071127              184.488551        0.517578   \n",
7973
       "\n",
7974
       "        PixelSpacing_1  WindowCenter_0  WindowCenter_1  WindowCenter_1_NAN  \\\n",
7975
       "100029        0.488281            40.0            40.0               False   \n",
7976
       "101618        0.494751            35.0             NaN                True   \n",
7977
       "54912         0.488281            40.0            40.0               False   \n",
7978
       "29363         0.517578            40.0            40.0               False   \n",
7979
       "92120         0.517578            40.0            40.0               False   \n",
7980
       "\n",
7981
       "        WindowWidth_0  WindowWidth_1  WindowWidth_0_le  WindowWidth_1_le  \\\n",
7982
       "100029           80.0           80.0                 0                 0   \n",
7983
       "101618          135.0            NaN                 3                 1   \n",
7984
       "54912            80.0           80.0                 0                 0   \n",
7985
       "29363            80.0           80.0                 0                 0   \n",
7986
       "92120            80.0           80.0                 0                 0   \n",
7987
       "\n",
7988
       "        WindowCenter_1_le  BitType_le  ImageOrientationPatient_4_f  \\\n",
7989
       "100029                  1           1                     1.695785   \n",
7990
       "101618                  3           0                    -1.333333   \n",
7991
       "54912                   1           1                     1.606731   \n",
7992
       "29363                   1           2                     2.212344   \n",
7993
       "92120                   1           2                     2.212344   \n",
7994
       "\n",
7995
       "        ImageOrientationPatient_4_enc_0  ...  ImageOrientationPatient_5_f  \\\n",
7996
       "100029                              0.0  ...                     0.835956   \n",
7997
       "101618                              1.0  ...                    -0.666667   \n",
7998
       "54912                               0.0  ...                     0.728459   \n",
7999
       "29363                               0.0  ...                     1.607873   \n",
8000
       "92120                               0.0  ...                     1.607873   \n",
8001
       "\n",
8002
       "        ImageOrientationPatient_5_enc_0  ImageOrientationPatient_5_enc_1  \\\n",
8003
       "100029                              0.0                            False   \n",
8004
       "101618                              1.0                            False   \n",
8005
       "54912                               0.0                            False   \n",
8006
       "29363                               0.0                            False   \n",
8007
       "92120                               0.0                            False   \n",
8008
       "\n",
8009
       "        ImagePositionPatient_0_f  ImagePositionPatient_0_enc_0  \\\n",
8010
       "100029                     -0.72                           1.0   \n",
8011
       "101618                     -0.72                           0.0   \n",
8012
       "54912                      -0.72                           1.0   \n",
8013
       "29363                       1.08                           0.0   \n",
8014
       "92120                       1.08                           0.0   \n",
8015
       "\n",
8016
       "        ImagePositionPatient_0_enc_1  ImagePositionPatient_0_f_r1  \\\n",
8017
       "100029                           0.0                          1.0   \n",
8018
       "101618                           1.0                          1.0   \n",
8019
       "54912                            0.0                          1.0   \n",
8020
       "29363                            0.0                          0.0   \n",
8021
       "92120                            0.0                          0.0   \n",
8022
       "\n",
8023
       "        ImagePositionPatient_0_f_r05  ImagePositionPatient_1_f  \\\n",
8024
       "100029                           1.0                  0.796162   \n",
8025
       "101618                           1.0                 -0.818785   \n",
8026
       "54912                            1.0                  1.838391   \n",
8027
       "29363                            1.0                  1.040948   \n",
8028
       "92120                            1.0                  1.040948   \n",
8029
       "\n",
8030
       "        ImagePositionPatient_1_enc_0  ImagePositionPatient_2_f  \\\n",
8031
       "100029                           1.0                  0.098592   \n",
8032
       "101618                           0.0                 -0.106806   \n",
8033
       "54912                            1.0                  0.037795   \n",
8034
       "29363                            1.0                  0.032396   \n",
8035
       "92120                            1.0                  0.025055   \n",
8036
       "\n",
8037
       "        ImagePositionPatient_2_f_r05  PixelSpacing_1_f  PixelSpacing_1_enc_0  \\\n",
8038
       "100029                           0.0         -0.480000                   1.0   \n",
8039
       "101618                           0.0         -0.480000                   0.0   \n",
8040
       "54912                            0.0         -0.480000                   1.0   \n",
8041
       "29363                            0.0          2.060625                   0.0   \n",
8042
       "92120                            0.0          2.060625                   0.0   \n",
8043
       "\n",
8044
       "        PixelSpacing_1_enc_1  WindowCenter_0_le   pos_max   pos_min  pos_size  \\\n",
8045
       "100029                 False                  2  1.202425  0.502892      -0.7   \n",
8046
       "101618                  True                  3  0.609797 -0.010203      -0.3   \n",
8047
       "54912                  False                  2  1.078723  0.490115      -0.7   \n",
8048
       "29363                  False                  2  1.026354  0.428849      -0.5   \n",
8049
       "92120                  False                  2  1.026354  0.428849      -0.5   \n",
8050
       "\n",
8051
       "        pos_idx1  pos_idx  pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  \\\n",
8052
       "100029  0.000000       17 -0.474576  2.247192  2.247192                3   \n",
8053
       "101618  0.135593       19 -0.338983 -1.500000 -1.500000                3   \n",
8054
       "54912  -0.271186       13 -0.203390  1.726074  1.723938                3   \n",
8055
       "29363  -0.067797       16 -0.271186  1.561829  1.588135                3   \n",
8056
       "92120  -0.135593       15 -0.203390  1.588134  1.561829                3   \n",
8057
       "\n",
8058
       "        pos_inc2_grp_le  pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  \\\n",
8059
       "100029                3          0.0             0.0             0.0   \n",
8060
       "101618                3          1.0             1.0             0.0   \n",
8061
       "54912                 3          0.0             0.0             0.0   \n",
8062
       "29363                 3          0.0             0.0             0.0   \n",
8063
       "92120                 3          0.0             0.0             0.0   \n",
8064
       "\n",
8065
       "        pos_inc2_enc_0  pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  \\\n",
8066
       "100029             0.0             0.0             0.0            3   \n",
8067
       "101618             0.0             1.0             1.0            0   \n",
8068
       "54912              0.0             0.0             0.0            3   \n",
8069
       "29363              0.0             0.0             0.0            5   \n",
8070
       "92120              0.0             0.0             0.0            5   \n",
8071
       "\n",
8072
       "        pos_range   pos_rel  pos_zeros  pos_inc_rng  pos_zeros_le  \\\n",
8073
       "100029   0.263550  0.518123        0.0    -0.575802             0   \n",
8074
       "101618  -0.266667  0.451613        0.0    -0.600000             0   \n",
8075
       "54912   -0.475944 -0.074042        0.0    -0.598386             0   \n",
8076
       "29363   -0.416634  0.206507        0.0    -0.579463             0   \n",
8077
       "92120   -0.416634  0.069305        0.0    -0.579463             0   \n",
8078
       "\n",
8079
       "        PxlMin_grp_le  PxlMin_zero  any  epidural  intraparenchymal  \\\n",
8080
       "100029              2        False  NaN       NaN               NaN   \n",
8081
       "101618              2        False  NaN       NaN               NaN   \n",
8082
       "54912               2        False  NaN       NaN               NaN   \n",
8083
       "29363               2        False  NaN       NaN               NaN   \n",
8084
       "92120               2        False  NaN       NaN               NaN   \n",
8085
       "\n",
8086
       "        intraventricular  subarachnoid  subdural  any_series  SeriesPP  \\\n",
8087
       "100029               NaN           NaN       NaN       False      -0.5   \n",
8088
       "101618               NaN           NaN       NaN       False      -0.5   \n",
8089
       "54912                NaN           NaN       NaN       False      -0.5   \n",
8090
       "29363                NaN           NaN       NaN       False      -0.5   \n",
8091
       "92120                NaN           NaN       NaN       False      -0.5   \n",
8092
       "\n",
8093
       "        yuval_idx  pred_any  \n",
8094
       "100029      52190  0.998915  \n",
8095
       "101618      93649  0.998922  \n",
8096
       "54912      115357  0.998966  \n",
8097
       "29363      111444  0.999020  \n",
8098
       "92120      111443  0.999043  \n",
8099
       "\n",
8100
       "[5 rows x 101 columns]"
8101
      ]
8102
     },
8103
     "execution_count": 63,
8104
     "metadata": {},
8105
     "output_type": "execute_result"
8106
    }
8107
   ],
8108
   "source": [
8109
    "test_md.sort_values('pred_any').tail()"
8110
   ]
8111
  },
8112
  {
8113
   "cell_type": "code",
8114
   "execution_count": 64,
8115
   "metadata": {},
8116
   "outputs": [
8117
    {
8118
     "data": {
8119
      "text/html": [
8120
       "<div>\n",
8121
       "<style scoped>\n",
8122
       "    .dataframe tbody tr th:only-of-type {\n",
8123
       "        vertical-align: middle;\n",
8124
       "    }\n",
8125
       "\n",
8126
       "    .dataframe tbody tr th {\n",
8127
       "        vertical-align: top;\n",
8128
       "    }\n",
8129
       "\n",
8130
       "    .dataframe thead th {\n",
8131
       "        text-align: right;\n",
8132
       "    }\n",
8133
       "</style>\n",
8134
       "<table border=\"1\" class=\"dataframe\">\n",
8135
       "  <thead>\n",
8136
       "    <tr style=\"text-align: right;\">\n",
8137
       "      <th></th>\n",
8138
       "      <th>img_id</th>\n",
8139
       "      <th>SOPInstanceUID</th>\n",
8140
       "      <th>Modality</th>\n",
8141
       "      <th>PatientID</th>\n",
8142
       "      <th>StudyInstanceUID</th>\n",
8143
       "      <th>SeriesInstanceUID</th>\n",
8144
       "      <th>StudyID</th>\n",
8145
       "      <th>ImagePositionPatient</th>\n",
8146
       "      <th>ImageOrientationPatient</th>\n",
8147
       "      <th>SamplesPerPixel</th>\n",
8148
       "      <th>PhotometricInterpretation</th>\n",
8149
       "      <th>Rows</th>\n",
8150
       "      <th>Columns</th>\n",
8151
       "      <th>PixelSpacing</th>\n",
8152
       "      <th>BitsAllocated</th>\n",
8153
       "      <th>BitsStored</th>\n",
8154
       "      <th>HighBit</th>\n",
8155
       "      <th>PixelRepresentation</th>\n",
8156
       "      <th>WindowCenter</th>\n",
8157
       "      <th>WindowWidth</th>\n",
8158
       "      <th>RescaleIntercept</th>\n",
8159
       "      <th>RescaleSlope</th>\n",
8160
       "      <th>PxlMin</th>\n",
8161
       "      <th>PxlMax</th>\n",
8162
       "      <th>PxlStd</th>\n",
8163
       "      <th>PxlMean</th>\n",
8164
       "      <th>test</th>\n",
8165
       "      <th>test2</th>\n",
8166
       "      <th>ImageOrientationPatient_0</th>\n",
8167
       "      <th>ImageOrientationPatient_1</th>\n",
8168
       "      <th>ImageOrientationPatient_2</th>\n",
8169
       "      <th>ImageOrientationPatient_3</th>\n",
8170
       "      <th>ImageOrientationPatient_4</th>\n",
8171
       "      <th>ImageOrientationPatient_5</th>\n",
8172
       "      <th>ImagePositionPatient_0</th>\n",
8173
       "      <th>ImagePositionPatient_1</th>\n",
8174
       "      <th>ImagePositionPatient_2</th>\n",
8175
       "      <th>PixelSpacing_0</th>\n",
8176
       "      <th>PixelSpacing_1</th>\n",
8177
       "      <th>WindowCenter_0</th>\n",
8178
       "      <th>WindowCenter_1</th>\n",
8179
       "      <th>WindowCenter_1_NAN</th>\n",
8180
       "      <th>WindowWidth_0</th>\n",
8181
       "      <th>WindowWidth_1</th>\n",
8182
       "      <th>WindowWidth_0_le</th>\n",
8183
       "      <th>WindowWidth_1_le</th>\n",
8184
       "      <th>WindowCenter_1_le</th>\n",
8185
       "      <th>BitType_le</th>\n",
8186
       "      <th>ImageOrientationPatient_4_f</th>\n",
8187
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
8188
       "      <th>...</th>\n",
8189
       "      <th>ImageOrientationPatient_5_f</th>\n",
8190
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
8191
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
8192
       "      <th>ImagePositionPatient_0_f</th>\n",
8193
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
8194
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
8195
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
8196
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
8197
       "      <th>ImagePositionPatient_1_f</th>\n",
8198
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
8199
       "      <th>ImagePositionPatient_2_f</th>\n",
8200
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
8201
       "      <th>PixelSpacing_1_f</th>\n",
8202
       "      <th>PixelSpacing_1_enc_0</th>\n",
8203
       "      <th>PixelSpacing_1_enc_1</th>\n",
8204
       "      <th>WindowCenter_0_le</th>\n",
8205
       "      <th>pos_max</th>\n",
8206
       "      <th>pos_min</th>\n",
8207
       "      <th>pos_size</th>\n",
8208
       "      <th>pos_idx1</th>\n",
8209
       "      <th>pos_idx</th>\n",
8210
       "      <th>pos_idx2</th>\n",
8211
       "      <th>pos_inc1</th>\n",
8212
       "      <th>pos_inc2</th>\n",
8213
       "      <th>pos_inc1_grp_le</th>\n",
8214
       "      <th>pos_inc2_grp_le</th>\n",
8215
       "      <th>pos_inc1_r1</th>\n",
8216
       "      <th>pos_inc1_r0001</th>\n",
8217
       "      <th>pos_inc1_enc_0</th>\n",
8218
       "      <th>pos_inc2_enc_0</th>\n",
8219
       "      <th>pos_inc1_enc_1</th>\n",
8220
       "      <th>pos_inc2_enc_1</th>\n",
8221
       "      <th>pos_size_le</th>\n",
8222
       "      <th>pos_range</th>\n",
8223
       "      <th>pos_rel</th>\n",
8224
       "      <th>pos_zeros</th>\n",
8225
       "      <th>pos_inc_rng</th>\n",
8226
       "      <th>pos_zeros_le</th>\n",
8227
       "      <th>PxlMin_grp_le</th>\n",
8228
       "      <th>PxlMin_zero</th>\n",
8229
       "      <th>any</th>\n",
8230
       "      <th>epidural</th>\n",
8231
       "      <th>intraparenchymal</th>\n",
8232
       "      <th>intraventricular</th>\n",
8233
       "      <th>subarachnoid</th>\n",
8234
       "      <th>subdural</th>\n",
8235
       "      <th>any_series</th>\n",
8236
       "      <th>SeriesPP</th>\n",
8237
       "      <th>yuval_idx</th>\n",
8238
       "      <th>pred_any</th>\n",
8239
       "    </tr>\n",
8240
       "  </thead>\n",
8241
       "  <tbody>\n",
8242
       "    <tr>\n",
8243
       "      <td>85421</td>\n",
8244
       "      <td>ba1a7894c</td>\n",
8245
       "      <td>ID_ba1a7894c</td>\n",
8246
       "      <td>CT</td>\n",
8247
       "      <td>ID_6f87831a</td>\n",
8248
       "      <td>ID_a6ca244172</td>\n",
8249
       "      <td>ID_d00cee7f0c</td>\n",
8250
       "      <td>NaN</td>\n",
8251
       "      <td>['-125.000000', '-119.997978', '127.192337']</td>\n",
8252
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
8253
       "      <td>1</td>\n",
8254
       "      <td>MONOCHROME2</td>\n",
8255
       "      <td>512</td>\n",
8256
       "      <td>512</td>\n",
8257
       "      <td>['0.488281', '0.488281']</td>\n",
8258
       "      <td>16</td>\n",
8259
       "      <td>16</td>\n",
8260
       "      <td>15</td>\n",
8261
       "      <td>1</td>\n",
8262
       "      <td>30</td>\n",
8263
       "      <td>80</td>\n",
8264
       "      <td>-1024.0</td>\n",
8265
       "      <td>1.0</td>\n",
8266
       "      <td>-1.365333</td>\n",
8267
       "      <td>0.310667</td>\n",
8268
       "      <td>1.642553</td>\n",
8269
       "      <td>-0.881730</td>\n",
8270
       "      <td>False</td>\n",
8271
       "      <td>True</td>\n",
8272
       "      <td>1.0</td>\n",
8273
       "      <td>0.0</td>\n",
8274
       "      <td>0.0</td>\n",
8275
       "      <td>0.0</td>\n",
8276
       "      <td>0.927184</td>\n",
8277
       "      <td>-0.374607</td>\n",
8278
       "      <td>-125.0</td>\n",
8279
       "      <td>-119.997978</td>\n",
8280
       "      <td>127.192337</td>\n",
8281
       "      <td>0.488281</td>\n",
8282
       "      <td>0.488281</td>\n",
8283
       "      <td>30.0</td>\n",
8284
       "      <td>NaN</td>\n",
8285
       "      <td>True</td>\n",
8286
       "      <td>80.0</td>\n",
8287
       "      <td>NaN</td>\n",
8288
       "      <td>0</td>\n",
8289
       "      <td>1</td>\n",
8290
       "      <td>3</td>\n",
8291
       "      <td>0</td>\n",
8292
       "      <td>-1.333333</td>\n",
8293
       "      <td>0.0</td>\n",
8294
       "      <td>...</td>\n",
8295
       "      <td>-0.666667</td>\n",
8296
       "      <td>0.0</td>\n",
8297
       "      <td>True</td>\n",
8298
       "      <td>-0.72</td>\n",
8299
       "      <td>1.0</td>\n",
8300
       "      <td>0.0</td>\n",
8301
       "      <td>1.0</td>\n",
8302
       "      <td>1.0</td>\n",
8303
       "      <td>-0.733306</td>\n",
8304
       "      <td>0.0</td>\n",
8305
       "      <td>-0.057031</td>\n",
8306
       "      <td>0.0</td>\n",
8307
       "      <td>-0.48</td>\n",
8308
       "      <td>1.0</td>\n",
8309
       "      <td>False</td>\n",
8310
       "      <td>0</td>\n",
8311
       "      <td>0.918601</td>\n",
8312
       "      <td>0.249929</td>\n",
8313
       "      <td>-0.3</td>\n",
8314
       "      <td>-0.338983</td>\n",
8315
       "      <td>12</td>\n",
8316
       "      <td>0.135593</td>\n",
8317
       "      <td>1.695991</td>\n",
8318
       "      <td>1.696335</td>\n",
8319
       "      <td>3</td>\n",
8320
       "      <td>3</td>\n",
8321
       "      <td>0.0</td>\n",
8322
       "      <td>0.0</td>\n",
8323
       "      <td>0.0</td>\n",
8324
       "      <td>0.0</td>\n",
8325
       "      <td>0.0</td>\n",
8326
       "      <td>0.0</td>\n",
8327
       "      <td>0</td>\n",
8328
       "      <td>0.057814</td>\n",
8329
       "      <td>-0.451618</td>\n",
8330
       "      <td>0.0</td>\n",
8331
       "      <td>-0.599737</td>\n",
8332
       "      <td>0</td>\n",
8333
       "      <td>0</td>\n",
8334
       "      <td>True</td>\n",
8335
       "      <td>NaN</td>\n",
8336
       "      <td>NaN</td>\n",
8337
       "      <td>NaN</td>\n",
8338
       "      <td>NaN</td>\n",
8339
       "      <td>NaN</td>\n",
8340
       "      <td>NaN</td>\n",
8341
       "      <td>False</td>\n",
8342
       "      <td>-0.5</td>\n",
8343
       "      <td>111470</td>\n",
8344
       "      <td>0.902253</td>\n",
8345
       "    </tr>\n",
8346
       "    <tr>\n",
8347
       "      <td>46168</td>\n",
8348
       "      <td>7f9480ae5</td>\n",
8349
       "      <td>ID_7f9480ae5</td>\n",
8350
       "      <td>CT</td>\n",
8351
       "      <td>ID_61101bd3</td>\n",
8352
       "      <td>ID_6aca8f9834</td>\n",
8353
       "      <td>ID_1cb45bbcea</td>\n",
8354
       "      <td>NaN</td>\n",
8355
       "      <td>['-125', '42.4079503', '222.344198']</td>\n",
8356
       "      <td>['1', '0', '0', '0', '0.939692621', '-0.342020...</td>\n",
8357
       "      <td>1</td>\n",
8358
       "      <td>MONOCHROME2</td>\n",
8359
       "      <td>512</td>\n",
8360
       "      <td>512</td>\n",
8361
       "      <td>['0.48828125', '0.48828125']</td>\n",
8362
       "      <td>16</td>\n",
8363
       "      <td>12</td>\n",
8364
       "      <td>11</td>\n",
8365
       "      <td>0</td>\n",
8366
       "      <td>['00040', '00040']</td>\n",
8367
       "      <td>['00080', '00080']</td>\n",
8368
       "      <td>-1024.0</td>\n",
8369
       "      <td>1.0</td>\n",
8370
       "      <td>1.301333</td>\n",
8371
       "      <td>0.006667</td>\n",
8372
       "      <td>-0.700807</td>\n",
8373
       "      <td>1.484853</td>\n",
8374
       "      <td>False</td>\n",
8375
       "      <td>True</td>\n",
8376
       "      <td>1.0</td>\n",
8377
       "      <td>0.0</td>\n",
8378
       "      <td>0.0</td>\n",
8379
       "      <td>0.0</td>\n",
8380
       "      <td>0.939693</td>\n",
8381
       "      <td>-0.342020</td>\n",
8382
       "      <td>-125.0</td>\n",
8383
       "      <td>42.407950</td>\n",
8384
       "      <td>222.344198</td>\n",
8385
       "      <td>0.488281</td>\n",
8386
       "      <td>0.488281</td>\n",
8387
       "      <td>40.0</td>\n",
8388
       "      <td>40.0</td>\n",
8389
       "      <td>False</td>\n",
8390
       "      <td>80.0</td>\n",
8391
       "      <td>80.0</td>\n",
8392
       "      <td>0</td>\n",
8393
       "      <td>0</td>\n",
8394
       "      <td>1</td>\n",
8395
       "      <td>1</td>\n",
8396
       "      <td>1.862568</td>\n",
8397
       "      <td>0.0</td>\n",
8398
       "      <td>...</td>\n",
8399
       "      <td>1.053199</td>\n",
8400
       "      <td>0.0</td>\n",
8401
       "      <td>False</td>\n",
8402
       "      <td>-0.72</td>\n",
8403
       "      <td>1.0</td>\n",
8404
       "      <td>0.0</td>\n",
8405
       "      <td>1.0</td>\n",
8406
       "      <td>1.0</td>\n",
8407
       "      <td>1.432106</td>\n",
8408
       "      <td>1.0</td>\n",
8409
       "      <td>0.079290</td>\n",
8410
       "      <td>0.0</td>\n",
8411
       "      <td>-0.48</td>\n",
8412
       "      <td>1.0</td>\n",
8413
       "      <td>False</td>\n",
8414
       "      <td>2</td>\n",
8415
       "      <td>1.249860</td>\n",
8416
       "      <td>0.592577</td>\n",
8417
       "      <td>-0.3</td>\n",
8418
       "      <td>-0.203390</td>\n",
8419
       "      <td>14</td>\n",
8420
       "      <td>0.000000</td>\n",
8421
       "      <td>1.639649</td>\n",
8422
       "      <td>1.660400</td>\n",
8423
       "      <td>3</td>\n",
8424
       "      <td>3</td>\n",
8425
       "      <td>0.0</td>\n",
8426
       "      <td>0.0</td>\n",
8427
       "      <td>0.0</td>\n",
8428
       "      <td>0.0</td>\n",
8429
       "      <td>0.0</td>\n",
8430
       "      <td>0.0</td>\n",
8431
       "      <td>0</td>\n",
8432
       "      <td>-0.018112</td>\n",
8433
       "      <td>-0.193775</td>\n",
8434
       "      <td>0.0</td>\n",
8435
       "      <td>-0.584230</td>\n",
8436
       "      <td>0</td>\n",
8437
       "      <td>2</td>\n",
8438
       "      <td>False</td>\n",
8439
       "      <td>NaN</td>\n",
8440
       "      <td>NaN</td>\n",
8441
       "      <td>NaN</td>\n",
8442
       "      <td>NaN</td>\n",
8443
       "      <td>NaN</td>\n",
8444
       "      <td>NaN</td>\n",
8445
       "      <td>False</td>\n",
8446
       "      <td>-0.5</td>\n",
8447
       "      <td>35441</td>\n",
8448
       "      <td>0.911084</td>\n",
8449
       "    </tr>\n",
8450
       "    <tr>\n",
8451
       "      <td>90394</td>\n",
8452
       "      <td>5d403bd8a</td>\n",
8453
       "      <td>ID_5d403bd8a</td>\n",
8454
       "      <td>CT</td>\n",
8455
       "      <td>ID_61101bd3</td>\n",
8456
       "      <td>ID_6aca8f9834</td>\n",
8457
       "      <td>ID_1cb45bbcea</td>\n",
8458
       "      <td>NaN</td>\n",
8459
       "      <td>['-125', '42.4079503', '206.464926']</td>\n",
8460
       "      <td>['1', '0', '0', '0', '0.939692621', '-0.342020...</td>\n",
8461
       "      <td>1</td>\n",
8462
       "      <td>MONOCHROME2</td>\n",
8463
       "      <td>512</td>\n",
8464
       "      <td>512</td>\n",
8465
       "      <td>['0.48828125', '0.48828125']</td>\n",
8466
       "      <td>16</td>\n",
8467
       "      <td>12</td>\n",
8468
       "      <td>11</td>\n",
8469
       "      <td>0</td>\n",
8470
       "      <td>['00040', '00040']</td>\n",
8471
       "      <td>['00080', '00080']</td>\n",
8472
       "      <td>-1024.0</td>\n",
8473
       "      <td>1.0</td>\n",
8474
       "      <td>1.301333</td>\n",
8475
       "      <td>0.010667</td>\n",
8476
       "      <td>-0.780873</td>\n",
8477
       "      <td>1.496547</td>\n",
8478
       "      <td>False</td>\n",
8479
       "      <td>True</td>\n",
8480
       "      <td>1.0</td>\n",
8481
       "      <td>0.0</td>\n",
8482
       "      <td>0.0</td>\n",
8483
       "      <td>0.0</td>\n",
8484
       "      <td>0.939693</td>\n",
8485
       "      <td>-0.342020</td>\n",
8486
       "      <td>-125.0</td>\n",
8487
       "      <td>42.407950</td>\n",
8488
       "      <td>206.464926</td>\n",
8489
       "      <td>0.488281</td>\n",
8490
       "      <td>0.488281</td>\n",
8491
       "      <td>40.0</td>\n",
8492
       "      <td>40.0</td>\n",
8493
       "      <td>False</td>\n",
8494
       "      <td>80.0</td>\n",
8495
       "      <td>80.0</td>\n",
8496
       "      <td>0</td>\n",
8497
       "      <td>0</td>\n",
8498
       "      <td>1</td>\n",
8499
       "      <td>1</td>\n",
8500
       "      <td>1.862568</td>\n",
8501
       "      <td>0.0</td>\n",
8502
       "      <td>...</td>\n",
8503
       "      <td>1.053199</td>\n",
8504
       "      <td>0.0</td>\n",
8505
       "      <td>False</td>\n",
8506
       "      <td>-0.72</td>\n",
8507
       "      <td>1.0</td>\n",
8508
       "      <td>0.0</td>\n",
8509
       "      <td>1.0</td>\n",
8510
       "      <td>1.0</td>\n",
8511
       "      <td>1.432106</td>\n",
8512
       "      <td>1.0</td>\n",
8513
       "      <td>0.056540</td>\n",
8514
       "      <td>0.0</td>\n",
8515
       "      <td>-0.48</td>\n",
8516
       "      <td>1.0</td>\n",
8517
       "      <td>False</td>\n",
8518
       "      <td>2</td>\n",
8519
       "      <td>1.249860</td>\n",
8520
       "      <td>0.592577</td>\n",
8521
       "      <td>-0.3</td>\n",
8522
       "      <td>-0.406780</td>\n",
8523
       "      <td>11</td>\n",
8524
       "      <td>0.203390</td>\n",
8525
       "      <td>1.660400</td>\n",
8526
       "      <td>1.639587</td>\n",
8527
       "      <td>3</td>\n",
8528
       "      <td>3</td>\n",
8529
       "      <td>0.0</td>\n",
8530
       "      <td>0.0</td>\n",
8531
       "      <td>0.0</td>\n",
8532
       "      <td>0.0</td>\n",
8533
       "      <td>0.0</td>\n",
8534
       "      <td>0.0</td>\n",
8535
       "      <td>0</td>\n",
8536
       "      <td>-0.018112</td>\n",
8537
       "      <td>-0.580318</td>\n",
8538
       "      <td>0.0</td>\n",
8539
       "      <td>-0.584230</td>\n",
8540
       "      <td>0</td>\n",
8541
       "      <td>2</td>\n",
8542
       "      <td>False</td>\n",
8543
       "      <td>NaN</td>\n",
8544
       "      <td>NaN</td>\n",
8545
       "      <td>NaN</td>\n",
8546
       "      <td>NaN</td>\n",
8547
       "      <td>NaN</td>\n",
8548
       "      <td>NaN</td>\n",
8549
       "      <td>False</td>\n",
8550
       "      <td>-0.5</td>\n",
8551
       "      <td>35438</td>\n",
8552
       "      <td>0.911625</td>\n",
8553
       "    </tr>\n",
8554
       "    <tr>\n",
8555
       "      <td>23519</td>\n",
8556
       "      <td>645917b86</td>\n",
8557
       "      <td>ID_645917b86</td>\n",
8558
       "      <td>CT</td>\n",
8559
       "      <td>ID_61101bd3</td>\n",
8560
       "      <td>ID_6aca8f9834</td>\n",
8561
       "      <td>ID_1cb45bbcea</td>\n",
8562
       "      <td>NaN</td>\n",
8563
       "      <td>['-125', '42.4079503', '211.7441']</td>\n",
8564
       "      <td>['1', '0', '0', '0', '0.939692621', '-0.342020...</td>\n",
8565
       "      <td>1</td>\n",
8566
       "      <td>MONOCHROME2</td>\n",
8567
       "      <td>512</td>\n",
8568
       "      <td>512</td>\n",
8569
       "      <td>['0.48828125', '0.48828125']</td>\n",
8570
       "      <td>16</td>\n",
8571
       "      <td>12</td>\n",
8572
       "      <td>11</td>\n",
8573
       "      <td>0</td>\n",
8574
       "      <td>['00040', '00040']</td>\n",
8575
       "      <td>['00080', '00080']</td>\n",
8576
       "      <td>-1024.0</td>\n",
8577
       "      <td>1.0</td>\n",
8578
       "      <td>1.301333</td>\n",
8579
       "      <td>0.037333</td>\n",
8580
       "      <td>-0.754200</td>\n",
8581
       "      <td>1.500107</td>\n",
8582
       "      <td>False</td>\n",
8583
       "      <td>True</td>\n",
8584
       "      <td>1.0</td>\n",
8585
       "      <td>0.0</td>\n",
8586
       "      <td>0.0</td>\n",
8587
       "      <td>0.0</td>\n",
8588
       "      <td>0.939693</td>\n",
8589
       "      <td>-0.342020</td>\n",
8590
       "      <td>-125.0</td>\n",
8591
       "      <td>42.407950</td>\n",
8592
       "      <td>211.744100</td>\n",
8593
       "      <td>0.488281</td>\n",
8594
       "      <td>0.488281</td>\n",
8595
       "      <td>40.0</td>\n",
8596
       "      <td>40.0</td>\n",
8597
       "      <td>False</td>\n",
8598
       "      <td>80.0</td>\n",
8599
       "      <td>80.0</td>\n",
8600
       "      <td>0</td>\n",
8601
       "      <td>0</td>\n",
8602
       "      <td>1</td>\n",
8603
       "      <td>1</td>\n",
8604
       "      <td>1.862568</td>\n",
8605
       "      <td>0.0</td>\n",
8606
       "      <td>...</td>\n",
8607
       "      <td>1.053199</td>\n",
8608
       "      <td>0.0</td>\n",
8609
       "      <td>False</td>\n",
8610
       "      <td>-0.72</td>\n",
8611
       "      <td>1.0</td>\n",
8612
       "      <td>0.0</td>\n",
8613
       "      <td>1.0</td>\n",
8614
       "      <td>1.0</td>\n",
8615
       "      <td>1.432106</td>\n",
8616
       "      <td>1.0</td>\n",
8617
       "      <td>0.064103</td>\n",
8618
       "      <td>0.0</td>\n",
8619
       "      <td>-0.48</td>\n",
8620
       "      <td>1.0</td>\n",
8621
       "      <td>False</td>\n",
8622
       "      <td>2</td>\n",
8623
       "      <td>1.249860</td>\n",
8624
       "      <td>0.592577</td>\n",
8625
       "      <td>-0.3</td>\n",
8626
       "      <td>-0.338983</td>\n",
8627
       "      <td>12</td>\n",
8628
       "      <td>0.135593</td>\n",
8629
       "      <td>1.639587</td>\n",
8630
       "      <td>1.660400</td>\n",
8631
       "      <td>3</td>\n",
8632
       "      <td>3</td>\n",
8633
       "      <td>0.0</td>\n",
8634
       "      <td>0.0</td>\n",
8635
       "      <td>0.0</td>\n",
8636
       "      <td>0.0</td>\n",
8637
       "      <td>0.0</td>\n",
8638
       "      <td>0.0</td>\n",
8639
       "      <td>0</td>\n",
8640
       "      <td>-0.018112</td>\n",
8641
       "      <td>-0.451810</td>\n",
8642
       "      <td>0.0</td>\n",
8643
       "      <td>-0.584230</td>\n",
8644
       "      <td>0</td>\n",
8645
       "      <td>2</td>\n",
8646
       "      <td>False</td>\n",
8647
       "      <td>NaN</td>\n",
8648
       "      <td>NaN</td>\n",
8649
       "      <td>NaN</td>\n",
8650
       "      <td>NaN</td>\n",
8651
       "      <td>NaN</td>\n",
8652
       "      <td>NaN</td>\n",
8653
       "      <td>False</td>\n",
8654
       "      <td>-0.5</td>\n",
8655
       "      <td>35439</td>\n",
8656
       "      <td>0.916977</td>\n",
8657
       "    </tr>\n",
8658
       "    <tr>\n",
8659
       "      <td>41043</td>\n",
8660
       "      <td>0f43a379c</td>\n",
8661
       "      <td>ID_0f43a379c</td>\n",
8662
       "      <td>CT</td>\n",
8663
       "      <td>ID_61101bd3</td>\n",
8664
       "      <td>ID_6aca8f9834</td>\n",
8665
       "      <td>ID_1cb45bbcea</td>\n",
8666
       "      <td>NaN</td>\n",
8667
       "      <td>['-125', '42.4079503', '217.064901']</td>\n",
8668
       "      <td>['1', '0', '0', '0', '0.939692621', '-0.342020...</td>\n",
8669
       "      <td>1</td>\n",
8670
       "      <td>MONOCHROME2</td>\n",
8671
       "      <td>512</td>\n",
8672
       "      <td>512</td>\n",
8673
       "      <td>['0.48828125', '0.48828125']</td>\n",
8674
       "      <td>16</td>\n",
8675
       "      <td>12</td>\n",
8676
       "      <td>11</td>\n",
8677
       "      <td>0</td>\n",
8678
       "      <td>['00040', '00040']</td>\n",
8679
       "      <td>['00080', '00080']</td>\n",
8680
       "      <td>-1024.0</td>\n",
8681
       "      <td>1.0</td>\n",
8682
       "      <td>1.301333</td>\n",
8683
       "      <td>0.000000</td>\n",
8684
       "      <td>-0.718398</td>\n",
8685
       "      <td>1.505203</td>\n",
8686
       "      <td>False</td>\n",
8687
       "      <td>True</td>\n",
8688
       "      <td>1.0</td>\n",
8689
       "      <td>0.0</td>\n",
8690
       "      <td>0.0</td>\n",
8691
       "      <td>0.0</td>\n",
8692
       "      <td>0.939693</td>\n",
8693
       "      <td>-0.342020</td>\n",
8694
       "      <td>-125.0</td>\n",
8695
       "      <td>42.407950</td>\n",
8696
       "      <td>217.064901</td>\n",
8697
       "      <td>0.488281</td>\n",
8698
       "      <td>0.488281</td>\n",
8699
       "      <td>40.0</td>\n",
8700
       "      <td>40.0</td>\n",
8701
       "      <td>False</td>\n",
8702
       "      <td>80.0</td>\n",
8703
       "      <td>80.0</td>\n",
8704
       "      <td>0</td>\n",
8705
       "      <td>0</td>\n",
8706
       "      <td>1</td>\n",
8707
       "      <td>1</td>\n",
8708
       "      <td>1.862568</td>\n",
8709
       "      <td>0.0</td>\n",
8710
       "      <td>...</td>\n",
8711
       "      <td>1.053199</td>\n",
8712
       "      <td>0.0</td>\n",
8713
       "      <td>False</td>\n",
8714
       "      <td>-0.72</td>\n",
8715
       "      <td>1.0</td>\n",
8716
       "      <td>0.0</td>\n",
8717
       "      <td>1.0</td>\n",
8718
       "      <td>1.0</td>\n",
8719
       "      <td>1.432106</td>\n",
8720
       "      <td>1.0</td>\n",
8721
       "      <td>0.071726</td>\n",
8722
       "      <td>0.0</td>\n",
8723
       "      <td>-0.48</td>\n",
8724
       "      <td>1.0</td>\n",
8725
       "      <td>False</td>\n",
8726
       "      <td>2</td>\n",
8727
       "      <td>1.249860</td>\n",
8728
       "      <td>0.592577</td>\n",
8729
       "      <td>-0.3</td>\n",
8730
       "      <td>-0.271186</td>\n",
8731
       "      <td>13</td>\n",
8732
       "      <td>0.067797</td>\n",
8733
       "      <td>1.660400</td>\n",
8734
       "      <td>1.639649</td>\n",
8735
       "      <td>3</td>\n",
8736
       "      <td>3</td>\n",
8737
       "      <td>0.0</td>\n",
8738
       "      <td>0.0</td>\n",
8739
       "      <td>0.0</td>\n",
8740
       "      <td>0.0</td>\n",
8741
       "      <td>0.0</td>\n",
8742
       "      <td>0.0</td>\n",
8743
       "      <td>0</td>\n",
8744
       "      <td>-0.018112</td>\n",
8745
       "      <td>-0.322287</td>\n",
8746
       "      <td>0.0</td>\n",
8747
       "      <td>-0.584230</td>\n",
8748
       "      <td>0</td>\n",
8749
       "      <td>2</td>\n",
8750
       "      <td>False</td>\n",
8751
       "      <td>NaN</td>\n",
8752
       "      <td>NaN</td>\n",
8753
       "      <td>NaN</td>\n",
8754
       "      <td>NaN</td>\n",
8755
       "      <td>NaN</td>\n",
8756
       "      <td>NaN</td>\n",
8757
       "      <td>False</td>\n",
8758
       "      <td>-0.5</td>\n",
8759
       "      <td>35440</td>\n",
8760
       "      <td>0.917555</td>\n",
8761
       "    </tr>\n",
8762
       "  </tbody>\n",
8763
       "</table>\n",
8764
       "<p>5 rows × 101 columns</p>\n",
8765
       "</div>"
8766
      ],
8767
      "text/plain": [
8768
       "          img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
8769
       "85421  ba1a7894c   ID_ba1a7894c       CT  ID_6f87831a    ID_a6ca244172   \n",
8770
       "46168  7f9480ae5   ID_7f9480ae5       CT  ID_61101bd3    ID_6aca8f9834   \n",
8771
       "90394  5d403bd8a   ID_5d403bd8a       CT  ID_61101bd3    ID_6aca8f9834   \n",
8772
       "23519  645917b86   ID_645917b86       CT  ID_61101bd3    ID_6aca8f9834   \n",
8773
       "41043  0f43a379c   ID_0f43a379c       CT  ID_61101bd3    ID_6aca8f9834   \n",
8774
       "\n",
8775
       "      SeriesInstanceUID  StudyID  \\\n",
8776
       "85421     ID_d00cee7f0c      NaN   \n",
8777
       "46168     ID_1cb45bbcea      NaN   \n",
8778
       "90394     ID_1cb45bbcea      NaN   \n",
8779
       "23519     ID_1cb45bbcea      NaN   \n",
8780
       "41043     ID_1cb45bbcea      NaN   \n",
8781
       "\n",
8782
       "                               ImagePositionPatient  \\\n",
8783
       "85421  ['-125.000000', '-119.997978', '127.192337']   \n",
8784
       "46168          ['-125', '42.4079503', '222.344198']   \n",
8785
       "90394          ['-125', '42.4079503', '206.464926']   \n",
8786
       "23519            ['-125', '42.4079503', '211.7441']   \n",
8787
       "41043          ['-125', '42.4079503', '217.064901']   \n",
8788
       "\n",
8789
       "                                 ImageOrientationPatient  SamplesPerPixel  \\\n",
8790
       "85421  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
8791
       "46168  ['1', '0', '0', '0', '0.939692621', '-0.342020...                1   \n",
8792
       "90394  ['1', '0', '0', '0', '0.939692621', '-0.342020...                1   \n",
8793
       "23519  ['1', '0', '0', '0', '0.939692621', '-0.342020...                1   \n",
8794
       "41043  ['1', '0', '0', '0', '0.939692621', '-0.342020...                1   \n",
8795
       "\n",
8796
       "      PhotometricInterpretation  Rows  Columns                  PixelSpacing  \\\n",
8797
       "85421               MONOCHROME2   512      512      ['0.488281', '0.488281']   \n",
8798
       "46168               MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
8799
       "90394               MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
8800
       "23519               MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
8801
       "41043               MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
8802
       "\n",
8803
       "       BitsAllocated  BitsStored  HighBit  PixelRepresentation  \\\n",
8804
       "85421             16          16       15                    1   \n",
8805
       "46168             16          12       11                    0   \n",
8806
       "90394             16          12       11                    0   \n",
8807
       "23519             16          12       11                    0   \n",
8808
       "41043             16          12       11                    0   \n",
8809
       "\n",
8810
       "             WindowCenter         WindowWidth  RescaleIntercept  RescaleSlope  \\\n",
8811
       "85421                  30                  80           -1024.0           1.0   \n",
8812
       "46168  ['00040', '00040']  ['00080', '00080']           -1024.0           1.0   \n",
8813
       "90394  ['00040', '00040']  ['00080', '00080']           -1024.0           1.0   \n",
8814
       "23519  ['00040', '00040']  ['00080', '00080']           -1024.0           1.0   \n",
8815
       "41043  ['00040', '00040']  ['00080', '00080']           -1024.0           1.0   \n",
8816
       "\n",
8817
       "         PxlMin    PxlMax    PxlStd   PxlMean   test  test2  \\\n",
8818
       "85421 -1.365333  0.310667  1.642553 -0.881730  False   True   \n",
8819
       "46168  1.301333  0.006667 -0.700807  1.484853  False   True   \n",
8820
       "90394  1.301333  0.010667 -0.780873  1.496547  False   True   \n",
8821
       "23519  1.301333  0.037333 -0.754200  1.500107  False   True   \n",
8822
       "41043  1.301333  0.000000 -0.718398  1.505203  False   True   \n",
8823
       "\n",
8824
       "       ImageOrientationPatient_0  ImageOrientationPatient_1  \\\n",
8825
       "85421                        1.0                        0.0   \n",
8826
       "46168                        1.0                        0.0   \n",
8827
       "90394                        1.0                        0.0   \n",
8828
       "23519                        1.0                        0.0   \n",
8829
       "41043                        1.0                        0.0   \n",
8830
       "\n",
8831
       "       ImageOrientationPatient_2  ImageOrientationPatient_3  \\\n",
8832
       "85421                        0.0                        0.0   \n",
8833
       "46168                        0.0                        0.0   \n",
8834
       "90394                        0.0                        0.0   \n",
8835
       "23519                        0.0                        0.0   \n",
8836
       "41043                        0.0                        0.0   \n",
8837
       "\n",
8838
       "       ImageOrientationPatient_4  ImageOrientationPatient_5  \\\n",
8839
       "85421                   0.927184                  -0.374607   \n",
8840
       "46168                   0.939693                  -0.342020   \n",
8841
       "90394                   0.939693                  -0.342020   \n",
8842
       "23519                   0.939693                  -0.342020   \n",
8843
       "41043                   0.939693                  -0.342020   \n",
8844
       "\n",
8845
       "       ImagePositionPatient_0  ImagePositionPatient_1  ImagePositionPatient_2  \\\n",
8846
       "85421                  -125.0             -119.997978              127.192337   \n",
8847
       "46168                  -125.0               42.407950              222.344198   \n",
8848
       "90394                  -125.0               42.407950              206.464926   \n",
8849
       "23519                  -125.0               42.407950              211.744100   \n",
8850
       "41043                  -125.0               42.407950              217.064901   \n",
8851
       "\n",
8852
       "       PixelSpacing_0  PixelSpacing_1  WindowCenter_0  WindowCenter_1  \\\n",
8853
       "85421        0.488281        0.488281            30.0             NaN   \n",
8854
       "46168        0.488281        0.488281            40.0            40.0   \n",
8855
       "90394        0.488281        0.488281            40.0            40.0   \n",
8856
       "23519        0.488281        0.488281            40.0            40.0   \n",
8857
       "41043        0.488281        0.488281            40.0            40.0   \n",
8858
       "\n",
8859
       "       WindowCenter_1_NAN  WindowWidth_0  WindowWidth_1  WindowWidth_0_le  \\\n",
8860
       "85421                True           80.0            NaN                 0   \n",
8861
       "46168               False           80.0           80.0                 0   \n",
8862
       "90394               False           80.0           80.0                 0   \n",
8863
       "23519               False           80.0           80.0                 0   \n",
8864
       "41043               False           80.0           80.0                 0   \n",
8865
       "\n",
8866
       "       WindowWidth_1_le  WindowCenter_1_le  BitType_le  \\\n",
8867
       "85421                 1                  3           0   \n",
8868
       "46168                 0                  1           1   \n",
8869
       "90394                 0                  1           1   \n",
8870
       "23519                 0                  1           1   \n",
8871
       "41043                 0                  1           1   \n",
8872
       "\n",
8873
       "       ImageOrientationPatient_4_f  ImageOrientationPatient_4_enc_0  ...  \\\n",
8874
       "85421                    -1.333333                              0.0  ...   \n",
8875
       "46168                     1.862568                              0.0  ...   \n",
8876
       "90394                     1.862568                              0.0  ...   \n",
8877
       "23519                     1.862568                              0.0  ...   \n",
8878
       "41043                     1.862568                              0.0  ...   \n",
8879
       "\n",
8880
       "       ImageOrientationPatient_5_f  ImageOrientationPatient_5_enc_0  \\\n",
8881
       "85421                    -0.666667                              0.0   \n",
8882
       "46168                     1.053199                              0.0   \n",
8883
       "90394                     1.053199                              0.0   \n",
8884
       "23519                     1.053199                              0.0   \n",
8885
       "41043                     1.053199                              0.0   \n",
8886
       "\n",
8887
       "       ImageOrientationPatient_5_enc_1  ImagePositionPatient_0_f  \\\n",
8888
       "85421                             True                     -0.72   \n",
8889
       "46168                            False                     -0.72   \n",
8890
       "90394                            False                     -0.72   \n",
8891
       "23519                            False                     -0.72   \n",
8892
       "41043                            False                     -0.72   \n",
8893
       "\n",
8894
       "       ImagePositionPatient_0_enc_0  ImagePositionPatient_0_enc_1  \\\n",
8895
       "85421                           1.0                           0.0   \n",
8896
       "46168                           1.0                           0.0   \n",
8897
       "90394                           1.0                           0.0   \n",
8898
       "23519                           1.0                           0.0   \n",
8899
       "41043                           1.0                           0.0   \n",
8900
       "\n",
8901
       "       ImagePositionPatient_0_f_r1  ImagePositionPatient_0_f_r05  \\\n",
8902
       "85421                          1.0                           1.0   \n",
8903
       "46168                          1.0                           1.0   \n",
8904
       "90394                          1.0                           1.0   \n",
8905
       "23519                          1.0                           1.0   \n",
8906
       "41043                          1.0                           1.0   \n",
8907
       "\n",
8908
       "       ImagePositionPatient_1_f  ImagePositionPatient_1_enc_0  \\\n",
8909
       "85421                 -0.733306                           0.0   \n",
8910
       "46168                  1.432106                           1.0   \n",
8911
       "90394                  1.432106                           1.0   \n",
8912
       "23519                  1.432106                           1.0   \n",
8913
       "41043                  1.432106                           1.0   \n",
8914
       "\n",
8915
       "       ImagePositionPatient_2_f  ImagePositionPatient_2_f_r05  \\\n",
8916
       "85421                 -0.057031                           0.0   \n",
8917
       "46168                  0.079290                           0.0   \n",
8918
       "90394                  0.056540                           0.0   \n",
8919
       "23519                  0.064103                           0.0   \n",
8920
       "41043                  0.071726                           0.0   \n",
8921
       "\n",
8922
       "       PixelSpacing_1_f  PixelSpacing_1_enc_0  PixelSpacing_1_enc_1  \\\n",
8923
       "85421             -0.48                   1.0                 False   \n",
8924
       "46168             -0.48                   1.0                 False   \n",
8925
       "90394             -0.48                   1.0                 False   \n",
8926
       "23519             -0.48                   1.0                 False   \n",
8927
       "41043             -0.48                   1.0                 False   \n",
8928
       "\n",
8929
       "       WindowCenter_0_le   pos_max   pos_min  pos_size  pos_idx1  pos_idx  \\\n",
8930
       "85421                  0  0.918601  0.249929      -0.3 -0.338983       12   \n",
8931
       "46168                  2  1.249860  0.592577      -0.3 -0.203390       14   \n",
8932
       "90394                  2  1.249860  0.592577      -0.3 -0.406780       11   \n",
8933
       "23519                  2  1.249860  0.592577      -0.3 -0.338983       12   \n",
8934
       "41043                  2  1.249860  0.592577      -0.3 -0.271186       13   \n",
8935
       "\n",
8936
       "       pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  pos_inc2_grp_le  \\\n",
8937
       "85421  0.135593  1.695991  1.696335                3                3   \n",
8938
       "46168  0.000000  1.639649  1.660400                3                3   \n",
8939
       "90394  0.203390  1.660400  1.639587                3                3   \n",
8940
       "23519  0.135593  1.639587  1.660400                3                3   \n",
8941
       "41043  0.067797  1.660400  1.639649                3                3   \n",
8942
       "\n",
8943
       "       pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  pos_inc2_enc_0  \\\n",
8944
       "85421          0.0             0.0             0.0             0.0   \n",
8945
       "46168          0.0             0.0             0.0             0.0   \n",
8946
       "90394          0.0             0.0             0.0             0.0   \n",
8947
       "23519          0.0             0.0             0.0             0.0   \n",
8948
       "41043          0.0             0.0             0.0             0.0   \n",
8949
       "\n",
8950
       "       pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  pos_range   pos_rel  \\\n",
8951
       "85421             0.0             0.0            0   0.057814 -0.451618   \n",
8952
       "46168             0.0             0.0            0  -0.018112 -0.193775   \n",
8953
       "90394             0.0             0.0            0  -0.018112 -0.580318   \n",
8954
       "23519             0.0             0.0            0  -0.018112 -0.451810   \n",
8955
       "41043             0.0             0.0            0  -0.018112 -0.322287   \n",
8956
       "\n",
8957
       "       pos_zeros  pos_inc_rng  pos_zeros_le  PxlMin_grp_le  PxlMin_zero  any  \\\n",
8958
       "85421        0.0    -0.599737             0              0         True  NaN   \n",
8959
       "46168        0.0    -0.584230             0              2        False  NaN   \n",
8960
       "90394        0.0    -0.584230             0              2        False  NaN   \n",
8961
       "23519        0.0    -0.584230             0              2        False  NaN   \n",
8962
       "41043        0.0    -0.584230             0              2        False  NaN   \n",
8963
       "\n",
8964
       "       epidural  intraparenchymal  intraventricular  subarachnoid  subdural  \\\n",
8965
       "85421       NaN               NaN               NaN           NaN       NaN   \n",
8966
       "46168       NaN               NaN               NaN           NaN       NaN   \n",
8967
       "90394       NaN               NaN               NaN           NaN       NaN   \n",
8968
       "23519       NaN               NaN               NaN           NaN       NaN   \n",
8969
       "41043       NaN               NaN               NaN           NaN       NaN   \n",
8970
       "\n",
8971
       "       any_series  SeriesPP  yuval_idx  pred_any  \n",
8972
       "85421       False      -0.5     111470  0.902253  \n",
8973
       "46168       False      -0.5      35441  0.911084  \n",
8974
       "90394       False      -0.5      35438  0.911625  \n",
8975
       "23519       False      -0.5      35439  0.916977  \n",
8976
       "41043       False      -0.5      35440  0.917555  \n",
8977
       "\n",
8978
       "[5 rows x 101 columns]"
8979
      ]
8980
     },
8981
     "execution_count": 64,
8982
     "metadata": {},
8983
     "output_type": "execute_result"
8984
    }
8985
   ],
8986
   "source": [
8987
    "test_md['pred_any'] = predictions[:,1]\n",
8988
    "test_md.sort_values('pred_any').tail()"
8989
   ]
8990
  },
8991
  {
8992
   "cell_type": "code",
8993
   "execution_count": 65,
8994
   "metadata": {},
8995
   "outputs": [
8996
    {
8997
     "data": {
8998
      "text/html": [
8999
       "<div>\n",
9000
       "<style scoped>\n",
9001
       "    .dataframe tbody tr th:only-of-type {\n",
9002
       "        vertical-align: middle;\n",
9003
       "    }\n",
9004
       "\n",
9005
       "    .dataframe tbody tr th {\n",
9006
       "        vertical-align: top;\n",
9007
       "    }\n",
9008
       "\n",
9009
       "    .dataframe thead th {\n",
9010
       "        text-align: right;\n",
9011
       "    }\n",
9012
       "</style>\n",
9013
       "<table border=\"1\" class=\"dataframe\">\n",
9014
       "  <thead>\n",
9015
       "    <tr style=\"text-align: right;\">\n",
9016
       "      <th></th>\n",
9017
       "      <th>img_id</th>\n",
9018
       "      <th>SOPInstanceUID</th>\n",
9019
       "      <th>Modality</th>\n",
9020
       "      <th>PatientID</th>\n",
9021
       "      <th>StudyInstanceUID</th>\n",
9022
       "      <th>SeriesInstanceUID</th>\n",
9023
       "      <th>StudyID</th>\n",
9024
       "      <th>ImagePositionPatient</th>\n",
9025
       "      <th>ImageOrientationPatient</th>\n",
9026
       "      <th>SamplesPerPixel</th>\n",
9027
       "      <th>PhotometricInterpretation</th>\n",
9028
       "      <th>Rows</th>\n",
9029
       "      <th>Columns</th>\n",
9030
       "      <th>PixelSpacing</th>\n",
9031
       "      <th>BitsAllocated</th>\n",
9032
       "      <th>BitsStored</th>\n",
9033
       "      <th>HighBit</th>\n",
9034
       "      <th>PixelRepresentation</th>\n",
9035
       "      <th>WindowCenter</th>\n",
9036
       "      <th>WindowWidth</th>\n",
9037
       "      <th>RescaleIntercept</th>\n",
9038
       "      <th>RescaleSlope</th>\n",
9039
       "      <th>PxlMin</th>\n",
9040
       "      <th>PxlMax</th>\n",
9041
       "      <th>PxlStd</th>\n",
9042
       "      <th>PxlMean</th>\n",
9043
       "      <th>test</th>\n",
9044
       "      <th>test2</th>\n",
9045
       "      <th>ImageOrientationPatient_0</th>\n",
9046
       "      <th>ImageOrientationPatient_1</th>\n",
9047
       "      <th>ImageOrientationPatient_2</th>\n",
9048
       "      <th>ImageOrientationPatient_3</th>\n",
9049
       "      <th>ImageOrientationPatient_4</th>\n",
9050
       "      <th>ImageOrientationPatient_5</th>\n",
9051
       "      <th>ImagePositionPatient_0</th>\n",
9052
       "      <th>ImagePositionPatient_1</th>\n",
9053
       "      <th>ImagePositionPatient_2</th>\n",
9054
       "      <th>PixelSpacing_0</th>\n",
9055
       "      <th>PixelSpacing_1</th>\n",
9056
       "      <th>WindowCenter_0</th>\n",
9057
       "      <th>WindowCenter_1</th>\n",
9058
       "      <th>WindowCenter_1_NAN</th>\n",
9059
       "      <th>WindowWidth_0</th>\n",
9060
       "      <th>WindowWidth_1</th>\n",
9061
       "      <th>WindowWidth_0_le</th>\n",
9062
       "      <th>WindowWidth_1_le</th>\n",
9063
       "      <th>WindowCenter_1_le</th>\n",
9064
       "      <th>BitType_le</th>\n",
9065
       "      <th>ImageOrientationPatient_4_f</th>\n",
9066
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
9067
       "      <th>...</th>\n",
9068
       "      <th>ImageOrientationPatient_5_f</th>\n",
9069
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
9070
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
9071
       "      <th>ImagePositionPatient_0_f</th>\n",
9072
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
9073
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
9074
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
9075
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
9076
       "      <th>ImagePositionPatient_1_f</th>\n",
9077
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
9078
       "      <th>ImagePositionPatient_2_f</th>\n",
9079
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
9080
       "      <th>PixelSpacing_1_f</th>\n",
9081
       "      <th>PixelSpacing_1_enc_0</th>\n",
9082
       "      <th>PixelSpacing_1_enc_1</th>\n",
9083
       "      <th>WindowCenter_0_le</th>\n",
9084
       "      <th>pos_max</th>\n",
9085
       "      <th>pos_min</th>\n",
9086
       "      <th>pos_size</th>\n",
9087
       "      <th>pos_idx1</th>\n",
9088
       "      <th>pos_idx</th>\n",
9089
       "      <th>pos_idx2</th>\n",
9090
       "      <th>pos_inc1</th>\n",
9091
       "      <th>pos_inc2</th>\n",
9092
       "      <th>pos_inc1_grp_le</th>\n",
9093
       "      <th>pos_inc2_grp_le</th>\n",
9094
       "      <th>pos_inc1_r1</th>\n",
9095
       "      <th>pos_inc1_r0001</th>\n",
9096
       "      <th>pos_inc1_enc_0</th>\n",
9097
       "      <th>pos_inc2_enc_0</th>\n",
9098
       "      <th>pos_inc1_enc_1</th>\n",
9099
       "      <th>pos_inc2_enc_1</th>\n",
9100
       "      <th>pos_size_le</th>\n",
9101
       "      <th>pos_range</th>\n",
9102
       "      <th>pos_rel</th>\n",
9103
       "      <th>pos_zeros</th>\n",
9104
       "      <th>pos_inc_rng</th>\n",
9105
       "      <th>pos_zeros_le</th>\n",
9106
       "      <th>PxlMin_grp_le</th>\n",
9107
       "      <th>PxlMin_zero</th>\n",
9108
       "      <th>any</th>\n",
9109
       "      <th>epidural</th>\n",
9110
       "      <th>intraparenchymal</th>\n",
9111
       "      <th>intraventricular</th>\n",
9112
       "      <th>subarachnoid</th>\n",
9113
       "      <th>subdural</th>\n",
9114
       "      <th>any_series</th>\n",
9115
       "      <th>SeriesPP</th>\n",
9116
       "      <th>yuval_idx</th>\n",
9117
       "      <th>pred_any</th>\n",
9118
       "    </tr>\n",
9119
       "  </thead>\n",
9120
       "  <tbody>\n",
9121
       "    <tr>\n",
9122
       "      <td>56318</td>\n",
9123
       "      <td>aaea1517d</td>\n",
9124
       "      <td>ID_aaea1517d</td>\n",
9125
       "      <td>CT</td>\n",
9126
       "      <td>ID_e875aaac</td>\n",
9127
       "      <td>ID_ace87fc419</td>\n",
9128
       "      <td>ID_c2050c1b62</td>\n",
9129
       "      <td>NaN</td>\n",
9130
       "      <td>['-126.408875', '-126.408875', '157.507935']</td>\n",
9131
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
9132
       "      <td>1</td>\n",
9133
       "      <td>MONOCHROME2</td>\n",
9134
       "      <td>512</td>\n",
9135
       "      <td>512</td>\n",
9136
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
9137
       "      <td>16</td>\n",
9138
       "      <td>16</td>\n",
9139
       "      <td>15</td>\n",
9140
       "      <td>1</td>\n",
9141
       "      <td>35.000000</td>\n",
9142
       "      <td>135.000000</td>\n",
9143
       "      <td>-1024.0</td>\n",
9144
       "      <td>1.0</td>\n",
9145
       "      <td>1.301333</td>\n",
9146
       "      <td>0.309333</td>\n",
9147
       "      <td>-0.835506</td>\n",
9148
       "      <td>1.294749</td>\n",
9149
       "      <td>False</td>\n",
9150
       "      <td>True</td>\n",
9151
       "      <td>1.0</td>\n",
9152
       "      <td>0.0</td>\n",
9153
       "      <td>0.0</td>\n",
9154
       "      <td>0.0</td>\n",
9155
       "      <td>1.000000</td>\n",
9156
       "      <td>0.000000</td>\n",
9157
       "      <td>-126.408875</td>\n",
9158
       "      <td>-126.408875</td>\n",
9159
       "      <td>157.507935</td>\n",
9160
       "      <td>0.494751</td>\n",
9161
       "      <td>0.494751</td>\n",
9162
       "      <td>35.0</td>\n",
9163
       "      <td>NaN</td>\n",
9164
       "      <td>True</td>\n",
9165
       "      <td>135.0</td>\n",
9166
       "      <td>NaN</td>\n",
9167
       "      <td>3</td>\n",
9168
       "      <td>1</td>\n",
9169
       "      <td>3</td>\n",
9170
       "      <td>0</td>\n",
9171
       "      <td>-1.333333</td>\n",
9172
       "      <td>1.0</td>\n",
9173
       "      <td>...</td>\n",
9174
       "      <td>-0.666667</td>\n",
9175
       "      <td>1.0</td>\n",
9176
       "      <td>False</td>\n",
9177
       "      <td>-0.720000</td>\n",
9178
       "      <td>0.0</td>\n",
9179
       "      <td>1.0</td>\n",
9180
       "      <td>1.0</td>\n",
9181
       "      <td>1.0</td>\n",
9182
       "      <td>-0.818785</td>\n",
9183
       "      <td>0.0</td>\n",
9184
       "      <td>-0.013599</td>\n",
9185
       "      <td>0.0</td>\n",
9186
       "      <td>-0.48</td>\n",
9187
       "      <td>0.0</td>\n",
9188
       "      <td>True</td>\n",
9189
       "      <td>3</td>\n",
9190
       "      <td>0.850032</td>\n",
9191
       "      <td>-0.009968</td>\n",
9192
       "      <td>0.9</td>\n",
9193
       "      <td>1.016949</td>\n",
9194
       "      <td>32</td>\n",
9195
       "      <td>-0.406780</td>\n",
9196
       "      <td>-1.500000</td>\n",
9197
       "      <td>-1.500000</td>\n",
9198
       "      <td>3</td>\n",
9199
       "      <td>3</td>\n",
9200
       "      <td>1.0</td>\n",
9201
       "      <td>1.0</td>\n",
9202
       "      <td>0.0</td>\n",
9203
       "      <td>0.0</td>\n",
9204
       "      <td>1.0</td>\n",
9205
       "      <td>1.0</td>\n",
9206
       "      <td>9</td>\n",
9207
       "      <td>1.333334</td>\n",
9208
       "      <td>0.976744</td>\n",
9209
       "      <td>0.0</td>\n",
9210
       "      <td>-0.599997</td>\n",
9211
       "      <td>0</td>\n",
9212
       "      <td>2</td>\n",
9213
       "      <td>False</td>\n",
9214
       "      <td>NaN</td>\n",
9215
       "      <td>NaN</td>\n",
9216
       "      <td>NaN</td>\n",
9217
       "      <td>NaN</td>\n",
9218
       "      <td>NaN</td>\n",
9219
       "      <td>NaN</td>\n",
9220
       "      <td>False</td>\n",
9221
       "      <td>-0.5</td>\n",
9222
       "      <td>20605</td>\n",
9223
       "      <td>0.995869</td>\n",
9224
       "    </tr>\n",
9225
       "    <tr>\n",
9226
       "      <td>11838</td>\n",
9227
       "      <td>8a3a7113f</td>\n",
9228
       "      <td>ID_8a3a7113f</td>\n",
9229
       "      <td>CT</td>\n",
9230
       "      <td>ID_1f7020f7</td>\n",
9231
       "      <td>ID_ffd91b71d1</td>\n",
9232
       "      <td>ID_a0997b616a</td>\n",
9233
       "      <td>NaN</td>\n",
9234
       "      <td>['-118', '20.437079', '167.587618']</td>\n",
9235
       "      <td>['1', '0', '0', '0', '0.978147601', '-0.207911...</td>\n",
9236
       "      <td>1</td>\n",
9237
       "      <td>MONOCHROME2</td>\n",
9238
       "      <td>512</td>\n",
9239
       "      <td>512</td>\n",
9240
       "      <td>['0.48828125', '0.48828125']</td>\n",
9241
       "      <td>16</td>\n",
9242
       "      <td>12</td>\n",
9243
       "      <td>11</td>\n",
9244
       "      <td>0</td>\n",
9245
       "      <td>['00040', '00040']</td>\n",
9246
       "      <td>['00080', '00080']</td>\n",
9247
       "      <td>-1024.0</td>\n",
9248
       "      <td>1.0</td>\n",
9249
       "      <td>1.301333</td>\n",
9250
       "      <td>0.126667</td>\n",
9251
       "      <td>-0.975261</td>\n",
9252
       "      <td>0.972069</td>\n",
9253
       "      <td>False</td>\n",
9254
       "      <td>True</td>\n",
9255
       "      <td>1.0</td>\n",
9256
       "      <td>0.0</td>\n",
9257
       "      <td>0.0</td>\n",
9258
       "      <td>0.0</td>\n",
9259
       "      <td>0.978148</td>\n",
9260
       "      <td>-0.207912</td>\n",
9261
       "      <td>-118.000000</td>\n",
9262
       "      <td>20.437079</td>\n",
9263
       "      <td>167.587618</td>\n",
9264
       "      <td>0.488281</td>\n",
9265
       "      <td>0.488281</td>\n",
9266
       "      <td>40.0</td>\n",
9267
       "      <td>40.0</td>\n",
9268
       "      <td>False</td>\n",
9269
       "      <td>80.0</td>\n",
9270
       "      <td>80.0</td>\n",
9271
       "      <td>0</td>\n",
9272
       "      <td>0</td>\n",
9273
       "      <td>1</td>\n",
9274
       "      <td>1</td>\n",
9275
       "      <td>2.375301</td>\n",
9276
       "      <td>0.0</td>\n",
9277
       "      <td>...</td>\n",
9278
       "      <td>1.947255</td>\n",
9279
       "      <td>0.0</td>\n",
9280
       "      <td>False</td>\n",
9281
       "      <td>1.466667</td>\n",
9282
       "      <td>0.0</td>\n",
9283
       "      <td>0.0</td>\n",
9284
       "      <td>1.0</td>\n",
9285
       "      <td>1.0</td>\n",
9286
       "      <td>1.139161</td>\n",
9287
       "      <td>1.0</td>\n",
9288
       "      <td>0.000842</td>\n",
9289
       "      <td>0.0</td>\n",
9290
       "      <td>-0.48</td>\n",
9291
       "      <td>1.0</td>\n",
9292
       "      <td>False</td>\n",
9293
       "      <td>2</td>\n",
9294
       "      <td>1.078350</td>\n",
9295
       "      <td>0.445865</td>\n",
9296
       "      <td>-0.3</td>\n",
9297
       "      <td>-0.406780</td>\n",
9298
       "      <td>11</td>\n",
9299
       "      <td>0.203390</td>\n",
9300
       "      <td>1.560669</td>\n",
9301
       "      <td>1.539367</td>\n",
9302
       "      <td>3</td>\n",
9303
       "      <td>3</td>\n",
9304
       "      <td>0.0</td>\n",
9305
       "      <td>0.0</td>\n",
9306
       "      <td>0.0</td>\n",
9307
       "      <td>0.0</td>\n",
9308
       "      <td>0.0</td>\n",
9309
       "      <td>0.0</td>\n",
9310
       "      <td>0</td>\n",
9311
       "      <td>-0.183431</td>\n",
9312
       "      <td>-0.580297</td>\n",
9313
       "      <td>0.0</td>\n",
9314
       "      <td>-0.583175</td>\n",
9315
       "      <td>0</td>\n",
9316
       "      <td>2</td>\n",
9317
       "      <td>False</td>\n",
9318
       "      <td>NaN</td>\n",
9319
       "      <td>NaN</td>\n",
9320
       "      <td>NaN</td>\n",
9321
       "      <td>NaN</td>\n",
9322
       "      <td>NaN</td>\n",
9323
       "      <td>NaN</td>\n",
9324
       "      <td>False</td>\n",
9325
       "      <td>-0.5</td>\n",
9326
       "      <td>49264</td>\n",
9327
       "      <td>0.995902</td>\n",
9328
       "    </tr>\n",
9329
       "    <tr>\n",
9330
       "      <td>60633</td>\n",
9331
       "      <td>fd5080c37</td>\n",
9332
       "      <td>ID_fd5080c37</td>\n",
9333
       "      <td>CT</td>\n",
9334
       "      <td>ID_16b922cc</td>\n",
9335
       "      <td>ID_b48b0482e3</td>\n",
9336
       "      <td>ID_653f493476</td>\n",
9337
       "      <td>NaN</td>\n",
9338
       "      <td>['-125', '32.528565', '161.22819']</td>\n",
9339
       "      <td>['1', '0', '0', '0', '0.939692621', '-0.342020...</td>\n",
9340
       "      <td>1</td>\n",
9341
       "      <td>MONOCHROME2</td>\n",
9342
       "      <td>512</td>\n",
9343
       "      <td>512</td>\n",
9344
       "      <td>['0.48828125', '0.48828125']</td>\n",
9345
       "      <td>16</td>\n",
9346
       "      <td>12</td>\n",
9347
       "      <td>11</td>\n",
9348
       "      <td>0</td>\n",
9349
       "      <td>['00040', '00040']</td>\n",
9350
       "      <td>['00080', '00080']</td>\n",
9351
       "      <td>-1024.0</td>\n",
9352
       "      <td>1.0</td>\n",
9353
       "      <td>1.301333</td>\n",
9354
       "      <td>-0.090667</td>\n",
9355
       "      <td>-0.796026</td>\n",
9356
       "      <td>1.330736</td>\n",
9357
       "      <td>False</td>\n",
9358
       "      <td>True</td>\n",
9359
       "      <td>1.0</td>\n",
9360
       "      <td>0.0</td>\n",
9361
       "      <td>0.0</td>\n",
9362
       "      <td>0.0</td>\n",
9363
       "      <td>0.939693</td>\n",
9364
       "      <td>-0.342020</td>\n",
9365
       "      <td>-125.000000</td>\n",
9366
       "      <td>32.528565</td>\n",
9367
       "      <td>161.228190</td>\n",
9368
       "      <td>0.488281</td>\n",
9369
       "      <td>0.488281</td>\n",
9370
       "      <td>40.0</td>\n",
9371
       "      <td>40.0</td>\n",
9372
       "      <td>False</td>\n",
9373
       "      <td>80.0</td>\n",
9374
       "      <td>80.0</td>\n",
9375
       "      <td>0</td>\n",
9376
       "      <td>0</td>\n",
9377
       "      <td>1</td>\n",
9378
       "      <td>1</td>\n",
9379
       "      <td>1.862568</td>\n",
9380
       "      <td>0.0</td>\n",
9381
       "      <td>...</td>\n",
9382
       "      <td>1.053199</td>\n",
9383
       "      <td>0.0</td>\n",
9384
       "      <td>False</td>\n",
9385
       "      <td>-0.720000</td>\n",
9386
       "      <td>1.0</td>\n",
9387
       "      <td>0.0</td>\n",
9388
       "      <td>1.0</td>\n",
9389
       "      <td>1.0</td>\n",
9390
       "      <td>1.300381</td>\n",
9391
       "      <td>1.0</td>\n",
9392
       "      <td>-0.008269</td>\n",
9393
       "      <td>0.0</td>\n",
9394
       "      <td>-0.48</td>\n",
9395
       "      <td>1.0</td>\n",
9396
       "      <td>False</td>\n",
9397
       "      <td>2</td>\n",
9398
       "      <td>1.132596</td>\n",
9399
       "      <td>0.432912</td>\n",
9400
       "      <td>-0.1</td>\n",
9401
       "      <td>-0.474576</td>\n",
9402
       "      <td>10</td>\n",
9403
       "      <td>0.406780</td>\n",
9404
       "      <td>1.639588</td>\n",
9405
       "      <td>1.660400</td>\n",
9406
       "      <td>3</td>\n",
9407
       "      <td>3</td>\n",
9408
       "      <td>0.0</td>\n",
9409
       "      <td>0.0</td>\n",
9410
       "      <td>0.0</td>\n",
9411
       "      <td>0.0</td>\n",
9412
       "      <td>0.0</td>\n",
9413
       "      <td>0.0</td>\n",
9414
       "      <td>4</td>\n",
9415
       "      <td>0.264557</td>\n",
9416
       "      <td>-0.788021</td>\n",
9417
       "      <td>0.0</td>\n",
9418
       "      <td>-0.584137</td>\n",
9419
       "      <td>0</td>\n",
9420
       "      <td>2</td>\n",
9421
       "      <td>False</td>\n",
9422
       "      <td>NaN</td>\n",
9423
       "      <td>NaN</td>\n",
9424
       "      <td>NaN</td>\n",
9425
       "      <td>NaN</td>\n",
9426
       "      <td>NaN</td>\n",
9427
       "      <td>NaN</td>\n",
9428
       "      <td>False</td>\n",
9429
       "      <td>-0.5</td>\n",
9430
       "      <td>59637</td>\n",
9431
       "      <td>0.995940</td>\n",
9432
       "    </tr>\n",
9433
       "    <tr>\n",
9434
       "      <td>30971</td>\n",
9435
       "      <td>8dbff5245</td>\n",
9436
       "      <td>ID_8dbff5245</td>\n",
9437
       "      <td>CT</td>\n",
9438
       "      <td>ID_f0ef989c</td>\n",
9439
       "      <td>ID_fcdfd2db4e</td>\n",
9440
       "      <td>ID_b9627ee31c</td>\n",
9441
       "      <td>NaN</td>\n",
9442
       "      <td>['-125', '19.0514449', '123.026026']</td>\n",
9443
       "      <td>['1', '0', '0', '0', '0.981627183', '-0.190808...</td>\n",
9444
       "      <td>1</td>\n",
9445
       "      <td>MONOCHROME2</td>\n",
9446
       "      <td>512</td>\n",
9447
       "      <td>512</td>\n",
9448
       "      <td>['0.48828125', '0.48828125']</td>\n",
9449
       "      <td>16</td>\n",
9450
       "      <td>12</td>\n",
9451
       "      <td>11</td>\n",
9452
       "      <td>0</td>\n",
9453
       "      <td>['00040', '00040']</td>\n",
9454
       "      <td>['00080', '00080']</td>\n",
9455
       "      <td>-1024.0</td>\n",
9456
       "      <td>1.0</td>\n",
9457
       "      <td>1.301333</td>\n",
9458
       "      <td>0.164000</td>\n",
9459
       "      <td>-0.675198</td>\n",
9460
       "      <td>1.380203</td>\n",
9461
       "      <td>False</td>\n",
9462
       "      <td>True</td>\n",
9463
       "      <td>1.0</td>\n",
9464
       "      <td>0.0</td>\n",
9465
       "      <td>0.0</td>\n",
9466
       "      <td>0.0</td>\n",
9467
       "      <td>0.981627</td>\n",
9468
       "      <td>-0.190809</td>\n",
9469
       "      <td>-125.000000</td>\n",
9470
       "      <td>19.051445</td>\n",
9471
       "      <td>123.026026</td>\n",
9472
       "      <td>0.488281</td>\n",
9473
       "      <td>0.488281</td>\n",
9474
       "      <td>40.0</td>\n",
9475
       "      <td>40.0</td>\n",
9476
       "      <td>False</td>\n",
9477
       "      <td>80.0</td>\n",
9478
       "      <td>80.0</td>\n",
9479
       "      <td>0</td>\n",
9480
       "      <td>0</td>\n",
9481
       "      <td>1</td>\n",
9482
       "      <td>1</td>\n",
9483
       "      <td>2.421696</td>\n",
9484
       "      <td>0.0</td>\n",
9485
       "      <td>...</td>\n",
9486
       "      <td>2.061273</td>\n",
9487
       "      <td>0.0</td>\n",
9488
       "      <td>False</td>\n",
9489
       "      <td>-0.720000</td>\n",
9490
       "      <td>1.0</td>\n",
9491
       "      <td>0.0</td>\n",
9492
       "      <td>1.0</td>\n",
9493
       "      <td>1.0</td>\n",
9494
       "      <td>1.120686</td>\n",
9495
       "      <td>1.0</td>\n",
9496
       "      <td>-0.063000</td>\n",
9497
       "      <td>0.0</td>\n",
9498
       "      <td>-0.48</td>\n",
9499
       "      <td>1.0</td>\n",
9500
       "      <td>False</td>\n",
9501
       "      <td>2</td>\n",
9502
       "      <td>0.940904</td>\n",
9503
       "      <td>0.267694</td>\n",
9504
       "      <td>-0.1</td>\n",
9505
       "      <td>-0.406780</td>\n",
9506
       "      <td>11</td>\n",
9507
       "      <td>0.338983</td>\n",
9508
       "      <td>1.551270</td>\n",
9509
       "      <td>1.548767</td>\n",
9510
       "      <td>3</td>\n",
9511
       "      <td>3</td>\n",
9512
       "      <td>0.0</td>\n",
9513
       "      <td>0.0</td>\n",
9514
       "      <td>0.0</td>\n",
9515
       "      <td>0.0</td>\n",
9516
       "      <td>0.0</td>\n",
9517
       "      <td>0.0</td>\n",
9518
       "      <td>4</td>\n",
9519
       "      <td>0.088070</td>\n",
9520
       "      <td>-0.666627</td>\n",
9521
       "      <td>0.0</td>\n",
9522
       "      <td>-0.597988</td>\n",
9523
       "      <td>0</td>\n",
9524
       "      <td>2</td>\n",
9525
       "      <td>False</td>\n",
9526
       "      <td>NaN</td>\n",
9527
       "      <td>NaN</td>\n",
9528
       "      <td>NaN</td>\n",
9529
       "      <td>NaN</td>\n",
9530
       "      <td>NaN</td>\n",
9531
       "      <td>NaN</td>\n",
9532
       "      <td>False</td>\n",
9533
       "      <td>-0.5</td>\n",
9534
       "      <td>59366</td>\n",
9535
       "      <td>0.995958</td>\n",
9536
       "    </tr>\n",
9537
       "    <tr>\n",
9538
       "      <td>23816</td>\n",
9539
       "      <td>423cee314</td>\n",
9540
       "      <td>ID_423cee314</td>\n",
9541
       "      <td>CT</td>\n",
9542
       "      <td>ID_e875aaac</td>\n",
9543
       "      <td>ID_ace87fc419</td>\n",
9544
       "      <td>ID_c2050c1b62</td>\n",
9545
       "      <td>NaN</td>\n",
9546
       "      <td>['-126.408875', '-126.408875', '167.507935']</td>\n",
9547
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
9548
       "      <td>1</td>\n",
9549
       "      <td>MONOCHROME2</td>\n",
9550
       "      <td>512</td>\n",
9551
       "      <td>512</td>\n",
9552
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
9553
       "      <td>16</td>\n",
9554
       "      <td>16</td>\n",
9555
       "      <td>15</td>\n",
9556
       "      <td>1</td>\n",
9557
       "      <td>35.000000</td>\n",
9558
       "      <td>135.000000</td>\n",
9559
       "      <td>-1024.0</td>\n",
9560
       "      <td>1.0</td>\n",
9561
       "      <td>1.301333</td>\n",
9562
       "      <td>0.265333</td>\n",
9563
       "      <td>-0.826401</td>\n",
9564
       "      <td>1.185491</td>\n",
9565
       "      <td>False</td>\n",
9566
       "      <td>True</td>\n",
9567
       "      <td>1.0</td>\n",
9568
       "      <td>0.0</td>\n",
9569
       "      <td>0.0</td>\n",
9570
       "      <td>0.0</td>\n",
9571
       "      <td>1.000000</td>\n",
9572
       "      <td>0.000000</td>\n",
9573
       "      <td>-126.408875</td>\n",
9574
       "      <td>-126.408875</td>\n",
9575
       "      <td>167.507935</td>\n",
9576
       "      <td>0.494751</td>\n",
9577
       "      <td>0.494751</td>\n",
9578
       "      <td>35.0</td>\n",
9579
       "      <td>NaN</td>\n",
9580
       "      <td>True</td>\n",
9581
       "      <td>135.0</td>\n",
9582
       "      <td>NaN</td>\n",
9583
       "      <td>3</td>\n",
9584
       "      <td>1</td>\n",
9585
       "      <td>3</td>\n",
9586
       "      <td>0</td>\n",
9587
       "      <td>-1.333333</td>\n",
9588
       "      <td>1.0</td>\n",
9589
       "      <td>...</td>\n",
9590
       "      <td>-0.666667</td>\n",
9591
       "      <td>1.0</td>\n",
9592
       "      <td>False</td>\n",
9593
       "      <td>-0.720000</td>\n",
9594
       "      <td>0.0</td>\n",
9595
       "      <td>1.0</td>\n",
9596
       "      <td>1.0</td>\n",
9597
       "      <td>1.0</td>\n",
9598
       "      <td>-0.818785</td>\n",
9599
       "      <td>0.0</td>\n",
9600
       "      <td>0.000728</td>\n",
9601
       "      <td>0.0</td>\n",
9602
       "      <td>-0.48</td>\n",
9603
       "      <td>0.0</td>\n",
9604
       "      <td>True</td>\n",
9605
       "      <td>3</td>\n",
9606
       "      <td>0.850032</td>\n",
9607
       "      <td>-0.009968</td>\n",
9608
       "      <td>0.9</td>\n",
9609
       "      <td>1.152542</td>\n",
9610
       "      <td>34</td>\n",
9611
       "      <td>-0.542373</td>\n",
9612
       "      <td>-1.500000</td>\n",
9613
       "      <td>-1.500000</td>\n",
9614
       "      <td>3</td>\n",
9615
       "      <td>3</td>\n",
9616
       "      <td>1.0</td>\n",
9617
       "      <td>1.0</td>\n",
9618
       "      <td>0.0</td>\n",
9619
       "      <td>0.0</td>\n",
9620
       "      <td>1.0</td>\n",
9621
       "      <td>1.0</td>\n",
9622
       "      <td>9</td>\n",
9623
       "      <td>1.333334</td>\n",
9624
       "      <td>1.162791</td>\n",
9625
       "      <td>0.0</td>\n",
9626
       "      <td>-0.599997</td>\n",
9627
       "      <td>0</td>\n",
9628
       "      <td>2</td>\n",
9629
       "      <td>False</td>\n",
9630
       "      <td>NaN</td>\n",
9631
       "      <td>NaN</td>\n",
9632
       "      <td>NaN</td>\n",
9633
       "      <td>NaN</td>\n",
9634
       "      <td>NaN</td>\n",
9635
       "      <td>NaN</td>\n",
9636
       "      <td>False</td>\n",
9637
       "      <td>-0.5</td>\n",
9638
       "      <td>20607</td>\n",
9639
       "      <td>0.996068</td>\n",
9640
       "    </tr>\n",
9641
       "  </tbody>\n",
9642
       "</table>\n",
9643
       "<p>5 rows × 101 columns</p>\n",
9644
       "</div>"
9645
      ],
9646
      "text/plain": [
9647
       "          img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
9648
       "56318  aaea1517d   ID_aaea1517d       CT  ID_e875aaac    ID_ace87fc419   \n",
9649
       "11838  8a3a7113f   ID_8a3a7113f       CT  ID_1f7020f7    ID_ffd91b71d1   \n",
9650
       "60633  fd5080c37   ID_fd5080c37       CT  ID_16b922cc    ID_b48b0482e3   \n",
9651
       "30971  8dbff5245   ID_8dbff5245       CT  ID_f0ef989c    ID_fcdfd2db4e   \n",
9652
       "23816  423cee314   ID_423cee314       CT  ID_e875aaac    ID_ace87fc419   \n",
9653
       "\n",
9654
       "      SeriesInstanceUID  StudyID  \\\n",
9655
       "56318     ID_c2050c1b62      NaN   \n",
9656
       "11838     ID_a0997b616a      NaN   \n",
9657
       "60633     ID_653f493476      NaN   \n",
9658
       "30971     ID_b9627ee31c      NaN   \n",
9659
       "23816     ID_c2050c1b62      NaN   \n",
9660
       "\n",
9661
       "                               ImagePositionPatient  \\\n",
9662
       "56318  ['-126.408875', '-126.408875', '157.507935']   \n",
9663
       "11838           ['-118', '20.437079', '167.587618']   \n",
9664
       "60633            ['-125', '32.528565', '161.22819']   \n",
9665
       "30971          ['-125', '19.0514449', '123.026026']   \n",
9666
       "23816  ['-126.408875', '-126.408875', '167.507935']   \n",
9667
       "\n",
9668
       "                                 ImageOrientationPatient  SamplesPerPixel  \\\n",
9669
       "56318  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
9670
       "11838  ['1', '0', '0', '0', '0.978147601', '-0.207911...                1   \n",
9671
       "60633  ['1', '0', '0', '0', '0.939692621', '-0.342020...                1   \n",
9672
       "30971  ['1', '0', '0', '0', '0.981627183', '-0.190808...                1   \n",
9673
       "23816  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
9674
       "\n",
9675
       "      PhotometricInterpretation  Rows  Columns  \\\n",
9676
       "56318               MONOCHROME2   512      512   \n",
9677
       "11838               MONOCHROME2   512      512   \n",
9678
       "60633               MONOCHROME2   512      512   \n",
9679
       "30971               MONOCHROME2   512      512   \n",
9680
       "23816               MONOCHROME2   512      512   \n",
9681
       "\n",
9682
       "                               PixelSpacing  BitsAllocated  BitsStored  \\\n",
9683
       "56318  ['0.494750976563', '0.494750976563']             16          16   \n",
9684
       "11838          ['0.48828125', '0.48828125']             16          12   \n",
9685
       "60633          ['0.48828125', '0.48828125']             16          12   \n",
9686
       "30971          ['0.48828125', '0.48828125']             16          12   \n",
9687
       "23816  ['0.494750976563', '0.494750976563']             16          16   \n",
9688
       "\n",
9689
       "       HighBit  PixelRepresentation        WindowCenter         WindowWidth  \\\n",
9690
       "56318       15                    1           35.000000          135.000000   \n",
9691
       "11838       11                    0  ['00040', '00040']  ['00080', '00080']   \n",
9692
       "60633       11                    0  ['00040', '00040']  ['00080', '00080']   \n",
9693
       "30971       11                    0  ['00040', '00040']  ['00080', '00080']   \n",
9694
       "23816       15                    1           35.000000          135.000000   \n",
9695
       "\n",
9696
       "       RescaleIntercept  RescaleSlope    PxlMin    PxlMax    PxlStd   PxlMean  \\\n",
9697
       "56318           -1024.0           1.0  1.301333  0.309333 -0.835506  1.294749   \n",
9698
       "11838           -1024.0           1.0  1.301333  0.126667 -0.975261  0.972069   \n",
9699
       "60633           -1024.0           1.0  1.301333 -0.090667 -0.796026  1.330736   \n",
9700
       "30971           -1024.0           1.0  1.301333  0.164000 -0.675198  1.380203   \n",
9701
       "23816           -1024.0           1.0  1.301333  0.265333 -0.826401  1.185491   \n",
9702
       "\n",
9703
       "        test  test2  ImageOrientationPatient_0  ImageOrientationPatient_1  \\\n",
9704
       "56318  False   True                        1.0                        0.0   \n",
9705
       "11838  False   True                        1.0                        0.0   \n",
9706
       "60633  False   True                        1.0                        0.0   \n",
9707
       "30971  False   True                        1.0                        0.0   \n",
9708
       "23816  False   True                        1.0                        0.0   \n",
9709
       "\n",
9710
       "       ImageOrientationPatient_2  ImageOrientationPatient_3  \\\n",
9711
       "56318                        0.0                        0.0   \n",
9712
       "11838                        0.0                        0.0   \n",
9713
       "60633                        0.0                        0.0   \n",
9714
       "30971                        0.0                        0.0   \n",
9715
       "23816                        0.0                        0.0   \n",
9716
       "\n",
9717
       "       ImageOrientationPatient_4  ImageOrientationPatient_5  \\\n",
9718
       "56318                   1.000000                   0.000000   \n",
9719
       "11838                   0.978148                  -0.207912   \n",
9720
       "60633                   0.939693                  -0.342020   \n",
9721
       "30971                   0.981627                  -0.190809   \n",
9722
       "23816                   1.000000                   0.000000   \n",
9723
       "\n",
9724
       "       ImagePositionPatient_0  ImagePositionPatient_1  ImagePositionPatient_2  \\\n",
9725
       "56318             -126.408875             -126.408875              157.507935   \n",
9726
       "11838             -118.000000               20.437079              167.587618   \n",
9727
       "60633             -125.000000               32.528565              161.228190   \n",
9728
       "30971             -125.000000               19.051445              123.026026   \n",
9729
       "23816             -126.408875             -126.408875              167.507935   \n",
9730
       "\n",
9731
       "       PixelSpacing_0  PixelSpacing_1  WindowCenter_0  WindowCenter_1  \\\n",
9732
       "56318        0.494751        0.494751            35.0             NaN   \n",
9733
       "11838        0.488281        0.488281            40.0            40.0   \n",
9734
       "60633        0.488281        0.488281            40.0            40.0   \n",
9735
       "30971        0.488281        0.488281            40.0            40.0   \n",
9736
       "23816        0.494751        0.494751            35.0             NaN   \n",
9737
       "\n",
9738
       "       WindowCenter_1_NAN  WindowWidth_0  WindowWidth_1  WindowWidth_0_le  \\\n",
9739
       "56318                True          135.0            NaN                 3   \n",
9740
       "11838               False           80.0           80.0                 0   \n",
9741
       "60633               False           80.0           80.0                 0   \n",
9742
       "30971               False           80.0           80.0                 0   \n",
9743
       "23816                True          135.0            NaN                 3   \n",
9744
       "\n",
9745
       "       WindowWidth_1_le  WindowCenter_1_le  BitType_le  \\\n",
9746
       "56318                 1                  3           0   \n",
9747
       "11838                 0                  1           1   \n",
9748
       "60633                 0                  1           1   \n",
9749
       "30971                 0                  1           1   \n",
9750
       "23816                 1                  3           0   \n",
9751
       "\n",
9752
       "       ImageOrientationPatient_4_f  ImageOrientationPatient_4_enc_0  ...  \\\n",
9753
       "56318                    -1.333333                              1.0  ...   \n",
9754
       "11838                     2.375301                              0.0  ...   \n",
9755
       "60633                     1.862568                              0.0  ...   \n",
9756
       "30971                     2.421696                              0.0  ...   \n",
9757
       "23816                    -1.333333                              1.0  ...   \n",
9758
       "\n",
9759
       "       ImageOrientationPatient_5_f  ImageOrientationPatient_5_enc_0  \\\n",
9760
       "56318                    -0.666667                              1.0   \n",
9761
       "11838                     1.947255                              0.0   \n",
9762
       "60633                     1.053199                              0.0   \n",
9763
       "30971                     2.061273                              0.0   \n",
9764
       "23816                    -0.666667                              1.0   \n",
9765
       "\n",
9766
       "       ImageOrientationPatient_5_enc_1  ImagePositionPatient_0_f  \\\n",
9767
       "56318                            False                 -0.720000   \n",
9768
       "11838                            False                  1.466667   \n",
9769
       "60633                            False                 -0.720000   \n",
9770
       "30971                            False                 -0.720000   \n",
9771
       "23816                            False                 -0.720000   \n",
9772
       "\n",
9773
       "       ImagePositionPatient_0_enc_0  ImagePositionPatient_0_enc_1  \\\n",
9774
       "56318                           0.0                           1.0   \n",
9775
       "11838                           0.0                           0.0   \n",
9776
       "60633                           1.0                           0.0   \n",
9777
       "30971                           1.0                           0.0   \n",
9778
       "23816                           0.0                           1.0   \n",
9779
       "\n",
9780
       "       ImagePositionPatient_0_f_r1  ImagePositionPatient_0_f_r05  \\\n",
9781
       "56318                          1.0                           1.0   \n",
9782
       "11838                          1.0                           1.0   \n",
9783
       "60633                          1.0                           1.0   \n",
9784
       "30971                          1.0                           1.0   \n",
9785
       "23816                          1.0                           1.0   \n",
9786
       "\n",
9787
       "       ImagePositionPatient_1_f  ImagePositionPatient_1_enc_0  \\\n",
9788
       "56318                 -0.818785                           0.0   \n",
9789
       "11838                  1.139161                           1.0   \n",
9790
       "60633                  1.300381                           1.0   \n",
9791
       "30971                  1.120686                           1.0   \n",
9792
       "23816                 -0.818785                           0.0   \n",
9793
       "\n",
9794
       "       ImagePositionPatient_2_f  ImagePositionPatient_2_f_r05  \\\n",
9795
       "56318                 -0.013599                           0.0   \n",
9796
       "11838                  0.000842                           0.0   \n",
9797
       "60633                 -0.008269                           0.0   \n",
9798
       "30971                 -0.063000                           0.0   \n",
9799
       "23816                  0.000728                           0.0   \n",
9800
       "\n",
9801
       "       PixelSpacing_1_f  PixelSpacing_1_enc_0  PixelSpacing_1_enc_1  \\\n",
9802
       "56318             -0.48                   0.0                  True   \n",
9803
       "11838             -0.48                   1.0                 False   \n",
9804
       "60633             -0.48                   1.0                 False   \n",
9805
       "30971             -0.48                   1.0                 False   \n",
9806
       "23816             -0.48                   0.0                  True   \n",
9807
       "\n",
9808
       "       WindowCenter_0_le   pos_max   pos_min  pos_size  pos_idx1  pos_idx  \\\n",
9809
       "56318                  3  0.850032 -0.009968       0.9  1.016949       32   \n",
9810
       "11838                  2  1.078350  0.445865      -0.3 -0.406780       11   \n",
9811
       "60633                  2  1.132596  0.432912      -0.1 -0.474576       10   \n",
9812
       "30971                  2  0.940904  0.267694      -0.1 -0.406780       11   \n",
9813
       "23816                  3  0.850032 -0.009968       0.9  1.152542       34   \n",
9814
       "\n",
9815
       "       pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  pos_inc2_grp_le  \\\n",
9816
       "56318 -0.406780 -1.500000 -1.500000                3                3   \n",
9817
       "11838  0.203390  1.560669  1.539367                3                3   \n",
9818
       "60633  0.406780  1.639588  1.660400                3                3   \n",
9819
       "30971  0.338983  1.551270  1.548767                3                3   \n",
9820
       "23816 -0.542373 -1.500000 -1.500000                3                3   \n",
9821
       "\n",
9822
       "       pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  pos_inc2_enc_0  \\\n",
9823
       "56318          1.0             1.0             0.0             0.0   \n",
9824
       "11838          0.0             0.0             0.0             0.0   \n",
9825
       "60633          0.0             0.0             0.0             0.0   \n",
9826
       "30971          0.0             0.0             0.0             0.0   \n",
9827
       "23816          1.0             1.0             0.0             0.0   \n",
9828
       "\n",
9829
       "       pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  pos_range   pos_rel  \\\n",
9830
       "56318             1.0             1.0            9   1.333334  0.976744   \n",
9831
       "11838             0.0             0.0            0  -0.183431 -0.580297   \n",
9832
       "60633             0.0             0.0            4   0.264557 -0.788021   \n",
9833
       "30971             0.0             0.0            4   0.088070 -0.666627   \n",
9834
       "23816             1.0             1.0            9   1.333334  1.162791   \n",
9835
       "\n",
9836
       "       pos_zeros  pos_inc_rng  pos_zeros_le  PxlMin_grp_le  PxlMin_zero  any  \\\n",
9837
       "56318        0.0    -0.599997             0              2        False  NaN   \n",
9838
       "11838        0.0    -0.583175             0              2        False  NaN   \n",
9839
       "60633        0.0    -0.584137             0              2        False  NaN   \n",
9840
       "30971        0.0    -0.597988             0              2        False  NaN   \n",
9841
       "23816        0.0    -0.599997             0              2        False  NaN   \n",
9842
       "\n",
9843
       "       epidural  intraparenchymal  intraventricular  subarachnoid  subdural  \\\n",
9844
       "56318       NaN               NaN               NaN           NaN       NaN   \n",
9845
       "11838       NaN               NaN               NaN           NaN       NaN   \n",
9846
       "60633       NaN               NaN               NaN           NaN       NaN   \n",
9847
       "30971       NaN               NaN               NaN           NaN       NaN   \n",
9848
       "23816       NaN               NaN               NaN           NaN       NaN   \n",
9849
       "\n",
9850
       "       any_series  SeriesPP  yuval_idx  pred_any  \n",
9851
       "56318       False      -0.5      20605  0.995869  \n",
9852
       "11838       False      -0.5      49264  0.995902  \n",
9853
       "60633       False      -0.5      59637  0.995940  \n",
9854
       "30971       False      -0.5      59366  0.995958  \n",
9855
       "23816       False      -0.5      20607  0.996068  \n",
9856
       "\n",
9857
       "[5 rows x 101 columns]"
9858
      ]
9859
     },
9860
     "execution_count": 65,
9861
     "metadata": {},
9862
     "output_type": "execute_result"
9863
    }
9864
   ],
9865
   "source": [
9866
    "test_md['pred_any'] = predictions[:,2]\n",
9867
    "test_md.sort_values('pred_any').tail()"
9868
   ]
9869
  },
9870
  {
9871
   "cell_type": "code",
9872
   "execution_count": 66,
9873
   "metadata": {},
9874
   "outputs": [
9875
    {
9876
     "data": {
9877
      "text/html": [
9878
       "<div>\n",
9879
       "<style scoped>\n",
9880
       "    .dataframe tbody tr th:only-of-type {\n",
9881
       "        vertical-align: middle;\n",
9882
       "    }\n",
9883
       "\n",
9884
       "    .dataframe tbody tr th {\n",
9885
       "        vertical-align: top;\n",
9886
       "    }\n",
9887
       "\n",
9888
       "    .dataframe thead th {\n",
9889
       "        text-align: right;\n",
9890
       "    }\n",
9891
       "</style>\n",
9892
       "<table border=\"1\" class=\"dataframe\">\n",
9893
       "  <thead>\n",
9894
       "    <tr style=\"text-align: right;\">\n",
9895
       "      <th></th>\n",
9896
       "      <th>img_id</th>\n",
9897
       "      <th>SOPInstanceUID</th>\n",
9898
       "      <th>Modality</th>\n",
9899
       "      <th>PatientID</th>\n",
9900
       "      <th>StudyInstanceUID</th>\n",
9901
       "      <th>SeriesInstanceUID</th>\n",
9902
       "      <th>StudyID</th>\n",
9903
       "      <th>ImagePositionPatient</th>\n",
9904
       "      <th>ImageOrientationPatient</th>\n",
9905
       "      <th>SamplesPerPixel</th>\n",
9906
       "      <th>PhotometricInterpretation</th>\n",
9907
       "      <th>Rows</th>\n",
9908
       "      <th>Columns</th>\n",
9909
       "      <th>PixelSpacing</th>\n",
9910
       "      <th>BitsAllocated</th>\n",
9911
       "      <th>BitsStored</th>\n",
9912
       "      <th>HighBit</th>\n",
9913
       "      <th>PixelRepresentation</th>\n",
9914
       "      <th>WindowCenter</th>\n",
9915
       "      <th>WindowWidth</th>\n",
9916
       "      <th>RescaleIntercept</th>\n",
9917
       "      <th>RescaleSlope</th>\n",
9918
       "      <th>PxlMin</th>\n",
9919
       "      <th>PxlMax</th>\n",
9920
       "      <th>PxlStd</th>\n",
9921
       "      <th>PxlMean</th>\n",
9922
       "      <th>test</th>\n",
9923
       "      <th>test2</th>\n",
9924
       "      <th>ImageOrientationPatient_0</th>\n",
9925
       "      <th>ImageOrientationPatient_1</th>\n",
9926
       "      <th>ImageOrientationPatient_2</th>\n",
9927
       "      <th>ImageOrientationPatient_3</th>\n",
9928
       "      <th>ImageOrientationPatient_4</th>\n",
9929
       "      <th>ImageOrientationPatient_5</th>\n",
9930
       "      <th>ImagePositionPatient_0</th>\n",
9931
       "      <th>ImagePositionPatient_1</th>\n",
9932
       "      <th>ImagePositionPatient_2</th>\n",
9933
       "      <th>PixelSpacing_0</th>\n",
9934
       "      <th>PixelSpacing_1</th>\n",
9935
       "      <th>WindowCenter_0</th>\n",
9936
       "      <th>WindowCenter_1</th>\n",
9937
       "      <th>WindowCenter_1_NAN</th>\n",
9938
       "      <th>WindowWidth_0</th>\n",
9939
       "      <th>WindowWidth_1</th>\n",
9940
       "      <th>WindowWidth_0_le</th>\n",
9941
       "      <th>WindowWidth_1_le</th>\n",
9942
       "      <th>WindowCenter_1_le</th>\n",
9943
       "      <th>BitType_le</th>\n",
9944
       "      <th>ImageOrientationPatient_4_f</th>\n",
9945
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
9946
       "      <th>...</th>\n",
9947
       "      <th>ImageOrientationPatient_5_f</th>\n",
9948
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
9949
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
9950
       "      <th>ImagePositionPatient_0_f</th>\n",
9951
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
9952
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
9953
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
9954
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
9955
       "      <th>ImagePositionPatient_1_f</th>\n",
9956
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
9957
       "      <th>ImagePositionPatient_2_f</th>\n",
9958
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
9959
       "      <th>PixelSpacing_1_f</th>\n",
9960
       "      <th>PixelSpacing_1_enc_0</th>\n",
9961
       "      <th>PixelSpacing_1_enc_1</th>\n",
9962
       "      <th>WindowCenter_0_le</th>\n",
9963
       "      <th>pos_max</th>\n",
9964
       "      <th>pos_min</th>\n",
9965
       "      <th>pos_size</th>\n",
9966
       "      <th>pos_idx1</th>\n",
9967
       "      <th>pos_idx</th>\n",
9968
       "      <th>pos_idx2</th>\n",
9969
       "      <th>pos_inc1</th>\n",
9970
       "      <th>pos_inc2</th>\n",
9971
       "      <th>pos_inc1_grp_le</th>\n",
9972
       "      <th>pos_inc2_grp_le</th>\n",
9973
       "      <th>pos_inc1_r1</th>\n",
9974
       "      <th>pos_inc1_r0001</th>\n",
9975
       "      <th>pos_inc1_enc_0</th>\n",
9976
       "      <th>pos_inc2_enc_0</th>\n",
9977
       "      <th>pos_inc1_enc_1</th>\n",
9978
       "      <th>pos_inc2_enc_1</th>\n",
9979
       "      <th>pos_size_le</th>\n",
9980
       "      <th>pos_range</th>\n",
9981
       "      <th>pos_rel</th>\n",
9982
       "      <th>pos_zeros</th>\n",
9983
       "      <th>pos_inc_rng</th>\n",
9984
       "      <th>pos_zeros_le</th>\n",
9985
       "      <th>PxlMin_grp_le</th>\n",
9986
       "      <th>PxlMin_zero</th>\n",
9987
       "      <th>any</th>\n",
9988
       "      <th>epidural</th>\n",
9989
       "      <th>intraparenchymal</th>\n",
9990
       "      <th>intraventricular</th>\n",
9991
       "      <th>subarachnoid</th>\n",
9992
       "      <th>subdural</th>\n",
9993
       "      <th>any_series</th>\n",
9994
       "      <th>SeriesPP</th>\n",
9995
       "      <th>yuval_idx</th>\n",
9996
       "      <th>pred_any</th>\n",
9997
       "    </tr>\n",
9998
       "  </thead>\n",
9999
       "  <tbody>\n",
10000
       "    <tr>\n",
10001
       "      <td>8592</td>\n",
10002
       "      <td>2b3878103</td>\n",
10003
       "      <td>ID_2b3878103</td>\n",
10004
       "      <td>CT</td>\n",
10005
       "      <td>ID_b81caf1c</td>\n",
10006
       "      <td>ID_3d31a06240</td>\n",
10007
       "      <td>ID_25c620d29b</td>\n",
10008
       "      <td>NaN</td>\n",
10009
       "      <td>['-126.408875', '-126.408875', '77.500000']</td>\n",
10010
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
10011
       "      <td>1</td>\n",
10012
       "      <td>MONOCHROME2</td>\n",
10013
       "      <td>512</td>\n",
10014
       "      <td>512</td>\n",
10015
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
10016
       "      <td>16</td>\n",
10017
       "      <td>16</td>\n",
10018
       "      <td>15</td>\n",
10019
       "      <td>1</td>\n",
10020
       "      <td>35.000000</td>\n",
10021
       "      <td>135.000000</td>\n",
10022
       "      <td>-1024.0</td>\n",
10023
       "      <td>1.0</td>\n",
10024
       "      <td>1.301333</td>\n",
10025
       "      <td>0.394667</td>\n",
10026
       "      <td>-0.776436</td>\n",
10027
       "      <td>1.134899</td>\n",
10028
       "      <td>False</td>\n",
10029
       "      <td>True</td>\n",
10030
       "      <td>1.0</td>\n",
10031
       "      <td>0.0</td>\n",
10032
       "      <td>0.0</td>\n",
10033
       "      <td>0.0</td>\n",
10034
       "      <td>1.0</td>\n",
10035
       "      <td>0.0</td>\n",
10036
       "      <td>-126.408875</td>\n",
10037
       "      <td>-126.408875</td>\n",
10038
       "      <td>77.5</td>\n",
10039
       "      <td>0.494751</td>\n",
10040
       "      <td>0.494751</td>\n",
10041
       "      <td>35.0</td>\n",
10042
       "      <td>NaN</td>\n",
10043
       "      <td>True</td>\n",
10044
       "      <td>135.0</td>\n",
10045
       "      <td>NaN</td>\n",
10046
       "      <td>3</td>\n",
10047
       "      <td>1</td>\n",
10048
       "      <td>3</td>\n",
10049
       "      <td>0</td>\n",
10050
       "      <td>-1.333333</td>\n",
10051
       "      <td>1.0</td>\n",
10052
       "      <td>...</td>\n",
10053
       "      <td>-0.666667</td>\n",
10054
       "      <td>1.0</td>\n",
10055
       "      <td>False</td>\n",
10056
       "      <td>-0.720000</td>\n",
10057
       "      <td>0.0</td>\n",
10058
       "      <td>1.0</td>\n",
10059
       "      <td>1.0</td>\n",
10060
       "      <td>1.0</td>\n",
10061
       "      <td>-0.818785</td>\n",
10062
       "      <td>0.0</td>\n",
10063
       "      <td>-0.128223</td>\n",
10064
       "      <td>1.0</td>\n",
10065
       "      <td>-0.4800</td>\n",
10066
       "      <td>0.0</td>\n",
10067
       "      <td>True</td>\n",
10068
       "      <td>3</td>\n",
10069
       "      <td>0.61</td>\n",
10070
       "      <td>-0.01</td>\n",
10071
       "      <td>-0.3</td>\n",
10072
       "      <td>-0.067797</td>\n",
10073
       "      <td>16</td>\n",
10074
       "      <td>-0.135593</td>\n",
10075
       "      <td>-1.5</td>\n",
10076
       "      <td>-1.5</td>\n",
10077
       "      <td>3</td>\n",
10078
       "      <td>3</td>\n",
10079
       "      <td>1.0</td>\n",
10080
       "      <td>1.0</td>\n",
10081
       "      <td>0.0</td>\n",
10082
       "      <td>0.0</td>\n",
10083
       "      <td>1.0</td>\n",
10084
       "      <td>1.0</td>\n",
10085
       "      <td>0</td>\n",
10086
       "      <td>-0.266667</td>\n",
10087
       "      <td>0.064516</td>\n",
10088
       "      <td>0.0</td>\n",
10089
       "      <td>-0.6</td>\n",
10090
       "      <td>0</td>\n",
10091
       "      <td>2</td>\n",
10092
       "      <td>False</td>\n",
10093
       "      <td>NaN</td>\n",
10094
       "      <td>NaN</td>\n",
10095
       "      <td>NaN</td>\n",
10096
       "      <td>NaN</td>\n",
10097
       "      <td>NaN</td>\n",
10098
       "      <td>NaN</td>\n",
10099
       "      <td>False</td>\n",
10100
       "      <td>-0.5</td>\n",
10101
       "      <td>95036</td>\n",
10102
       "      <td>0.995983</td>\n",
10103
       "    </tr>\n",
10104
       "    <tr>\n",
10105
       "      <td>101233</td>\n",
10106
       "      <td>734856256</td>\n",
10107
       "      <td>ID_734856256</td>\n",
10108
       "      <td>CT</td>\n",
10109
       "      <td>ID_4482f018</td>\n",
10110
       "      <td>ID_5ccd14e6b7</td>\n",
10111
       "      <td>ID_b75da817b2</td>\n",
10112
       "      <td>NaN</td>\n",
10113
       "      <td>['-126.438', '-126.438', '97.500']</td>\n",
10114
       "      <td>['1.0', '0.0', '0.0', '0.0', '1.0', '0.0']</td>\n",
10115
       "      <td>1</td>\n",
10116
       "      <td>MONOCHROME2</td>\n",
10117
       "      <td>512</td>\n",
10118
       "      <td>512</td>\n",
10119
       "      <td>['0.4949', '0.4949']</td>\n",
10120
       "      <td>16</td>\n",
10121
       "      <td>16</td>\n",
10122
       "      <td>15</td>\n",
10123
       "      <td>1</td>\n",
10124
       "      <td>35.0</td>\n",
10125
       "      <td>135.0</td>\n",
10126
       "      <td>-1024.0</td>\n",
10127
       "      <td>1.0</td>\n",
10128
       "      <td>1.301333</td>\n",
10129
       "      <td>0.378667</td>\n",
10130
       "      <td>-0.862722</td>\n",
10131
       "      <td>1.089866</td>\n",
10132
       "      <td>False</td>\n",
10133
       "      <td>True</td>\n",
10134
       "      <td>1.0</td>\n",
10135
       "      <td>0.0</td>\n",
10136
       "      <td>0.0</td>\n",
10137
       "      <td>0.0</td>\n",
10138
       "      <td>1.0</td>\n",
10139
       "      <td>0.0</td>\n",
10140
       "      <td>-126.438000</td>\n",
10141
       "      <td>-126.438000</td>\n",
10142
       "      <td>97.5</td>\n",
10143
       "      <td>0.494900</td>\n",
10144
       "      <td>0.494900</td>\n",
10145
       "      <td>35.0</td>\n",
10146
       "      <td>NaN</td>\n",
10147
       "      <td>True</td>\n",
10148
       "      <td>135.0</td>\n",
10149
       "      <td>NaN</td>\n",
10150
       "      <td>3</td>\n",
10151
       "      <td>1</td>\n",
10152
       "      <td>3</td>\n",
10153
       "      <td>0</td>\n",
10154
       "      <td>-1.333333</td>\n",
10155
       "      <td>1.0</td>\n",
10156
       "      <td>...</td>\n",
10157
       "      <td>-0.666667</td>\n",
10158
       "      <td>1.0</td>\n",
10159
       "      <td>False</td>\n",
10160
       "      <td>1.241653</td>\n",
10161
       "      <td>0.0</td>\n",
10162
       "      <td>0.0</td>\n",
10163
       "      <td>0.0</td>\n",
10164
       "      <td>0.0</td>\n",
10165
       "      <td>-0.819173</td>\n",
10166
       "      <td>0.0</td>\n",
10167
       "      <td>-0.099570</td>\n",
10168
       "      <td>1.0</td>\n",
10169
       "      <td>1.8792</td>\n",
10170
       "      <td>0.0</td>\n",
10171
       "      <td>False</td>\n",
10172
       "      <td>3</td>\n",
10173
       "      <td>0.67</td>\n",
10174
       "      <td>0.01</td>\n",
10175
       "      <td>-0.1</td>\n",
10176
       "      <td>0.135593</td>\n",
10177
       "      <td>19</td>\n",
10178
       "      <td>-0.203390</td>\n",
10179
       "      <td>-1.5</td>\n",
10180
       "      <td>-1.5</td>\n",
10181
       "      <td>3</td>\n",
10182
       "      <td>3</td>\n",
10183
       "      <td>1.0</td>\n",
10184
       "      <td>1.0</td>\n",
10185
       "      <td>0.0</td>\n",
10186
       "      <td>0.0</td>\n",
10187
       "      <td>1.0</td>\n",
10188
       "      <td>1.0</td>\n",
10189
       "      <td>4</td>\n",
10190
       "      <td>0.000000</td>\n",
10191
       "      <td>0.303030</td>\n",
10192
       "      <td>0.0</td>\n",
10193
       "      <td>-0.6</td>\n",
10194
       "      <td>0</td>\n",
10195
       "      <td>2</td>\n",
10196
       "      <td>False</td>\n",
10197
       "      <td>NaN</td>\n",
10198
       "      <td>NaN</td>\n",
10199
       "      <td>NaN</td>\n",
10200
       "      <td>NaN</td>\n",
10201
       "      <td>NaN</td>\n",
10202
       "      <td>NaN</td>\n",
10203
       "      <td>False</td>\n",
10204
       "      <td>-0.5</td>\n",
10205
       "      <td>23495</td>\n",
10206
       "      <td>0.996200</td>\n",
10207
       "    </tr>\n",
10208
       "    <tr>\n",
10209
       "      <td>38580</td>\n",
10210
       "      <td>1bb3fe555</td>\n",
10211
       "      <td>ID_1bb3fe555</td>\n",
10212
       "      <td>CT</td>\n",
10213
       "      <td>ID_ca92b4e6</td>\n",
10214
       "      <td>ID_e14681614d</td>\n",
10215
       "      <td>ID_23f8022c7d</td>\n",
10216
       "      <td>NaN</td>\n",
10217
       "      <td>['-126.408875', '-126.408875', '72.500000']</td>\n",
10218
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
10219
       "      <td>1</td>\n",
10220
       "      <td>MONOCHROME2</td>\n",
10221
       "      <td>512</td>\n",
10222
       "      <td>512</td>\n",
10223
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
10224
       "      <td>16</td>\n",
10225
       "      <td>16</td>\n",
10226
       "      <td>15</td>\n",
10227
       "      <td>1</td>\n",
10228
       "      <td>35.000000</td>\n",
10229
       "      <td>135.000000</td>\n",
10230
       "      <td>-1024.0</td>\n",
10231
       "      <td>1.0</td>\n",
10232
       "      <td>1.301333</td>\n",
10233
       "      <td>0.472000</td>\n",
10234
       "      <td>-0.733947</td>\n",
10235
       "      <td>1.184966</td>\n",
10236
       "      <td>False</td>\n",
10237
       "      <td>True</td>\n",
10238
       "      <td>1.0</td>\n",
10239
       "      <td>0.0</td>\n",
10240
       "      <td>0.0</td>\n",
10241
       "      <td>0.0</td>\n",
10242
       "      <td>1.0</td>\n",
10243
       "      <td>0.0</td>\n",
10244
       "      <td>-126.408875</td>\n",
10245
       "      <td>-126.408875</td>\n",
10246
       "      <td>72.5</td>\n",
10247
       "      <td>0.494751</td>\n",
10248
       "      <td>0.494751</td>\n",
10249
       "      <td>35.0</td>\n",
10250
       "      <td>NaN</td>\n",
10251
       "      <td>True</td>\n",
10252
       "      <td>135.0</td>\n",
10253
       "      <td>NaN</td>\n",
10254
       "      <td>3</td>\n",
10255
       "      <td>1</td>\n",
10256
       "      <td>3</td>\n",
10257
       "      <td>0</td>\n",
10258
       "      <td>-1.333333</td>\n",
10259
       "      <td>1.0</td>\n",
10260
       "      <td>...</td>\n",
10261
       "      <td>-0.666667</td>\n",
10262
       "      <td>1.0</td>\n",
10263
       "      <td>False</td>\n",
10264
       "      <td>-0.720000</td>\n",
10265
       "      <td>0.0</td>\n",
10266
       "      <td>1.0</td>\n",
10267
       "      <td>1.0</td>\n",
10268
       "      <td>1.0</td>\n",
10269
       "      <td>-0.818785</td>\n",
10270
       "      <td>0.0</td>\n",
10271
       "      <td>-0.135387</td>\n",
10272
       "      <td>1.0</td>\n",
10273
       "      <td>-0.4800</td>\n",
10274
       "      <td>0.0</td>\n",
10275
       "      <td>True</td>\n",
10276
       "      <td>3</td>\n",
10277
       "      <td>0.61</td>\n",
10278
       "      <td>-0.01</td>\n",
10279
       "      <td>-0.3</td>\n",
10280
       "      <td>-0.135593</td>\n",
10281
       "      <td>15</td>\n",
10282
       "      <td>-0.067797</td>\n",
10283
       "      <td>-1.5</td>\n",
10284
       "      <td>-1.5</td>\n",
10285
       "      <td>3</td>\n",
10286
       "      <td>3</td>\n",
10287
       "      <td>1.0</td>\n",
10288
       "      <td>1.0</td>\n",
10289
       "      <td>0.0</td>\n",
10290
       "      <td>0.0</td>\n",
10291
       "      <td>1.0</td>\n",
10292
       "      <td>1.0</td>\n",
10293
       "      <td>0</td>\n",
10294
       "      <td>-0.266667</td>\n",
10295
       "      <td>-0.064516</td>\n",
10296
       "      <td>0.0</td>\n",
10297
       "      <td>-0.6</td>\n",
10298
       "      <td>0</td>\n",
10299
       "      <td>2</td>\n",
10300
       "      <td>False</td>\n",
10301
       "      <td>NaN</td>\n",
10302
       "      <td>NaN</td>\n",
10303
       "      <td>NaN</td>\n",
10304
       "      <td>NaN</td>\n",
10305
       "      <td>NaN</td>\n",
10306
       "      <td>NaN</td>\n",
10307
       "      <td>False</td>\n",
10308
       "      <td>-0.5</td>\n",
10309
       "      <td>40054</td>\n",
10310
       "      <td>0.996345</td>\n",
10311
       "    </tr>\n",
10312
       "    <tr>\n",
10313
       "      <td>29351</td>\n",
10314
       "      <td>5bd2084d9</td>\n",
10315
       "      <td>ID_5bd2084d9</td>\n",
10316
       "      <td>CT</td>\n",
10317
       "      <td>ID_4482f018</td>\n",
10318
       "      <td>ID_5ccd14e6b7</td>\n",
10319
       "      <td>ID_b75da817b2</td>\n",
10320
       "      <td>NaN</td>\n",
10321
       "      <td>['-126.438', '-126.438', '102.500']</td>\n",
10322
       "      <td>['1.0', '0.0', '0.0', '0.0', '1.0', '0.0']</td>\n",
10323
       "      <td>1</td>\n",
10324
       "      <td>MONOCHROME2</td>\n",
10325
       "      <td>512</td>\n",
10326
       "      <td>512</td>\n",
10327
       "      <td>['0.4949', '0.4949']</td>\n",
10328
       "      <td>16</td>\n",
10329
       "      <td>16</td>\n",
10330
       "      <td>15</td>\n",
10331
       "      <td>1</td>\n",
10332
       "      <td>35.0</td>\n",
10333
       "      <td>135.0</td>\n",
10334
       "      <td>-1024.0</td>\n",
10335
       "      <td>1.0</td>\n",
10336
       "      <td>1.301333</td>\n",
10337
       "      <td>0.442667</td>\n",
10338
       "      <td>-0.849438</td>\n",
10339
       "      <td>1.059636</td>\n",
10340
       "      <td>False</td>\n",
10341
       "      <td>True</td>\n",
10342
       "      <td>1.0</td>\n",
10343
       "      <td>0.0</td>\n",
10344
       "      <td>0.0</td>\n",
10345
       "      <td>0.0</td>\n",
10346
       "      <td>1.0</td>\n",
10347
       "      <td>0.0</td>\n",
10348
       "      <td>-126.438000</td>\n",
10349
       "      <td>-126.438000</td>\n",
10350
       "      <td>102.5</td>\n",
10351
       "      <td>0.494900</td>\n",
10352
       "      <td>0.494900</td>\n",
10353
       "      <td>35.0</td>\n",
10354
       "      <td>NaN</td>\n",
10355
       "      <td>True</td>\n",
10356
       "      <td>135.0</td>\n",
10357
       "      <td>NaN</td>\n",
10358
       "      <td>3</td>\n",
10359
       "      <td>1</td>\n",
10360
       "      <td>3</td>\n",
10361
       "      <td>0</td>\n",
10362
       "      <td>-1.333333</td>\n",
10363
       "      <td>1.0</td>\n",
10364
       "      <td>...</td>\n",
10365
       "      <td>-0.666667</td>\n",
10366
       "      <td>1.0</td>\n",
10367
       "      <td>False</td>\n",
10368
       "      <td>1.241653</td>\n",
10369
       "      <td>0.0</td>\n",
10370
       "      <td>0.0</td>\n",
10371
       "      <td>0.0</td>\n",
10372
       "      <td>0.0</td>\n",
10373
       "      <td>-0.819173</td>\n",
10374
       "      <td>0.0</td>\n",
10375
       "      <td>-0.092407</td>\n",
10376
       "      <td>1.0</td>\n",
10377
       "      <td>1.8792</td>\n",
10378
       "      <td>0.0</td>\n",
10379
       "      <td>False</td>\n",
10380
       "      <td>3</td>\n",
10381
       "      <td>0.67</td>\n",
10382
       "      <td>0.01</td>\n",
10383
       "      <td>-0.1</td>\n",
10384
       "      <td>0.203390</td>\n",
10385
       "      <td>20</td>\n",
10386
       "      <td>-0.271186</td>\n",
10387
       "      <td>-1.5</td>\n",
10388
       "      <td>-1.5</td>\n",
10389
       "      <td>3</td>\n",
10390
       "      <td>3</td>\n",
10391
       "      <td>1.0</td>\n",
10392
       "      <td>1.0</td>\n",
10393
       "      <td>0.0</td>\n",
10394
       "      <td>0.0</td>\n",
10395
       "      <td>1.0</td>\n",
10396
       "      <td>1.0</td>\n",
10397
       "      <td>4</td>\n",
10398
       "      <td>0.000000</td>\n",
10399
       "      <td>0.424242</td>\n",
10400
       "      <td>0.0</td>\n",
10401
       "      <td>-0.6</td>\n",
10402
       "      <td>0</td>\n",
10403
       "      <td>2</td>\n",
10404
       "      <td>False</td>\n",
10405
       "      <td>NaN</td>\n",
10406
       "      <td>NaN</td>\n",
10407
       "      <td>NaN</td>\n",
10408
       "      <td>NaN</td>\n",
10409
       "      <td>NaN</td>\n",
10410
       "      <td>NaN</td>\n",
10411
       "      <td>False</td>\n",
10412
       "      <td>-0.5</td>\n",
10413
       "      <td>23496</td>\n",
10414
       "      <td>0.996412</td>\n",
10415
       "    </tr>\n",
10416
       "    <tr>\n",
10417
       "      <td>90564</td>\n",
10418
       "      <td>2941d6eba</td>\n",
10419
       "      <td>ID_2941d6eba</td>\n",
10420
       "      <td>CT</td>\n",
10421
       "      <td>ID_ca92b4e6</td>\n",
10422
       "      <td>ID_e14681614d</td>\n",
10423
       "      <td>ID_23f8022c7d</td>\n",
10424
       "      <td>NaN</td>\n",
10425
       "      <td>['-126.408875', '-126.408875', '77.500000']</td>\n",
10426
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
10427
       "      <td>1</td>\n",
10428
       "      <td>MONOCHROME2</td>\n",
10429
       "      <td>512</td>\n",
10430
       "      <td>512</td>\n",
10431
       "      <td>['0.494750976563', '0.494750976563']</td>\n",
10432
       "      <td>16</td>\n",
10433
       "      <td>16</td>\n",
10434
       "      <td>15</td>\n",
10435
       "      <td>1</td>\n",
10436
       "      <td>35.000000</td>\n",
10437
       "      <td>135.000000</td>\n",
10438
       "      <td>-1024.0</td>\n",
10439
       "      <td>1.0</td>\n",
10440
       "      <td>1.301333</td>\n",
10441
       "      <td>0.482667</td>\n",
10442
       "      <td>-0.703788</td>\n",
10443
       "      <td>1.179639</td>\n",
10444
       "      <td>False</td>\n",
10445
       "      <td>True</td>\n",
10446
       "      <td>1.0</td>\n",
10447
       "      <td>0.0</td>\n",
10448
       "      <td>0.0</td>\n",
10449
       "      <td>0.0</td>\n",
10450
       "      <td>1.0</td>\n",
10451
       "      <td>0.0</td>\n",
10452
       "      <td>-126.408875</td>\n",
10453
       "      <td>-126.408875</td>\n",
10454
       "      <td>77.5</td>\n",
10455
       "      <td>0.494751</td>\n",
10456
       "      <td>0.494751</td>\n",
10457
       "      <td>35.0</td>\n",
10458
       "      <td>NaN</td>\n",
10459
       "      <td>True</td>\n",
10460
       "      <td>135.0</td>\n",
10461
       "      <td>NaN</td>\n",
10462
       "      <td>3</td>\n",
10463
       "      <td>1</td>\n",
10464
       "      <td>3</td>\n",
10465
       "      <td>0</td>\n",
10466
       "      <td>-1.333333</td>\n",
10467
       "      <td>1.0</td>\n",
10468
       "      <td>...</td>\n",
10469
       "      <td>-0.666667</td>\n",
10470
       "      <td>1.0</td>\n",
10471
       "      <td>False</td>\n",
10472
       "      <td>-0.720000</td>\n",
10473
       "      <td>0.0</td>\n",
10474
       "      <td>1.0</td>\n",
10475
       "      <td>1.0</td>\n",
10476
       "      <td>1.0</td>\n",
10477
       "      <td>-0.818785</td>\n",
10478
       "      <td>0.0</td>\n",
10479
       "      <td>-0.128223</td>\n",
10480
       "      <td>1.0</td>\n",
10481
       "      <td>-0.4800</td>\n",
10482
       "      <td>0.0</td>\n",
10483
       "      <td>True</td>\n",
10484
       "      <td>3</td>\n",
10485
       "      <td>0.61</td>\n",
10486
       "      <td>-0.01</td>\n",
10487
       "      <td>-0.3</td>\n",
10488
       "      <td>-0.067797</td>\n",
10489
       "      <td>16</td>\n",
10490
       "      <td>-0.135593</td>\n",
10491
       "      <td>-1.5</td>\n",
10492
       "      <td>-1.5</td>\n",
10493
       "      <td>3</td>\n",
10494
       "      <td>3</td>\n",
10495
       "      <td>1.0</td>\n",
10496
       "      <td>1.0</td>\n",
10497
       "      <td>0.0</td>\n",
10498
       "      <td>0.0</td>\n",
10499
       "      <td>1.0</td>\n",
10500
       "      <td>1.0</td>\n",
10501
       "      <td>0</td>\n",
10502
       "      <td>-0.266667</td>\n",
10503
       "      <td>0.064516</td>\n",
10504
       "      <td>0.0</td>\n",
10505
       "      <td>-0.6</td>\n",
10506
       "      <td>0</td>\n",
10507
       "      <td>2</td>\n",
10508
       "      <td>False</td>\n",
10509
       "      <td>NaN</td>\n",
10510
       "      <td>NaN</td>\n",
10511
       "      <td>NaN</td>\n",
10512
       "      <td>NaN</td>\n",
10513
       "      <td>NaN</td>\n",
10514
       "      <td>NaN</td>\n",
10515
       "      <td>False</td>\n",
10516
       "      <td>-0.5</td>\n",
10517
       "      <td>40055</td>\n",
10518
       "      <td>0.996628</td>\n",
10519
       "    </tr>\n",
10520
       "  </tbody>\n",
10521
       "</table>\n",
10522
       "<p>5 rows × 101 columns</p>\n",
10523
       "</div>"
10524
      ],
10525
      "text/plain": [
10526
       "           img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
10527
       "8592    2b3878103   ID_2b3878103       CT  ID_b81caf1c    ID_3d31a06240   \n",
10528
       "101233  734856256   ID_734856256       CT  ID_4482f018    ID_5ccd14e6b7   \n",
10529
       "38580   1bb3fe555   ID_1bb3fe555       CT  ID_ca92b4e6    ID_e14681614d   \n",
10530
       "29351   5bd2084d9   ID_5bd2084d9       CT  ID_4482f018    ID_5ccd14e6b7   \n",
10531
       "90564   2941d6eba   ID_2941d6eba       CT  ID_ca92b4e6    ID_e14681614d   \n",
10532
       "\n",
10533
       "       SeriesInstanceUID  StudyID  \\\n",
10534
       "8592       ID_25c620d29b      NaN   \n",
10535
       "101233     ID_b75da817b2      NaN   \n",
10536
       "38580      ID_23f8022c7d      NaN   \n",
10537
       "29351      ID_b75da817b2      NaN   \n",
10538
       "90564      ID_23f8022c7d      NaN   \n",
10539
       "\n",
10540
       "                               ImagePositionPatient  \\\n",
10541
       "8592    ['-126.408875', '-126.408875', '77.500000']   \n",
10542
       "101233           ['-126.438', '-126.438', '97.500']   \n",
10543
       "38580   ['-126.408875', '-126.408875', '72.500000']   \n",
10544
       "29351           ['-126.438', '-126.438', '102.500']   \n",
10545
       "90564   ['-126.408875', '-126.408875', '77.500000']   \n",
10546
       "\n",
10547
       "                                  ImageOrientationPatient  SamplesPerPixel  \\\n",
10548
       "8592    ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
10549
       "101233         ['1.0', '0.0', '0.0', '0.0', '1.0', '0.0']                1   \n",
10550
       "38580   ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
10551
       "29351          ['1.0', '0.0', '0.0', '0.0', '1.0', '0.0']                1   \n",
10552
       "90564   ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
10553
       "\n",
10554
       "       PhotometricInterpretation  Rows  Columns  \\\n",
10555
       "8592                 MONOCHROME2   512      512   \n",
10556
       "101233               MONOCHROME2   512      512   \n",
10557
       "38580                MONOCHROME2   512      512   \n",
10558
       "29351                MONOCHROME2   512      512   \n",
10559
       "90564                MONOCHROME2   512      512   \n",
10560
       "\n",
10561
       "                                PixelSpacing  BitsAllocated  BitsStored  \\\n",
10562
       "8592    ['0.494750976563', '0.494750976563']             16          16   \n",
10563
       "101233                  ['0.4949', '0.4949']             16          16   \n",
10564
       "38580   ['0.494750976563', '0.494750976563']             16          16   \n",
10565
       "29351                   ['0.4949', '0.4949']             16          16   \n",
10566
       "90564   ['0.494750976563', '0.494750976563']             16          16   \n",
10567
       "\n",
10568
       "        HighBit  PixelRepresentation WindowCenter WindowWidth  \\\n",
10569
       "8592         15                    1    35.000000  135.000000   \n",
10570
       "101233       15                    1         35.0       135.0   \n",
10571
       "38580        15                    1    35.000000  135.000000   \n",
10572
       "29351        15                    1         35.0       135.0   \n",
10573
       "90564        15                    1    35.000000  135.000000   \n",
10574
       "\n",
10575
       "        RescaleIntercept  RescaleSlope    PxlMin    PxlMax    PxlStd  \\\n",
10576
       "8592             -1024.0           1.0  1.301333  0.394667 -0.776436   \n",
10577
       "101233           -1024.0           1.0  1.301333  0.378667 -0.862722   \n",
10578
       "38580            -1024.0           1.0  1.301333  0.472000 -0.733947   \n",
10579
       "29351            -1024.0           1.0  1.301333  0.442667 -0.849438   \n",
10580
       "90564            -1024.0           1.0  1.301333  0.482667 -0.703788   \n",
10581
       "\n",
10582
       "         PxlMean   test  test2  ImageOrientationPatient_0  \\\n",
10583
       "8592    1.134899  False   True                        1.0   \n",
10584
       "101233  1.089866  False   True                        1.0   \n",
10585
       "38580   1.184966  False   True                        1.0   \n",
10586
       "29351   1.059636  False   True                        1.0   \n",
10587
       "90564   1.179639  False   True                        1.0   \n",
10588
       "\n",
10589
       "        ImageOrientationPatient_1  ImageOrientationPatient_2  \\\n",
10590
       "8592                          0.0                        0.0   \n",
10591
       "101233                        0.0                        0.0   \n",
10592
       "38580                         0.0                        0.0   \n",
10593
       "29351                         0.0                        0.0   \n",
10594
       "90564                         0.0                        0.0   \n",
10595
       "\n",
10596
       "        ImageOrientationPatient_3  ImageOrientationPatient_4  \\\n",
10597
       "8592                          0.0                        1.0   \n",
10598
       "101233                        0.0                        1.0   \n",
10599
       "38580                         0.0                        1.0   \n",
10600
       "29351                         0.0                        1.0   \n",
10601
       "90564                         0.0                        1.0   \n",
10602
       "\n",
10603
       "        ImageOrientationPatient_5  ImagePositionPatient_0  \\\n",
10604
       "8592                          0.0             -126.408875   \n",
10605
       "101233                        0.0             -126.438000   \n",
10606
       "38580                         0.0             -126.408875   \n",
10607
       "29351                         0.0             -126.438000   \n",
10608
       "90564                         0.0             -126.408875   \n",
10609
       "\n",
10610
       "        ImagePositionPatient_1  ImagePositionPatient_2  PixelSpacing_0  \\\n",
10611
       "8592               -126.408875                    77.5        0.494751   \n",
10612
       "101233             -126.438000                    97.5        0.494900   \n",
10613
       "38580              -126.408875                    72.5        0.494751   \n",
10614
       "29351              -126.438000                   102.5        0.494900   \n",
10615
       "90564              -126.408875                    77.5        0.494751   \n",
10616
       "\n",
10617
       "        PixelSpacing_1  WindowCenter_0  WindowCenter_1  WindowCenter_1_NAN  \\\n",
10618
       "8592          0.494751            35.0             NaN                True   \n",
10619
       "101233        0.494900            35.0             NaN                True   \n",
10620
       "38580         0.494751            35.0             NaN                True   \n",
10621
       "29351         0.494900            35.0             NaN                True   \n",
10622
       "90564         0.494751            35.0             NaN                True   \n",
10623
       "\n",
10624
       "        WindowWidth_0  WindowWidth_1  WindowWidth_0_le  WindowWidth_1_le  \\\n",
10625
       "8592            135.0            NaN                 3                 1   \n",
10626
       "101233          135.0            NaN                 3                 1   \n",
10627
       "38580           135.0            NaN                 3                 1   \n",
10628
       "29351           135.0            NaN                 3                 1   \n",
10629
       "90564           135.0            NaN                 3                 1   \n",
10630
       "\n",
10631
       "        WindowCenter_1_le  BitType_le  ImageOrientationPatient_4_f  \\\n",
10632
       "8592                    3           0                    -1.333333   \n",
10633
       "101233                  3           0                    -1.333333   \n",
10634
       "38580                   3           0                    -1.333333   \n",
10635
       "29351                   3           0                    -1.333333   \n",
10636
       "90564                   3           0                    -1.333333   \n",
10637
       "\n",
10638
       "        ImageOrientationPatient_4_enc_0  ...  ImageOrientationPatient_5_f  \\\n",
10639
       "8592                                1.0  ...                    -0.666667   \n",
10640
       "101233                              1.0  ...                    -0.666667   \n",
10641
       "38580                               1.0  ...                    -0.666667   \n",
10642
       "29351                               1.0  ...                    -0.666667   \n",
10643
       "90564                               1.0  ...                    -0.666667   \n",
10644
       "\n",
10645
       "        ImageOrientationPatient_5_enc_0  ImageOrientationPatient_5_enc_1  \\\n",
10646
       "8592                                1.0                            False   \n",
10647
       "101233                              1.0                            False   \n",
10648
       "38580                               1.0                            False   \n",
10649
       "29351                               1.0                            False   \n",
10650
       "90564                               1.0                            False   \n",
10651
       "\n",
10652
       "        ImagePositionPatient_0_f  ImagePositionPatient_0_enc_0  \\\n",
10653
       "8592                   -0.720000                           0.0   \n",
10654
       "101233                  1.241653                           0.0   \n",
10655
       "38580                  -0.720000                           0.0   \n",
10656
       "29351                   1.241653                           0.0   \n",
10657
       "90564                  -0.720000                           0.0   \n",
10658
       "\n",
10659
       "        ImagePositionPatient_0_enc_1  ImagePositionPatient_0_f_r1  \\\n",
10660
       "8592                             1.0                          1.0   \n",
10661
       "101233                           0.0                          0.0   \n",
10662
       "38580                            1.0                          1.0   \n",
10663
       "29351                            0.0                          0.0   \n",
10664
       "90564                            1.0                          1.0   \n",
10665
       "\n",
10666
       "        ImagePositionPatient_0_f_r05  ImagePositionPatient_1_f  \\\n",
10667
       "8592                             1.0                 -0.818785   \n",
10668
       "101233                           0.0                 -0.819173   \n",
10669
       "38580                            1.0                 -0.818785   \n",
10670
       "29351                            0.0                 -0.819173   \n",
10671
       "90564                            1.0                 -0.818785   \n",
10672
       "\n",
10673
       "        ImagePositionPatient_1_enc_0  ImagePositionPatient_2_f  \\\n",
10674
       "8592                             0.0                 -0.128223   \n",
10675
       "101233                           0.0                 -0.099570   \n",
10676
       "38580                            0.0                 -0.135387   \n",
10677
       "29351                            0.0                 -0.092407   \n",
10678
       "90564                            0.0                 -0.128223   \n",
10679
       "\n",
10680
       "        ImagePositionPatient_2_f_r05  PixelSpacing_1_f  PixelSpacing_1_enc_0  \\\n",
10681
       "8592                             1.0           -0.4800                   0.0   \n",
10682
       "101233                           1.0            1.8792                   0.0   \n",
10683
       "38580                            1.0           -0.4800                   0.0   \n",
10684
       "29351                            1.0            1.8792                   0.0   \n",
10685
       "90564                            1.0           -0.4800                   0.0   \n",
10686
       "\n",
10687
       "        PixelSpacing_1_enc_1  WindowCenter_0_le  pos_max  pos_min  pos_size  \\\n",
10688
       "8592                    True                  3     0.61    -0.01      -0.3   \n",
10689
       "101233                 False                  3     0.67     0.01      -0.1   \n",
10690
       "38580                   True                  3     0.61    -0.01      -0.3   \n",
10691
       "29351                  False                  3     0.67     0.01      -0.1   \n",
10692
       "90564                   True                  3     0.61    -0.01      -0.3   \n",
10693
       "\n",
10694
       "        pos_idx1  pos_idx  pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  \\\n",
10695
       "8592   -0.067797       16 -0.135593      -1.5      -1.5                3   \n",
10696
       "101233  0.135593       19 -0.203390      -1.5      -1.5                3   \n",
10697
       "38580  -0.135593       15 -0.067797      -1.5      -1.5                3   \n",
10698
       "29351   0.203390       20 -0.271186      -1.5      -1.5                3   \n",
10699
       "90564  -0.067797       16 -0.135593      -1.5      -1.5                3   \n",
10700
       "\n",
10701
       "        pos_inc2_grp_le  pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  \\\n",
10702
       "8592                  3          1.0             1.0             0.0   \n",
10703
       "101233                3          1.0             1.0             0.0   \n",
10704
       "38580                 3          1.0             1.0             0.0   \n",
10705
       "29351                 3          1.0             1.0             0.0   \n",
10706
       "90564                 3          1.0             1.0             0.0   \n",
10707
       "\n",
10708
       "        pos_inc2_enc_0  pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  \\\n",
10709
       "8592               0.0             1.0             1.0            0   \n",
10710
       "101233             0.0             1.0             1.0            4   \n",
10711
       "38580              0.0             1.0             1.0            0   \n",
10712
       "29351              0.0             1.0             1.0            4   \n",
10713
       "90564              0.0             1.0             1.0            0   \n",
10714
       "\n",
10715
       "        pos_range   pos_rel  pos_zeros  pos_inc_rng  pos_zeros_le  \\\n",
10716
       "8592    -0.266667  0.064516        0.0         -0.6             0   \n",
10717
       "101233   0.000000  0.303030        0.0         -0.6             0   \n",
10718
       "38580   -0.266667 -0.064516        0.0         -0.6             0   \n",
10719
       "29351    0.000000  0.424242        0.0         -0.6             0   \n",
10720
       "90564   -0.266667  0.064516        0.0         -0.6             0   \n",
10721
       "\n",
10722
       "        PxlMin_grp_le  PxlMin_zero  any  epidural  intraparenchymal  \\\n",
10723
       "8592                2        False  NaN       NaN               NaN   \n",
10724
       "101233              2        False  NaN       NaN               NaN   \n",
10725
       "38580               2        False  NaN       NaN               NaN   \n",
10726
       "29351               2        False  NaN       NaN               NaN   \n",
10727
       "90564               2        False  NaN       NaN               NaN   \n",
10728
       "\n",
10729
       "        intraventricular  subarachnoid  subdural  any_series  SeriesPP  \\\n",
10730
       "8592                 NaN           NaN       NaN       False      -0.5   \n",
10731
       "101233               NaN           NaN       NaN       False      -0.5   \n",
10732
       "38580                NaN           NaN       NaN       False      -0.5   \n",
10733
       "29351                NaN           NaN       NaN       False      -0.5   \n",
10734
       "90564                NaN           NaN       NaN       False      -0.5   \n",
10735
       "\n",
10736
       "        yuval_idx  pred_any  \n",
10737
       "8592        95036  0.995983  \n",
10738
       "101233      23495  0.996200  \n",
10739
       "38580       40054  0.996345  \n",
10740
       "29351       23496  0.996412  \n",
10741
       "90564       40055  0.996628  \n",
10742
       "\n",
10743
       "[5 rows x 101 columns]"
10744
      ]
10745
     },
10746
     "execution_count": 66,
10747
     "metadata": {},
10748
     "output_type": "execute_result"
10749
    }
10750
   ],
10751
   "source": [
10752
    "test_md['pred_any'] = predictions[:,3]\n",
10753
    "test_md.sort_values('pred_any').tail()"
10754
   ]
10755
  },
10756
  {
10757
   "cell_type": "code",
10758
   "execution_count": 67,
10759
   "metadata": {},
10760
   "outputs": [
10761
    {
10762
     "data": {
10763
      "text/html": [
10764
       "<div>\n",
10765
       "<style scoped>\n",
10766
       "    .dataframe tbody tr th:only-of-type {\n",
10767
       "        vertical-align: middle;\n",
10768
       "    }\n",
10769
       "\n",
10770
       "    .dataframe tbody tr th {\n",
10771
       "        vertical-align: top;\n",
10772
       "    }\n",
10773
       "\n",
10774
       "    .dataframe thead th {\n",
10775
       "        text-align: right;\n",
10776
       "    }\n",
10777
       "</style>\n",
10778
       "<table border=\"1\" class=\"dataframe\">\n",
10779
       "  <thead>\n",
10780
       "    <tr style=\"text-align: right;\">\n",
10781
       "      <th></th>\n",
10782
       "      <th>img_id</th>\n",
10783
       "      <th>SOPInstanceUID</th>\n",
10784
       "      <th>Modality</th>\n",
10785
       "      <th>PatientID</th>\n",
10786
       "      <th>StudyInstanceUID</th>\n",
10787
       "      <th>SeriesInstanceUID</th>\n",
10788
       "      <th>StudyID</th>\n",
10789
       "      <th>ImagePositionPatient</th>\n",
10790
       "      <th>ImageOrientationPatient</th>\n",
10791
       "      <th>SamplesPerPixel</th>\n",
10792
       "      <th>PhotometricInterpretation</th>\n",
10793
       "      <th>Rows</th>\n",
10794
       "      <th>Columns</th>\n",
10795
       "      <th>PixelSpacing</th>\n",
10796
       "      <th>BitsAllocated</th>\n",
10797
       "      <th>BitsStored</th>\n",
10798
       "      <th>HighBit</th>\n",
10799
       "      <th>PixelRepresentation</th>\n",
10800
       "      <th>WindowCenter</th>\n",
10801
       "      <th>WindowWidth</th>\n",
10802
       "      <th>RescaleIntercept</th>\n",
10803
       "      <th>RescaleSlope</th>\n",
10804
       "      <th>PxlMin</th>\n",
10805
       "      <th>PxlMax</th>\n",
10806
       "      <th>PxlStd</th>\n",
10807
       "      <th>PxlMean</th>\n",
10808
       "      <th>test</th>\n",
10809
       "      <th>test2</th>\n",
10810
       "      <th>ImageOrientationPatient_0</th>\n",
10811
       "      <th>ImageOrientationPatient_1</th>\n",
10812
       "      <th>ImageOrientationPatient_2</th>\n",
10813
       "      <th>ImageOrientationPatient_3</th>\n",
10814
       "      <th>ImageOrientationPatient_4</th>\n",
10815
       "      <th>ImageOrientationPatient_5</th>\n",
10816
       "      <th>ImagePositionPatient_0</th>\n",
10817
       "      <th>ImagePositionPatient_1</th>\n",
10818
       "      <th>ImagePositionPatient_2</th>\n",
10819
       "      <th>PixelSpacing_0</th>\n",
10820
       "      <th>PixelSpacing_1</th>\n",
10821
       "      <th>WindowCenter_0</th>\n",
10822
       "      <th>WindowCenter_1</th>\n",
10823
       "      <th>WindowCenter_1_NAN</th>\n",
10824
       "      <th>WindowWidth_0</th>\n",
10825
       "      <th>WindowWidth_1</th>\n",
10826
       "      <th>WindowWidth_0_le</th>\n",
10827
       "      <th>WindowWidth_1_le</th>\n",
10828
       "      <th>WindowCenter_1_le</th>\n",
10829
       "      <th>BitType_le</th>\n",
10830
       "      <th>ImageOrientationPatient_4_f</th>\n",
10831
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
10832
       "      <th>...</th>\n",
10833
       "      <th>ImageOrientationPatient_5_f</th>\n",
10834
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
10835
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
10836
       "      <th>ImagePositionPatient_0_f</th>\n",
10837
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
10838
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
10839
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
10840
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
10841
       "      <th>ImagePositionPatient_1_f</th>\n",
10842
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
10843
       "      <th>ImagePositionPatient_2_f</th>\n",
10844
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
10845
       "      <th>PixelSpacing_1_f</th>\n",
10846
       "      <th>PixelSpacing_1_enc_0</th>\n",
10847
       "      <th>PixelSpacing_1_enc_1</th>\n",
10848
       "      <th>WindowCenter_0_le</th>\n",
10849
       "      <th>pos_max</th>\n",
10850
       "      <th>pos_min</th>\n",
10851
       "      <th>pos_size</th>\n",
10852
       "      <th>pos_idx1</th>\n",
10853
       "      <th>pos_idx</th>\n",
10854
       "      <th>pos_idx2</th>\n",
10855
       "      <th>pos_inc1</th>\n",
10856
       "      <th>pos_inc2</th>\n",
10857
       "      <th>pos_inc1_grp_le</th>\n",
10858
       "      <th>pos_inc2_grp_le</th>\n",
10859
       "      <th>pos_inc1_r1</th>\n",
10860
       "      <th>pos_inc1_r0001</th>\n",
10861
       "      <th>pos_inc1_enc_0</th>\n",
10862
       "      <th>pos_inc2_enc_0</th>\n",
10863
       "      <th>pos_inc1_enc_1</th>\n",
10864
       "      <th>pos_inc2_enc_1</th>\n",
10865
       "      <th>pos_size_le</th>\n",
10866
       "      <th>pos_range</th>\n",
10867
       "      <th>pos_rel</th>\n",
10868
       "      <th>pos_zeros</th>\n",
10869
       "      <th>pos_inc_rng</th>\n",
10870
       "      <th>pos_zeros_le</th>\n",
10871
       "      <th>PxlMin_grp_le</th>\n",
10872
       "      <th>PxlMin_zero</th>\n",
10873
       "      <th>any</th>\n",
10874
       "      <th>epidural</th>\n",
10875
       "      <th>intraparenchymal</th>\n",
10876
       "      <th>intraventricular</th>\n",
10877
       "      <th>subarachnoid</th>\n",
10878
       "      <th>subdural</th>\n",
10879
       "      <th>any_series</th>\n",
10880
       "      <th>SeriesPP</th>\n",
10881
       "      <th>yuval_idx</th>\n",
10882
       "      <th>pred_any</th>\n",
10883
       "    </tr>\n",
10884
       "  </thead>\n",
10885
       "  <tbody>\n",
10886
       "    <tr>\n",
10887
       "      <td>46433</td>\n",
10888
       "      <td>f3a75309f</td>\n",
10889
       "      <td>ID_f3a75309f</td>\n",
10890
       "      <td>CT</td>\n",
10891
       "      <td>ID_f6723c35</td>\n",
10892
       "      <td>ID_fc07fac521</td>\n",
10893
       "      <td>ID_cfd350c878</td>\n",
10894
       "      <td>NaN</td>\n",
10895
       "      <td>['-125.000', '-118.558', '151.625']</td>\n",
10896
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
10897
       "      <td>1</td>\n",
10898
       "      <td>MONOCHROME2</td>\n",
10899
       "      <td>512</td>\n",
10900
       "      <td>512</td>\n",
10901
       "      <td>['0.488281', '0.488281']</td>\n",
10902
       "      <td>16</td>\n",
10903
       "      <td>16</td>\n",
10904
       "      <td>15</td>\n",
10905
       "      <td>1</td>\n",
10906
       "      <td>40</td>\n",
10907
       "      <td>150</td>\n",
10908
       "      <td>-1024.0</td>\n",
10909
       "      <td>1.0</td>\n",
10910
       "      <td>-0.064</td>\n",
10911
       "      <td>0.292000</td>\n",
10912
       "      <td>0.113617</td>\n",
10913
       "      <td>-0.426832</td>\n",
10914
       "      <td>False</td>\n",
10915
       "      <td>True</td>\n",
10916
       "      <td>1.0</td>\n",
10917
       "      <td>0.0</td>\n",
10918
       "      <td>0.0</td>\n",
10919
       "      <td>0.0</td>\n",
10920
       "      <td>0.961262</td>\n",
10921
       "      <td>-0.275637</td>\n",
10922
       "      <td>-125.0</td>\n",
10923
       "      <td>-118.558</td>\n",
10924
       "      <td>151.625</td>\n",
10925
       "      <td>0.488281</td>\n",
10926
       "      <td>0.488281</td>\n",
10927
       "      <td>40.0</td>\n",
10928
       "      <td>NaN</td>\n",
10929
       "      <td>True</td>\n",
10930
       "      <td>150.0</td>\n",
10931
       "      <td>NaN</td>\n",
10932
       "      <td>1</td>\n",
10933
       "      <td>1</td>\n",
10934
       "      <td>3</td>\n",
10935
       "      <td>0</td>\n",
10936
       "      <td>2.15016</td>\n",
10937
       "      <td>0.0</td>\n",
10938
       "      <td>...</td>\n",
10939
       "      <td>1.495753</td>\n",
10940
       "      <td>0.0</td>\n",
10941
       "      <td>False</td>\n",
10942
       "      <td>-0.72</td>\n",
10943
       "      <td>1.0</td>\n",
10944
       "      <td>0.0</td>\n",
10945
       "      <td>1.0</td>\n",
10946
       "      <td>1.0</td>\n",
10947
       "      <td>-0.714107</td>\n",
10948
       "      <td>0.0</td>\n",
10949
       "      <td>-0.022027</td>\n",
10950
       "      <td>0.0</td>\n",
10951
       "      <td>-0.48</td>\n",
10952
       "      <td>1.0</td>\n",
10953
       "      <td>False</td>\n",
10954
       "      <td>2</td>\n",
10955
       "      <td>0.68972</td>\n",
10956
       "      <td>0.127984</td>\n",
10957
       "      <td>-0.7</td>\n",
10958
       "      <td>0.406780</td>\n",
10959
       "      <td>23</td>\n",
10960
       "      <td>-0.881356</td>\n",
10961
       "      <td>1.6005</td>\n",
10962
       "      <td>1.6005</td>\n",
10963
       "      <td>3</td>\n",
10964
       "      <td>3</td>\n",
10965
       "      <td>0.0</td>\n",
10966
       "      <td>1.0</td>\n",
10967
       "      <td>0.0</td>\n",
10968
       "      <td>0.0</td>\n",
10969
       "      <td>0.0</td>\n",
10970
       "      <td>0.0</td>\n",
10971
       "      <td>3</td>\n",
10972
       "      <td>-0.655093</td>\n",
10973
       "      <td>1.407408</td>\n",
10974
       "      <td>0.0</td>\n",
10975
       "      <td>-0.599615</td>\n",
10976
       "      <td>0</td>\n",
10977
       "      <td>1</td>\n",
10978
       "      <td>False</td>\n",
10979
       "      <td>NaN</td>\n",
10980
       "      <td>NaN</td>\n",
10981
       "      <td>NaN</td>\n",
10982
       "      <td>NaN</td>\n",
10983
       "      <td>NaN</td>\n",
10984
       "      <td>NaN</td>\n",
10985
       "      <td>False</td>\n",
10986
       "      <td>-0.5</td>\n",
10987
       "      <td>55354</td>\n",
10988
       "      <td>0.993246</td>\n",
10989
       "    </tr>\n",
10990
       "    <tr>\n",
10991
       "      <td>59590</td>\n",
10992
       "      <td>dc7e09cbd</td>\n",
10993
       "      <td>ID_dc7e09cbd</td>\n",
10994
       "      <td>CT</td>\n",
10995
       "      <td>ID_f6723c35</td>\n",
10996
       "      <td>ID_fc07fac521</td>\n",
10997
       "      <td>ID_cfd350c878</td>\n",
10998
       "      <td>NaN</td>\n",
10999
       "      <td>['-125.000', '-118.558', '130.820']</td>\n",
11000
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
11001
       "      <td>1</td>\n",
11002
       "      <td>MONOCHROME2</td>\n",
11003
       "      <td>512</td>\n",
11004
       "      <td>512</td>\n",
11005
       "      <td>['0.488281', '0.488281']</td>\n",
11006
       "      <td>16</td>\n",
11007
       "      <td>16</td>\n",
11008
       "      <td>15</td>\n",
11009
       "      <td>1</td>\n",
11010
       "      <td>40</td>\n",
11011
       "      <td>150</td>\n",
11012
       "      <td>-1024.0</td>\n",
11013
       "      <td>1.0</td>\n",
11014
       "      <td>-0.064</td>\n",
11015
       "      <td>0.180000</td>\n",
11016
       "      <td>0.216861</td>\n",
11017
       "      <td>-0.102710</td>\n",
11018
       "      <td>False</td>\n",
11019
       "      <td>True</td>\n",
11020
       "      <td>1.0</td>\n",
11021
       "      <td>0.0</td>\n",
11022
       "      <td>0.0</td>\n",
11023
       "      <td>0.0</td>\n",
11024
       "      <td>0.961262</td>\n",
11025
       "      <td>-0.275637</td>\n",
11026
       "      <td>-125.0</td>\n",
11027
       "      <td>-118.558</td>\n",
11028
       "      <td>130.820</td>\n",
11029
       "      <td>0.488281</td>\n",
11030
       "      <td>0.488281</td>\n",
11031
       "      <td>40.0</td>\n",
11032
       "      <td>NaN</td>\n",
11033
       "      <td>True</td>\n",
11034
       "      <td>150.0</td>\n",
11035
       "      <td>NaN</td>\n",
11036
       "      <td>1</td>\n",
11037
       "      <td>1</td>\n",
11038
       "      <td>3</td>\n",
11039
       "      <td>0</td>\n",
11040
       "      <td>2.15016</td>\n",
11041
       "      <td>0.0</td>\n",
11042
       "      <td>...</td>\n",
11043
       "      <td>1.495753</td>\n",
11044
       "      <td>0.0</td>\n",
11045
       "      <td>False</td>\n",
11046
       "      <td>-0.72</td>\n",
11047
       "      <td>1.0</td>\n",
11048
       "      <td>0.0</td>\n",
11049
       "      <td>1.0</td>\n",
11050
       "      <td>1.0</td>\n",
11051
       "      <td>-0.714107</td>\n",
11052
       "      <td>0.0</td>\n",
11053
       "      <td>-0.051834</td>\n",
11054
       "      <td>0.0</td>\n",
11055
       "      <td>-0.48</td>\n",
11056
       "      <td>1.0</td>\n",
11057
       "      <td>False</td>\n",
11058
       "      <td>2</td>\n",
11059
       "      <td>0.68972</td>\n",
11060
       "      <td>0.127984</td>\n",
11061
       "      <td>-0.7</td>\n",
11062
       "      <td>0.135593</td>\n",
11063
       "      <td>19</td>\n",
11064
       "      <td>-0.610169</td>\n",
11065
       "      <td>1.6005</td>\n",
11066
       "      <td>1.6005</td>\n",
11067
       "      <td>3</td>\n",
11068
       "      <td>3</td>\n",
11069
       "      <td>0.0</td>\n",
11070
       "      <td>1.0</td>\n",
11071
       "      <td>0.0</td>\n",
11072
       "      <td>0.0</td>\n",
11073
       "      <td>0.0</td>\n",
11074
       "      <td>0.0</td>\n",
11075
       "      <td>3</td>\n",
11076
       "      <td>-0.655093</td>\n",
11077
       "      <td>0.814817</td>\n",
11078
       "      <td>0.0</td>\n",
11079
       "      <td>-0.599615</td>\n",
11080
       "      <td>0</td>\n",
11081
       "      <td>1</td>\n",
11082
       "      <td>False</td>\n",
11083
       "      <td>NaN</td>\n",
11084
       "      <td>NaN</td>\n",
11085
       "      <td>NaN</td>\n",
11086
       "      <td>NaN</td>\n",
11087
       "      <td>NaN</td>\n",
11088
       "      <td>NaN</td>\n",
11089
       "      <td>False</td>\n",
11090
       "      <td>-0.5</td>\n",
11091
       "      <td>55350</td>\n",
11092
       "      <td>0.993314</td>\n",
11093
       "    </tr>\n",
11094
       "    <tr>\n",
11095
       "      <td>33815</td>\n",
11096
       "      <td>397e899f6</td>\n",
11097
       "      <td>ID_397e899f6</td>\n",
11098
       "      <td>CT</td>\n",
11099
       "      <td>ID_f6723c35</td>\n",
11100
       "      <td>ID_fc07fac521</td>\n",
11101
       "      <td>ID_cfd350c878</td>\n",
11102
       "      <td>NaN</td>\n",
11103
       "      <td>['-125.000', '-118.558', '146.424']</td>\n",
11104
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
11105
       "      <td>1</td>\n",
11106
       "      <td>MONOCHROME2</td>\n",
11107
       "      <td>512</td>\n",
11108
       "      <td>512</td>\n",
11109
       "      <td>['0.488281', '0.488281']</td>\n",
11110
       "      <td>16</td>\n",
11111
       "      <td>16</td>\n",
11112
       "      <td>15</td>\n",
11113
       "      <td>1</td>\n",
11114
       "      <td>40</td>\n",
11115
       "      <td>150</td>\n",
11116
       "      <td>-1024.0</td>\n",
11117
       "      <td>1.0</td>\n",
11118
       "      <td>-0.064</td>\n",
11119
       "      <td>0.218667</td>\n",
11120
       "      <td>0.157382</td>\n",
11121
       "      <td>-0.319805</td>\n",
11122
       "      <td>False</td>\n",
11123
       "      <td>True</td>\n",
11124
       "      <td>1.0</td>\n",
11125
       "      <td>0.0</td>\n",
11126
       "      <td>0.0</td>\n",
11127
       "      <td>0.0</td>\n",
11128
       "      <td>0.961262</td>\n",
11129
       "      <td>-0.275637</td>\n",
11130
       "      <td>-125.0</td>\n",
11131
       "      <td>-118.558</td>\n",
11132
       "      <td>146.424</td>\n",
11133
       "      <td>0.488281</td>\n",
11134
       "      <td>0.488281</td>\n",
11135
       "      <td>40.0</td>\n",
11136
       "      <td>NaN</td>\n",
11137
       "      <td>True</td>\n",
11138
       "      <td>150.0</td>\n",
11139
       "      <td>NaN</td>\n",
11140
       "      <td>1</td>\n",
11141
       "      <td>1</td>\n",
11142
       "      <td>3</td>\n",
11143
       "      <td>0</td>\n",
11144
       "      <td>2.15016</td>\n",
11145
       "      <td>0.0</td>\n",
11146
       "      <td>...</td>\n",
11147
       "      <td>1.495753</td>\n",
11148
       "      <td>0.0</td>\n",
11149
       "      <td>False</td>\n",
11150
       "      <td>-0.72</td>\n",
11151
       "      <td>1.0</td>\n",
11152
       "      <td>0.0</td>\n",
11153
       "      <td>1.0</td>\n",
11154
       "      <td>1.0</td>\n",
11155
       "      <td>-0.714107</td>\n",
11156
       "      <td>0.0</td>\n",
11157
       "      <td>-0.029479</td>\n",
11158
       "      <td>0.0</td>\n",
11159
       "      <td>-0.48</td>\n",
11160
       "      <td>1.0</td>\n",
11161
       "      <td>False</td>\n",
11162
       "      <td>2</td>\n",
11163
       "      <td>0.68972</td>\n",
11164
       "      <td>0.127984</td>\n",
11165
       "      <td>-0.7</td>\n",
11166
       "      <td>0.338983</td>\n",
11167
       "      <td>22</td>\n",
11168
       "      <td>-0.813559</td>\n",
11169
       "      <td>1.6010</td>\n",
11170
       "      <td>1.6005</td>\n",
11171
       "      <td>3</td>\n",
11172
       "      <td>3</td>\n",
11173
       "      <td>0.0</td>\n",
11174
       "      <td>1.0</td>\n",
11175
       "      <td>0.0</td>\n",
11176
       "      <td>0.0</td>\n",
11177
       "      <td>0.0</td>\n",
11178
       "      <td>0.0</td>\n",
11179
       "      <td>3</td>\n",
11180
       "      <td>-0.655093</td>\n",
11181
       "      <td>1.259268</td>\n",
11182
       "      <td>0.0</td>\n",
11183
       "      <td>-0.599615</td>\n",
11184
       "      <td>0</td>\n",
11185
       "      <td>1</td>\n",
11186
       "      <td>False</td>\n",
11187
       "      <td>NaN</td>\n",
11188
       "      <td>NaN</td>\n",
11189
       "      <td>NaN</td>\n",
11190
       "      <td>NaN</td>\n",
11191
       "      <td>NaN</td>\n",
11192
       "      <td>NaN</td>\n",
11193
       "      <td>False</td>\n",
11194
       "      <td>-0.5</td>\n",
11195
       "      <td>55353</td>\n",
11196
       "      <td>0.993623</td>\n",
11197
       "    </tr>\n",
11198
       "    <tr>\n",
11199
       "      <td>2113</td>\n",
11200
       "      <td>ed2a8477b</td>\n",
11201
       "      <td>ID_ed2a8477b</td>\n",
11202
       "      <td>CT</td>\n",
11203
       "      <td>ID_f6723c35</td>\n",
11204
       "      <td>ID_fc07fac521</td>\n",
11205
       "      <td>ID_cfd350c878</td>\n",
11206
       "      <td>NaN</td>\n",
11207
       "      <td>['-125.000', '-118.558', '141.222']</td>\n",
11208
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
11209
       "      <td>1</td>\n",
11210
       "      <td>MONOCHROME2</td>\n",
11211
       "      <td>512</td>\n",
11212
       "      <td>512</td>\n",
11213
       "      <td>['0.488281', '0.488281']</td>\n",
11214
       "      <td>16</td>\n",
11215
       "      <td>16</td>\n",
11216
       "      <td>15</td>\n",
11217
       "      <td>1</td>\n",
11218
       "      <td>40</td>\n",
11219
       "      <td>150</td>\n",
11220
       "      <td>-1024.0</td>\n",
11221
       "      <td>1.0</td>\n",
11222
       "      <td>-0.064</td>\n",
11223
       "      <td>0.169333</td>\n",
11224
       "      <td>0.193354</td>\n",
11225
       "      <td>-0.226621</td>\n",
11226
       "      <td>False</td>\n",
11227
       "      <td>True</td>\n",
11228
       "      <td>1.0</td>\n",
11229
       "      <td>0.0</td>\n",
11230
       "      <td>0.0</td>\n",
11231
       "      <td>0.0</td>\n",
11232
       "      <td>0.961262</td>\n",
11233
       "      <td>-0.275637</td>\n",
11234
       "      <td>-125.0</td>\n",
11235
       "      <td>-118.558</td>\n",
11236
       "      <td>141.222</td>\n",
11237
       "      <td>0.488281</td>\n",
11238
       "      <td>0.488281</td>\n",
11239
       "      <td>40.0</td>\n",
11240
       "      <td>NaN</td>\n",
11241
       "      <td>True</td>\n",
11242
       "      <td>150.0</td>\n",
11243
       "      <td>NaN</td>\n",
11244
       "      <td>1</td>\n",
11245
       "      <td>1</td>\n",
11246
       "      <td>3</td>\n",
11247
       "      <td>0</td>\n",
11248
       "      <td>2.15016</td>\n",
11249
       "      <td>0.0</td>\n",
11250
       "      <td>...</td>\n",
11251
       "      <td>1.495753</td>\n",
11252
       "      <td>0.0</td>\n",
11253
       "      <td>False</td>\n",
11254
       "      <td>-0.72</td>\n",
11255
       "      <td>1.0</td>\n",
11256
       "      <td>0.0</td>\n",
11257
       "      <td>1.0</td>\n",
11258
       "      <td>1.0</td>\n",
11259
       "      <td>-0.714107</td>\n",
11260
       "      <td>0.0</td>\n",
11261
       "      <td>-0.036931</td>\n",
11262
       "      <td>0.0</td>\n",
11263
       "      <td>-0.48</td>\n",
11264
       "      <td>1.0</td>\n",
11265
       "      <td>False</td>\n",
11266
       "      <td>2</td>\n",
11267
       "      <td>0.68972</td>\n",
11268
       "      <td>0.127984</td>\n",
11269
       "      <td>-0.7</td>\n",
11270
       "      <td>0.271186</td>\n",
11271
       "      <td>21</td>\n",
11272
       "      <td>-0.745763</td>\n",
11273
       "      <td>1.6005</td>\n",
11274
       "      <td>1.6010</td>\n",
11275
       "      <td>3</td>\n",
11276
       "      <td>3</td>\n",
11277
       "      <td>0.0</td>\n",
11278
       "      <td>1.0</td>\n",
11279
       "      <td>0.0</td>\n",
11280
       "      <td>0.0</td>\n",
11281
       "      <td>0.0</td>\n",
11282
       "      <td>0.0</td>\n",
11283
       "      <td>3</td>\n",
11284
       "      <td>-0.655093</td>\n",
11285
       "      <td>1.111098</td>\n",
11286
       "      <td>0.0</td>\n",
11287
       "      <td>-0.599615</td>\n",
11288
       "      <td>0</td>\n",
11289
       "      <td>1</td>\n",
11290
       "      <td>False</td>\n",
11291
       "      <td>NaN</td>\n",
11292
       "      <td>NaN</td>\n",
11293
       "      <td>NaN</td>\n",
11294
       "      <td>NaN</td>\n",
11295
       "      <td>NaN</td>\n",
11296
       "      <td>NaN</td>\n",
11297
       "      <td>False</td>\n",
11298
       "      <td>-0.5</td>\n",
11299
       "      <td>55352</td>\n",
11300
       "      <td>0.993728</td>\n",
11301
       "    </tr>\n",
11302
       "    <tr>\n",
11303
       "      <td>61829</td>\n",
11304
       "      <td>b2d64d052</td>\n",
11305
       "      <td>ID_b2d64d052</td>\n",
11306
       "      <td>CT</td>\n",
11307
       "      <td>ID_f6723c35</td>\n",
11308
       "      <td>ID_fc07fac521</td>\n",
11309
       "      <td>ID_cfd350c878</td>\n",
11310
       "      <td>NaN</td>\n",
11311
       "      <td>['-125.000', '-118.558', '136.021']</td>\n",
11312
       "      <td>['1.000000', '0.000000', '0.000000', '0.000000...</td>\n",
11313
       "      <td>1</td>\n",
11314
       "      <td>MONOCHROME2</td>\n",
11315
       "      <td>512</td>\n",
11316
       "      <td>512</td>\n",
11317
       "      <td>['0.488281', '0.488281']</td>\n",
11318
       "      <td>16</td>\n",
11319
       "      <td>16</td>\n",
11320
       "      <td>15</td>\n",
11321
       "      <td>1</td>\n",
11322
       "      <td>40</td>\n",
11323
       "      <td>150</td>\n",
11324
       "      <td>-1024.0</td>\n",
11325
       "      <td>1.0</td>\n",
11326
       "      <td>-0.064</td>\n",
11327
       "      <td>0.148000</td>\n",
11328
       "      <td>0.208909</td>\n",
11329
       "      <td>-0.156506</td>\n",
11330
       "      <td>False</td>\n",
11331
       "      <td>True</td>\n",
11332
       "      <td>1.0</td>\n",
11333
       "      <td>0.0</td>\n",
11334
       "      <td>0.0</td>\n",
11335
       "      <td>0.0</td>\n",
11336
       "      <td>0.961262</td>\n",
11337
       "      <td>-0.275637</td>\n",
11338
       "      <td>-125.0</td>\n",
11339
       "      <td>-118.558</td>\n",
11340
       "      <td>136.021</td>\n",
11341
       "      <td>0.488281</td>\n",
11342
       "      <td>0.488281</td>\n",
11343
       "      <td>40.0</td>\n",
11344
       "      <td>NaN</td>\n",
11345
       "      <td>True</td>\n",
11346
       "      <td>150.0</td>\n",
11347
       "      <td>NaN</td>\n",
11348
       "      <td>1</td>\n",
11349
       "      <td>1</td>\n",
11350
       "      <td>3</td>\n",
11351
       "      <td>0</td>\n",
11352
       "      <td>2.15016</td>\n",
11353
       "      <td>0.0</td>\n",
11354
       "      <td>...</td>\n",
11355
       "      <td>1.495753</td>\n",
11356
       "      <td>0.0</td>\n",
11357
       "      <td>False</td>\n",
11358
       "      <td>-0.72</td>\n",
11359
       "      <td>1.0</td>\n",
11360
       "      <td>0.0</td>\n",
11361
       "      <td>1.0</td>\n",
11362
       "      <td>1.0</td>\n",
11363
       "      <td>-0.714107</td>\n",
11364
       "      <td>0.0</td>\n",
11365
       "      <td>-0.044383</td>\n",
11366
       "      <td>0.0</td>\n",
11367
       "      <td>-0.48</td>\n",
11368
       "      <td>1.0</td>\n",
11369
       "      <td>False</td>\n",
11370
       "      <td>2</td>\n",
11371
       "      <td>0.68972</td>\n",
11372
       "      <td>0.127984</td>\n",
11373
       "      <td>-0.7</td>\n",
11374
       "      <td>0.203390</td>\n",
11375
       "      <td>20</td>\n",
11376
       "      <td>-0.677966</td>\n",
11377
       "      <td>1.6005</td>\n",
11378
       "      <td>1.6005</td>\n",
11379
       "      <td>3</td>\n",
11380
       "      <td>3</td>\n",
11381
       "      <td>0.0</td>\n",
11382
       "      <td>1.0</td>\n",
11383
       "      <td>0.0</td>\n",
11384
       "      <td>0.0</td>\n",
11385
       "      <td>0.0</td>\n",
11386
       "      <td>0.0</td>\n",
11387
       "      <td>3</td>\n",
11388
       "      <td>-0.655093</td>\n",
11389
       "      <td>0.962958</td>\n",
11390
       "      <td>0.0</td>\n",
11391
       "      <td>-0.599615</td>\n",
11392
       "      <td>0</td>\n",
11393
       "      <td>1</td>\n",
11394
       "      <td>False</td>\n",
11395
       "      <td>NaN</td>\n",
11396
       "      <td>NaN</td>\n",
11397
       "      <td>NaN</td>\n",
11398
       "      <td>NaN</td>\n",
11399
       "      <td>NaN</td>\n",
11400
       "      <td>NaN</td>\n",
11401
       "      <td>False</td>\n",
11402
       "      <td>-0.5</td>\n",
11403
       "      <td>55351</td>\n",
11404
       "      <td>0.993775</td>\n",
11405
       "    </tr>\n",
11406
       "  </tbody>\n",
11407
       "</table>\n",
11408
       "<p>5 rows × 101 columns</p>\n",
11409
       "</div>"
11410
      ],
11411
      "text/plain": [
11412
       "          img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
11413
       "46433  f3a75309f   ID_f3a75309f       CT  ID_f6723c35    ID_fc07fac521   \n",
11414
       "59590  dc7e09cbd   ID_dc7e09cbd       CT  ID_f6723c35    ID_fc07fac521   \n",
11415
       "33815  397e899f6   ID_397e899f6       CT  ID_f6723c35    ID_fc07fac521   \n",
11416
       "2113   ed2a8477b   ID_ed2a8477b       CT  ID_f6723c35    ID_fc07fac521   \n",
11417
       "61829  b2d64d052   ID_b2d64d052       CT  ID_f6723c35    ID_fc07fac521   \n",
11418
       "\n",
11419
       "      SeriesInstanceUID  StudyID                 ImagePositionPatient  \\\n",
11420
       "46433     ID_cfd350c878      NaN  ['-125.000', '-118.558', '151.625']   \n",
11421
       "59590     ID_cfd350c878      NaN  ['-125.000', '-118.558', '130.820']   \n",
11422
       "33815     ID_cfd350c878      NaN  ['-125.000', '-118.558', '146.424']   \n",
11423
       "2113      ID_cfd350c878      NaN  ['-125.000', '-118.558', '141.222']   \n",
11424
       "61829     ID_cfd350c878      NaN  ['-125.000', '-118.558', '136.021']   \n",
11425
       "\n",
11426
       "                                 ImageOrientationPatient  SamplesPerPixel  \\\n",
11427
       "46433  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
11428
       "59590  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
11429
       "33815  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
11430
       "2113   ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
11431
       "61829  ['1.000000', '0.000000', '0.000000', '0.000000...                1   \n",
11432
       "\n",
11433
       "      PhotometricInterpretation  Rows  Columns              PixelSpacing  \\\n",
11434
       "46433               MONOCHROME2   512      512  ['0.488281', '0.488281']   \n",
11435
       "59590               MONOCHROME2   512      512  ['0.488281', '0.488281']   \n",
11436
       "33815               MONOCHROME2   512      512  ['0.488281', '0.488281']   \n",
11437
       "2113                MONOCHROME2   512      512  ['0.488281', '0.488281']   \n",
11438
       "61829               MONOCHROME2   512      512  ['0.488281', '0.488281']   \n",
11439
       "\n",
11440
       "       BitsAllocated  BitsStored  HighBit  PixelRepresentation WindowCenter  \\\n",
11441
       "46433             16          16       15                    1           40   \n",
11442
       "59590             16          16       15                    1           40   \n",
11443
       "33815             16          16       15                    1           40   \n",
11444
       "2113              16          16       15                    1           40   \n",
11445
       "61829             16          16       15                    1           40   \n",
11446
       "\n",
11447
       "      WindowWidth  RescaleIntercept  RescaleSlope  PxlMin    PxlMax    PxlStd  \\\n",
11448
       "46433         150           -1024.0           1.0  -0.064  0.292000  0.113617   \n",
11449
       "59590         150           -1024.0           1.0  -0.064  0.180000  0.216861   \n",
11450
       "33815         150           -1024.0           1.0  -0.064  0.218667  0.157382   \n",
11451
       "2113          150           -1024.0           1.0  -0.064  0.169333  0.193354   \n",
11452
       "61829         150           -1024.0           1.0  -0.064  0.148000  0.208909   \n",
11453
       "\n",
11454
       "        PxlMean   test  test2  ImageOrientationPatient_0  \\\n",
11455
       "46433 -0.426832  False   True                        1.0   \n",
11456
       "59590 -0.102710  False   True                        1.0   \n",
11457
       "33815 -0.319805  False   True                        1.0   \n",
11458
       "2113  -0.226621  False   True                        1.0   \n",
11459
       "61829 -0.156506  False   True                        1.0   \n",
11460
       "\n",
11461
       "       ImageOrientationPatient_1  ImageOrientationPatient_2  \\\n",
11462
       "46433                        0.0                        0.0   \n",
11463
       "59590                        0.0                        0.0   \n",
11464
       "33815                        0.0                        0.0   \n",
11465
       "2113                         0.0                        0.0   \n",
11466
       "61829                        0.0                        0.0   \n",
11467
       "\n",
11468
       "       ImageOrientationPatient_3  ImageOrientationPatient_4  \\\n",
11469
       "46433                        0.0                   0.961262   \n",
11470
       "59590                        0.0                   0.961262   \n",
11471
       "33815                        0.0                   0.961262   \n",
11472
       "2113                         0.0                   0.961262   \n",
11473
       "61829                        0.0                   0.961262   \n",
11474
       "\n",
11475
       "       ImageOrientationPatient_5  ImagePositionPatient_0  \\\n",
11476
       "46433                  -0.275637                  -125.0   \n",
11477
       "59590                  -0.275637                  -125.0   \n",
11478
       "33815                  -0.275637                  -125.0   \n",
11479
       "2113                   -0.275637                  -125.0   \n",
11480
       "61829                  -0.275637                  -125.0   \n",
11481
       "\n",
11482
       "       ImagePositionPatient_1  ImagePositionPatient_2  PixelSpacing_0  \\\n",
11483
       "46433                -118.558                 151.625        0.488281   \n",
11484
       "59590                -118.558                 130.820        0.488281   \n",
11485
       "33815                -118.558                 146.424        0.488281   \n",
11486
       "2113                 -118.558                 141.222        0.488281   \n",
11487
       "61829                -118.558                 136.021        0.488281   \n",
11488
       "\n",
11489
       "       PixelSpacing_1  WindowCenter_0  WindowCenter_1  WindowCenter_1_NAN  \\\n",
11490
       "46433        0.488281            40.0             NaN                True   \n",
11491
       "59590        0.488281            40.0             NaN                True   \n",
11492
       "33815        0.488281            40.0             NaN                True   \n",
11493
       "2113         0.488281            40.0             NaN                True   \n",
11494
       "61829        0.488281            40.0             NaN                True   \n",
11495
       "\n",
11496
       "       WindowWidth_0  WindowWidth_1  WindowWidth_0_le  WindowWidth_1_le  \\\n",
11497
       "46433          150.0            NaN                 1                 1   \n",
11498
       "59590          150.0            NaN                 1                 1   \n",
11499
       "33815          150.0            NaN                 1                 1   \n",
11500
       "2113           150.0            NaN                 1                 1   \n",
11501
       "61829          150.0            NaN                 1                 1   \n",
11502
       "\n",
11503
       "       WindowCenter_1_le  BitType_le  ImageOrientationPatient_4_f  \\\n",
11504
       "46433                  3           0                      2.15016   \n",
11505
       "59590                  3           0                      2.15016   \n",
11506
       "33815                  3           0                      2.15016   \n",
11507
       "2113                   3           0                      2.15016   \n",
11508
       "61829                  3           0                      2.15016   \n",
11509
       "\n",
11510
       "       ImageOrientationPatient_4_enc_0  ...  ImageOrientationPatient_5_f  \\\n",
11511
       "46433                              0.0  ...                     1.495753   \n",
11512
       "59590                              0.0  ...                     1.495753   \n",
11513
       "33815                              0.0  ...                     1.495753   \n",
11514
       "2113                               0.0  ...                     1.495753   \n",
11515
       "61829                              0.0  ...                     1.495753   \n",
11516
       "\n",
11517
       "       ImageOrientationPatient_5_enc_0  ImageOrientationPatient_5_enc_1  \\\n",
11518
       "46433                              0.0                            False   \n",
11519
       "59590                              0.0                            False   \n",
11520
       "33815                              0.0                            False   \n",
11521
       "2113                               0.0                            False   \n",
11522
       "61829                              0.0                            False   \n",
11523
       "\n",
11524
       "       ImagePositionPatient_0_f  ImagePositionPatient_0_enc_0  \\\n",
11525
       "46433                     -0.72                           1.0   \n",
11526
       "59590                     -0.72                           1.0   \n",
11527
       "33815                     -0.72                           1.0   \n",
11528
       "2113                      -0.72                           1.0   \n",
11529
       "61829                     -0.72                           1.0   \n",
11530
       "\n",
11531
       "       ImagePositionPatient_0_enc_1  ImagePositionPatient_0_f_r1  \\\n",
11532
       "46433                           0.0                          1.0   \n",
11533
       "59590                           0.0                          1.0   \n",
11534
       "33815                           0.0                          1.0   \n",
11535
       "2113                            0.0                          1.0   \n",
11536
       "61829                           0.0                          1.0   \n",
11537
       "\n",
11538
       "       ImagePositionPatient_0_f_r05  ImagePositionPatient_1_f  \\\n",
11539
       "46433                           1.0                 -0.714107   \n",
11540
       "59590                           1.0                 -0.714107   \n",
11541
       "33815                           1.0                 -0.714107   \n",
11542
       "2113                            1.0                 -0.714107   \n",
11543
       "61829                           1.0                 -0.714107   \n",
11544
       "\n",
11545
       "       ImagePositionPatient_1_enc_0  ImagePositionPatient_2_f  \\\n",
11546
       "46433                           0.0                 -0.022027   \n",
11547
       "59590                           0.0                 -0.051834   \n",
11548
       "33815                           0.0                 -0.029479   \n",
11549
       "2113                            0.0                 -0.036931   \n",
11550
       "61829                           0.0                 -0.044383   \n",
11551
       "\n",
11552
       "       ImagePositionPatient_2_f_r05  PixelSpacing_1_f  PixelSpacing_1_enc_0  \\\n",
11553
       "46433                           0.0             -0.48                   1.0   \n",
11554
       "59590                           0.0             -0.48                   1.0   \n",
11555
       "33815                           0.0             -0.48                   1.0   \n",
11556
       "2113                            0.0             -0.48                   1.0   \n",
11557
       "61829                           0.0             -0.48                   1.0   \n",
11558
       "\n",
11559
       "       PixelSpacing_1_enc_1  WindowCenter_0_le  pos_max   pos_min  pos_size  \\\n",
11560
       "46433                 False                  2  0.68972  0.127984      -0.7   \n",
11561
       "59590                 False                  2  0.68972  0.127984      -0.7   \n",
11562
       "33815                 False                  2  0.68972  0.127984      -0.7   \n",
11563
       "2113                  False                  2  0.68972  0.127984      -0.7   \n",
11564
       "61829                 False                  2  0.68972  0.127984      -0.7   \n",
11565
       "\n",
11566
       "       pos_idx1  pos_idx  pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  \\\n",
11567
       "46433  0.406780       23 -0.881356    1.6005    1.6005                3   \n",
11568
       "59590  0.135593       19 -0.610169    1.6005    1.6005                3   \n",
11569
       "33815  0.338983       22 -0.813559    1.6010    1.6005                3   \n",
11570
       "2113   0.271186       21 -0.745763    1.6005    1.6010                3   \n",
11571
       "61829  0.203390       20 -0.677966    1.6005    1.6005                3   \n",
11572
       "\n",
11573
       "       pos_inc2_grp_le  pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  \\\n",
11574
       "46433                3          0.0             1.0             0.0   \n",
11575
       "59590                3          0.0             1.0             0.0   \n",
11576
       "33815                3          0.0             1.0             0.0   \n",
11577
       "2113                 3          0.0             1.0             0.0   \n",
11578
       "61829                3          0.0             1.0             0.0   \n",
11579
       "\n",
11580
       "       pos_inc2_enc_0  pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  pos_range  \\\n",
11581
       "46433             0.0             0.0             0.0            3  -0.655093   \n",
11582
       "59590             0.0             0.0             0.0            3  -0.655093   \n",
11583
       "33815             0.0             0.0             0.0            3  -0.655093   \n",
11584
       "2113              0.0             0.0             0.0            3  -0.655093   \n",
11585
       "61829             0.0             0.0             0.0            3  -0.655093   \n",
11586
       "\n",
11587
       "        pos_rel  pos_zeros  pos_inc_rng  pos_zeros_le  PxlMin_grp_le  \\\n",
11588
       "46433  1.407408        0.0    -0.599615             0              1   \n",
11589
       "59590  0.814817        0.0    -0.599615             0              1   \n",
11590
       "33815  1.259268        0.0    -0.599615             0              1   \n",
11591
       "2113   1.111098        0.0    -0.599615             0              1   \n",
11592
       "61829  0.962958        0.0    -0.599615             0              1   \n",
11593
       "\n",
11594
       "       PxlMin_zero  any  epidural  intraparenchymal  intraventricular  \\\n",
11595
       "46433        False  NaN       NaN               NaN               NaN   \n",
11596
       "59590        False  NaN       NaN               NaN               NaN   \n",
11597
       "33815        False  NaN       NaN               NaN               NaN   \n",
11598
       "2113         False  NaN       NaN               NaN               NaN   \n",
11599
       "61829        False  NaN       NaN               NaN               NaN   \n",
11600
       "\n",
11601
       "       subarachnoid  subdural  any_series  SeriesPP  yuval_idx  pred_any  \n",
11602
       "46433           NaN       NaN       False      -0.5      55354  0.993246  \n",
11603
       "59590           NaN       NaN       False      -0.5      55350  0.993314  \n",
11604
       "33815           NaN       NaN       False      -0.5      55353  0.993623  \n",
11605
       "2113            NaN       NaN       False      -0.5      55352  0.993728  \n",
11606
       "61829           NaN       NaN       False      -0.5      55351  0.993775  \n",
11607
       "\n",
11608
       "[5 rows x 101 columns]"
11609
      ]
11610
     },
11611
     "execution_count": 67,
11612
     "metadata": {},
11613
     "output_type": "execute_result"
11614
    }
11615
   ],
11616
   "source": [
11617
    "test_md['pred_any'] = predictions[:,4]\n",
11618
    "test_md.sort_values('pred_any').tail()"
11619
   ]
11620
  },
11621
  {
11622
   "cell_type": "code",
11623
   "execution_count": 68,
11624
   "metadata": {},
11625
   "outputs": [
11626
    {
11627
     "data": {
11628
      "text/html": [
11629
       "<div>\n",
11630
       "<style scoped>\n",
11631
       "    .dataframe tbody tr th:only-of-type {\n",
11632
       "        vertical-align: middle;\n",
11633
       "    }\n",
11634
       "\n",
11635
       "    .dataframe tbody tr th {\n",
11636
       "        vertical-align: top;\n",
11637
       "    }\n",
11638
       "\n",
11639
       "    .dataframe thead th {\n",
11640
       "        text-align: right;\n",
11641
       "    }\n",
11642
       "</style>\n",
11643
       "<table border=\"1\" class=\"dataframe\">\n",
11644
       "  <thead>\n",
11645
       "    <tr style=\"text-align: right;\">\n",
11646
       "      <th></th>\n",
11647
       "      <th>img_id</th>\n",
11648
       "      <th>SOPInstanceUID</th>\n",
11649
       "      <th>Modality</th>\n",
11650
       "      <th>PatientID</th>\n",
11651
       "      <th>StudyInstanceUID</th>\n",
11652
       "      <th>SeriesInstanceUID</th>\n",
11653
       "      <th>StudyID</th>\n",
11654
       "      <th>ImagePositionPatient</th>\n",
11655
       "      <th>ImageOrientationPatient</th>\n",
11656
       "      <th>SamplesPerPixel</th>\n",
11657
       "      <th>PhotometricInterpretation</th>\n",
11658
       "      <th>Rows</th>\n",
11659
       "      <th>Columns</th>\n",
11660
       "      <th>PixelSpacing</th>\n",
11661
       "      <th>BitsAllocated</th>\n",
11662
       "      <th>BitsStored</th>\n",
11663
       "      <th>HighBit</th>\n",
11664
       "      <th>PixelRepresentation</th>\n",
11665
       "      <th>WindowCenter</th>\n",
11666
       "      <th>WindowWidth</th>\n",
11667
       "      <th>RescaleIntercept</th>\n",
11668
       "      <th>RescaleSlope</th>\n",
11669
       "      <th>PxlMin</th>\n",
11670
       "      <th>PxlMax</th>\n",
11671
       "      <th>PxlStd</th>\n",
11672
       "      <th>PxlMean</th>\n",
11673
       "      <th>test</th>\n",
11674
       "      <th>test2</th>\n",
11675
       "      <th>ImageOrientationPatient_0</th>\n",
11676
       "      <th>ImageOrientationPatient_1</th>\n",
11677
       "      <th>ImageOrientationPatient_2</th>\n",
11678
       "      <th>ImageOrientationPatient_3</th>\n",
11679
       "      <th>ImageOrientationPatient_4</th>\n",
11680
       "      <th>ImageOrientationPatient_5</th>\n",
11681
       "      <th>ImagePositionPatient_0</th>\n",
11682
       "      <th>ImagePositionPatient_1</th>\n",
11683
       "      <th>ImagePositionPatient_2</th>\n",
11684
       "      <th>PixelSpacing_0</th>\n",
11685
       "      <th>PixelSpacing_1</th>\n",
11686
       "      <th>WindowCenter_0</th>\n",
11687
       "      <th>WindowCenter_1</th>\n",
11688
       "      <th>WindowCenter_1_NAN</th>\n",
11689
       "      <th>WindowWidth_0</th>\n",
11690
       "      <th>WindowWidth_1</th>\n",
11691
       "      <th>WindowWidth_0_le</th>\n",
11692
       "      <th>WindowWidth_1_le</th>\n",
11693
       "      <th>WindowCenter_1_le</th>\n",
11694
       "      <th>BitType_le</th>\n",
11695
       "      <th>ImageOrientationPatient_4_f</th>\n",
11696
       "      <th>ImageOrientationPatient_4_enc_0</th>\n",
11697
       "      <th>...</th>\n",
11698
       "      <th>ImageOrientationPatient_5_f</th>\n",
11699
       "      <th>ImageOrientationPatient_5_enc_0</th>\n",
11700
       "      <th>ImageOrientationPatient_5_enc_1</th>\n",
11701
       "      <th>ImagePositionPatient_0_f</th>\n",
11702
       "      <th>ImagePositionPatient_0_enc_0</th>\n",
11703
       "      <th>ImagePositionPatient_0_enc_1</th>\n",
11704
       "      <th>ImagePositionPatient_0_f_r1</th>\n",
11705
       "      <th>ImagePositionPatient_0_f_r05</th>\n",
11706
       "      <th>ImagePositionPatient_1_f</th>\n",
11707
       "      <th>ImagePositionPatient_1_enc_0</th>\n",
11708
       "      <th>ImagePositionPatient_2_f</th>\n",
11709
       "      <th>ImagePositionPatient_2_f_r05</th>\n",
11710
       "      <th>PixelSpacing_1_f</th>\n",
11711
       "      <th>PixelSpacing_1_enc_0</th>\n",
11712
       "      <th>PixelSpacing_1_enc_1</th>\n",
11713
       "      <th>WindowCenter_0_le</th>\n",
11714
       "      <th>pos_max</th>\n",
11715
       "      <th>pos_min</th>\n",
11716
       "      <th>pos_size</th>\n",
11717
       "      <th>pos_idx1</th>\n",
11718
       "      <th>pos_idx</th>\n",
11719
       "      <th>pos_idx2</th>\n",
11720
       "      <th>pos_inc1</th>\n",
11721
       "      <th>pos_inc2</th>\n",
11722
       "      <th>pos_inc1_grp_le</th>\n",
11723
       "      <th>pos_inc2_grp_le</th>\n",
11724
       "      <th>pos_inc1_r1</th>\n",
11725
       "      <th>pos_inc1_r0001</th>\n",
11726
       "      <th>pos_inc1_enc_0</th>\n",
11727
       "      <th>pos_inc2_enc_0</th>\n",
11728
       "      <th>pos_inc1_enc_1</th>\n",
11729
       "      <th>pos_inc2_enc_1</th>\n",
11730
       "      <th>pos_size_le</th>\n",
11731
       "      <th>pos_range</th>\n",
11732
       "      <th>pos_rel</th>\n",
11733
       "      <th>pos_zeros</th>\n",
11734
       "      <th>pos_inc_rng</th>\n",
11735
       "      <th>pos_zeros_le</th>\n",
11736
       "      <th>PxlMin_grp_le</th>\n",
11737
       "      <th>PxlMin_zero</th>\n",
11738
       "      <th>any</th>\n",
11739
       "      <th>epidural</th>\n",
11740
       "      <th>intraparenchymal</th>\n",
11741
       "      <th>intraventricular</th>\n",
11742
       "      <th>subarachnoid</th>\n",
11743
       "      <th>subdural</th>\n",
11744
       "      <th>any_series</th>\n",
11745
       "      <th>SeriesPP</th>\n",
11746
       "      <th>yuval_idx</th>\n",
11747
       "      <th>pred_any</th>\n",
11748
       "    </tr>\n",
11749
       "  </thead>\n",
11750
       "  <tbody>\n",
11751
       "    <tr>\n",
11752
       "      <td>9661</td>\n",
11753
       "      <td>feb0a9076</td>\n",
11754
       "      <td>ID_feb0a9076</td>\n",
11755
       "      <td>CT</td>\n",
11756
       "      <td>ID_252323d1</td>\n",
11757
       "      <td>ID_0257ad04c2</td>\n",
11758
       "      <td>ID_58b46714eb</td>\n",
11759
       "      <td>NaN</td>\n",
11760
       "      <td>['-125', '24.4169536', '204.228683']</td>\n",
11761
       "      <td>['1', '0', '0', '0', '0.990268069', '-0.139173...</td>\n",
11762
       "      <td>1</td>\n",
11763
       "      <td>MONOCHROME2</td>\n",
11764
       "      <td>512</td>\n",
11765
       "      <td>512</td>\n",
11766
       "      <td>['0.48828125', '0.48828125']</td>\n",
11767
       "      <td>16</td>\n",
11768
       "      <td>12</td>\n",
11769
       "      <td>11</td>\n",
11770
       "      <td>0</td>\n",
11771
       "      <td>['00040', '00040']</td>\n",
11772
       "      <td>['00080', '00080']</td>\n",
11773
       "      <td>-1024.0</td>\n",
11774
       "      <td>1.0</td>\n",
11775
       "      <td>1.301333</td>\n",
11776
       "      <td>0.126667</td>\n",
11777
       "      <td>-0.761993</td>\n",
11778
       "      <td>1.166719</td>\n",
11779
       "      <td>False</td>\n",
11780
       "      <td>True</td>\n",
11781
       "      <td>1.0</td>\n",
11782
       "      <td>0.0</td>\n",
11783
       "      <td>0.0</td>\n",
11784
       "      <td>0.0</td>\n",
11785
       "      <td>0.990268</td>\n",
11786
       "      <td>-0.139173</td>\n",
11787
       "      <td>-125.0</td>\n",
11788
       "      <td>24.416954</td>\n",
11789
       "      <td>204.228683</td>\n",
11790
       "      <td>0.488281</td>\n",
11791
       "      <td>0.488281</td>\n",
11792
       "      <td>40.0</td>\n",
11793
       "      <td>40.0</td>\n",
11794
       "      <td>False</td>\n",
11795
       "      <td>80.0</td>\n",
11796
       "      <td>80.0</td>\n",
11797
       "      <td>0</td>\n",
11798
       "      <td>0</td>\n",
11799
       "      <td>1</td>\n",
11800
       "      <td>1</td>\n",
11801
       "      <td>2.536908</td>\n",
11802
       "      <td>0.0</td>\n",
11803
       "      <td>...</td>\n",
11804
       "      <td>2.405513</td>\n",
11805
       "      <td>0.0</td>\n",
11806
       "      <td>False</td>\n",
11807
       "      <td>-0.72</td>\n",
11808
       "      <td>1.0</td>\n",
11809
       "      <td>0.0</td>\n",
11810
       "      <td>1.0</td>\n",
11811
       "      <td>1.0</td>\n",
11812
       "      <td>1.192226</td>\n",
11813
       "      <td>1.0</td>\n",
11814
       "      <td>0.053336</td>\n",
11815
       "      <td>0.0</td>\n",
11816
       "      <td>-0.48</td>\n",
11817
       "      <td>1.0</td>\n",
11818
       "      <td>False</td>\n",
11819
       "      <td>2</td>\n",
11820
       "      <td>1.099715</td>\n",
11821
       "      <td>0.473493</td>\n",
11822
       "      <td>-0.3</td>\n",
11823
       "      <td>0.000000</td>\n",
11824
       "      <td>17</td>\n",
11825
       "      <td>-0.203390</td>\n",
11826
       "      <td>1.527710</td>\n",
11827
       "      <td>1.522339</td>\n",
11828
       "      <td>3</td>\n",
11829
       "      <td>3</td>\n",
11830
       "      <td>0.0</td>\n",
11831
       "      <td>0.0</td>\n",
11832
       "      <td>0.0</td>\n",
11833
       "      <td>0.0</td>\n",
11834
       "      <td>0.0</td>\n",
11835
       "      <td>0.0</td>\n",
11836
       "      <td>0</td>\n",
11837
       "      <td>-0.225189</td>\n",
11838
       "      <td>0.193609</td>\n",
11839
       "      <td>0.0</td>\n",
11840
       "      <td>-0.595692</td>\n",
11841
       "      <td>0</td>\n",
11842
       "      <td>2</td>\n",
11843
       "      <td>False</td>\n",
11844
       "      <td>NaN</td>\n",
11845
       "      <td>NaN</td>\n",
11846
       "      <td>NaN</td>\n",
11847
       "      <td>NaN</td>\n",
11848
       "      <td>NaN</td>\n",
11849
       "      <td>NaN</td>\n",
11850
       "      <td>False</td>\n",
11851
       "      <td>-0.5</td>\n",
11852
       "      <td>49823</td>\n",
11853
       "      <td>0.992338</td>\n",
11854
       "    </tr>\n",
11855
       "    <tr>\n",
11856
       "      <td>107285</td>\n",
11857
       "      <td>cfbf38afe</td>\n",
11858
       "      <td>ID_cfbf38afe</td>\n",
11859
       "      <td>CT</td>\n",
11860
       "      <td>ID_6e75b42a</td>\n",
11861
       "      <td>ID_b94674c76f</td>\n",
11862
       "      <td>ID_93d835d9f3</td>\n",
11863
       "      <td>NaN</td>\n",
11864
       "      <td>['-125', '17.5586391', '177.279488']</td>\n",
11865
       "      <td>['1', '0', '0', '0', '0.933580426', '-0.358367...</td>\n",
11866
       "      <td>1</td>\n",
11867
       "      <td>MONOCHROME2</td>\n",
11868
       "      <td>512</td>\n",
11869
       "      <td>512</td>\n",
11870
       "      <td>['0.48828125', '0.48828125']</td>\n",
11871
       "      <td>16</td>\n",
11872
       "      <td>12</td>\n",
11873
       "      <td>11</td>\n",
11874
       "      <td>0</td>\n",
11875
       "      <td>['00040', '00040']</td>\n",
11876
       "      <td>['00080', '00080']</td>\n",
11877
       "      <td>-1024.0</td>\n",
11878
       "      <td>1.0</td>\n",
11879
       "      <td>1.301333</td>\n",
11880
       "      <td>0.098667</td>\n",
11881
       "      <td>-0.807341</td>\n",
11882
       "      <td>1.049002</td>\n",
11883
       "      <td>False</td>\n",
11884
       "      <td>True</td>\n",
11885
       "      <td>1.0</td>\n",
11886
       "      <td>0.0</td>\n",
11887
       "      <td>0.0</td>\n",
11888
       "      <td>0.0</td>\n",
11889
       "      <td>0.933580</td>\n",
11890
       "      <td>-0.358368</td>\n",
11891
       "      <td>-125.0</td>\n",
11892
       "      <td>17.558639</td>\n",
11893
       "      <td>177.279488</td>\n",
11894
       "      <td>0.488281</td>\n",
11895
       "      <td>0.488281</td>\n",
11896
       "      <td>40.0</td>\n",
11897
       "      <td>40.0</td>\n",
11898
       "      <td>False</td>\n",
11899
       "      <td>80.0</td>\n",
11900
       "      <td>80.0</td>\n",
11901
       "      <td>0</td>\n",
11902
       "      <td>0</td>\n",
11903
       "      <td>1</td>\n",
11904
       "      <td>1</td>\n",
11905
       "      <td>1.781072</td>\n",
11906
       "      <td>0.0</td>\n",
11907
       "      <td>...</td>\n",
11908
       "      <td>0.944214</td>\n",
11909
       "      <td>0.0</td>\n",
11910
       "      <td>False</td>\n",
11911
       "      <td>-0.72</td>\n",
11912
       "      <td>1.0</td>\n",
11913
       "      <td>0.0</td>\n",
11914
       "      <td>1.0</td>\n",
11915
       "      <td>1.0</td>\n",
11916
       "      <td>1.100782</td>\n",
11917
       "      <td>1.0</td>\n",
11918
       "      <td>0.014727</td>\n",
11919
       "      <td>0.0</td>\n",
11920
       "      <td>-0.48</td>\n",
11921
       "      <td>1.0</td>\n",
11922
       "      <td>False</td>\n",
11923
       "      <td>2</td>\n",
11924
       "      <td>1.051518</td>\n",
11925
       "      <td>0.388022</td>\n",
11926
       "      <td>-0.3</td>\n",
11927
       "      <td>-0.135593</td>\n",
11928
       "      <td>15</td>\n",
11929
       "      <td>-0.067797</td>\n",
11930
       "      <td>1.687011</td>\n",
11931
       "      <td>1.663025</td>\n",
11932
       "      <td>3</td>\n",
11933
       "      <td>3</td>\n",
11934
       "      <td>0.0</td>\n",
11935
       "      <td>0.0</td>\n",
11936
       "      <td>0.0</td>\n",
11937
       "      <td>0.0</td>\n",
11938
       "      <td>0.0</td>\n",
11939
       "      <td>0.0</td>\n",
11940
       "      <td>0</td>\n",
11941
       "      <td>0.023307</td>\n",
11942
       "      <td>-0.064219</td>\n",
11943
       "      <td>0.0</td>\n",
11944
       "      <td>-0.581939</td>\n",
11945
       "      <td>0</td>\n",
11946
       "      <td>2</td>\n",
11947
       "      <td>False</td>\n",
11948
       "      <td>NaN</td>\n",
11949
       "      <td>NaN</td>\n",
11950
       "      <td>NaN</td>\n",
11951
       "      <td>NaN</td>\n",
11952
       "      <td>NaN</td>\n",
11953
       "      <td>NaN</td>\n",
11954
       "      <td>False</td>\n",
11955
       "      <td>-0.5</td>\n",
11956
       "      <td>74776</td>\n",
11957
       "      <td>0.992465</td>\n",
11958
       "    </tr>\n",
11959
       "    <tr>\n",
11960
       "      <td>81787</td>\n",
11961
       "      <td>95aace9ba</td>\n",
11962
       "      <td>ID_95aace9ba</td>\n",
11963
       "      <td>CT</td>\n",
11964
       "      <td>ID_6e75b42a</td>\n",
11965
       "      <td>ID_b94674c76f</td>\n",
11966
       "      <td>ID_93d835d9f3</td>\n",
11967
       "      <td>NaN</td>\n",
11968
       "      <td>['-125', '17.5586391', '193.305489']</td>\n",
11969
       "      <td>['1', '0', '0', '0', '0.933580426', '-0.358367...</td>\n",
11970
       "      <td>1</td>\n",
11971
       "      <td>MONOCHROME2</td>\n",
11972
       "      <td>512</td>\n",
11973
       "      <td>512</td>\n",
11974
       "      <td>['0.48828125', '0.48828125']</td>\n",
11975
       "      <td>16</td>\n",
11976
       "      <td>12</td>\n",
11977
       "      <td>11</td>\n",
11978
       "      <td>0</td>\n",
11979
       "      <td>['00040', '00040']</td>\n",
11980
       "      <td>['00080', '00080']</td>\n",
11981
       "      <td>-1024.0</td>\n",
11982
       "      <td>1.0</td>\n",
11983
       "      <td>1.301333</td>\n",
11984
       "      <td>0.086667</td>\n",
11985
       "      <td>-0.789830</td>\n",
11986
       "      <td>0.920645</td>\n",
11987
       "      <td>False</td>\n",
11988
       "      <td>True</td>\n",
11989
       "      <td>1.0</td>\n",
11990
       "      <td>0.0</td>\n",
11991
       "      <td>0.0</td>\n",
11992
       "      <td>0.0</td>\n",
11993
       "      <td>0.933580</td>\n",
11994
       "      <td>-0.358368</td>\n",
11995
       "      <td>-125.0</td>\n",
11996
       "      <td>17.558639</td>\n",
11997
       "      <td>193.305489</td>\n",
11998
       "      <td>0.488281</td>\n",
11999
       "      <td>0.488281</td>\n",
12000
       "      <td>40.0</td>\n",
12001
       "      <td>40.0</td>\n",
12002
       "      <td>False</td>\n",
12003
       "      <td>80.0</td>\n",
12004
       "      <td>80.0</td>\n",
12005
       "      <td>0</td>\n",
12006
       "      <td>0</td>\n",
12007
       "      <td>1</td>\n",
12008
       "      <td>1</td>\n",
12009
       "      <td>1.781072</td>\n",
12010
       "      <td>0.0</td>\n",
12011
       "      <td>...</td>\n",
12012
       "      <td>0.944214</td>\n",
12013
       "      <td>0.0</td>\n",
12014
       "      <td>False</td>\n",
12015
       "      <td>-0.72</td>\n",
12016
       "      <td>1.0</td>\n",
12017
       "      <td>0.0</td>\n",
12018
       "      <td>1.0</td>\n",
12019
       "      <td>1.0</td>\n",
12020
       "      <td>1.100782</td>\n",
12021
       "      <td>1.0</td>\n",
12022
       "      <td>0.037687</td>\n",
12023
       "      <td>0.0</td>\n",
12024
       "      <td>-0.48</td>\n",
12025
       "      <td>1.0</td>\n",
12026
       "      <td>False</td>\n",
12027
       "      <td>2</td>\n",
12028
       "      <td>1.051518</td>\n",
12029
       "      <td>0.388022</td>\n",
12030
       "      <td>-0.3</td>\n",
12031
       "      <td>0.067797</td>\n",
12032
       "      <td>18</td>\n",
12033
       "      <td>-0.271186</td>\n",
12034
       "      <td>1.662964</td>\n",
12035
       "      <td>1.687012</td>\n",
12036
       "      <td>3</td>\n",
12037
       "      <td>3</td>\n",
12038
       "      <td>0.0</td>\n",
12039
       "      <td>0.0</td>\n",
12040
       "      <td>0.0</td>\n",
12041
       "      <td>0.0</td>\n",
12042
       "      <td>0.0</td>\n",
12043
       "      <td>0.0</td>\n",
12044
       "      <td>0</td>\n",
12045
       "      <td>0.023307</td>\n",
12046
       "      <td>0.322242</td>\n",
12047
       "      <td>0.0</td>\n",
12048
       "      <td>-0.581939</td>\n",
12049
       "      <td>0</td>\n",
12050
       "      <td>2</td>\n",
12051
       "      <td>False</td>\n",
12052
       "      <td>NaN</td>\n",
12053
       "      <td>NaN</td>\n",
12054
       "      <td>NaN</td>\n",
12055
       "      <td>NaN</td>\n",
12056
       "      <td>NaN</td>\n",
12057
       "      <td>NaN</td>\n",
12058
       "      <td>False</td>\n",
12059
       "      <td>-0.5</td>\n",
12060
       "      <td>74779</td>\n",
12061
       "      <td>0.992568</td>\n",
12062
       "    </tr>\n",
12063
       "    <tr>\n",
12064
       "      <td>87951</td>\n",
12065
       "      <td>5092c392f</td>\n",
12066
       "      <td>ID_5092c392f</td>\n",
12067
       "      <td>CT</td>\n",
12068
       "      <td>ID_6e75b42a</td>\n",
12069
       "      <td>ID_b94674c76f</td>\n",
12070
       "      <td>ID_93d835d9f3</td>\n",
12071
       "      <td>NaN</td>\n",
12072
       "      <td>['-125', '17.5586391', '187.979561']</td>\n",
12073
       "      <td>['1', '0', '0', '0', '0.933580426', '-0.358367...</td>\n",
12074
       "      <td>1</td>\n",
12075
       "      <td>MONOCHROME2</td>\n",
12076
       "      <td>512</td>\n",
12077
       "      <td>512</td>\n",
12078
       "      <td>['0.48828125', '0.48828125']</td>\n",
12079
       "      <td>16</td>\n",
12080
       "      <td>12</td>\n",
12081
       "      <td>11</td>\n",
12082
       "      <td>0</td>\n",
12083
       "      <td>['00040', '00040']</td>\n",
12084
       "      <td>['00080', '00080']</td>\n",
12085
       "      <td>-1024.0</td>\n",
12086
       "      <td>1.0</td>\n",
12087
       "      <td>1.301333</td>\n",
12088
       "      <td>0.130667</td>\n",
12089
       "      <td>-0.782852</td>\n",
12090
       "      <td>0.978311</td>\n",
12091
       "      <td>False</td>\n",
12092
       "      <td>True</td>\n",
12093
       "      <td>1.0</td>\n",
12094
       "      <td>0.0</td>\n",
12095
       "      <td>0.0</td>\n",
12096
       "      <td>0.0</td>\n",
12097
       "      <td>0.933580</td>\n",
12098
       "      <td>-0.358368</td>\n",
12099
       "      <td>-125.0</td>\n",
12100
       "      <td>17.558639</td>\n",
12101
       "      <td>187.979561</td>\n",
12102
       "      <td>0.488281</td>\n",
12103
       "      <td>0.488281</td>\n",
12104
       "      <td>40.0</td>\n",
12105
       "      <td>40.0</td>\n",
12106
       "      <td>False</td>\n",
12107
       "      <td>80.0</td>\n",
12108
       "      <td>80.0</td>\n",
12109
       "      <td>0</td>\n",
12110
       "      <td>0</td>\n",
12111
       "      <td>1</td>\n",
12112
       "      <td>1</td>\n",
12113
       "      <td>1.781072</td>\n",
12114
       "      <td>0.0</td>\n",
12115
       "      <td>...</td>\n",
12116
       "      <td>0.944214</td>\n",
12117
       "      <td>0.0</td>\n",
12118
       "      <td>False</td>\n",
12119
       "      <td>-0.72</td>\n",
12120
       "      <td>1.0</td>\n",
12121
       "      <td>0.0</td>\n",
12122
       "      <td>1.0</td>\n",
12123
       "      <td>1.0</td>\n",
12124
       "      <td>1.100782</td>\n",
12125
       "      <td>1.0</td>\n",
12126
       "      <td>0.030057</td>\n",
12127
       "      <td>0.0</td>\n",
12128
       "      <td>-0.48</td>\n",
12129
       "      <td>1.0</td>\n",
12130
       "      <td>False</td>\n",
12131
       "      <td>2</td>\n",
12132
       "      <td>1.051518</td>\n",
12133
       "      <td>0.388022</td>\n",
12134
       "      <td>-0.3</td>\n",
12135
       "      <td>0.000000</td>\n",
12136
       "      <td>17</td>\n",
12137
       "      <td>-0.203390</td>\n",
12138
       "      <td>1.687011</td>\n",
12139
       "      <td>1.662964</td>\n",
12140
       "      <td>3</td>\n",
12141
       "      <td>3</td>\n",
12142
       "      <td>0.0</td>\n",
12143
       "      <td>0.0</td>\n",
12144
       "      <td>0.0</td>\n",
12145
       "      <td>0.0</td>\n",
12146
       "      <td>0.0</td>\n",
12147
       "      <td>0.0</td>\n",
12148
       "      <td>0</td>\n",
12149
       "      <td>0.023307</td>\n",
12150
       "      <td>0.193809</td>\n",
12151
       "      <td>0.0</td>\n",
12152
       "      <td>-0.581939</td>\n",
12153
       "      <td>0</td>\n",
12154
       "      <td>2</td>\n",
12155
       "      <td>False</td>\n",
12156
       "      <td>NaN</td>\n",
12157
       "      <td>NaN</td>\n",
12158
       "      <td>NaN</td>\n",
12159
       "      <td>NaN</td>\n",
12160
       "      <td>NaN</td>\n",
12161
       "      <td>NaN</td>\n",
12162
       "      <td>False</td>\n",
12163
       "      <td>-0.5</td>\n",
12164
       "      <td>74778</td>\n",
12165
       "      <td>0.992820</td>\n",
12166
       "    </tr>\n",
12167
       "    <tr>\n",
12168
       "      <td>86533</td>\n",
12169
       "      <td>3a20374d3</td>\n",
12170
       "      <td>ID_3a20374d3</td>\n",
12171
       "      <td>CT</td>\n",
12172
       "      <td>ID_6e75b42a</td>\n",
12173
       "      <td>ID_b94674c76f</td>\n",
12174
       "      <td>ID_93d835d9f3</td>\n",
12175
       "      <td>NaN</td>\n",
12176
       "      <td>['-125', '17.5586391', '182.605538']</td>\n",
12177
       "      <td>['1', '0', '0', '0', '0.933580426', '-0.358367...</td>\n",
12178
       "      <td>1</td>\n",
12179
       "      <td>MONOCHROME2</td>\n",
12180
       "      <td>512</td>\n",
12181
       "      <td>512</td>\n",
12182
       "      <td>['0.48828125', '0.48828125']</td>\n",
12183
       "      <td>16</td>\n",
12184
       "      <td>12</td>\n",
12185
       "      <td>11</td>\n",
12186
       "      <td>0</td>\n",
12187
       "      <td>['00040', '00040']</td>\n",
12188
       "      <td>['00080', '00080']</td>\n",
12189
       "      <td>-1024.0</td>\n",
12190
       "      <td>1.0</td>\n",
12191
       "      <td>1.301333</td>\n",
12192
       "      <td>0.124000</td>\n",
12193
       "      <td>-0.788199</td>\n",
12194
       "      <td>1.016346</td>\n",
12195
       "      <td>False</td>\n",
12196
       "      <td>True</td>\n",
12197
       "      <td>1.0</td>\n",
12198
       "      <td>0.0</td>\n",
12199
       "      <td>0.0</td>\n",
12200
       "      <td>0.0</td>\n",
12201
       "      <td>0.933580</td>\n",
12202
       "      <td>-0.358368</td>\n",
12203
       "      <td>-125.0</td>\n",
12204
       "      <td>17.558639</td>\n",
12205
       "      <td>182.605538</td>\n",
12206
       "      <td>0.488281</td>\n",
12207
       "      <td>0.488281</td>\n",
12208
       "      <td>40.0</td>\n",
12209
       "      <td>40.0</td>\n",
12210
       "      <td>False</td>\n",
12211
       "      <td>80.0</td>\n",
12212
       "      <td>80.0</td>\n",
12213
       "      <td>0</td>\n",
12214
       "      <td>0</td>\n",
12215
       "      <td>1</td>\n",
12216
       "      <td>1</td>\n",
12217
       "      <td>1.781072</td>\n",
12218
       "      <td>0.0</td>\n",
12219
       "      <td>...</td>\n",
12220
       "      <td>0.944214</td>\n",
12221
       "      <td>0.0</td>\n",
12222
       "      <td>False</td>\n",
12223
       "      <td>-0.72</td>\n",
12224
       "      <td>1.0</td>\n",
12225
       "      <td>0.0</td>\n",
12226
       "      <td>1.0</td>\n",
12227
       "      <td>1.0</td>\n",
12228
       "      <td>1.100782</td>\n",
12229
       "      <td>1.0</td>\n",
12230
       "      <td>0.022358</td>\n",
12231
       "      <td>0.0</td>\n",
12232
       "      <td>-0.48</td>\n",
12233
       "      <td>1.0</td>\n",
12234
       "      <td>False</td>\n",
12235
       "      <td>2</td>\n",
12236
       "      <td>1.051518</td>\n",
12237
       "      <td>0.388022</td>\n",
12238
       "      <td>-0.3</td>\n",
12239
       "      <td>-0.067797</td>\n",
12240
       "      <td>16</td>\n",
12241
       "      <td>-0.135593</td>\n",
12242
       "      <td>1.663025</td>\n",
12243
       "      <td>1.687011</td>\n",
12244
       "      <td>3</td>\n",
12245
       "      <td>3</td>\n",
12246
       "      <td>0.0</td>\n",
12247
       "      <td>0.0</td>\n",
12248
       "      <td>0.0</td>\n",
12249
       "      <td>0.0</td>\n",
12250
       "      <td>0.0</td>\n",
12251
       "      <td>0.0</td>\n",
12252
       "      <td>0</td>\n",
12253
       "      <td>0.023307</td>\n",
12254
       "      <td>0.064217</td>\n",
12255
       "      <td>0.0</td>\n",
12256
       "      <td>-0.581939</td>\n",
12257
       "      <td>0</td>\n",
12258
       "      <td>2</td>\n",
12259
       "      <td>False</td>\n",
12260
       "      <td>NaN</td>\n",
12261
       "      <td>NaN</td>\n",
12262
       "      <td>NaN</td>\n",
12263
       "      <td>NaN</td>\n",
12264
       "      <td>NaN</td>\n",
12265
       "      <td>NaN</td>\n",
12266
       "      <td>False</td>\n",
12267
       "      <td>-0.5</td>\n",
12268
       "      <td>74777</td>\n",
12269
       "      <td>0.992826</td>\n",
12270
       "    </tr>\n",
12271
       "  </tbody>\n",
12272
       "</table>\n",
12273
       "<p>5 rows × 101 columns</p>\n",
12274
       "</div>"
12275
      ],
12276
      "text/plain": [
12277
       "           img_id SOPInstanceUID Modality    PatientID StudyInstanceUID  \\\n",
12278
       "9661    feb0a9076   ID_feb0a9076       CT  ID_252323d1    ID_0257ad04c2   \n",
12279
       "107285  cfbf38afe   ID_cfbf38afe       CT  ID_6e75b42a    ID_b94674c76f   \n",
12280
       "81787   95aace9ba   ID_95aace9ba       CT  ID_6e75b42a    ID_b94674c76f   \n",
12281
       "87951   5092c392f   ID_5092c392f       CT  ID_6e75b42a    ID_b94674c76f   \n",
12282
       "86533   3a20374d3   ID_3a20374d3       CT  ID_6e75b42a    ID_b94674c76f   \n",
12283
       "\n",
12284
       "       SeriesInstanceUID  StudyID                  ImagePositionPatient  \\\n",
12285
       "9661       ID_58b46714eb      NaN  ['-125', '24.4169536', '204.228683']   \n",
12286
       "107285     ID_93d835d9f3      NaN  ['-125', '17.5586391', '177.279488']   \n",
12287
       "81787      ID_93d835d9f3      NaN  ['-125', '17.5586391', '193.305489']   \n",
12288
       "87951      ID_93d835d9f3      NaN  ['-125', '17.5586391', '187.979561']   \n",
12289
       "86533      ID_93d835d9f3      NaN  ['-125', '17.5586391', '182.605538']   \n",
12290
       "\n",
12291
       "                                  ImageOrientationPatient  SamplesPerPixel  \\\n",
12292
       "9661    ['1', '0', '0', '0', '0.990268069', '-0.139173...                1   \n",
12293
       "107285  ['1', '0', '0', '0', '0.933580426', '-0.358367...                1   \n",
12294
       "81787   ['1', '0', '0', '0', '0.933580426', '-0.358367...                1   \n",
12295
       "87951   ['1', '0', '0', '0', '0.933580426', '-0.358367...                1   \n",
12296
       "86533   ['1', '0', '0', '0', '0.933580426', '-0.358367...                1   \n",
12297
       "\n",
12298
       "       PhotometricInterpretation  Rows  Columns                  PixelSpacing  \\\n",
12299
       "9661                 MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
12300
       "107285               MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
12301
       "81787                MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
12302
       "87951                MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
12303
       "86533                MONOCHROME2   512      512  ['0.48828125', '0.48828125']   \n",
12304
       "\n",
12305
       "        BitsAllocated  BitsStored  HighBit  PixelRepresentation  \\\n",
12306
       "9661               16          12       11                    0   \n",
12307
       "107285             16          12       11                    0   \n",
12308
       "81787              16          12       11                    0   \n",
12309
       "87951              16          12       11                    0   \n",
12310
       "86533              16          12       11                    0   \n",
12311
       "\n",
12312
       "              WindowCenter         WindowWidth  RescaleIntercept  \\\n",
12313
       "9661    ['00040', '00040']  ['00080', '00080']           -1024.0   \n",
12314
       "107285  ['00040', '00040']  ['00080', '00080']           -1024.0   \n",
12315
       "81787   ['00040', '00040']  ['00080', '00080']           -1024.0   \n",
12316
       "87951   ['00040', '00040']  ['00080', '00080']           -1024.0   \n",
12317
       "86533   ['00040', '00040']  ['00080', '00080']           -1024.0   \n",
12318
       "\n",
12319
       "        RescaleSlope    PxlMin    PxlMax    PxlStd   PxlMean   test  test2  \\\n",
12320
       "9661             1.0  1.301333  0.126667 -0.761993  1.166719  False   True   \n",
12321
       "107285           1.0  1.301333  0.098667 -0.807341  1.049002  False   True   \n",
12322
       "81787            1.0  1.301333  0.086667 -0.789830  0.920645  False   True   \n",
12323
       "87951            1.0  1.301333  0.130667 -0.782852  0.978311  False   True   \n",
12324
       "86533            1.0  1.301333  0.124000 -0.788199  1.016346  False   True   \n",
12325
       "\n",
12326
       "        ImageOrientationPatient_0  ImageOrientationPatient_1  \\\n",
12327
       "9661                          1.0                        0.0   \n",
12328
       "107285                        1.0                        0.0   \n",
12329
       "81787                         1.0                        0.0   \n",
12330
       "87951                         1.0                        0.0   \n",
12331
       "86533                         1.0                        0.0   \n",
12332
       "\n",
12333
       "        ImageOrientationPatient_2  ImageOrientationPatient_3  \\\n",
12334
       "9661                          0.0                        0.0   \n",
12335
       "107285                        0.0                        0.0   \n",
12336
       "81787                         0.0                        0.0   \n",
12337
       "87951                         0.0                        0.0   \n",
12338
       "86533                         0.0                        0.0   \n",
12339
       "\n",
12340
       "        ImageOrientationPatient_4  ImageOrientationPatient_5  \\\n",
12341
       "9661                     0.990268                  -0.139173   \n",
12342
       "107285                   0.933580                  -0.358368   \n",
12343
       "81787                    0.933580                  -0.358368   \n",
12344
       "87951                    0.933580                  -0.358368   \n",
12345
       "86533                    0.933580                  -0.358368   \n",
12346
       "\n",
12347
       "        ImagePositionPatient_0  ImagePositionPatient_1  \\\n",
12348
       "9661                    -125.0               24.416954   \n",
12349
       "107285                  -125.0               17.558639   \n",
12350
       "81787                   -125.0               17.558639   \n",
12351
       "87951                   -125.0               17.558639   \n",
12352
       "86533                   -125.0               17.558639   \n",
12353
       "\n",
12354
       "        ImagePositionPatient_2  PixelSpacing_0  PixelSpacing_1  \\\n",
12355
       "9661                204.228683        0.488281        0.488281   \n",
12356
       "107285              177.279488        0.488281        0.488281   \n",
12357
       "81787               193.305489        0.488281        0.488281   \n",
12358
       "87951               187.979561        0.488281        0.488281   \n",
12359
       "86533               182.605538        0.488281        0.488281   \n",
12360
       "\n",
12361
       "        WindowCenter_0  WindowCenter_1  WindowCenter_1_NAN  WindowWidth_0  \\\n",
12362
       "9661              40.0            40.0               False           80.0   \n",
12363
       "107285            40.0            40.0               False           80.0   \n",
12364
       "81787             40.0            40.0               False           80.0   \n",
12365
       "87951             40.0            40.0               False           80.0   \n",
12366
       "86533             40.0            40.0               False           80.0   \n",
12367
       "\n",
12368
       "        WindowWidth_1  WindowWidth_0_le  WindowWidth_1_le  WindowCenter_1_le  \\\n",
12369
       "9661             80.0                 0                 0                  1   \n",
12370
       "107285           80.0                 0                 0                  1   \n",
12371
       "81787            80.0                 0                 0                  1   \n",
12372
       "87951            80.0                 0                 0                  1   \n",
12373
       "86533            80.0                 0                 0                  1   \n",
12374
       "\n",
12375
       "        BitType_le  ImageOrientationPatient_4_f  \\\n",
12376
       "9661             1                     2.536908   \n",
12377
       "107285           1                     1.781072   \n",
12378
       "81787            1                     1.781072   \n",
12379
       "87951            1                     1.781072   \n",
12380
       "86533            1                     1.781072   \n",
12381
       "\n",
12382
       "        ImageOrientationPatient_4_enc_0  ...  ImageOrientationPatient_5_f  \\\n",
12383
       "9661                                0.0  ...                     2.405513   \n",
12384
       "107285                              0.0  ...                     0.944214   \n",
12385
       "81787                               0.0  ...                     0.944214   \n",
12386
       "87951                               0.0  ...                     0.944214   \n",
12387
       "86533                               0.0  ...                     0.944214   \n",
12388
       "\n",
12389
       "        ImageOrientationPatient_5_enc_0  ImageOrientationPatient_5_enc_1  \\\n",
12390
       "9661                                0.0                            False   \n",
12391
       "107285                              0.0                            False   \n",
12392
       "81787                               0.0                            False   \n",
12393
       "87951                               0.0                            False   \n",
12394
       "86533                               0.0                            False   \n",
12395
       "\n",
12396
       "        ImagePositionPatient_0_f  ImagePositionPatient_0_enc_0  \\\n",
12397
       "9661                       -0.72                           1.0   \n",
12398
       "107285                     -0.72                           1.0   \n",
12399
       "81787                      -0.72                           1.0   \n",
12400
       "87951                      -0.72                           1.0   \n",
12401
       "86533                      -0.72                           1.0   \n",
12402
       "\n",
12403
       "        ImagePositionPatient_0_enc_1  ImagePositionPatient_0_f_r1  \\\n",
12404
       "9661                             0.0                          1.0   \n",
12405
       "107285                           0.0                          1.0   \n",
12406
       "81787                            0.0                          1.0   \n",
12407
       "87951                            0.0                          1.0   \n",
12408
       "86533                            0.0                          1.0   \n",
12409
       "\n",
12410
       "        ImagePositionPatient_0_f_r05  ImagePositionPatient_1_f  \\\n",
12411
       "9661                             1.0                  1.192226   \n",
12412
       "107285                           1.0                  1.100782   \n",
12413
       "81787                            1.0                  1.100782   \n",
12414
       "87951                            1.0                  1.100782   \n",
12415
       "86533                            1.0                  1.100782   \n",
12416
       "\n",
12417
       "        ImagePositionPatient_1_enc_0  ImagePositionPatient_2_f  \\\n",
12418
       "9661                             1.0                  0.053336   \n",
12419
       "107285                           1.0                  0.014727   \n",
12420
       "81787                            1.0                  0.037687   \n",
12421
       "87951                            1.0                  0.030057   \n",
12422
       "86533                            1.0                  0.022358   \n",
12423
       "\n",
12424
       "        ImagePositionPatient_2_f_r05  PixelSpacing_1_f  PixelSpacing_1_enc_0  \\\n",
12425
       "9661                             0.0             -0.48                   1.0   \n",
12426
       "107285                           0.0             -0.48                   1.0   \n",
12427
       "81787                            0.0             -0.48                   1.0   \n",
12428
       "87951                            0.0             -0.48                   1.0   \n",
12429
       "86533                            0.0             -0.48                   1.0   \n",
12430
       "\n",
12431
       "        PixelSpacing_1_enc_1  WindowCenter_0_le   pos_max   pos_min  pos_size  \\\n",
12432
       "9661                   False                  2  1.099715  0.473493      -0.3   \n",
12433
       "107285                 False                  2  1.051518  0.388022      -0.3   \n",
12434
       "81787                  False                  2  1.051518  0.388022      -0.3   \n",
12435
       "87951                  False                  2  1.051518  0.388022      -0.3   \n",
12436
       "86533                  False                  2  1.051518  0.388022      -0.3   \n",
12437
       "\n",
12438
       "        pos_idx1  pos_idx  pos_idx2  pos_inc1  pos_inc2  pos_inc1_grp_le  \\\n",
12439
       "9661    0.000000       17 -0.203390  1.527710  1.522339                3   \n",
12440
       "107285 -0.135593       15 -0.067797  1.687011  1.663025                3   \n",
12441
       "81787   0.067797       18 -0.271186  1.662964  1.687012                3   \n",
12442
       "87951   0.000000       17 -0.203390  1.687011  1.662964                3   \n",
12443
       "86533  -0.067797       16 -0.135593  1.663025  1.687011                3   \n",
12444
       "\n",
12445
       "        pos_inc2_grp_le  pos_inc1_r1  pos_inc1_r0001  pos_inc1_enc_0  \\\n",
12446
       "9661                  3          0.0             0.0             0.0   \n",
12447
       "107285                3          0.0             0.0             0.0   \n",
12448
       "81787                 3          0.0             0.0             0.0   \n",
12449
       "87951                 3          0.0             0.0             0.0   \n",
12450
       "86533                 3          0.0             0.0             0.0   \n",
12451
       "\n",
12452
       "        pos_inc2_enc_0  pos_inc1_enc_1  pos_inc2_enc_1  pos_size_le  \\\n",
12453
       "9661               0.0             0.0             0.0            0   \n",
12454
       "107285             0.0             0.0             0.0            0   \n",
12455
       "81787              0.0             0.0             0.0            0   \n",
12456
       "87951              0.0             0.0             0.0            0   \n",
12457
       "86533              0.0             0.0             0.0            0   \n",
12458
       "\n",
12459
       "        pos_range   pos_rel  pos_zeros  pos_inc_rng  pos_zeros_le  \\\n",
12460
       "9661    -0.225189  0.193609        0.0    -0.595692             0   \n",
12461
       "107285   0.023307 -0.064219        0.0    -0.581939             0   \n",
12462
       "81787    0.023307  0.322242        0.0    -0.581939             0   \n",
12463
       "87951    0.023307  0.193809        0.0    -0.581939             0   \n",
12464
       "86533    0.023307  0.064217        0.0    -0.581939             0   \n",
12465
       "\n",
12466
       "        PxlMin_grp_le  PxlMin_zero  any  epidural  intraparenchymal  \\\n",
12467
       "9661                2        False  NaN       NaN               NaN   \n",
12468
       "107285              2        False  NaN       NaN               NaN   \n",
12469
       "81787               2        False  NaN       NaN               NaN   \n",
12470
       "87951               2        False  NaN       NaN               NaN   \n",
12471
       "86533               2        False  NaN       NaN               NaN   \n",
12472
       "\n",
12473
       "        intraventricular  subarachnoid  subdural  any_series  SeriesPP  \\\n",
12474
       "9661                 NaN           NaN       NaN       False      -0.5   \n",
12475
       "107285               NaN           NaN       NaN       False      -0.5   \n",
12476
       "81787                NaN           NaN       NaN       False      -0.5   \n",
12477
       "87951                NaN           NaN       NaN       False      -0.5   \n",
12478
       "86533                NaN           NaN       NaN       False      -0.5   \n",
12479
       "\n",
12480
       "        yuval_idx  pred_any  \n",
12481
       "9661        49823  0.992338  \n",
12482
       "107285      74776  0.992465  \n",
12483
       "81787       74779  0.992568  \n",
12484
       "87951       74778  0.992820  \n",
12485
       "86533       74777  0.992826  \n",
12486
       "\n",
12487
       "[5 rows x 101 columns]"
12488
      ]
12489
     },
12490
     "execution_count": 68,
12491
     "metadata": {},
12492
     "output_type": "execute_result"
12493
    }
12494
   ],
12495
   "source": [
12496
    "test_md['pred_any'] = predictions[:,5]\n",
12497
    "test_md.sort_values('pred_any').tail()"
12498
   ]
12499
  },
12500
  {
12501
   "cell_type": "code",
12502
   "execution_count": null,
12503
   "metadata": {},
12504
   "outputs": [],
12505
   "source": []
12506
  },
12507
  {
12508
   "cell_type": "code",
12509
   "execution_count": null,
12510
   "metadata": {},
12511
   "outputs": [],
12512
   "source": []
12513
  },
12514
  {
12515
   "cell_type": "code",
12516
   "execution_count": null,
12517
   "metadata": {},
12518
   "outputs": [],
12519
   "source": []
12520
  },
12521
  {
12522
   "cell_type": "code",
12523
   "execution_count": null,
12524
   "metadata": {},
12525
   "outputs": [],
12526
   "source": []
12527
  },
12528
  {
12529
   "cell_type": "code",
12530
   "execution_count": null,
12531
   "metadata": {},
12532
   "outputs": [],
12533
   "source": []
12534
  },
12535
  {
12536
   "cell_type": "code",
12537
   "execution_count": null,
12538
   "metadata": {},
12539
   "outputs": [],
12540
   "source": []
12541
  },
12542
  {
12543
   "cell_type": "code",
12544
   "execution_count": 66,
12545
   "metadata": {},
12546
   "outputs": [
12547
    {
12548
     "data": {
12549
      "text/plain": [
12550
       "(10, 121232, 6)"
12551
      ]
12552
     },
12553
     "execution_count": 66,
12554
     "metadata": {},
12555
     "output_type": "execute_result"
12556
    }
12557
   ],
12558
   "source": [
12559
    "preds.shape"
12560
   ]
12561
  },
12562
  {
12563
   "cell_type": "code",
12564
   "execution_count": 69,
12565
   "metadata": {},
12566
   "outputs": [],
12567
   "source": [
12568
    "test_md['pred_any'] = preds[9,:,1]"
12569
   ]
12570
  },
12571
  {
12572
   "cell_type": "code",
12573
   "execution_count": 70,
12574
   "metadata": {
12575
    "scrolled": true
12576
   },
12577
   "outputs": [
12578
    {
12579
     "data": {
12580
      "text/plain": [
12581
       "[<matplotlib.lines.Line2D at 0x7f8297793f50>]"
12582
      ]
12583
     },
12584
     "execution_count": 70,
12585
     "metadata": {},
12586
     "output_type": "execute_result"
12587
    },
12588
    {
12589
     "data": {
12590
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD4CAYAAADlwTGnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXxU9b3/8ddnJjvZQ4AskIU97BABV3BHpaBWLVStt9dqbfHW3t7eVu/Sxce1y/3dbrdFe6321motKmpFBCwVEFd2CHsIa1aSEJJA9sx8f3/khBtilgmZ5MzyeT7MIzNnzjnz+cpk3jPf7znnK8YYlFJKBR+H3QUopZSyhwaAUkoFKQ0ApZQKUhoASikVpDQAlFIqSIXYXUBfDB061GRmZtpdhlJK+Y0dO3ZUGmOSu3rMrwIgMzOT7du3212GUkr5DRE52d1j2gWklFJBSgNAKaWClAaAUkoFKQ0ApZQKUhoASikVpDQAlFIqSGkAKKVUkPKr8wCUvYwxVNU1U1zdQNHZBorPNjBtZDyzsxLtLk0pdQk0AFSv3G7Dd17P4528UhpaXBc9FuoUXn5oLpdlaggo5W80AFSvnt5UwModRdw5I43JaXGkJ0SSlhBJXGQo9z+/lUde3MGqf7iKtPhIu0tVSvWBjgGoHn1wpIKfrc9n8fRUfnbPNP7+qixumjSCSalxpCdE8bsv5dLc6uahF7ZT39xqd7lKqT7QAFDdKqlu4LEVuxk7LJof3zkFEfnMOmOGRfPfS2dwsKyWf34tD51iVCn/oQGgutTc6mbZyztpanHxzH2ziArrvrfw2gnDeHzBBN7ZW8pvNhQMYpVKqf7QMQDVpR+tOciuU9U8fe9MRidH97r+w9dkc6jsHD9bn8/Y4TEsmDxiEKpUSvWHfgNQn7FqTwl/+PgED16Vxa1TUjzaRkT48Z1TmJYex+Nv5FHb2DLAVSql+ksDQF2kvrmV7721j1kZCTx+y4Q+bRsR6uSpO6ZQXd/C7zYfG6AKlVLeogGgLvLa9iKq61v4l1snEOrs+8tjcloct01N4fkPj1NxrmkAKlRKeYsGgLrA5TY89+ExZmUkMCvj0k/s+qcbx9HU6mb5Rh0QVsqXaQCoC9btK6OwqoGHrs7u136yk6O5e1Y6L285RdHZei9Vp5TyNo8CQEQWiMhhESkQkce7eDxcRF6xHt8iIpkdHnvCWn5YRG7usPyEiOwVkd0iohP92swYw7Obj5KZFMWNOcP7vb/HbhgLAr/82xEvVKeUGgi9BoCIOIHlwC1ADrBURHI6rfYgcNYYMwb4BfBTa9scYAkwCVgAPG3tr921xpjpxpjcfrdE9cu2E2fZU1TDV67Oxun47AlffZUSF8mX5mbwxs4ijpw+54UKlVLe5sk3gNlAgTHmmDGmGVgBLO60zmLgBev2SuB6aTttdDGwwhjTZIw5DhRY+1M+5tnNx0gcEsbnZ6Z7bZ9fv3YMUWEh/Oyv+V7bp1LKezwJgDSgsMP9ImtZl+sYY1qBGiCpl20N8FcR2SEiD3f35CLysIhsF5HtFRUVHpSr+qqg/Dx/O3ia++dmEBnm7H0DDyUOCeOhq7NZt7+MPYXVXtuvUso7PAmArvoDOl/wpbt1etr2SmPMTNq6lpaJyDVdPbkx5lljTK4xJjc5OdmDclVfPf/hMcJDHHzp8gyv7/vBq7NIGhLGj9YcxO3W6wQp5Us8CYAiYGSH++lASXfriEgIEAdU9bStMab9dznwJto1ZIuKc028vrOYu2alkxQd7vX9R4eH8O2bx7PleBU/W3/Y6/tXSl06TwJgGzBWRLJEJIy2Qd1VndZZBTxg3b4L2GDaLgu5ClhiHSWUBYwFtorIEBGJARCRIcBNwL7+N0f11YufnKDF5ebBq7IG7DmWXDaSpbNHsXzjUd7aXTxgz6OU6pteLwZnjGkVkUeBdwEn8HtjzH4ReRLYboxZBTwPvCgiBbR98l9ibbtfRF4FDgCtwDJjjEtEhgNvWpcXDgFeNsasG4D2qR40tbp48dOT3DBxONkeXPDtUokIP1w0iWMV5/nnlXmMSoxixqiEAXs+pZRnxJ+u356bm2u2b9dTBrxl3b4yHnlpB3/48mXMHz9swJ+vqq6Zxcs/pLHFzapHryQlTmcQU2qgiciO7g611zOBg9gbO4tIjgnnqjFDB+X5EoeE8fwDl9HQ7OKhP26nodnV+0ZKqQGjARCkztY1s/FwObdPTyXkEi76dqnGDY/hv5dOZ39JLY+t2MX5Jp1GUim7aAAEqdV5JbS4DHd68cQvT103YTj/flsO6w+e5safv8+6fWU6laRSNtAACFKv7yxmYkosE1NibXn+v78qi9e/dgVxkaE88tIOvvLCdr1wnFKDTAMgCB2tOM/uwmo+P7PzCd2Da+aoBFb/w1X8660T+eTYGW78+Wb+5/2jtLrcttalVLDQAAhCb+4sxiGwaFqq3aUQ4nTw0DXZrP/WPK4aO5Qfrz3Enc98zOEyvYCcUgNNAyDIuN2GN3cVc/XYZIbFRthdzgVp8ZH87ku5LP/iTIrPNrDw1x/w3+8doUW/DSg1YDQAgsyW41UUVzdwp83dP925bWoKf/3Ha1gwOYWfr8/n9uUfcaCk1u6ylApIGgBB5o2dRUSHh3BTzgi7S+lWUnQ4v146g9/eN4vTtU3cvvwjCsq1S0gpb9MACCINzS7W7C3l1ikjvHrZ54GyYPII1nzjKpwO4TcbdH5hpbxNAyCI/PVAGXXNLluO/b9Uw2IjuP/yDFbtKeFYxXm7y1EqoGgABJHXdxaTFh/J7MxEu0vpk4euziYsxMFvNuq3AKW8SQMgSFSca+LDIxXcMSMNhxfm/B1MyTHh3Dsng7d2l3Ciss7ucpQKGBoAQWLdvlLcBhZNt//Y/0vx1WuyCXEIT2/SbwFKeYsGQJB4O6+UscOiGTc8xu5SLsmw2AiWzh7FGzuLKazSS0Yo5Q0aAEHgdG0j205UsXCqf376b/fIvNE4RL8FKOUtGgBBYM3eUoxpO8nKn42Ii2DJ7JGs3FGkF45Tygs0AILA6rxSJoyIYcywgZv2cbA8Mm80AM9sOmpzJUr5Pw2AAFdS3cCOk2dZ6Oef/tulxkdyd+5IXt1eSFlNo93lKOXXNAAC3Jq9pQB+3//f0dfmjabVbXh56ym7S1HKr2kABLjVeaVMToslc+gQu0vxmpGJUcwbl8yKraf0aqFK9YMGQAArrKpnd2E1t00JnE//7e6fm0H5uSb+duC03aUo5bc0AALYOxe6fwKj/7+j+eOHkRYfyYufnrS7FKX8lgZAAHsnr5Rp6XGMTIyyuxSvczqEL84ZxcdHz3BULxKn1CXRAAhQJyrr2FtcE1CDv53dkzuSUKfwp091MFipS6EBEKDau39uDcDun3bJMeHcPGkEK3cU0tDssrscpfyOBkCAWp1XysxR8aTFR9pdyoC6b24GtY2tvJ1XYncpSvkdDYAAdOpMPQdLa7l1SuB++m83JyuRscOi+ZMOBivVZx4FgIgsEJHDIlIgIo938Xi4iLxiPb5FRDI7PPaEtfywiNzcaTuniOwSkdX9bYj6P+8fqQDgugnDbK5k4IkI984ZxZ6iGvKKqu0uRym/0msAiIgTWA7cAuQAS0Ukp9NqDwJnjTFjgF8AP7W2zQGWAJOABcDT1v7aPQYc7G8j1MU251eQnhBJVgCd/NWTO2elExnq5CX9FqBUn3jyDWA2UGCMOWaMaQZWAIs7rbMYeMG6vRK4XkTEWr7CGNNkjDkOFFj7Q0TSgduA5/rfDNWuudXNxwWVXDMumbZ/gsAXGxHK4umprNpTQk19i93lKOU3PAmANKCww/0ia1mX6xhjWoEaIKmXbX8JfAfo8Vx+EXlYRLaLyPaKigoPyg1uO0+dpa7ZxTVjk+0uZVDdOyeDxhb3haOflFK98yQAuvoYaTxcp8vlIrIQKDfG7OjtyY0xzxpjco0xucnJwfWmdik251fgdAhXjEmyu5RBNTktluyhQ1itRwMp5TFPAqAIGNnhfjrQ+a/swjoiEgLEAVU9bHslsEhETtDWpXSdiLx0CfWrTjYfqWDmqHhiI0LtLmVQiQi3TU3h02NnqDjXZHc5SvkFTwJgGzBWRLJEJIy2Qd1VndZZBTxg3b4L2GCMMdbyJdZRQlnAWGCrMeYJY0y6MSbT2t8GY8x9XmhPUKs838S+4lrmjQvOb0oLp6biNrB2n3YDKeWJXgPA6tN/FHiXtiN2XjXG7BeRJ0VkkbXa80CSiBQA3wIet7bdD7wKHADWAcuMMXrK5gD58EglANcEaQCMHxHD2GHRrN6jAaCUJ0I8WckYswZY02nZ9zrcbgTu7mbbp4Cnetj3JmCTJ3Wonm3OryBxSBiTU+PsLsU2C6em8sv38imraWREXITd5Sjl0/RM4ADhdhs2H6nkqjFDcTiC4/DPriycloIx6NFASnlAAyBAHCyrpfJ8U9B2/7QbnRzNxJRYPRpIKQ9oAASI9/PbzpG4ZuxQmyux38KpKew6VU3R2Xq7S1HKp2kABIjN+RVMGBHDsFjt9/6cNQfCO3naDaRUTzQAAkBdUys7Tp4N2sM/OxuVFMXU9DgdB1CqFxoAAeCTo2docZmg7//vaOHUFPKKajh5ps7uUpTyWRoAAWDzkQoiQ53kZibYXYrPuM3qBlqt3UBKdUsDIABszq9gbnYi4SHO3lcOEmnxkcwcFa8BoFQPNAD83Kkz9Zw4U6/dP11YODWVg6W1HK04b3cpSvkkDQA/tym/HID54wN/9q++umXKCAA2HCy3uRKlfJMGgJ/bdLiCjKSooJn9qy9S4iLJTIpiy/Equ0tRyidpAPixxhYXHx+tZL52/3RrTlYS205U4XZ3nsJCKaUB4Me2Hq+iscWt3T89mJOdSE1DC4fKztldilI+RwPAj206XEFYiIO52cE1+1dfzLH+32w5fsbmSpTyPRoAfmxTfjlzs5OIDNPDP7uTFh9JWnwkW3UcQKnP0ADwU4VV9RyrqNP+fw/MyU5k6/Eq2iapU0q10wDwU5sOtx/+qQHQm7lZSZypa6agXM8HUKojDQA/telwBaMS9fBPT8zJTgTgU+0GUuoiGgB+qO3wzzPMH5+MSPDO/uWpUYlRjIiNYMsxHQhWqiMNAD+07UQVDS0u7f7xkIgwO0vHAZTqTAPAD7Uf/nl5ts7+5ak52YmUn2vixBmdJUypdhoAfmjT4XLmZCXq4Z99MCfLOh9Au4GUukADwM8UVtVztKJOz/7to9HJQxgaHabXBVKqAw0AP7PJmvxdp3/sm/ZxgC3Hzug4gFIWDQA/8/7hctITIhmdrId/9tWcrCRKahopOttgdylK+QQNAD/S1KqHf/ZH+/kA2g2kVBsNAD+y7fhZ6ptdzB+n/f+XYtywGOKjQnUgWCmLBoAf2XConLAQB1eM0at/XgqHQ5idmajfAJSyeBQAIrJARA6LSIGIPN7F4+Ei8or1+BYRyezw2BPW8sMicrO1LEJEtorIHhHZLyI/9FaDAtmmw+Vcnp1EVFiI3aX4rdlZiZyqqqe0RscBlOo1AETECSwHbgFygKUiktNptQeBs8aYMcAvgJ9a2+YAS4BJwALgaWt/TcB1xphpwHRggYjM9U6TAtOJyjqOVdZxrZ792y/tcyfo5aGV8uwbwGygwBhzzBjTDKwAFndaZzHwgnV7JXC9tI1SLgZWGGOajDHHgQJgtmnTfmnGUOtHj83rwUbr6p/XTRhucyX+bWJKLJGhTnadqra7FKVs50kApAGFHe4XWcu6XMcY0wrUAEk9bSsiThHZDZQD640xW7p6chF5WES2i8j2iooKD8oNTBsOlZOdPIRRSVF2l+LXnA5hUmos+4pr7C5FKdt5EgBdHW/Y+dN6d+t0u60xxmWMmQ6kA7NFZHJXT26MedYYk2uMyU1ODs7uj/rmVrYcq+I6PfvXKyanxbG/pBaXThSvgpwnAVAEjOxwPx0o6W4dEQkB4oAqT7Y1xlQDm2gbI1Bd+KjgDM0uN9dO0ADwhqnpcTS0uDhaoRPEqODmSQBsA8aKSJaIhNE2qLuq0zqrgAes23cBG0zb+fargCXWUUJZwFhgq4gki0g8gIhEAjcAh/rfnMC08XA50eEhXJaZaHcpAWFKWhwAe4u0G0gFt14DwOrTfxR4FzgIvGqM2S8iT4rIImu154EkESkAvgU8bm27H3gVOACsA5YZY1xACrBRRPJoC5j1xpjV3m1aYDDGsPFQOVeNGUpYiJ624Q3ZydFEhTnZq+MAKsh5dEC5MWYNsKbTsu91uN0I3N3Ntk8BT3ValgfM6GuxwehQ2TlKaxr55g3BOf4xENoHgjUAVLDTj5Q+buOFyd+1/9+bpqTFs7+khlaX2+5SlLKNBoCP23ionEmpsQyPjbC7lIAyJT2WxhY3Ryvq7C5FKdtoAPiwmvoWdpw8y3V69I/XTUmLByCvSE8IU8FLA8CHvX+kArfR7p+BkD10CEPCnHpCmApqGgA+bNOhchKHhDF9ZLzdpQQch0OYlBZHngaACmIaAD7K5TZsyq9g3rhknA6d/GUgTEmL42BprQ4Eq6ClAeCj8oqqqaprZr5e/XPATE2Po7HFTYGeEayClAaAj3o/vwIRuGasBsBAmWydEZynZwSrIKUB4KPez69gWno8CUPC7C4lYGUlDSE6PEQHglXQ0gDwQTX1LewprOaacfrpfyA5rDOC9RuAClYaAD7ow4JK3AbmjRtqdykBb2p620Bwiw4EqyCkAeCDNudXEBMRwrR0PfxzoE1Oi6Op1c2R0zoQrIKPBoCPMcbwfn4FV48dSohT/3kG2lQrZHUcQAUjfYfxMUfKz1NW26hH/wySjMQoYsJDyCvWS0Ko4KMB4GM257fNe6wDwIPD4RAmp8Wxt7jW7lKUGnQaAD7m/fwKxgyLJjU+0u5SgsYUHQhWQUoDwIc0trjYeryKefrpf1BNSYujudVN/ulzdpei1KDSAPAhnx47Q1OrW7t/BpnOEayClQaAD9mcX0l4iIM5WTr5+2DKSIpieGw46/aX2V2KUoNKA8CHbD5SweysRCJCnXaXElREhCWXjeL9/ApOntEZwlTw0ADwEcXVDRSUn9f+f5ssnT0KhwgvbzlldylKDRoNAB/RfvinBoA9RsRFcFPOcF7ZXkhji8vucpQaFBoAPmJzfgUpcRGMGRZtdylB6/65GVTXt/BOXqndpSg1KDQAfECry82HBZVcMzYZEZ39yy6Xj05idPIQXvz0pN2lKDUoNAB8wJ6ias41turhnzYTEe6fm8Huwmo9JFQFBQ0AH/DewXKcDuGqMXr5Z7vdOSudyFAnL356wu5SlBpwGgA+YP2B08zJSiQuKtTuUoJebEQot89I463dJdTUt9hdjlIDyqMAEJEFInJYRApE5PEuHg8XkVesx7eISGaHx56wlh8WkZutZSNFZKOIHBSR/SLymLca5G9OVNZxpPw8N+YMt7sUZblv7iiaWt28tqPQ7lKUGlC9BoCIOIHlwC1ADrBURHI6rfYgcNYYMwb4BfBTa9scYAkwCVgAPG3trxX4J2PMRGAusKyLfQaF9QdOA2gA+JBJqXHMykjgT1tO4XYbu8tRasB48g1gNlBgjDlmjGkGVgCLO62zGHjBur0SuF7aDmdZDKwwxjQZY44DBcBsY0ypMWYngDHmHHAQSOt/c/zP+gOnmZgSS3pClN2lqA7un5vB8co6PjpaaXcpSg0YTwIgDej4XbiIz75ZX1jHGNMK1ABJnmxrdRfNALZ4XnZgqKprZvvJKv3074NumTKCxCFhrNim3UAqcHkSAF0dmN75e3F36/S4rYhEA68D3zTGdDkjh4g8LCLbRWR7RUWFB+X6j/cOnsZt4CYNAJ8THuLktikpvHfwNHVNrXaXo9SA8CQAioCRHe6nAyXdrSMiIUAcUNXTtiISStub/5+MMW909+TGmGeNMbnGmNzk5MA6Tv6vB06TGhfBpNRYu0tRXVg0PZXGFveFcRqlAo0nAbANGCsiWSISRtug7qpO66wCHrBu3wVsMMYYa/kS6yihLGAssNUaH3geOGiM+bk3GuJvGppdfHCkghtyhuvZvz5q1qgEUuIiWLWn8+cdpQJDrwFg9ek/CrxL22Dtq8aY/SLypIgsslZ7HkgSkQLgW8Dj1rb7gVeBA8A6YJkxxgVcCdwPXCciu62fW73cNp/2YUEljS1u7f/3YQ6H8LlpqWzOr+BsXbPd5SjldSGerGSMWQOs6bTsex1uNwJ3d7PtU8BTnZZ9SNfjA0Fj/YEyYsJDmJOVZHcpqgeLpqXy7OZjrN1XxhfnjLK7HKW8Ss8EtoHLbXjvYDnzJwwjLET/CXzZpNRYsocO4W3tBlIBSN99bLDr1FnO1DVr948fEGnrBvr0+BlO1zbaXY5SXqUBYIP1B04T6hTmjw+so5oC1aLpqRgDq3WeABVgNABssP7AaeZmJxEboRd/8wejk6OZlBqrRwOpgKMBMMgKys9zrLJOu3/8zKJpqewprNZJ41VA0QAYZO1XmLxhogaAP1k4LRXApwaDjTGcqNRAUpdOA2AQFVbV878fneDOGWmkxkfaXY7qg7T4SHIzEvrVDbS7sJo3dhZ5raYXPz3J/P/axE/WHqLtvEul+kYDYBD9eO1BnCL884LxdpeiLsGi6anknz7P4bJzfd7WGMN3V+bxrVf3sGZv/weTm1pdPL3xKDHhIfz2/aN89/U8Wl3ufu9XBRcNgEGy5dgZ1uwt45F5o0mJ00///ujWKSk4HcJLlzBp/K7Cag6fPkdMRAjfWZnX766b17YXUVbbyDP3zeIb14/l1e1FPPLSThpbXP3arwouGgCDwOU2PLn6AKlxETx8Tbbd5ahLNDQ6nHtyR/Lipyf73O3yytZCosKcvP61K3A6hK//6dLfrJtb3Tyz6SgzR8Vz5ZgkvnXjOJ5cPIn3Dp3m/ue36FSWymMaAIPg9R1F7C+p5bu3TCAyzGl3Oaof/uP2ydw7ZxS/ff8o//LmPlwezBh2vqmVt/NK+NzUVMYNj+Hn90zjQGktT64+cEk1vLmriOLqBv7h+rEXLiT4pcsz+fXSGewurOYLz35CbaOGgOqdBsAAO9/Uyn++e5iZo+JZZB1JovyX0yH8x+2T+fr80fx56ym+sWIXza09972/vaeE+mYXS2a3XRn9+onD+eq8bF7ecoq3dhf36flbXW6WbzzK1PQ45o+7+ETChVNTee6ByzhUdo4VW0/1rWEqKGkADLDlGwuoPN/E9z83SS/7HCBEhO8smMATt0zgnbxSHvrjdhqau+/OWbH1FOOHxzB9ZPyFZd++aTyXZSbwxBt7KSg/7/Fzr9pTwqmqeh69dkyXr6d545KZnZXIS5/qfMaqdxoAA6iwqp7nPzjOnTPTmNbhj18Fhq/OG81P7pzCB0cq+Moft3V5FM6Bklr2FNXwhctGXvSGHep08OulM4kIdXLfc1v4ydpDbDtR1WOXkstt+M2GAiaMiOnxRML752Zwqqqe9/MDawY95X0aAAPEGMP3V+3H6RC+c/MEu8tRA2TJ7FH85M6pfFRwhp+sPfSZx1/ZdoqwEAd3zuw8jTaMiIvgd1/KJTt5CM99cIy7f/sJuf+xnn98ZTer80qoabi4H/+dvaUcq6zjGx36/rty86QRJMeE8+IlHK2kgotH8wGovlu1p4QNh8r594U5jIiLsLscNYDuuWwk+0tqeO7D40xJj2Px9LY3+8YWF2/uKmbBpBHER4V1ue2sjARefmguNQ0tfHCkgvcOlrPxcDlv7irG6RByMxK4bsIwrp0wjN9sOMLYYdEsmDSix3rCQhwsvWwkv95YQGFVPSMTo7zeZhUYNAAGQFVdMz98+wDTR8bzd1dk2l2OGgT/tjCHg6Xn+O7reYxOjmZyWhzr9pVR29h6YfC3J3GRoSycmsrCqam43IZdp86y4VA5Gw6V8+O1h/ix9e3iV0um43D0Ppa0dM4olm86yktbTvLELRP73T4VmLQLaAA8+fZ+zjW28NPPT8XpwR+r8n+hTgfL751JQlQYX31xB1V1zfx56ykykqKY28dZ35wOITczke8smMC6b17Dx49fx1N3TOax68eycKpnR5KlxEVy48ThvLqtUE8OU93SAPCyjYfL+cvuEr42fwzjR8TYXY4aRMkx4fz2vllUnG/igd9vZcvxKu7JHenRJ/aepMZHcu+cDP7xxnF9+kDxpcszOFvfwjs6j4HqhgaAF51vauVf39jLmGHRLLt2tN3lKBtMGxnPU7dPZm9xDU6HcPesdNtquXx0EqOTh/BHHQxW3dAxAC/6f+sOUVrbyMpHriA8RM/4DVZ3546k4nwTLa2GYbH2HQAgItw/N4MfvH2AvKJqpqbrocjqYvoNwEt2nKzij5+e5IHLM5mVkWB3OcpmX58/hsduGGt3Gdw5K52oMCcvfqLfAtRnaQB4gTGGp945yIjYCP75Zr3Us/IdsRGh3D4jjVV7Sjhb12x3OcrHaAB4wSfHzrDzVDVfnz+aIeHaq6Z8yxdyR9LU6tYzg9VnaAB4wfKNBSTHhHN3bu/Heys12HJSYwlzOjhYWmt3KcrHaAD0065TZ/mo4AwPXZ1FRKgO/CrfE+p0MGZYNAc0AFQnGgD9tHxjAfFRodw7J8PuUpTqVk5qLAdL+z6VpQpsGgD9cKCklr8dLOfLV2Rp37/yaRNTYqk830TFuSa7S1E+xKMAEJEFInJYRApE5PEuHg8XkVesx7eISGaHx56wlh8WkZs7LP+9iJSLyD5vNMQOyzcVEB0eotf7UT5vYkrbWek6DqA66jUARMQJLAduAXKApSKS02m1B4GzxpgxwC+An1rb5gBLgEnAAuBpa38Af7CW+aWjFedZs7eU++ZmEBcVanc5SvUoJyUW0ABQF/PkG8BsoMAYc8wY0wysABZ3Wmcx8IJ1eyVwvbRdsHwxsMIY02SMOQ4UWPvDGLMZqPJCG2zxzKajhIc4+MrVWXaXolSv4qPCSImL0IFgdRFPAiANKOxwv8ha1uU6xphWoAZI8nDbHonIwyKyXUS2V1T4xnHMRWfr+cuuYpZcNoqh0eF2l6OURyamxOo3AHURTwKgq8sPdp63rrt1PNm2R8aYZ0Kze7cAAA9iSURBVI0xucaY3OTk5N43GATPfXAcEfjqvGy7S1HKYxNTYjhaUaeXh1YXeBIARUDHM5zSgZLu1hGRECCOtu4dT7b1Ky63YXVeCTfljCAlLtLucpTy2MSUWFxu06dJ6FVg8yQAtgFjRSRLRMJoG9Rd1WmdVcAD1u27gA3GGGMtX2IdJZQFjAW2eqd0e2w9XkXl+WZunZJidylK9Un7QLCOA6h2vQaA1af/KPAucBB41RizX0SeFJFF1mrPA0kiUgB8C3jc2nY/8CpwAFgHLDPGuABE5M/AJ8B4ESkSkQe927SBsXZfKRGhDuaP943uKKU8lZE0hMhQp44DqAs8OnvJGLMGWNNp2fc63G4E7u5m26eAp7pYvrRPlfoAt9uwdl8Z88cN0xO/lN9xOoTxI2I0ANQFeiZwH+w4dZaKc03cMmWE3aUodUkmpsRyoKSWth5aFew0APpgzd5SwkIcXD9xuN2lKHVJclJiqG1spaSm0e5SlA/QAPCQ221Yt6+MeeOSidbuH+WnJrafEVyi3UBKA8Bju4uqKa1p5Fbt/lF+bIJeEkJ1oAHgobV7Swl1inb/KL8WHR5CRlIUB8s0AJSHRwEFO2MMa/aWcfXYZGIj9MJvyr9NHGHP3AANzS62nqji44JKRiZGce+cUbRdMkzZRQPAA3lFNRRXN/DNG8baXYpS/TYxJZZ3D5RR39xKVNjAvgUUna3nrd0lfHikkh0nz9LscuMQcBuorm/m0ev0b8pOGgAeWLOvlBCHcGOOdv8o/zcxJQZj4FDZOWaOShiw56lpaOGuZz6hrLaRnJRY/u7KTK4cM5TcjAT+7S/7+K+/5hMTEcoDOp+GbTQAemGMYe3eMq4YM5T4qDC7y1Gq39qPBDpQUjugAfDk2weoON/EX5ZdyfSR8Rc99p93TeVcYyvfX7Wf2MgQ7piRPmB1qO7pIHAv9pfUcqqqntv06B8VINITIomJCBnQI4H+duA0r+8sYtn80Z9584e2iep/88UZXJ6dxLdfy2P9gdMDVovqngZAL9buK8XpEG7M0QBQgUFErIHggQmA6vpmnnhzLxNGxPTYxx8R6uR3D+QyOTWWZS/v5OOjlQNSj+qeBkAPGltcrNxRxBWjk0gcot0/KnBMTInhUNk53G7vXxLiB6v2c7aumZ/dM42wkJ7fYqLDQ/jDl2eTkRjFN/68ixaX2+v1qO5pAPRg5Y4iTtc28ci80XaXopRX5aTGUt/s4lRVvVf3u25fGX/ZXcKj141hUmqcR9skDAnjOwsmUHm+mc35vjHrX7DQQeButLjcPLPpKNNHxnPF6CS7y1HKq9oHgu97fgsjYiOIiwwlLjKU+Kgw5mYnMn/8sF4/vXdWVdfMv/1lLzkpsSy7dkyftp03LpmEqFDe2FWsJ1sOIg2Abry1u4Ti6gaeXDxJT1ZRAWdSahxfnZfNqTP11DS0UFrTyKGyc5ypa+L3Hx0nPiqUhVNTuGNGGjNHJXT5N9DQ7KKg/Dz5p8+RX36OD/IrqWlo4cUH5xDq7Ft4hIU4WDQtlT9vK6S2sUVPuBwkGgBdcLkNT28sYGJKLNdNGGZ3OUp5ndMhPHHLxM8sb3G5+fBIJW/sKua17UW89OkpRiVGkRIXQWOrm6YWFw0tLhqaXVScb6L9qtJhTgfZyUP40R1TLny76Ks7ZqbzwicnWbu3lC9cNqo/zVMe0gDowtp9pRyrrGP5F2fqp38VVEKdDq6dMIxrJwzjXGML7+4/zTt5JdQ1u4iLDCUyNpyIUCcRIU5S4yMZNzyascNjyEyKIqSPn/o7m5YeR/bQIby+s1gDYJBoAHRijOE3GwoYnTyEBZP10E8VvGIiQrlrVjp3zRqck7REhDtmpPGz9fkUVtUzMjFqUJ43mOlRQJ28d7CcQ2XnWHbtGJwO/fSv1GC6fUYaAG/tLra5kuCgAdCBMYZfbyxgZGIki6al2l2OUkFnZGIUs7MSeWNXsU5bOQiCIgBO1zbS6sEJJh8WVLKnsJqvzRvT7/5MpdSluXNGGscq6sgrqrG7lIAX8O9yZ+uaWfjrD/n3t/b1+ImiodnFf647zIjYCD4/K20QK1RKdXTLlBTCQhy8uSv4uoE+Kqjk3f1lg/Z8AR8ACUPCuCc3nT9vLeSXfzvS5TotLjfLXt7JvpIafrh4EuEhzkGuUinVLi4ylBsnDmfVnpKgujTEucYWvv3aHv7r3cOD1u6ADwCAb980nnty0/nVe0d46dOTFz3mdhu++3oeGw6V8x+3T+bmSXrkj1J2u3NmGlV1zbx/OHguDfHjtYc4XdvIf941tc8n0l2qoAgAEeFHd0zhugnD+N5b+1i37/++Yv1k3SHe2FnMP904jnvnZNhYpVKq3TXjkkkaEsZrOwoH5IJ1vubjgkpe3nKKr1ydzYwBnKOhM/Gnkfbc3Fyzffv2S96+vrmVL/5uCwdKa3nx72ezu7CaH689xN9dkcn3P5ejJ30p5UN+tOYgz24+RnpCJJ+fmc7nZ6YzKsmzcwPcbkNNQwutboPBYP2H2xiaW900t7ppanXT1OrCbWBaenyfr33kLXVNrSz41WZCHA7WPnY1EaHe7YIWkR3GmNwuHwumAIC2C1bd9duPKatppL7ZxaJpqfzyC9Nx6DH/SvmU5lY3a/aWsnJHER8drcQYmJ2ZyI05wwl1Ci7Tdui2y21obHFTUt1AcYef5lbP+9GHx4bz5SuzWDp7FHGRg3sdoh+s2s8Ln5zg1a9ezmWZiV7fvwZAJ0Vn67nnt58wbkQMz96fa1vyK6U8U1LdwJu7inl9ZxHHKuq6XCc5Jpy0+EjSEiJJT4hkeEwEoU4BEQQQAUEID3EQHuogPMRJeIiDuqZWXtpyko8KzhAdHsKSy0by5auySIuPHPB2bT1exT3/8wl/d0UmP1g0aUCeo98BICILgF8BTuA5Y8xPOj0eDvwRmAWcAb5gjDlhPfYE8CDgAr5hjHnXk312xVsBAG2fLkKdot0+SvkRYwxn61sQwOEQHNJ2YbsQh6PfH+T2Fdfwuw+OsTqvFGMMqfGRjIiNYHhcBCNiIxgWE05Tq5sz55s4U9dMlfUzLDaCyamxTE6LY3JqHCMTIz16X2lodnHrf39Aq9vNu9+8hqiwgbkyT78CQEScQD5wI1AEbAOWGmMOdFjn68BUY8wjIrIEuMMY8wURyQH+DMwGUoG/AeOszXrcZ1e8GQBKKdWV4uoGXt1WyMkzdZTVNnK6tomymkYaWlwAxEaEkBQdTuKQMBKiQimpbiT/9DlarcHqmIgQspOjSbe+jaTGRZCWEIUA5eeaqDjXRMX5Rg6U1LLzVDUvf2UOV4wZOmDt6SkAPImc2UCBMeaYtbMVwGKg45v1YuAH1u2VwG+kLQIXAyuMMU3AcREpsPaHB/v0nrWPQ9neAdm1UiqwpAH/2H4nFEgCk9Q21uAQwdHx070biAV3jKG+2UVdcyt1Ta001bhpOuOmyeWi42fsaCAbCHEIdzsdDB0eTtoHkfBBL0WNmAK39NpJ0meeBEAaUNjhfhEwp7t1jDGtIlIDJFnLP+20bftptr3tEwAReRh4GGDUKL1ErFJq8AlCSA8HijhEiA4PITo8BGL+b7nB0OIyFwakQ51CqNNxcYjYyJMA6KrSzv1G3a3T3fKuOuu67IsyxjwLPAttXUDdl9mDAUhOpZTqjQBh1o8v8mTUpAgY2eF+OlDS3ToiEgLEAVU9bOvJPpVSSg0gTwJgGzBWRLJEJAxYAqzqtM4q4AHr9l3ABtM2urwKWCIi4SKSBYwFtnq4T6WUUgOo1y4gq0//UeBd2g7Z/L0xZr+IPAlsN8asAp4HXrQGeatoe0PHWu9V2gZ3W4FlxhgXQFf79H7zlFJKdScoTwRTSqlg0dNhoHoKrFJKBSkNAKWUClIaAEopFaQ0AJRSKkj51SCwiFQAJ3tdsWtDgUovlmOnQGlLoLQDtC2+KFDaAf1rS4YxJrmrB/wqAPpDRLZ3NxLubwKlLYHSDtC2+KJAaQcMXFu0C0gppYKUBoBSSgWpYAqAZ+0uwIsCpS2B0g7QtviiQGkHDFBbgmYMQCml1MWC6RuAUkqpDjQAlFIqSAV8AIjIAhE5LCIFIvK43fX0hYj8XkTKRWRfh2WJIrJeRI5YvxPsrNFTIjJSRDaKyEER2S8ij1nL/ao9IhIhIltFZI/Vjh9ay7NEZIvVjlesy5z7BRFxisguEVlt3ffLtojICRHZKyK7RWS7tcyvXl/tRCReRFaKyCHrb+bygWhLQAeANaH9cuAWIAdYak1U7y/+ACzotOxx4D1jzFjgPeu+P2gF/skYMxGYCyyz/i38rT1NwHXGmGnAdGCBiMwFfgr8wmrHWeBBG2vsq8eAgx3u+3NbrjXGTO9wzLy/vb7a/QpYZ4yZAEyj7d/H+20xxgTsD3A58G6H+08AT9hdVx/bkAns63D/MJBi3U4BDttd4yW26y3gRn9uDxAF7KRtPutKIMRaftHrzpd/aJuN7z3gOmA1bbMY+mtbTgBDOy3zu9cXEAscxzpIZyDbEtDfAOh6Qvu0btb1F8ONMaUA1u9hNtfTZyKSCcwAtuCH7bG6THYD5cB64ChQbYxptVbxp9fZL4HvAG7rfhL+2xYD/FVEdojIw9Yyv3t9AdlABfC/VtfccyIyhAFoS6AHgCcT2qtBJCLRwOvAN40xtXbXcymMMS5jzHTaPj3PBiZ2tdrgVtV3IrIQKDfG7Oi4uItVfb4tliuNMTNp6/JdJiLX2F3QJQoBZgLPGGNmAHUMUNdVoAdAIE4+f1pEUgCs3+U21+MxEQml7c3/T8aYN6zFftseY0w1sIm2MY14EWmfYtVfXmdXAotE5ASwgrZuoF/in23BGFNi/S4H3qQtnP3x9VUEFBljtlj3V9IWCF5vS6AHQCBOPr8KeMC6/QBtfek+T0SEtrmjDxpjft7hIb9qj4gki0i8dTsSuIG2AbqNwF3Waj7fDgBjzBPGmHRjTCZtfxsbjDH34odtEZEhIhLTfhu4CdiHn72+AIwxZUChiIy3Fl1P27zq3m+L3QMegzCgciuQT1s/7b/aXU8fa/8zUAq00Pap4EHa+mjfA45YvxPtrtPDtlxFW1dCHrDb+rnV39oDTAV2We3YB3zPWp4NbAUKgNeAcLtr7WO75gOr/bUtVs17rJ/97X/r/vb66tCe6cB263X2FyBhINqil4JQSqkgFehdQEoppbqhAaCUUkFKA0AppYKUBoBSSgUpDQCllApSGgBKKRWkNACUUipI/X+prQdaZtcg6wAAAABJRU5ErkJggg==\n",
12591
      "text/plain": [
12592
       "<Figure size 432x288 with 1 Axes>"
12593
      ]
12594
     },
12595
     "metadata": {
12596
      "needs_background": "light"
12597
     },
12598
     "output_type": "display_data"
12599
    }
12600
   ],
12601
   "source": [
12602
    "plt.plot(test_md[['pos_idx','pred_any']].groupby('pos_idx').mean())\n",
12603
    "plt.plot([0,60],[0,0])"
12604
   ]
12605
  },
12606
  {
12607
   "cell_type": "code",
12608
   "execution_count": 71,
12609
   "metadata": {
12610
    "scrolled": true
12611
   },
12612
   "outputs": [
12613
    {
12614
     "data": {
12615
      "text/plain": [
12616
       "[<matplotlib.lines.Line2D at 0x7f8255ef9250>]"
12617
      ]
12618
     },
12619
     "execution_count": 71,
12620
     "metadata": {},
12621
     "output_type": "execute_result"
12622
    },
12623
    {
12624
     "data": {
12625
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dd3wU17n/8c+zq15QFwKBGgiE6CDAdOMKbtiO47gRHJM4zddJfHMTpzmJfdPs302c5Po6xsY1wQ07NnGwHRd6F70KVYRoaiCBhOqe3x9aiACBVnW2PO/Xa1/anZndfQYt3x2dOXOOGGNQSinlvWxWF6CUUqpnadArpZSX06BXSikvp0GvlFJeToNeKaW8nJ/VBVwoNjbWpKSkWF2GUkp5lC1btpQbY+LaWud2QZ+SkkJ2drbVZSillEcRkYOXWqdNN0op5eU06JVSystp0CullJfToFdKKS+nQa+UUl5Og14ppbycBr1SSnk5t+tHr6xTUHaaNXnlJMeEMiM9FhGxuiSlVDfQoPdhDU0ONhdV8tm+UpbnlFJYXnNuXXp8GF+dnsrcMYkE+dstrFIp1VUa9D5q9+EqFryymePV9QT42ZicFsNXpqYwPT2ObcUneH51IT98ZxdPfpTDvMnJzJ+cQlRogNVlK6U6QYPeB63LL+fBV7fQJ8iP5+aNZ3p6LCEB//4opMaGctvYRNbnV/DCmkKe/jSXJVtKeG3BJFJjQy2sXCnVGXoy1scs23WU+1/cTL+IIN751hSuH55wXsifJSJMGRzLi/dP4N1vTaG2oZk7nl3HzpKTFlStlOoKDXof8tqGg3x78VZGDojg7W9Mpl9EsEvPG5cUxZJvTCbI387dCzewOreshytVSnUnDXof8fSnB/jZe7u5amg8f10wiciQjrW3p8WF8e63pjAwOoQHXt7M0h1HeqhSpVR306D3AduKT/D0p7ncPi6R5+aNJzigc71o+vYJ4s2vT2ZsUhQPv76Nv2645KioSik3okHvAxauKqBPkB+Pzx2Bn71rv/KIYH9efWAiV2fE89j7u1l5QJtxlHJ3GvRerqi8ho/2HOO+K5IJC+yeTlZB/nb+fM9Yhib04aHFWykoO90tr6uU6hka9F7u+dUF+Nts3D8lpVtfNyTAj+e/PB5/u42vvppNdV1jt76+Uqr7aNB7sfLT9SzZUsLt4xKJ7xPU7a8/ICqEZ+8dR3FFLQ+/vo1mh+n291BKdZ0GvRd7df1B6pscfHV6Wo+9x6S0GH45dzgrcsp48uP9PfY+SqnO0ytjvVRtQxOvri/immF9GRwf1qPvde+kZPYdrea5lQVkJIRz29gBPfp+SqmO0SN6L/V2dgknaxv5xsyeO5pv7ec3D+eKtGh+sGQna/PKe+U9lVKucSnoRWS2iOSISJ6IPNrG+kdEZK+I7BSRz0QkudW6ZhHZ7rwt7c7iVduamh28sKaAcUmRZKVE98p7+tttPHdfFmmxYXz9tS3sPlzVK++rlGpfu0EvInbgGWAOkAncLSKZF2y2DcgyxowClgBPtlp3xhgzxnm7pZvqVpfx4e5jHKo8w4MzBvXq+0aE+PPKAxOJCPbn/pc2c7Cipv0nKaV6nCtH9BOBPGNMgTGmAXgDmNt6A2PMcmNMrfPhBkAbaS1ijGHhqgLSYkO5NrNvr79/QkQQrzwwkSaHgy+/uImyU/W9XoNS6nyuBH0icKjV4xLnsktZAHzY6nGQiGSLyAYRubWtJ4jIg85tssvK9ErLrsg+eIJdh6tYMD0Vu82aGaIGx4fx4v0TOF5dx1de3sTp+iZL6lBKtXAl6NtKizY7TIvIfUAW8FSrxUnGmCzgHuBpEbmoPcEYs9AYk2WMyYqLi3OhJHUpizcWEx7ox21jL/dd3PPGJUXx7L3j2Xf0FP+xeCvGaB97paziStCXAANbPR4AXDR0oYhcA/wEuMUYc+7vdWPMEefPAmAFMLYL9arLOFHTwD93HeXWsYltjjHf22ZlxPPjG4axPKeMz/eXWl2OUj7LlaDfDKSLSKqIBAB3Aef1nhGRscBztIR8aavlUSIS6LwfC0wF9nZX8ep872wtoaHJwT2Tkqwu5ZwvT04mNTaU3364n6Zmh9XlKOWT2g16Y0wT8BDwMbAPeMsYs0dEHheRs71ongLCgLcv6EY5DMgWkR3AcuC3xhgN+h5gjGHxpmLGJkUyrF8fq8s5x99u44ezh5JbepolW0qsLkcpn+TS3/fGmGXAsguWPdbq/jWXeN46YGRXClSu2VhYSUFZDU/dMcrqUi5y/fAExidH8ftPDnDLmP5u0ayklC/RK2O9xOKNxfQJ8uOmUf2tLuUiIsKPb8ig9FQ9i1YXWl2OUj5Hg94LVJyu58PdR7l93IBOzx7V08YnRzN7eAJ/WZlP+WntW69Ub9Kg9wJLtpTQ2Gy4141OwrblB7OHUtfk4I+f5lpdilI+RYPewzkchtc3FTMhJYr0vuFWl3NZaXFh3DMxicWbisnXWamU6jUa9B5ufUEFRRW1btWl8nIevjqdID8bT36kY9cr1Vs06D3c4o3FRIb4M2dEP6tLcUlceCAPzhjEx3uO6wiXSvUSDXoPVn66no/3HOML4wYQ5O+eJ2Hb8pVpKfQJ8uPPn2tbvVK9QYPegy3fX0qTw3D7OGvHtemoPkH+PDAtlY/3HGff0Wqry1HK62nQe7AVB8qIDw8k042uhHXVV6akEh6oR/VK9QYNeg/V1OxgTW45M4fEIWLNcMRdERHiz/1TU1i26xg5x05ZXY5SXk2D3kPtKKmi6kwjVw6Nt7qUTlswLZXQALse1SvVwzToPdTKA2XYBKYNjrW6lE6LDAlg/pQU/rnrKHmlelSvVE/RoPdQKw+UMTYpiogQf6tL6ZKvTk8j2N/Onz/Ps7oUpbyWBr0HqqxpYGfJSWYO8fzZuKJDA5g3OZl/7DiiV8sq1UM06D3Q6twyjMErgh7ga9PTCPCz8cxyPapXqido0HuglTllRIcGMDIxwupSukVsWCD3TUrmvW2HKa6otbocpbyOBr2HcTgMq3LLmJ4ei83med0qL+VrM9Lws9l4fnWB1aUo5XU06D3M3qPVlJ9u8Jpmm7P69gni9nGJvJV9iLJTOl69Ut1Jg97DrDxQBsD0dO8KeoAHZ6TR0Ozg5XU6C5VS3UmD3sOszCljZGIEceGBVpfS7dLiwpgzIoFX1x/kVF2j1eUo5TU06D1IdV0jW4pPeF2zTWvfmDmIU3VNLN5YbHUpSnkNDXoPsja3nGaHYeZQ7w36UQMimTY4lhfWFFLX2Gx1OUp5BQ16D7LyQBnhQX6MHRhpdSk96ptXDqLsVD3vbj1sdSlKeQUNeg9hjGHlgTKmDY7Fz+7dv7Ypg2IYNSCC51bl0+wwVpejlMfz7sTwIrmlpzlaVefV7fNniQjfnDmIgxW1fLj7qNXlKOXxNOg9xCpnt8oZPhD0ANcNTyAtNpRnV+RjjB7VK9UVGvQeYlVuOYPjw+gfGWx1Kb3CbhO+Oj2NPUeq2X7opNXlKOXRXAp6EZktIjkikicij7ax/hER2SsiO0XkMxFJbrVuvojkOm/zu7N4X1HX2MzGggqmp3vu2POdcdPofgT42Vi644jVpSjl0doNehGxA88Ac4BM4G4Rybxgs21AljFmFLAEeNL53Gjg58AkYCLwcxGJ6r7yfUN20QnqmxzM8MKrYS+nT5A/Vw2N5x87jupJWaW6wJUj+olAnjGmwBjTALwBzG29gTFmuTHm7LCDG4ABzvvXA58YYyqNMSeAT4DZ3VO671idW4a/XZiUFm11Kb1u7pj+lJ+uZ31+hdWlKOWxXAn6ROBQq8clzmWXsgD4sCPPFZEHRSRbRLLLyspcKMm3rMotZ3xyFCEBflaX0utmZcQTHujH+9u1T71SneVK0Lc1Fm6bf0eLyH1AFvBUR55rjFlojMkyxmTFxflW80R7yk7Vs+9otVcOYuaKIH87149I4KPdx/RKWaU6yZWgLwEGtno8ALjo7JiIXAP8BLjFGFPfkeeqS1ubVw7gc+3zrc0d059T9U2syCm1uhSlPJIrQb8ZSBeRVBEJAO4ClrbeQETGAs/REvKt/zd+DFwnIlHOk7DXOZcpF63KLSMqxJ/h/ftYXYplJqfFEBsWyHvb9BhBqc5oN+iNMU3AQ7QE9D7gLWPMHhF5XERucW72FBAGvC0i20VkqfO5lcATtHxZbAYedy5TLjDGsDq3nGnpcV41m1RH+dlt3Dy6H5/nlFJ1RocvVqqjXDq7Z4xZBiy7YNljre5fc5nnvgi82NkCfVnO8VOUnar3uf7zbZk7JpGX1hbx8Z5j3Jk1sP0nKKXO0Stj3djqAy3t8xr0MHpABMkxISzdrs03SnWUBr0bW5VbRnp8GP0ifGPYg8sREeaO7s+6/HJKq+usLkcpj6JB76bqGpvZVFjps90q23LLmP44DHywU0e0VKojNOjd1OaiSuqbHEwfos02Zw2OD2d4/z68r2PfKNUhGvRuanVuOQF2G5NSfW/Yg8uZO6Y/Ow6d5FBlbfsbK6UADXq3tepAGVkpvjnsweVcPawv0HL+QinlGg16N1RaXcf+Y6e0fb4NabGh9I8IYk1uudWlKOUxNOjd0Opc7VZ5KSLC1MGxrMuv0KGLlXKRBr0bWp5TSlx4IJn9fHfYg8uZlh5L1ZlGdh+usroUpTyCBr2baWp2sOpAGTOH+PawB5czdXDLXzpr8rT5RilXaNC7mW2HTlJd18SsofFWl+K2YsMCGdavj7bTK+UiDXo3syKnFLtNmKbt85c1PT2WLQdPcKZBx6hXqj0a9G5m+f4yxidHERHsb3Upbm3q4Fgamh1sKtLBUJVqjwa9GzlWVcfeo9XabOOCiSnRBNht5yZmUUpdmga9G1l5oGXOllkZ2n++PcEBdsYnR53riqqUujQNejeyfH8Z/SKCGNo33OpSPMK09Fj2Ha2m/HR9+xsr5cM06N1EQ5ODNXnlXDk0HhHtVumKac5ultp8o9TladC7ieyDlZyub2LWUG22cdWIxAgigv21m6VS7dCgdxMrcsrwt8u5i4FU++w2YcqgGNbmlWOMDoeg1KVo0LuJ5ftLmZQaQ2igjlbZEdPSYzlSVUdBeY3VpSjltjTo3UDJiVpyS09zpTbbdJi20yvVPg16N7Aip2Vs9Su1/3yHJceEMjA6WLtZKnUZGvRuYEVOKQOjgxkUF2p1KR5p2uBYNuRX0NTssLoUpdySBr3F6hqbWZtXwSztVtlp0wbHcaq+iR0lOmyxUm3RoLfYpsJKzjQ267AHXTB5UAwA6/O1+UaptmjQW2x5TimBfjauSIuxuhSPFR0aQGa/PqzNq7C6FKXckga9xVbklDF5UAzBAXarS/FoUwbFsKX4BHWNOmyxUhdyKehFZLaI5IhInog82sb6GSKyVUSaROSOC9Y1i8h2521pdxXuDQrLaygsr9Fmm24wdXAsDU0Othw8YXUpSrmddoNeROzAM8AcIBO4W0QyL9isGLgfWNzGS5wxxoxx3m7pYr1eZUWOc7RKDfoum5AajZ9NtD+9Um1w5TLMiUCeMaYAQETeAOYCe89uYIwpcq7T/m0dsDynjLS4UJJiQqwuxeOFBfoxemAk6/K1nV6pC7nSdJMIHGr1uMS5zFVBIpItIhtE5Na2NhCRB53bZJeVlXXgpT1XbUMTGwoq9Gi+G00dFMPOkpNU1zVaXYpSbsWVoG+rc3dHRpBKMsZkAfcAT4vIoItezJiFxpgsY0xWXJxvDAOwPr+ChiaHBn03mjwoFoeBTQU6vaBSrbkS9CXAwFaPBwBHXH0DY8wR588CYAUwtgP1ea0VOWWEBNiZkBpldSleY1xyJIF+NtZqf3qlzuNK0G8G0kUkVUQCgLsAl3rPiEiUiAQ678cCU2nVtu+rjDEszyll6uBYAv20W2V3CfSzMyElmvXaTq/UedoNemNME/AQ8DGwD3jLGLNHRB4XkVsARGSCiJQAXwSeE5E9zqcPA7JFZAewHPitMcbngz6/7DQlJ85os00PmDI4hv3HTun0gkq14tLg58aYZcCyC5Y91ur+ZlqadC583jpgZBdr9DrL958drdI3zkf0pimDYoEc1uVXcMvo/laXo5Rb0CtjLbA8p5SMhHD6RwZbXYrXGdG/D+FBfjrujVKtaND3slN1jWwuqmSmHs33CD+7jUmpMTrujVKtaND3srV5FTQ2G22f70FTB8dQXFnLocpaq0tRyi1o0PeyFTmlhAf6MT5Zu1X2lLMTrGvvG6VaaND3ImMMK3LKmD4kFn+7/tP3lPT4MGLDAlmn7fRKARr0vWr/sVMcq67TuWF7mIgwZVAMa/MrMKYjF3Er5Z006HvRuUnAh+iJ2J42ZVAMZafqySs9bXUpSllOg74XrcsvJyMhnPg+QVaX4vXOttOvPOAbg+QpdTka9L2kvqmZzUWVOmVgLxkYHUJ6fBjLnWP+K+XLNOh7yY5DVdQ1OpgySIO+t8zKiGdTYSWn65usLkUpS2nQ95J1+eWIwKRUDfreMmtoPI3NhjW52vtG+TYN+l6yPr+CEf0jiAjxt7oUn5GVEkV4kB/L92vzjfJtGvS9oK6xmW3FJ5mszTa9yt9uY0Z6HMtzSrWbpfJpGvS9YMvBEzQ0OzToLTArI57SU/XsOVJtdSlKWUaDvhesz6/AbhMmpERbXYrPmem8ZkGbb5Qv06DvBevyyxk9IIKwQJeG/1fdKC48kNEDIvhcu1kqH6ZB38NO1zexs6RKm20sNCsjnu2HTlKhs04pH6VB38M2F1XS5DBMTou1uhSfdVVGPMboVbLKd2nQ97AN+RUE2G06LLGFRvSPIDYskM+1nV75KA36HrYuv4IxSZEEB9itLsVn2WzClUPjWHWgjKZmh9XlKNXrNOh7UFVtI3uOVOmwB27gqox4quua2Fp80upSlOp1GvQ9aGNhBQ4Dk3UgM8tNS4/FzybafKN8kgZ9D1pfUEGQv40xSZFWl+Lz+gT5MyElWvvTK5+kQd+D1udXkJUcTaCfts+7g1kZceQcP8Xhk2esLkWpXqVB30MqTtez/9gp7T/vRq7K6AvAh7uOWlyJUr1Lg76HbCioBNCgdyOD48MYnxzFXzccxOHQQc6U79Cg7yHrC8oJDbAzMjHC6lJUK1+enExRRS2rcvXiKeU7XAp6EZktIjkikicij7axfoaIbBWRJhG544J180Uk13mb312Fu7u1eRVckRaDv12/S93JnBH9iA0L5LX1B60uRale024KiYgdeAaYA2QCd4tI5gWbFQP3A4sveG408HNgEjAR+LmIeP0loiUnaiksrzk3QbVyHwF+Nu6ZOJDPc0oprqi1uhyleoUrh5sTgTxjTIExpgF4A5jbegNjTJExZidw4WWH1wOfGGMqjTEngE+A2d1Qt1tbl1cBtPTdVu7nnknJ2ET460Y9qle+wZWgTwQOtXpc4lzmCpeeKyIPiki2iGSXlXl+2+mavHLiwgNJjw+zuhTVhoSIIGYPT+DNzYc409BsdTlK9ThXgl7aWOZqlwWXnmuMWWiMyTLGZMXFxbn40u7J4TCszStn2uBYRNrafeUOvjw5maozjSzdcdjqUpTqca4EfQkwsNXjAcARF1+/K8/1SDnHT1FR06Dt825uYmo0GQnhvLLuoM4nq7yeK0G/GUgXkVQRCQDuApa6+PofA9eJSJTzJOx1zmVea21eOQBTB2v/eXcmIsybnMzeo9VsOXjC6nKU6lHtBr0xpgl4iJaA3ge8ZYzZIyKPi8gtACIyQURKgC8Cz4nIHudzK4EnaPmy2Aw87lzmtdbklTMoLpR+EcFWl6LaceuYRMKD/HhVu1oqL+fSJKbGmGXAsguWPdbq/mZammXaeu6LwItdqNFjNDQ52FhQyZ1Zbf5TKDcTGujHF8cP5NX1Rfz0xmHE9wmyuiSleoRezdONthWf4Exjs7bPe5B5k5Npchje3Hyo/Y2V8lAa9N1obV45NoErdHwbj5EaG8rktBje3lKi498or6VB343W5JUzemAkfYL8rS5FdcCdEwZQXFnLxkKvPn2kfJgGfTeprmtkR0kV07TZxuPMHt6P8EA/3s7W5hvlnTTou8nGgkqaHUbb5z1QcICdm8f0Z9nuo1TXNVpdjlLdToO+m6zNKyfY385YnTbQI92ZNZC6Rgcf7NBJSZT30aDvJmvyypmYqtMGeqrRAyIY0jeMt7T5RnkhDfpucKyqjrzS09o+78FEhDuzBrL90EkOHD9ldTlKdSsN+m7w72EPNOg92a1jE/GziZ6UVV5Hg74brMkrJyY0gIyEcKtLUV0QGxbI1cPieXfrYRqbL5xaQSnPpUHfRQ6HYdWBMqanx2Kz6bDEnu7OrIFU1DTw+f5Sq0tRqtto0HfRrsNVVNQ0cOXQeKtLUd1g5pA44sIDtflGeRUN+i5anlOKCMwY4tkTpqgWfnYbXxg3gOU5ZZRW11ldjlLdQoO+i1bklDF6QCTRoQFWl6K6yRezBtDsMLyhA50pL6FB3wWVNQ3sKDnJLG228SqD4sK4NrMvf1mZz3E9qldeQIO+C1YdKMMYuHKoNtt4m5/dmEmTw/CbZfusLkWpLtOg74IVOaXEhAYwMjHC6lJUN0uKCeHrM9J4b/sRNhfpqJbKs2nQd1Kzw7DyQBkzh8Rpt0ov9c0rB9E/Ioifv7+HZh2rXnkwDfpO2llykhO1jczUZhuvFRLgx49vHMbeo9Us3lRsdTlKdZoGfSetyCnDJjAjXYPem904sh+T02L4n3/lcKKmwepylOoUDfpOWpFTypiBkURpt0qvJiL8/JZMTtU18T+f5FhdjlKdokHfCeWn69l5uEqvhvURGQl9mHdFMos3FrPnSJXV5SjVYRr0nXC2W6X2n/cd37tmCFEhAfzo3V00tTPgmTEGY/TkrXIfGvSdsCKnjNiwAIb372N1KaqXRIT488u5w9lZUsVzqwouuV19UzP3vrCRe1/YSF1jcy9WqNSladB3ULPDsCq3jJlD4rVbpY+5aVR/bhiZwB8/zSXn2MWTkxhjeOy9PazLr2B9QQXfe3M7DjfsltnsMLy4ppCqWp0f11do0HfQ9kMnOVnbqFfD+qgn5o4gPMiP77+946Ix6/+64SBvZh/ioVmD+ckNw/hw9zF+7YZX1q7Pr+DxD/by3//ca3Upl2SM0TkBupEGfQetzCnFJjA9XWeT8kUxYYE8cesIdh2u4rmV+eeWbyyo4Jf/2MtVGfF879ohLJiWyv1TUnhhTSEvry20sOKLnb3S9+0tJWw/dNLiatr2q3/uY+aTyzlUWWt1KV5Bg74DjDF8uPsY45OjiAzRbpW+6oaR/bhxVD/++Fku+45Wc+TkGb71t60kRYfw9F1jsNsEEeFnN2VyXWZffvnBXv6155jVZZ+TfbCSQXGhxIUH8vOle7qleamusbnbTkCXnarn1Q0HOVJVx/yXNnGyVq9f6CqXgl5EZotIjojkicijbawPFJE3nes3ikiKc3mKiJwRke3O21+6t/zetf3QSXJLT/OFcQOsLkVZ7Im5I4gI9uf7b+/g669tob7JwcIvZ9EnyP/cNnab8Me7xjJqQCQPv7GNTYXWj5nT2OxgW/FJpqfH8aM5Gew4dJJ3tpZ0+TVv+NNq7lvUPSegX11fRGOzg999YSQllWf42qvZemK7i9oNehGxA88Ac4BM4G4RybxgswXACWPMYOAPwO9arcs3xoxx3r7RTXVb4q3sEoL97dw4qp/VpSiLRYcG8N+3jmDPkWp2H6ni6S+NYXB82EXbBQfYWTQ/i/jwIO58bj2z/t8KfrF0DytySi0Jr71HqqltaCYrJYrbxiYyLimS3320n+q6zp+YXbbrKAVlNazNq+Dh17e12/30cmobmnhtw0GuHdaXL01I4vdfGs3mohM88pZ7ntj2FK4c0U8E8owxBcaYBuANYO4F28wFXnHeXwJcLSJe1SXlTEMz/9hxhBtG9iO81VGb8l2zR/TjkWuH8JvbRnJNZt9LbhcbFsi735rCL27OJDkmhNc3FXP/S5sZ8/i/+M+3dvRq4J9tn89KjkZE+OUtI6ioaeBPn+Z26vWMMTy/uoC0uFAeuymTf+09zk/+vrvTzThvbT7EydpGvj4zDWjp6fTTG4exbNcxfuWGJ7Y9hZ8L2yQCrafaKQEmXWobY0yTiFQBMc51qSKyDagGfmqMWX3hG4jIg8CDAElJSR3agd7y4e6jnK5v4s4sbbZR//bw1ekubRcbFsj9U1O5f2oqdY3NbCio4JO9x/nbxmJKT9WxcF4WwQH2Hq4WsotOMDA6mISIIABGDojgrgkDeXldEXdNHMjg+PAOvd7Gwkp2H67m17eN5J5JSZyobeDPn+cRHRbAD2dndOi1mpodLFpbyLikSMYnR59bvmBaKiUnzrBoTSH9I4NZMC21Q6+rXDuib+vI/MKv60ttcxRIMsaMBR4BFovIRVcZGWMWGmOyjDFZcXHu2W3xrexDpMSEMDE1uv2NlbqMIH87Vw6N51e3jeTJO0axJq+cB17eTE19U5vb7z1SzbtbS7p8stMYw+aiSiaknP8Z/v51QwkJsPPLf+zt8Hu8sLqA6NAAbh+XCMAj1w7hnklJPLsinxdWX/rCsrZ8tOcYhyrP8OCMQectP3tie/bwBP77n3tZvr+0Q6+rXDuiLwEGtno8ADhyiW1KRMQPiAAqTcunph7AGLNFRPKBIUB2VwvvTQcrathQUMl/XT8UL2uRUha7M2sgAXYbj7y1nfkvbuKlr0w41zSYX3aaP3xygA92HgWgqKKWR64d0un3KiyvoaKm4aKgjwkL5JFrh/CLf+zlhj+tYWBUMP0jg0mMDGZgdDAzh8S3+ddGQdlpPt1XysNXpxPk37JeRHhi7ghO1jbw3//cx6HKWjL79yE1NoyU2BDiwgLb/D9kjGHhqgJSY0O5to1mMLtN+MOXxnDHX9bx8OvbeO+hqQyKu/iciGqbK0G/GUgXkVTgMHAXcM8F2ywF5gPrgTuAz40xRkTiaAn8ZhFJA9KBjn3Nu4G3s0uwCdrbRvWIW8cm4m+38Z03tjFv0SZ+c/tIXlpbyJItJQT523lo1mCOVdfxp89y6dsnkHsnJXfqfbKLTgAwIRMommUAAA7kSURBVCXqonX3XZFMZU0Duw5XUVRRw9q8cmoaWs4dTEyN5tUHJp4L87MWrSkkwM/GvCvOr+dsKBuznb9tLKap1UnUsEA/pgyK4YdzMs4L6o2FlewsqeJXt43AfokrzoMD7Cz8cha3/HkNX3slm79/eyoRwZ0/X7Y6t4yX1hbxxK0jSIwM7vTreIJ2g97Z5v4Q8DFgB140xuwRkceBbGPMUmAR8JqI5AGVtHwZAMwAHheRJqAZ+IYxxvo+Zh3Q7DAs2VLCzCFx59o1lepuN47qh79d+Pbircz542oC7Dbun5LKt2YNIjYskKZmB5U1Dfzsvd3EhgVy/fCEDr/H5qJKokL82zwS9rPbeOS6oeceG2OoPtPER3uO8ui7u3ho8Tb+ct84/Owtrb2VNQ28s7WE28YkEhceeNHrBfrZefa+8TQ1Ozh88gyF5TUUldeQX1bD37cd5vo/rGLe5GS+c3U6kSEBLFxVQExoQLsHU4mRwfzfveO494WNfPeNbbwwf8Ilvxgupa6xmd9+uJ+X1xUBMGzjQf7r+o6dT/A04m6j7GVlZZnsbPdp2VmRU8r9L23m2XvHMWekdqtUPWtNbjkrckr5yrTUi44yaxuauPv5jew/Ws3fvjqJrJSOnS+68qnlpPcN5/kvZ3Xoea+tL+Jn7+/hC+MG8NQdo7DZhD9/lsv/fHKAf31vBkP6duwEbtmpen7/yQHe3FxMeJA/865I5n+X5/G9a4bwnWtcO7n91w0H+el7u/nmlYM6dNJ3z5EqvvvGdnJLT3P/lBRyjp3i0IlaVv9glsc3y4rIFmNMm79cvTK2HW9nlxAdGsDVwy7dfU6p7jItPZaf3pTZZlNCSIAfL90/gcTIYBa8kk3u8YsHVruU0lN1FFXUttls0555k1N45NohvLO1hF8t20ddYzOvrD/IzCFxHQ55gLjwQH5z+0iWfWc6IxL78L/L8wjytzFvsutNUvddkXzupO/72w+3u33F6Xr+b0Uetz6zlqozjbz6wER+cctw7hg/gJITZ9hafKLD++FJXGmj91mVNQ38a+8x5l2RQoCfficq60WHBvDKAxO5/dl13PvCRr4yNZW5Y/rTv5025i3n2uc712vsP64aTGVNA4vWFLL7cBXlp+v56vSudXPMSOjDXxdMYuWBMqBl3zriFzcPJ/f4Kb7zxnae+GAfw/qFM6xfH4b1C6dveBB7j1azo6SKHYdOUuwcM+eGkQn86taR52aGu35EAj/++y7e23bkvC6d3kaD/jLe336YxmbDnRP0JKxyHwOjQ3j1gYn85O+7+N1H+3ny4/1ckRrDbeMSmTMioc0L+jYVVRLkb2N4/4hOvaeI8NhNmVSdaeTv2w6TkRDOtMFdH9hPRDo9U1uAn40X5k9gyZYS9h2tZt/Ral5eW0RDqytzEyODGT0wgnsnJTE+OYrxyVHnNdGEBfpxTWZf/rnrKI/dnIm/3TsP6DToL6Gx2cFr6w8yakAEGQk6wYhyL8P69eHdb03lYEXLyc33th3mB0t28sule3jlgYkXtd9nF51gzMDILv1larMJT94xin4RQVyVEe8WbdoRwf7nXUDV2OygqLyGY9V1DE0IJz68/Q4Ut45J5J87j7I6t4yrMryzidY7v766wavrD1JQXsN3XLzyUSkrJMeE8t1rhrD8+1fy7remEN8niAdf20Jxxb+H9z1d38SeI1VM7GSzTWv+dhs/mJ3R4RPBvcXfbiO9bzjT0+NcCnmAmUPiiAzx5/3tF14e5D006NtQWdPAHz89wPT0WK7K0HlhlfsTEcYlRbFofhbNDsMDr2ym6kzLQGXbi0/iMLhtOFstwM/GDSP78a89xy95dbKn06Bvw+8/yaGmoZnHbsp0iz9PlXJVWlwYz943jqLyGh5avJXGZgebiiqxCYxNirS6PLd165hEzjQ288ne41aX0iM06C+w/1g1izcWc9+kJNI70XVMKatNGRTLr28byerccn6xdA+bCyvJ7N9HR129jKzkKBIjg3nPha6ankhPxrZijOGJD/YSHuTPd6/p/JgiSlntzgkDyS8/zXMrCxCB+ZNTrC7Jrdlsws2j+/P86gLKT9cTG3bx1b6eTI/oW/lk73HW5lXwvWvSz/WzVcpT/fD6DK4f3hdjOt9/3pfcOrY/zQ7Dsl1HrS6l22nQO9U3NfOrZfsYHB/GvVd0btAopdyJzSY8/aWx/Pq2kW2OCKnOl5HQh4yEcN7b5n3NNxr0Ti+vLeJgRS0/u8l7L5pQvic4wM49k5L0ym4XzR2TyNbik+d1T/UG+tunZdzvpz/N5eqMeGYOcc+JT5RSPW/umP7YBBZvKra6lG7l80Ff39TMw69vI8jfxq9uG2l1OUopC/WPDGbOiH4s3njQq/rU+3zQP/lRDnuOVPPUHaN1vHmlFAump1Jd18SSLSVWl9JtfDrol+eUsmhNIfMnJ3ONnqxSSgHjkqIYlxTJojWFNDvca76OzvLZoC89Vcf339pBRkI4P7phmNXlKKXcyFenp1FcWes1V8r6ZNA7HIb/fGsHp+ub+PPdYy+aC1Mp5duuH57AwOhgFq3xuCmu2+RzQW+M4dmV+azOLeexmzN1mAOl1EXsNuErU1LZXHSC7YdOWl1Ol/lM0Dc7DB/sPMLN/7uGpz7OYc6IBO6ZmGR1WUopN3XnhIGEB/qxaE2h1aV0mdePdVPX2Mw7W0tYuKqAgxW1pMWG8tvbR3L7uAE6MqVS6pLCAv24e1ISi9YU8uicjDbn8fUUXhf0Tc0O9h6tZmNBJRsLK9lUWEF1XROjB0Two/vGcW1mAnabBrxSqn3zp6SwaE0hr6wr4sce3GnDa4L+eHUd/7VkJ1uKKqlpaAYgJSaE2SMSuHVsIpPTYvQIXinVIYmRwdwwsh+vbyzmP64a7LFDPXtN0EeG+FNZU89t4xKZlBrDxNRo+vbRC6CUUl3ztemp/GPHEf70WS4/uTHT6nI6xWuCPtDPzgf/Md3qMpRSXmbUgEjmXZHM86sLGZEYwdwxiVaX1GE+0+tGKaU662c3ZTIxJZofvrOT3YerrC6nwzTolVKqHQF+Np65dxxRIQF8/bUtVJyut7qkDnEp6EVktojkiEieiDzaxvpAEXnTuX6jiKS0Wvcj5/IcEbm++0pXSqneExceyHPzxlN2up6HFm+jsdlhdUkuazfoRcQOPAPMATKBu0XkwjMSC4ATxpjBwB+A3zmfmwncBQwHZgP/53w9pZTyOKMGRPKb20ayvqCCXy/bZ3U5LnPlZOxEIM8YUwAgIm8Ac4G9rbaZC/zCeX8J8L/S0pdxLvCGMaYeKBSRPOfrre+e8i/w4aNwbFePvLRSSgF8ARgfW8OxzXVs326ns522w4P8SIsNO39hwkiY89uulngRV4I+ETjU6nEJMOlS2xhjmkSkCohxLt9wwXMvOmUtIg8CDwIkJemwBEop95YcE4JNhLqm5k6/RqBf7zVuuBL0bX1hXThI86W2ceW5GGMWAgsBsrKyOj8AdA98Eyql1IUE8KRDUldOxpYAA1s9HgAcudQ2IuIHRACVLj5XKaVUD3Il6DcD6SKSKiIBtJxcXXrBNkuB+c77dwCfG2OMc/ldzl45qUA6sKl7SldKKeWKdptunG3uDwEfA3bgRWPMHhF5HMg2xiwFFgGvOU+2VtLyZYBzu7doOXHbBHzbGNP5Ri2llFIdJi0H3u4jKyvLZGdnW12GUkp5FBHZYozJamudXhmrlFJeToNeKaW8nAa9Ukp5OQ16pZTycm53MlZEyoCDXXiJWKC8m8qxkrfsB+i+uCtv2Rdv2Q/o2r4kG2Pi2lrhdkHfVSKSfakzz57EW/YDdF/clbfsi7fsB/TcvmjTjVJKeTkNeqWU8nLeGPQLrS6gm3jLfoDui7vyln3xlv2AHtoXr2ujV0opdT5vPKJXSinViga9Ukp5Oa8J+vYmMHdnIvKiiJSKyO5Wy6JF5BMRyXX+jLKyRleJyEARWS4i+0Rkj4h8x7nco/ZHRIJEZJOI7HDuxy+dy1NFZKNzP950Dt3tEUTELiLbROQD52OP3BcRKRKRXSKyXUSyncs86vMFICKRIrJERPY7/79M7qn98Iqgd3ECc3f2Mi2Tp7f2KPCZMSYd+Mz52BM0Af9pjBkGXAF82/m78LT9qQeuMsaMBsYAs0XkClomvv+Dcz9OAAssrLGjvgO0ntHak/dlljFmTKs+5572+QL4I/CRMSYDGE3L76Zn9sMY4/E3YDLwcavHPwJ+ZHVdHdyHFGB3q8c5QD/n/X5AjtU1dnK/3geu9eT9AUKArbTMlVwO+DmXn/e5c+cbLbO7fQZcBXxAy2x4nrovRUDsBcs86vMF9AEKcXaI6en98IojetqewPyiScg9TF9jzFEA5894i+vpMBFJAcYCG/HA/XE2dWwHSoFPgHzgpDGmybmJJ33OngZ+ADicj2Pw3H0xwL9EZIuIPOhc5mmfrzSgDHjJ2Zz2goiE0kP74S1B79Ik5Kr3iEgY8A7wXWNMtdX1dIYxptkYM4aWo+GJwLC2NuvdqjpORG4CSo0xW1ovbmNTt98Xp6nGmHG0NNV+W0RmWF1QJ/gB44BnjTFjgRp6sLnJW4LeGychPy4i/QCcP0strsdlIuJPS8j/zRjzrnOxx+6PMeYksIKWcw6RInJ2Ck5P+ZxNBW4RkSLgDVqab57GM/cFY8wR589S4O+0fAl72uerBCgxxmx0Pl5CS/D3yH54S9C7MoG5p2k94fp8Wtq63Z6ICC1zCO8zxvy+1SqP2h8RiRORSOf9YOAaWk6WLQfucG7m9vsBYIz5kTFmgDEmhZb/G58bY+7FA/dFREJFJPzsfeA6YDce9vkyxhwDDonIUOeiq2mZW7tn9sPqkxLdeHLjBuAALe2oP7G6ng7W/jpwFGik5Zt+AS1tqJ8Buc6f0VbX6eK+TKOlCWAnsN15u8HT9gcYBWxz7sdu4DHn8jRgE5AHvA0EWl1rB/frSuADT90XZ807nLc9Z/+ve9rny1nzGCDb+Rl7D4jqqf3QIRCUUsrLeUvTjVJKqUvQoFdKKS+nQa+UUl5Og14ppbycBr1SSnk5DXqllPJyGvRKKeXl/j/1rBSumeyCYAAAAABJRU5ErkJggg==\n",
12626
      "text/plain": [
12627
       "<Figure size 432x288 with 1 Axes>"
12628
      ]
12629
     },
12630
     "metadata": {
12631
      "needs_background": "light"
12632
     },
12633
     "output_type": "display_data"
12634
    }
12635
   ],
12636
   "source": [
12637
    "plt.plot(train_md[['pos_idx',all_ich[0]]].groupby('pos_idx').mean())\n",
12638
    "plt.plot([0,60],[0,0])"
12639
   ]
12640
  },
12641
  {
12642
   "cell_type": "code",
12643
   "execution_count": 72,
12644
   "metadata": {},
12645
   "outputs": [],
12646
   "source": [
12647
    "test_md['pred_any'] = predictions[:,1]"
12648
   ]
12649
  },
12650
  {
12651
   "cell_type": "code",
12652
   "execution_count": 73,
12653
   "metadata": {
12654
    "scrolled": true
12655
   },
12656
   "outputs": [
12657
    {
12658
     "data": {
12659
      "text/plain": [
12660
       "[<matplotlib.lines.Line2D at 0x7f8255e69ad0>]"
12661
      ]
12662
     },
12663
     "execution_count": 73,
12664
     "metadata": {},
12665
     "output_type": "execute_result"
12666
    },
12667
    {
12668
     "data": {
12669
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD4CAYAAADlwTGnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXiU5bn48e+dyZ6QlUBYEhJIQBJ2IiCIIFhAq1Jb2oq1arVuVWvr6c+j9hxP6zme03ra2kVtS7VWqy1aF6SK4oIsCgTCToCEkABJIGQPkJBt5vn9MYMnxoRMYJJ3lvtzXVzMvPO879yPjHPP+6xijEEppVTgCbI6AKWUUtbQBKCUUgFKE4BSSgUoTQBKKRWgNAEopVSACrY6gN4YOHCgSUtLszoMpZTyGdu2bas2xiR19ZpPJYC0tDTy8vKsDkMppXyGiBzp7jVtAlJKqQClCUAppQKUJgCllApQmgCUUipAaQJQSqkApQlAKaUClCYApZQKUD41D0B5h4amNvZXnOTA8ZNkDY1lWnqC1SEppc6DJgDlln/uOsaKHeUcqDhFef2Zz44HBwkv3jaNmaMGWhidUup8aBOQ6tFH+0/w/eU7KKw8xZQR8Ty4aAzPf+di1vzLHNIHRnH3S9sprjptdZhKqV4SX9oRLCcnx+hSEP2rqPIUX3l6IyMSI3ntrplEhNo+93ppbROLn/6UmPBg3vzeLOKjQi2KVCnVFRHZZozJ6eo1vQNQ3WpoauP2F7cRHhLEsptyvvDlD5CSEMmyb0/lWH0zd720jdZ2hwWRKqXOhyYA1SW7w3Df8h2U1TXx+xunMiwuotuyOWkJPLFkArkltTzy5h586a5SqUCmncCqSz9/7wDrC6v47+vGc3Faz6N8vjJ5GMXVjfz2o4NkDIrmrjmj+iFKpdSF0DsA9QVv7ihj2fpivj1jBDdMT3X7vB9ekcmi7GSe/KCQ6tMtfRihUsoTNAGoz6k53cK/r8hnWloCj16T1atzRYQHF42h1e7gL58e7psAlVIeowlAfc7v1hTR1NrOf391PCG23n88RiZFszArmRc3HeZ0S7vnA1RKeYwmAPWZ0tomXs49wjcvTiFjUPR5X+euuaM42dzO8i1HPRidUsrTNAGoz/zy/QJsQcL980df0HUmpcQxY2QCz24o0WGhSnkxTQAKgL3lDazYeYxbZ6WTHBt+wde7a84oKk4289bOcg9Ep5TqC5oAFABPrC4gLjKEOz00fHPO6CQuSh7AH9cX43DovAClvJEmAMWnRdWsL6zi3ssziI0I8cg1RYS75oyiqPI0aw5UeuSaSinP0gQQ4BwOw8/ePcCwuAhunDHCo9e+esIQhsVF8Id1hzx6XaWUZ7iVAERkkYgUiEiRiDzUxethIvKK6/VcEUnr8NrDruMFIrKww/HDIrJHRHaKiK7wZpFVe4+zp7yBB740mvCQL671cyGCbUHcPjudvCN15B2u9ei1lVIXrscEICI24GngSiALWCoinWcI3QbUGWMygCeBn7vOzQKuB7KBRcAzruuddbkxZlJ3K9WpvuVwGH75fiEXJQ/gK5OH9cl7fOPiFOIjQ/j9Wr0LUMrbuHMHMA0oMsYUG2NageXA4k5lFgMvuB6/BswXEXEdX26MaTHGlABFruspL7DxUA0l1Y3cPXcUtiDpk/eIDA3mu7NH8tGBStYWaF+AUt7EnQQwDCjt8LzMdazLMsaYdqABSOzhXAO8LyLbROSO7t5cRO4QkTwRyauqqnIjXOWu5VuPEhsRwsLs5D59n+/OTmdUUhT/tmIvTa06O1gpb+FOAujqp2HncX3dlTnXubOMMVNwNi3dIyKXdfXmxphlxpgcY0xOUlKSG+Eqd9Q1tvJ+/gmumzzM423/nYUF2/ifr06grO4MT35Q2KfvpZRynzsJoAxI6fB8OHCsuzIiEgzEArXnOtcYc/bvSuBNtGmoX725o5xWu4NvXpzSc2EPmJaewNJpqTz3SQl7yxv65T2VUufmTgLYCmSKSLqIhOLs1F3ZqcxK4GbX4yXAGuPcFWQlcL1rlFA6kAlsEZEoERkAICJRwAJg74VXR7nDGMMrW0uZmBLH2CEx/fa+D115EYnRYfzr67tpt+sSEUpZrccE4GrTvxdYDewHXjXG5IvIYyJyravYc0CiiBQBDwAPuc7NB14F9gHvAfcYY+zAYOATEdkFbAHeMca859mqqe7sLK2n4MQpvpnTP7/+z4qNCOEn12STf+wkz+ty0UpZzq0dwYwxq4BVnY492uFxM/D1bs59HHi807FiYGJvg1We8crWUiJCbFwzcUi/v/dV45O5YuwgfvVBIYvGJZOSENnvMSilnHQmcIBpbGnnn7uOcfWEIQwI98yyD70hIjy2eBxBAo+8uUebgpSykCaAAPPO7uM0ttr7rfO3K0PjInjky2PZcLCae/62nZZ2u2WxKBXINAEEmOVbjzIqKYqpI+ItjeNb00fw6NVZrM4/wXdfyNP5AUpZQBNAADl44hTbj9Zz/cWpOCdqW+vWS9N5YskEPi2q5tvPbaHhTJvVISkVUDQBBJBXtpYSYhOum9I36/6cj2/kpPDUDVPYXVbP0mWbqT7dYnVISgUMTQABos3u4I0d5VwxdjADo8OsDudzrho/hD/dlENx9Wm+8cdNVJ5stjokpQKCJoAAsfFQDbWNrVzXR6t+Xqi5Ywbx4q3TqWho5oZnc/VOQKl+oAkgQKzafZzosGAuG+296ylNS0/gz7dcTFldEzc+m0tdY6vVISnl1zQBBIA2u4PV+yqYP3ZQny/8dqFmjEx0NQc1ctOftWNYqb6kCSAAbDpUQ31TG1eN7/+Zv+djdmYSf7hxCgcqTnLL81s43aJDRJXqC5oAAsCqPceJCrUxx4ubfzqbd9Fgfrd0CrvLGrj1+a20tuuMYaU8TROAn2uzO1idX8H8sYO9vvmns0XjkvnfJRPYcriW17eXWR2OUn5HE4Cfyy2upc6Hmn86u27yMCamxPH0x0W06bpBSnmUJgA/986e40SG2pg7xneafzoSEe6fn0FZ3Rne3FFudThK+RVNAH6s3dX8M+8i7x/9cy6XjxnEuGExPPNxka4eqpQHaQLwY7kltdQ2tvJlH23+OUtEuG9eJodrmvjn7s67kSqlzpcmAD/2zp7jRITYmDtmkNWhXLAvjR3MRckD+N2aIuwOY3U4SvkFTQB+qt3uYPXeCuaNHUREqO82/5wVFCR8f34mxVWNvLPnuNXhKOUXNAH4qS2Ha6nxg+afjhZlJ5M5KJqn1hzEoXcBSl0wTQB+atWe44SHBPns6J+uBAUJ987LoPDEaVbnV1gdjlI+TxOAH7I7DO/tPcG8iwYRGRpsdTgedfWEoYwcGMVvPtK7AKUulCYAP7S3vIHq0y0szE62OhSPs7nuAg5UnOLjgkqrw1HKp2kC8EMbD9UAMHPUQIsj6RvXTBzK0Nhwlq0vtjoUpXyaJgA/tPFQNaMHR5M0wLt2/vKUEFsQt16aTm5JLbtK660ORymfpQnAz7S2O9h6uNZvf/2fdf20VAaEB7Nsg94FKHW+NAH4mZ2l9TS3ObhkVKLVofSp6LBgbpieyrt7jlNa22R1OEr5JE0AfmbjoWpEYEa6fycAgO/MTMcWJDz3SYnVoSjlk9xKACKySEQKRKRIRB7q4vUwEXnF9XquiKR1eO1h1/ECEVnY6TybiOwQkbcvtCLKaWNRDeOGxhIbGWJ1KH0uOTacaycO45WtpdQ36f7BSvVWjwlARGzA08CVQBawVESyOhW7DagzxmQATwI/d52bBVwPZAOLgGdc1zvrfmD/hVZCOTW1trOjtI6Zft7809Htl6Vzps3OS5uPWB2KUj7HnTuAaUCRMabYGNMKLAcWdyqzGHjB9fg1YL6IiOv4cmNMizGmBChyXQ8RGQ58GXj2wquhAPIO19FmN8zM8O8O4I4uSo5hzugk/rLxCM1tdqvDUcqnuJMAhgGlHZ6XuY51WcYY0w40AIk9nPtr4EHgnAu8i8gdIpInInlVVVVuhBu4Nh6qIThIuDgt3upQ+tUdl42k+nQLb+3UDWOU6g13EoB0cazzHPzuynR5XESuBiqNMdt6enNjzDJjTI4xJicpyX/WtekLmw5VMzk1zu+Wf+jJzFGJZA2JYdn6Yl0eQqlecCcBlAEpHZ4PBzrvyvFZGREJBmKB2nOcOwu4VkQO42xSmiciL51H/Mql4Uwbe8obuMTPx/93RUS4c85IDlU18klRtdXhKOUz3EkAW4FMEUkXkVCcnborO5VZCdzserwEWGOMMa7j17tGCaUDmcAWY8zDxpjhxpg01/XWGGNu9EB9AtaWklochoDqAO5oYXYy0WHBrNK9ApRyW48JwNWmfy+wGueInVeNMfki8piIXOsq9hyQKCJFwAPAQ65z84FXgX3Ae8A9xhjtqesDGw9VExYcxOTUOKtDsUR4iI35YwexOr9C9w1Wyk1uNRYbY1YBqzode7TD42bg692c+zjw+DmuvRZY604cqnsbi2q4OC2BsGDf3/3rfF01fghv7TxGbkktswJoJJRS50tnAvuBqlMtFJw4xcyMwGz+OWvO6CQiQ226ZaRSbtIE4Ac2F/v38s/uCg+xMe+iQazeW6EbxyvlBk0AfmDjoRoGhAUzbmiM1aFY7qrxQ6hpbGVLSa3VoSjl9TQB+IFNh6qZPjKBYJv+c84dk0R4SBDv7tVmIKV6ot8YPq68/gyHa5qYMTKw2//PigwN5vIxg3h3b4VOClOqB5oAfNyGQufyGJeN1lnSZ105fghVp1rYdrTO6lCU8mqaAHzcusIqhsSGkzko2upQvMa8iwYRGhykk8KU6oEmAB/WZnfwycFq5oxOwrn4qgLnbmFzRifxnjYDKXVOmgB82M7Sek61tGvzTxeuGp/M8YZmdpbppvFKdUcTgA9bX1iFLUh01msX5o8dTIhNeFebgZTqliYAH7ausIrJKXHERvj/9o+9FRMewuzMJFbtqcC5LqFSqjNNAD6q+nQLu8samKPNP926clwy5fVn2F3WYHUoSnklTQA+6pODznXvtf2/e/PHDgbg00O6R4BSXdEE4KPWF1aREBXK+GGxVofitRKiQhk5MIrtR7QjWKmuaALwQQ6HYf3BKmZnDiQoSId/nsvk1Hh2HK3TfgCluqAJwAftO36S6tOt2v7vhikj4qhpbOVobZPVoSjldTQB+KB1ruUfZmdqAujJ5JR4ALbrshBKfYEmAB+0rrCK7KExJA0IszoUrzcmeQBRoTbtB1CqC5oAfMzJ5ja2H6nT5h832YKEiSlx7CjVOwClOtME4GM2FtXQ7jA6/LMXpqTGs//4KZpa260ORSmvognAx6wrrCI6LJgpqfFWh+IzpoyIw+4wOiFMqU40AfgQYwzrC6uYOSqR0GD9p3OXdgQr1TX9FvEhh6oaKa8/w5wx2vzTG/E6IUypLmkC8CFrCyoBuEyHf/bapNQ4nRCmVCeaAHzIusIqRiVFkZIQaXUoPmdKajw1ja2U1p6xOhSlvIYmAB/R1NpObnEtc8cMsjoUn3S201z7AZT6P5oAfMSmQzW02h3M1fb/8/LZhDBNAEp9xq0EICKLRKRARIpE5KEuXg8TkVdcr+eKSFqH1x52HS8QkYWuY+EiskVEdolIvoj81FMV8ldrC6qICLExLT3B6lB80tkJYZoAlPo/PSYAEbEBTwNXAlnAUhHJ6lTsNqDOGJMBPAn83HVuFnA9kA0sAp5xXa8FmGeMmQhMAhaJyAzPVMn/GGNYW1jJzFGJhAXbrA7HZ+mEMKU+z507gGlAkTGm2BjTCiwHFncqsxh4wfX4NWC+iIjr+HJjTIsxpgQoAqYZp9Ou8iGuPzo8oxvF1Y2U1p7R5p8LNDlVJ4Qp1ZE7CWAYUNrheZnrWJdljDHtQAOQeK5zRcQmIjuBSuADY0xuV28uIneISJ6I5FVVVbkRrv9ZW+Cst3YAX5jJro7gHUd1PoBS4F4C6GrHkc6/1rsr0+25xhi7MWYSMByYJiLjunpzY8wyY0yOMSYnKSkwfwGvLahkpA7/vGAJUaGkD4zSfgClXNxJAGVASofnw4Fj3ZURkWAgFqh151xjTD2wFmcfgerkTKud3JJa5o7WX/+eMFknhCn1GXcSwFYgU0TSRSQUZ6fuyk5lVgI3ux4vAdYY5/9hK4HrXaOE0oFMYIuIJIlIHICIRABXAAcuvDr+Z3NxDa3tOvzTU6akxlN9WieEKQUQ3FMBY0y7iNwLrAZswJ+NMfki8hiQZ4xZCTwH/FVEinD+8r/edW6+iLwK7APagXuMMXYRGQK84BoRFAS8aox5uy8q6OvWFlTq8E8P6jghLDVRm9RUYOsxAQAYY1YBqzode7TD42bg692c+zjweKdju4HJvQ02EK0trOKSUYmEh+jwT08YkzyAyFAbO0vr+crkzmMZlAosOhPYi5VUN3KkpkmbfzzIFiRMGB7LDu0IVkoTgDc7u/qndgB71uTUePKPnaS5zW51KEpZShOAF1tbUMXIgVHaVu1hk1PiaHcY8o/phDAV2DQBeKnmNjubi2t085c+MCk1DtAJYUppAvBSm4praGl36OzfPjBoQDjD4iI0AaiApwnAS60rqCI8JIjpOvyzT5ydEKZUINME4KXWFVYxY6QO/+wrk1PjOdbQzImTzVaHopRlNAF4oSM1jZRUNzJ3tLb/95XJ2g+glCYAb7Su0Ln65xxt/+8z2UNjCLUFsaNUm4FU4NIE4IXWFVQxIjGS9IFRVofit8KCbWQNjdE7ABXQNAF4mZZ2OxsP1TBHm3/63KSUOHaX1dNud1gdilKW0ATgZbaW1HGmza4JoB9MTo2juc3BgYpTVoeilCU0AXiZdYWVhNqCuGRUotWh+L2zK4PuLNVmIBWYNAF4mbUFVUxLTyAy1K2FWtUFGB4fwcDoUO0HUAFLE4AXKa8/w8HK07r6Zz8RESalxOtIIBWwNAF4kXWuzd+1/b//TE6No7iqkfqmVqtDUarfaQLwIusKKxkaG07GoGirQwkYk1OcE8K0H0AFIk0AXqK13cGnRTXMGTMIEbE6nIAxISUOEZ0RrAKTJgAvsf1oHadb2rX5p59FhwUzZvAAvQNQAUkTgJdYW1BFcJAwK0OHf/a3yalx7Cytx+EwVoeiVL/SBOAl1hVWMXVEPAPCQ6wOJeBMSomj4UwbJTWNVoeiVL/SBOAFTpxsZv/xk7r7l0UmuyaEbT+iw0FVYNEE4AV083drZSRFkzQgjJW7jlkdilL9ShOAF3h3bwXD4yMYO2SA1aEEpKAg4Tuz0thwsJq95bpRvAocmgAs1nCmjU+LqrlyXLIO/7TQt6aPIDosmD+uL7Y6FKX6jSYAi320/wRtdsOV44dYHUpAi40I4Ybpqbyz+xhHa5qsDkepfqEJwGKr9lQwJDacScPjrA4l4N06Kx1bkPCnDXoXoAKDWwlARBaJSIGIFInIQ128HiYir7hezxWRtA6vPew6XiAiC13HUkTkYxHZLyL5InK/pyrkS063tLP+YBULs5MJCtLmH6slx4Zz3eRhvJpXSvXpFqvDUarP9ZgARMQGPA1cCWQBS0Ukq1Ox24A6Y0wG8CTwc9e5WcD1QDawCHjGdb124F+MMWOBGcA9XVzT7605UElru4OrtPnHa9xx2Sha7Q5e2HjY6lCU6nPu3AFMA4qMMcXGmFZgObC4U5nFwAuux68B88XZo7kYWG6MaTHGlABFwDRjzHFjzHYAY8wpYD8w7MKr41ve23ucpAFhTB0Rb3UoyiVjUDRfGjuYFzcdobGl3epwlOpT7iSAYUBph+dlfPHL+rMyxph2oAFIdOdcV3PRZCC3qzcXkTtEJE9E8qqqqtwI1zc0tbbz8YEqFmYPxqbNP17lrrmjaDjTxvKtpT0XVsqHuZMAuvp26rxoSndlznmuiEQDrwM/MMac7OrNjTHLjDE5xpicpCT/mSm7rqCKM212rhqnzT/eZkpqPNPSE3huQzFtumG88mPuJIAyIKXD8+FA5ymTn5URkWAgFqg917kiEoLzy/9lY8wb5xO8L3t3bwUJUaFMS0+wOhTVhbvnjOJYQzMrd+rsYOW/3EkAW4FMEUkXkVCcnborO5VZCdzserwEWGOMMa7j17tGCaUDmcAWV//Ac8B+Y8yvPFERX9LcZuej/SdYkDWYYJuOxPVGc8ckkTkomhc2HbY6FKX6TI/fPq42/XuB1Tg7a181xuSLyGMicq2r2HNAoogUAQ8AD7nOzQdeBfYB7wH3GGPswCzg28A8Ednp+nOVh+vmtT45WE1jq10nf3kxEeHGGSPYXdbA7jLdK0D5p2B3ChljVgGrOh17tMPjZuDr3Zz7OPB4p2Of0HX/QEBYtfc4sREhzByla/97s+umDONn7x7g5c1HmbBEJ+op/6PtD/2std3BB/tOcMXYwYRo849XiwkPYfGkoazcdYyGM21Wh6OUx+k3UD/79FA1p5rbuWp8stWhKDd8a/oIzrTZeXN7mdWhKOVxmgD62cubjxAfGcKsjIFWh6LcMH54LBOHx/Jy7lGc4xqU8h+aAPrRwROn+HB/JTfPTCM8xGZ1OMpN35oxgoOVp9lSUmt1KEp5lCaAfrRsfTHhIUHcdEma1aGoXrhmwlAGhAfzcu5Rq0NRyqM0AfSTioZmVuws55s5KSREhVodjuqFiFAbX5synHf3HtdVQpVf0QTQT57/tAS7w/Dd2SOtDkWdhxtnpNJmN/wjTzuDlf/QBNAPTja38XLuUb48YSgpCZFWh6POQ8agAUxPT+BvW47gcGhnsPIPmgD6wd9yj3K6pZ07L9Nf/77sxhkjKK09w/qD/rMqrQpsmgD6WEu7nT9/UsKlGQMZNyzW6nDUBViYnczA6DD+Z9UBGpp0YpjyfZoA+thbO45ReaqFO+for39fFxocxJPfnEhJdSO3/GVLrzeMMcbQrstLKy+iCaAPORyGP64/RNaQGC7ViV9+YXZmEr9dOpndZQ3c8dc8mtvsbp/74xV7WfDkemobW/swQqXcpwmgD324/wSHqhq5c85InCtgK3+waFwyT3xtAp8W1XDf33e49au+8MQp/r7lKMXVjdz39+16J6C8giaAPmKM4bdrDpKSEMGXddlnv/O1qcP56bXZfLDvBA++trvHkUG//rCQqNBgfnzVWD4tquGJ1QX9FKlS3XNrOWjVex/sO8He8pM8sWSCbvrip26emcap5jZ+8X4h8VGh/PvVWV2Wyz/WwKo9FXx/Xga3XzaS0romlq0vZtywWK6dOLSfo1bq/+g3Ux9wOAxPfniQEYmRfHXyMKvDUX3onsszuGVmGs99UsLKXV1vH/nkBweJCQ/mNtckwH/7chYXp8Xz4Gu72Hesy62w3eJwGI7UNJ73+UppAugD7++rYP/xk9w/P1N//fs5EeHHXx7L1BHxPPT6booqT33u9V2l9Xy4/wS3zx5JbEQI4BxN9PS3phAbEcKdL+VR39T7TmFjDD9esZc5/7uWFzcd9kBNVCDSbycPczgMT35wkJFJUXp7HyBCbEE8fcMUIkJs3P3S9s8ND/3VB4XER4bwnUvTP3fOoAHh/P7GqZxoaOHbz23h2Q3F7DhaR2u7e53Dv/noIH/fcpRhcRH8x8p83ttb4dE6qcCgCcDDVu09TsGJU/rrP8Akx4bz26WTKao6zY/f3IMxhm1HallXWMWdc0YRHfbF7rYpqfH84hsTqT/Tyn+9s5/rntnI+J+s5ht/3MTvPjrIqeauJ5u9nHuEX394kCVTh/PBA5cxKSWO7y/fwdbDuly16h3xpU0ucnJyTF5entVhdMvuMCz69XoMsPoHl2EL0qGfgeZ3Hx3klx8U8p9fGce7e45TeOIU6x+8nMjQc4+3OHGymW1H6sg7XMe2I7XsKmtgYHQY/7poDF+bMpwg12dpdX4Fd7+0jTmjk1h2Uw4htiBqG1tZ8vuN1DS28vrdl5AxaEB/VFX5CBHZZozJ6fI1TQCe89bOcu5fvpOnbpjM1RO0+ScQORyGW1/YyoaD1dgdhkevzuLWTs0/7thVWs9P/pnPjqP1TBwey39cm43dYbjx2VzGDonhb7dP/1xSKa1t4rpnNhIWHMQb35vJ4JhwT1ZL+TBNAP2g3e5gwZPrCbEF8e79sz/7xaYCT11jK1f/7hMcxvDxj+ae9+5vDodhxc5yfvbuASpPtRAeEsTQ2Aheu3tml3tK7C1v4Jt/3ERKQiSv3z2TqC6anVTgOVcC0EZqD1m56xjF1Y388EuZ+uUf4OKjQvnnfZfy5vdmXdDWn0FBwlenDGfNj+Zy99xRXJQcwwu3Tut2Q6Fxw2J5+ltTOFBxipdzj5z3+6rAoT8RPMDhMDz9cREXJQ9gQVay1eEoL+DJXd+iw4L510UXuVV27phBzByVyLMbSrh5Zhphwbr3tOqe3gF4wPv7KjhU1cj3Ls/QX//KcnfPHUXlqRZW7Ci3OhTl5TQBXCBjDM+sPcSIxEiuGqe//pX1Ls0YSPbQGP64rhi77l6mzkETwAX6tKiG3WUN3DVnlI77V15BRLhrziiKqxv5YJ9OEFPdc+sbS0QWiUiBiBSJyENdvB4mIq+4Xs8VkbQOrz3sOl4gIgs7HP+ziFSKyF5PVMQqT39cxOCYML46Rdf8Ud7jynHJjEiM5PfrivGlkX6qf/WYAETEBjwNXAlkAUtFpPOyh7cBdcaYDOBJ4Oeuc7OA64FsYBHwjOt6AH9xHfNZO47Wsam4hu9eOlI725RXCbYFcfvskewqrWdTcY3V4Sgv5c4dwDSgyBhTbIxpBZYDizuVWQy84Hr8GjBfnDugLAaWG2NajDElQJHrehhj1gM+PXf9mbWHiI0IYen0VKtDUeoLlkwdzsDoUP6wrtjqUJSXcicBDANKOzwvcx3rsowxph1oABLdPPecROQOEckTkbyqqqrenNqnCk+c4oN9J7hlZlqX67woZbXwEBvfmZXO+sIq8o81WB2O8kLuJICuxjV2blTsrow7556TMWaZMSbHGJOTlJTUm1P71O/XHiIy1MYtM9OsDkWpbt04YwTRYcF6F6C65E4CKANSOjwfDnTe+eKzMiISDMTibN5x51yfU1rbxMpdx1g6LZV4D074UTzbM8IAAA/RSURBVMrTYiNCuGF6Ku/sPkZpbZPV4Sgv404C2Apkiki6iITi7NRd2anMSuBm1+MlwBrjHHqwErjeNUooHcgEtngmdOs890kJQQLfnd37Rb6U6m+3zEzDYeh2xzIVuHpMAK42/XuB1cB+4FVjTL6IPCYi17qKPQckikgR8ADwkOvcfOBVYB/wHnCPMcYOICJ/BzYBY0SkTERu82zV+kZru4MVO8tZNG4IQ2IjrA5HqR4NjYtgYkocq/N1ToD6PLd6L40xq4BVnY492uFxM/D1bs59HHi8i+NLexWpl1hXWEV9U5vu9at8ysLswTzxXgHH6s8wNE5/uCgnnbraSyt2lJMQFcqlmQOtDkUpty3Mdi5T8r7eBagONAH0wsnmNj7cf4JrJgwhRJd9UD5kVFI0GYOiWZ1/ol/fd3V+BU9/XMS2I3W0293b71j1Hx3A3gvv7a2gpd3BV7T5R/mghdmD+cO6YuoaW/tl9Nrh6kbu+/uOzza6jw4LZnp6AjMzBnLV+GTtQ/MC+jO2F1bsKGdEYiSTUuKsDkWpXluYnYzdYfjoQGWfv5cxhn9/ay+hrh3ynr5hCosnDaW4upH/fHsfi5/6lJPdbHqv+o8mADdVNDSzqbiGr0wahnOVC6V8y/hhsQyJDe+X0UBv7z7OhoPV/GjBaMYOieHLE4bw+HXj+fhHc3ntrkuoPt3CL1cX9Hkc6tw0Abhp5a5yjEGbf5TPEhEWZA1mfWEVTa3tffY+J5vbeOztfYwfFsu3L0n7wus5aQncdEkaL24+wq7S+j6LQ/VME4CbVuw4xsSUONIHRlkdilLnbWF2Mi3tDtYX9t26Wr9YXUDN6RYev24ctm52yPuXBaMZNCCMR97co53DFtIE4IaCilPsO36S6yYNtToUpS7ItPQE4iJD+mw00K7Sev66+Qg3XZLGhOHd95UNCA/hP67JJv/YSV7YpBvYW0UTgBtW7CzHFiRcPVETgPJtwbYg5l80mI/2n6DNw7+82+0OHnlzD0nRYTywYHSP5a8cl8zcMUn86v0Cjjec8Wgsyj2aAHrgcBhW7jzG7MyBDIwOszocpS7YwuzBnGxuJ7fYc9txNLfZeerjIvKPneQ/rskmJjykx3NEhP9cPA67Mfx05T6PxaLcp/MAerD1cC3l9Wf4fwvHWB2KUh5x2egkIkJsrM6v+GxGe0u7nbd3HeeNHWUkRoUxLT2BaekJZCRFE9RNO35dYytrDlTy/r4K1hdWc6bNzhVjB3HV+GS3Y0lJiOT78zN54r0CPtp/gvljB3ukjso9mgB68Mb2ciJDbSzI1g+m8g/hITbmjE7i/X0V3Dc/g7/lHuWlzUepPt1CWmIkB0+c/mzl0PjIEKaOSCAmPJimVjtNbXbOtLZzqrmdg5WnsTsMyTHhLJk6nC9lDeaSUYm9HiZ9++yRrNhRzqNv5TMrYyDhIbq9an/RBHAOpbVNvLGjjCVTU4gM1f9Uyn8sHDeY9/IruOR/1mB3GC4fk8R3ZqUz23VHcLS2idySWraW1LLtSB2tdgeRoTYiQoOJCrUxPD6CK8YOZkH2YMYPi72guTEhtiB+eu04lv5pM89uKObeeZmeqqbqgX6rncMv3i/AFiTcP18/kMq/zB87mIvT4skaEsPNM9MYmRT9uddHJEYxIjGKb+SkdHMFz7pkVCKLspN5Zu0hvp6TwuCY8H5530CnncDd2FvewFs7j3HrrHSSY/XDqPxLTHgI/7hrJj9dPO4LX/5WeeSqsbTbDU+8pzOE+4smgG787N0DxEeGcNfcUVaHolRASE2M5NZL03l9exm7y3SGcH/QBNCF9YVVfFJUzb3zMt0azqaU8ox7Lh/FwOhQHvvnPpy7yp6/Cz0/EAREAnA43P8gOByGn717gOHxEdw4I7UPo1JKdTYgPIQfLRhD3pE6/rn7+HldY09ZA7e/mEfmj9/lhj9t5m+5R6ltbPVwpP7B7xPAyeY2bnwul3/klbpVfuWuY+w7fpIfLRhDWLAOR1Oqv309J4WsITH8bNV+mtvsbp+342gd33l+C9c89Qm5xTUsmTqcioZmHnlzDxc//iHffi6X5VuOcuJkcx9G71v8fhRQZIgNY+DHK/YydkgM44bFdlu2pd3OL94vIHtoDNfqsg9KWcIWJPz71Vks/dNmfvl+AV+bOpzwYBsRobbP5ghUnmym4mQzFQ3NnDjZTG5JLRsOVhMXGcL/WziGmy4ZwYDwEIwx7D9+ird3H+Pt3cd56I09AIwZPIDLRg9kdmYS09ITvGbuQW5xDc3tDuaMTuqX9xNfaifLyckxeXl5vT6v+nQL1/zuE2xBwj/vvbTb3ZCe3VDMf72zn7/eNo3Zmf3zD6CU6trdL23j3b3u7V2QHBPOLbPSuHHGCKLDuv5dezYZrD9YxYaDVWwtcc5viAix8dPF2f025LU7jS3tLPz1ekJtQaz+4WUe23ZWRLYZY3K6fC0QEgDAztJ6vvGHTcwYlcjzt1z8uWVqjTH8ZeNh/uud/czKGMiLt07zVMhKqfPU2u5g46FqzrTaOdPm/NPc5sAYQ9KAMAbHhJMcE87gmHAiQnv/C/5Mq53NJTU8u6GYT4tq+MEVmdw/P9OyDZ9+sjKfFzYd5h93XkJOWoLHrnuuBOD3TUBnTUqJ4yfXZvPIm3v4zYeFPLDAubZPc5udR97cwxvby1mQNZhffmOixZEqpQBCg4OYO2ZQn10/ItTG5WMGcWnGQB56fQ+//vAgFQ3N/NdXxhHsoV/f7tpSUstfNh7mlplpHv3y70nAJACApdNS2Flax2/XFDFheBxZQ2O486/b2FPewA+vGM198zK6XfhKKeWfQmxB/OLrExgWF85v1xRx4mQzT90whahumpI87UyrnQdf20VKQgQPLurfRScDKgGICI8tHsf+46f44as7CbUF0dLu4E835fClLF3sTalAJSI8sGAMybER/NuKPSz902Z+tGAMU0fE93kiePLDQg7XNPG3707v9zXHAioBgHMlxN/fOIVrn/qU2MgQln07h4xB3jEVXillrRumpzI4Jozv/30HN/15C7YgYdzQGNfy2ImMGTyAoXHhHmsi2nG0jmc3FHPD9FRmZgz0yDV7I2A6gTurb2olItSmY/2VUl/Q2NLO9qN1bCmpJbeklp2l9bS2O3dQCw4ShsVHkJoQyYjESLKHxjJ1RPw5907oSku7nat/+wmnW9p5/4eXMaCPVh244E5gEVkE/AawAc8aY37W6fUw4EVgKlADfNMYc9j12sPAbYAd+L4xZrU71+xrcZFdDwVVSqmosGBmZyZ9Nhy8uc3O3vIGDlWd5khNE0dqmyitbWLlzmO8tPkoADHhwUwZEc/U1HhGJkUTHxVCfGQoCVGhxEWGcKbVzuGaJo7UNHK0pokth2s5WHma579zcZ99+fekxwQgIjbgaeBLQBmwVURWGmM67uF2G1BnjMkQkeuBnwPfFJEs4HogGxgKfCgiZzcL7emaSinlFcJDbOSkJXxhhI4xhsM1TWw7Use2I869E9YWVLl1zUEDwrhvXgaX9+FIp564cwcwDSgyxhQDiMhyYDHQ8ct6MfAT1+PXgKfEOZh2MbDcGNMClIhIket6uHFNz3n3IajY0yeXVkoFLgHSXX+WAMRBe4yDVruDNruh3e6g3W5oczgIEiE8xEZ4cBBhITZsIlAOPO/GGyWPhys930jiTgIYBnRcSKcMmN5dGWNMu4g0AImu45s7nTvM9binawIgIncAdwCkpuribEop7xYcFERwUBD4wELC7iSArno1Ovccd1emu+NddaF32RttjFkGLANnJ3D3YZ5DH2ROpZTyde6MZSoDOi6SMRw41l0ZEQkGYoHac5zrzjWVUkr1IXcSwFYgU0TSRSQUZ6fuyk5lVgI3ux4vAdYY5/jSlcD1IhImIulAJrDFzWsqpZTqQz02Abna9O8FVuMcsvlnY0y+iDwG5BljVgLPAX91dfLW4vxCx1XuVZydu+3APcYYO0BX1/R89ZRSSnUnYCeCKaVUIDjXRDC/3xFMKaVU1zQBKKVUgNIEoJRSAUoTgFJKBSif6gQWkSrgyHmePhCo9mA4VvKXuvhLPUDr4o38pR5wYXUZYYzpcpNzn0oAF0JE8rrrCfc1/lIXf6kHaF28kb/UA/quLtoEpJRSAUoTgFJKBahASgDLrA7Ag/ylLv5SD9C6eCN/qQf0UV0Cpg9AKaXU5wXSHYBSSqkONAEopVSA8vsEICKLRKRARIpE5CGr4+kNEfmziFSKyN4OxxJE5AMROej6O97KGN0lIiki8rGI7BeRfBG533Xcp+ojIuEiskVEdrnq8VPX8XQRyXXV4xXXMuc+QURsIrJDRN52PffJuojIYRHZIyI7RSTPdcynPl9niUiciLwmIgdc/89c0hd18esE0GFD+yuBLGCpa6N6X/EXYFGnYw8BHxljMoGPXM99QTvwL8aYscAM4B7Xv4Wv1acFmGeMmQhMAhaJyAzg58CTrnrUAbdZGGNv3Q/s7/Dcl+tyuTFmUocx8772+TrrN8B7xpiLgIk4/308XxdjjN/+AS4BVnd4/jDwsNVx9bIOacDeDs8LgCGux0OAAqtjPM96vQV8yZfrA0QC23HuZ10NBLuOf+5z581/cO7G9xEwD3gb5zauvlqXw8DATsd87vMFxAAluAbp9GVd/PoOgK43tB/WTVlfMdgYcxzA9fcgi+PpNRFJAyYDufhgfVxNJjuBSuAD4BBQb4xpdxXxpc/Zr4EHAYfreSK+WxcDvC8i20TkDtcxn/t8ASOBKuB5V9PcsyISRR/Uxd8TgDsb2qt+JCLRwOvAD4wxJ62O53wYY+zGmEk4fz1PA8Z2Vax/o+o9EbkaqDTGbOt4uIuiXl8Xl1nGmCk4m3zvEZHLrA7oPAUDU4DfG2MmA430UdOVvycAf9x8/oSIDAFw/V1pcTxuE5EQnF/+Lxtj3nAd9tn6GGPqgbU4+zTiROTsFqu+8jmbBVwrIoeB5TibgX6Nb9YFY8wx19+VwJs4k7Mvfr7KgDJjTK7r+Ws4E4LH6+LvCcAfN59fCdzsenwzzrZ0rycignPv6P3GmF91eMmn6iMiSSIS53ocAVyBs4PuY2CJq5jX1wPAGPOwMWa4MSYN5/8ba4wx38IH6yIiUSIy4OxjYAGwFx/7fAEYYyqAUhEZ4zo0H+e+6p6vi9UdHv3QoXIVUIiznfbHVsfTy9j/DhwH2nD+KrgNZxvtR8BB198JVsfpZl0uxdmUsBvY6fpzla/VB5gA7HDVYy/wqOv4SGALUAT8AwizOtZe1msu8Lav1sUV8y7Xn/yz/6/72uerQ30mAXmuz9kKIL4v6qJLQSilVIDy9yYgpZRS3dAEoJRSAUoTgFJKBShNAEopFaA0ASilVIDSBKCUUgFKE4BSSgWo/w+sDzGjeKMR3wAAAABJRU5ErkJggg==\n",
12670
      "text/plain": [
12671
       "<Figure size 432x288 with 1 Axes>"
12672
      ]
12673
     },
12674
     "metadata": {
12675
      "needs_background": "light"
12676
     },
12677
     "output_type": "display_data"
12678
    }
12679
   ],
12680
   "source": [
12681
    "plt.plot(test_md[['pos_idx','pred_any']].groupby('pos_idx').mean())\n",
12682
    "plt.plot([0,60],[0,0])"
12683
   ]
12684
  },
12685
  {
12686
   "cell_type": "code",
12687
   "execution_count": 74,
12688
   "metadata": {},
12689
   "outputs": [
12690
    {
12691
     "name": "stdout",
12692
     "output_type": "stream",
12693
     "text": [
12694
      "0 [9.30e-05 1.15e-04 1.49e-04 1.64e-03 9.96e-01 9.98e-01 9.99e-01]\n",
12695
      "1 [6.76e-06 8.61e-06 1.12e-05 7.63e-05 5.17e-02 3.93e-01 8.82e-01]\n",
12696
      "2 [2.12e-05 2.46e-05 3.09e-05 2.84e-04 9.83e-01 9.93e-01 9.95e-01]\n",
12697
      "3 [1.40e-05 1.65e-05 1.97e-05 8.68e-05 9.80e-01 9.93e-01 9.95e-01]\n",
12698
      "4 [2.81e-05 3.21e-05 4.08e-05 4.00e-04 9.59e-01 9.90e-01 9.92e-01]\n",
12699
      "5 [4.51e-05 5.60e-05 7.04e-05 8.55e-04 9.71e-01 9.89e-01 9.91e-01]\n"
12700
     ]
12701
    }
12702
   ],
12703
   "source": [
12704
    "# weighted models + weighted ensembling\n",
12705
    "#0 [2.14e-04 2.50e-04 3.15e-04 2.15e-03 9.88e-01 9.93e-01 9.94e-01]\n",
12706
    "#1 [4.46e-06 5.32e-06 6.88e-06 8.58e-05 1.34e-01 6.16e-01 9.24e-01]\n",
12707
    "#2 [4.88e-05 5.54e-05 6.88e-05 3.27e-04 9.65e-01 9.86e-01 9.90e-01]\n",
12708
    "#3 [1.78e-05 2.00e-05 2.42e-05 1.04e-04 9.52e-01 9.77e-01 9.81e-01]\n",
12709
    "#4 [6.56e-05 7.67e-05 9.50e-05 4.71e-04 9.41e-01 9.85e-01 9.89e-01]\n",
12710
    "#5 [9.93e-05 1.21e-04 1.53e-04 9.91e-04 9.42e-01 9.86e-01 9.92e-01]\n",
12711
    "\n",
12712
    "# weighted models + non-weighted ensembling\n",
12713
    "#0 [9.25e-05 1.11e-04 1.41e-04 1.60e-03 9.93e-01 9.97e-01 9.99e-01]\n",
12714
    "#1 [8.16e-06 9.69e-06 1.24e-05 9.28e-05 1.31e-01 5.91e-01 8.94e-01]\n",
12715
    "#2 [2.38e-05 2.66e-05 3.46e-05 2.46e-04 9.73e-01 9.91e-01 9.94e-01]\n",
12716
    "#3 [1.25e-05 1.40e-05 1.71e-05 8.06e-05 9.66e-01 9.90e-01 9.94e-01]\n",
12717
    "#4 [3.27e-05 3.80e-05 4.71e-05 3.55e-04 9.51e-01 9.91e-01 9.94e-01]\n",
12718
    "#5 [4.51e-05 5.74e-05 7.40e-05 7.90e-04 9.46e-01 9.89e-01 9.94e-01]\n",
12719
    "\n",
12720
    "# non-weighted models + non-weighted ensembling\n",
12721
    "#0 [1.10e-04 1.24e-04 1.55e-04 1.27e-03 9.93e-01 9.97e-01 9.98e-01]\n",
12722
    "#1 [8.61e-06 9.98e-06 1.23e-05 8.77e-05 1.36e-01 5.73e-01 8.74e-01]\n",
12723
    "#2 [2.34e-05 2.66e-05 3.41e-05 2.12e-04 9.73e-01 9.91e-01 9.95e-01]\n",
12724
    "#3 [1.08e-05 1.25e-05 1.50e-05 6.10e-05 9.67e-01 9.92e-01 9.96e-01]\n",
12725
    "#4 [3.18e-05 3.68e-05 4.48e-05 3.03e-04 9.51e-01 9.91e-01 9.94e-01]\n",
12726
    "#5 [4.72e-05 5.48e-05 6.86e-05 6.83e-04 9.41e-01 9.88e-01 9.92e-01]\n",
12727
    "\n",
12728
    "# STAGE2 non-weighted models + non-weighted ensembling\n",
12729
    "#0 [9.30e-05 1.15e-04 1.49e-04 1.64e-03 9.96e-01 9.98e-01 9.99e-01]\n",
12730
    "#1 [6.76e-06 8.61e-06 1.12e-05 7.63e-05 5.17e-02 3.93e-01 8.82e-01]\n",
12731
    "#2 [2.12e-05 2.46e-05 3.09e-05 2.84e-04 9.83e-01 9.93e-01 9.95e-01]\n",
12732
    "#3 [1.40e-05 1.65e-05 1.97e-05 8.68e-05 9.80e-01 9.93e-01 9.95e-01]\n",
12733
    "#4 [2.81e-05 3.21e-05 4.08e-05 4.00e-04 9.59e-01 9.90e-01 9.92e-01]\n",
12734
    "#5 [4.51e-05 5.60e-05 7.04e-05 8.55e-04 9.71e-01 9.89e-01 9.91e-01]\n",
12735
    "\n",
12736
    "np.set_printoptions(precision=2)\n",
12737
    "for k in range(6):\n",
12738
    "    print(k,np.quantile(predictions[:,k],[0.0001,0.001,0.01,0.5,0.99,0.999,0.9999]))"
12739
   ]
12740
  },
12741
  {
12742
   "cell_type": "code",
12743
   "execution_count": 75,
12744
   "metadata": {
12745
    "scrolled": true
12746
   },
12747
   "outputs": [
12748
    {
12749
     "data": {
12750
      "text/plain": [
12751
       "array([0.1376, 0.0029, 0.0464, 0.0378, 0.0449, 0.0591])"
12752
      ]
12753
     },
12754
     "execution_count": 75,
12755
     "metadata": {},
12756
     "output_type": "execute_result"
12757
    }
12758
   ],
12759
   "source": [
12760
    "# weighted models + weighted ensembling\n",
12761
    "#array([0.1361, 0.0056, 0.0429, 0.0295, 0.0468, 0.0569])\n",
12762
    "\n",
12763
    "# weighted models + non-weighted ensembling\n",
12764
    "#array([0.1335, 0.0055, 0.0423, 0.0298, 0.0466, 0.0556])\n",
12765
    "\n",
12766
    "# non-weighted models + non-weighted ensembling\n",
12767
    "#array([0.1313, 0.0057, 0.0421, 0.0297, 0.0464, 0.0544])\n",
12768
    "\n",
12769
    "# STAGE2 non-weighted models + non-weighted ensembling\n",
12770
    "#array([0.1376, 0.0029, 0.0464, 0.0378, 0.0449, 0.0591])\n",
12771
    "\n",
12772
    "# STAGE2 weighted models + weighted ensembling\n",
12773
    "#array([0.1373, 0.0028, 0.0464, 0.0379, 0.045 , 0.0589])\n",
12774
    "\n",
12775
    "np.set_printoptions(precision=4)\n",
12776
    "predictions.mean(0)"
12777
   ]
12778
  },
12779
  {
12780
   "cell_type": "code",
12781
   "execution_count": null,
12782
   "metadata": {},
12783
   "outputs": [],
12784
   "source": []
12785
  },
12786
  {
12787
   "cell_type": "code",
12788
   "execution_count": 76,
12789
   "metadata": {
12790
    "scrolled": false
12791
   },
12792
   "outputs": [
12793
    {
12794
     "data": {
12795
      "text/plain": [
12796
       "0.13762015847552184"
12797
      ]
12798
     },
12799
     "execution_count": 76,
12800
     "metadata": {},
12801
     "output_type": "execute_result"
12802
    }
12803
   ],
12804
   "source": [
12805
    "sub.loc[range(0,len(sub),6), 'Label'].mean()"
12806
   ]
12807
  },
12808
  {
12809
   "cell_type": "code",
12810
   "execution_count": 77,
12811
   "metadata": {},
12812
   "outputs": [],
12813
   "source": [
12814
    "sub = sub.sort_values('ID').reset_index(drop=True)\n",
12815
    "best_sub = pd.read_csv(PATH/'submission_stage2_3.csv').sort_values('ID').reset_index(drop=True)"
12816
   ]
12817
  },
12818
  {
12819
   "cell_type": "code",
12820
   "execution_count": 78,
12821
   "metadata": {
12822
    "scrolled": true
12823
   },
12824
   "outputs": [
12825
    {
12826
     "data": {
12827
      "text/plain": [
12828
       "0.13749181676694883"
12829
      ]
12830
     },
12831
     "execution_count": 78,
12832
     "metadata": {},
12833
     "output_type": "execute_result"
12834
    }
12835
   ],
12836
   "source": [
12837
    "best_sub.loc[range(0,len(sub),6), 'Label'].mean()"
12838
   ]
12839
  },
12840
  {
12841
   "cell_type": "code",
12842
   "execution_count": null,
12843
   "metadata": {},
12844
   "outputs": [],
12845
   "source": []
12846
  },
12847
  {
12848
   "cell_type": "code",
12849
   "execution_count": 79,
12850
   "metadata": {
12851
    "scrolled": true
12852
   },
12853
   "outputs": [
12854
    {
12855
     "data": {
12856
      "text/plain": [
12857
       "SpearmanrResult(correlation=0.985190415749868, pvalue=0.0)"
12858
      ]
12859
     },
12860
     "execution_count": 79,
12861
     "metadata": {},
12862
     "output_type": "execute_result"
12863
    }
12864
   ],
12865
   "source": [
12866
    "sp.stats.spearmanr(sub.loc[range(0,len(sub),6), 'Label'], \n",
12867
    "                   best_sub.loc[range(0,len(sub),6), 'Label'])"
12868
   ]
12869
  },
12870
  {
12871
   "cell_type": "code",
12872
   "execution_count": 95,
12873
   "metadata": {
12874
    "scrolled": true
12875
   },
12876
   "outputs": [
12877
    {
12878
     "data": {
12879
      "text/plain": [
12880
       "SpearmanrResult(correlation=0.985190415749868, pvalue=0.0)"
12881
      ]
12882
     },
12883
     "execution_count": 95,
12884
     "metadata": {},
12885
     "output_type": "execute_result"
12886
    }
12887
   ],
12888
   "source": [
12889
    "sp.stats.spearmanr(sub.loc[range(0,len(sub),6), 'Label'], \n",
12890
    "                   best_sub.loc[range(0,len(sub),6), 'Label'])"
12891
   ]
12892
  },
12893
  {
12894
   "cell_type": "code",
12895
   "execution_count": 78,
12896
   "metadata": {
12897
    "scrolled": true
12898
   },
12899
   "outputs": [
12900
    {
12901
     "data": {
12902
      "text/plain": [
12903
       "0.9992765979750622"
12904
      ]
12905
     },
12906
     "execution_count": 78,
12907
     "metadata": {},
12908
     "output_type": "execute_result"
12909
    }
12910
   ],
12911
   "source": [
12912
    "np.corrcoef(sub.sort_values('ID').reset_index(drop=True).loc[range(0,len(sub),6), 'Label'], \n",
12913
    "            best_sub.sort_values('ID').reset_index(drop=True).loc[range(0,len(sub),6), 'Label'])[0,1]"
12914
   ]
12915
  },
12916
  {
12917
   "cell_type": "code",
12918
   "execution_count": 96,
12919
   "metadata": {},
12920
   "outputs": [
12921
    {
12922
     "data": {
12923
      "text/plain": [
12924
       "0.999294961658725"
12925
      ]
12926
     },
12927
     "execution_count": 96,
12928
     "metadata": {},
12929
     "output_type": "execute_result"
12930
    }
12931
   ],
12932
   "source": [
12933
    "np.corrcoef(sub.sort_values('ID').reset_index(drop=True).loc[range(0,len(sub),6), 'Label'], \n",
12934
    "            best_sub.sort_values('ID').reset_index(drop=True).loc[range(0,len(sub),6), 'Label'])[0,1]"
12935
   ]
12936
  },
12937
  {
12938
   "cell_type": "markdown",
12939
   "metadata": {},
12940
   "source": [
12941
    "## Submission"
12942
   ]
12943
  },
12944
  {
12945
   "cell_type": "code",
12946
   "execution_count": 80,
12947
   "metadata": {},
12948
   "outputs": [
12949
    {
12950
     "name": "stdout",
12951
     "output_type": "stream",
12952
     "text": [
12953
      "100%|██████████████████████████████████████| 32.1M/32.1M [00:02<00:00, 16.2MB/s]\n",
12954
      "Successfully submitted to RSNA Intracranial Hemorrhage Detection"
12955
     ]
12956
    }
12957
   ],
12958
   "source": [
12959
    "!~/.local/bin/kaggle competitions submit rsna-intracranial-hemorrhage-detection -f ~/Hemorrhage/sub.csv -m \"GCP, safe final, take 2\""
12960
   ]
12961
  },
12962
  {
12963
   "cell_type": "code",
12964
   "execution_count": null,
12965
   "metadata": {},
12966
   "outputs": [],
12967
   "source": [
12968
    "!kaggle competitions submit rsna-intracranial-hemorrhage-detection -f C:/StudioProjects/Hemorrhage/sub.csv -m \"GCP, d161+d169+d201+s101+yd161, 8TTA, ensemble, bounds\""
12969
   ]
12970
  },
12971
  {
12972
   "cell_type": "code",
12973
   "execution_count": null,
12974
   "metadata": {},
12975
   "outputs": [],
12976
   "source": []
12977
  }
12978
 ],
12979
 "metadata": {
12980
  "kernelspec": {
12981
   "display_name": "Python 3",
12982
   "language": "python",
12983
   "name": "python3"
12984
  },
12985
  "language_info": {
12986
   "codemirror_mode": {
12987
    "name": "ipython",
12988
    "version": 3
12989
   },
12990
   "file_extension": ".py",
12991
   "mimetype": "text/x-python",
12992
   "name": "python",
12993
   "nbconvert_exporter": "python",
12994
   "pygments_lexer": "ipython3",
12995
   "version": "3.7.4"
12996
  }
12997
 },
12998
 "nbformat": 4,
12999
 "nbformat_minor": 2
13000
}