|
a |
|
b/notebooks/Training.ipynb |
|
|
1 |
{ |
|
|
2 |
"cells": [ |
|
|
3 |
{ |
|
|
4 |
"cell_type": "code", |
|
|
5 |
"execution_count": 1, |
|
|
6 |
"metadata": {}, |
|
|
7 |
"outputs": [], |
|
|
8 |
"source": [ |
|
|
9 |
"VERSION = 33\n", |
|
|
10 |
"\n", |
|
|
11 |
"FOCAL_LOSS = 0\n", |
|
|
12 |
"CLOUD_SINGLE = True\n", |
|
|
13 |
"MIXUP = False\n", |
|
|
14 |
"NO_BLACK_LOSS = True\n", |
|
|
15 |
"DATA_SMALL = False\n", |
|
|
16 |
"\n", |
|
|
17 |
"# VERSION 31 old features, no stage2 training\n", |
|
|
18 |
"# VERSION 32 old features, no stage2 training, fine-tuned weighted\n", |
|
|
19 |
"# VERSION 33 old features, with stage2 training\n", |
|
|
20 |
"# VERSION 34 old features, with stage2 training, fine-tuned weighted\n", |
|
|
21 |
"# VERSION 35 new features, with stage2 training\n", |
|
|
22 |
"# VERSION 35 new features, with stage2 training, fine-tuned weighted" |
|
|
23 |
] |
|
|
24 |
}, |
|
|
25 |
{ |
|
|
26 |
"cell_type": "code", |
|
|
27 |
"execution_count": 2, |
|
|
28 |
"metadata": {}, |
|
|
29 |
"outputs": [], |
|
|
30 |
"source": [ |
|
|
31 |
"if VERSION in [31,32]:\n", |
|
|
32 |
" TRAIN_ON_STAGE_1 = False\n", |
|
|
33 |
"else:\n", |
|
|
34 |
" TRAIN_ON_STAGE_1 = True\n", |
|
|
35 |
"\n", |
|
|
36 |
"if VERSION in [32,34,36]:\n", |
|
|
37 |
" WEIGHTED = True\n", |
|
|
38 |
"else:\n", |
|
|
39 |
" WEIGHTED = False" |
|
|
40 |
] |
|
|
41 |
}, |
|
|
42 |
{ |
|
|
43 |
"cell_type": "code", |
|
|
44 |
"execution_count": 6, |
|
|
45 |
"metadata": {}, |
|
|
46 |
"outputs": [], |
|
|
47 |
"source": [ |
|
|
48 |
"%run ./Code.ipynb" |
|
|
49 |
] |
|
|
50 |
}, |
|
|
51 |
{ |
|
|
52 |
"cell_type": "code", |
|
|
53 |
"execution_count": 4, |
|
|
54 |
"metadata": {}, |
|
|
55 |
"outputs": [], |
|
|
56 |
"source": [ |
|
|
57 |
"if VERSION in [31,32]:\n", |
|
|
58 |
" # old features, no stage2 training\n", |
|
|
59 |
" train_md, test_md = loadMetadata()\n", |
|
|
60 |
"elif VERSION in [33,34]:\n", |
|
|
61 |
" # old features, with stage2 training\n", |
|
|
62 |
" train_md, test_md = loadMetadata3()\n", |
|
|
63 |
"elif VERSION in [35,36]:\n", |
|
|
64 |
" # new features\n", |
|
|
65 |
" train_md, test_md = loadMetadata2()" |
|
|
66 |
] |
|
|
67 |
}, |
|
|
68 |
{ |
|
|
69 |
"cell_type": "code", |
|
|
70 |
"execution_count": 9, |
|
|
71 |
"metadata": {}, |
|
|
72 |
"outputs": [ |
|
|
73 |
{ |
|
|
74 |
"data": { |
|
|
75 |
"text/plain": [ |
|
|
76 |
"1.111931 230422\n", |
|
|
77 |
"0.864417 230399\n", |
|
|
78 |
"1.200910 221421\n", |
|
|
79 |
"0.446933 70301\n", |
|
|
80 |
"0.737883 129\n", |
|
|
81 |
"0.000000 124\n", |
|
|
82 |
"0.796930 1\n", |
|
|
83 |
"Name: weights, dtype: int64" |
|
|
84 |
] |
|
|
85 |
}, |
|
|
86 |
"execution_count": 9, |
|
|
87 |
"metadata": {}, |
|
|
88 |
"output_type": "execute_result" |
|
|
89 |
} |
|
|
90 |
], |
|
|
91 |
"source": [ |
|
|
92 |
"train_md.weights.value_counts()" |
|
|
93 |
] |
|
|
94 |
}, |
|
|
95 |
{ |
|
|
96 |
"cell_type": "code", |
|
|
97 |
"execution_count": 10, |
|
|
98 |
"metadata": { |
|
|
99 |
"scrolled": true |
|
|
100 |
}, |
|
|
101 |
"outputs": [ |
|
|
102 |
{ |
|
|
103 |
"data": { |
|
|
104 |
"text/plain": [ |
|
|
105 |
"0" |
|
|
106 |
] |
|
|
107 |
}, |
|
|
108 |
"execution_count": 10, |
|
|
109 |
"metadata": {}, |
|
|
110 |
"output_type": "execute_result" |
|
|
111 |
} |
|
|
112 |
], |
|
|
113 |
"source": [ |
|
|
114 |
"train_md.weights.isnull().sum()" |
|
|
115 |
] |
|
|
116 |
}, |
|
|
117 |
{ |
|
|
118 |
"cell_type": "code", |
|
|
119 |
"execution_count": 11, |
|
|
120 |
"metadata": {}, |
|
|
121 |
"outputs": [ |
|
|
122 |
{ |
|
|
123 |
"data": { |
|
|
124 |
"text/plain": [ |
|
|
125 |
"(752797, 103)" |
|
|
126 |
] |
|
|
127 |
}, |
|
|
128 |
"execution_count": 11, |
|
|
129 |
"metadata": {}, |
|
|
130 |
"output_type": "execute_result" |
|
|
131 |
} |
|
|
132 |
], |
|
|
133 |
"source": [ |
|
|
134 |
"train_md.shape" |
|
|
135 |
] |
|
|
136 |
}, |
|
|
137 |
{ |
|
|
138 |
"cell_type": "markdown", |
|
|
139 |
"metadata": {}, |
|
|
140 |
"source": [ |
|
|
141 |
"# Pre-processing features" |
|
|
142 |
] |
|
|
143 |
}, |
|
|
144 |
{ |
|
|
145 |
"cell_type": "code", |
|
|
146 |
"execution_count": 29, |
|
|
147 |
"metadata": {}, |
|
|
148 |
"outputs": [], |
|
|
149 |
"source": [ |
|
|
150 |
"_,_ = loadMetadata(True)" |
|
|
151 |
] |
|
|
152 |
}, |
|
|
153 |
{ |
|
|
154 |
"cell_type": "code", |
|
|
155 |
"execution_count": 9, |
|
|
156 |
"metadata": {}, |
|
|
157 |
"outputs": [], |
|
|
158 |
"source": [ |
|
|
159 |
"_,_ = loadMetadata3(True)" |
|
|
160 |
] |
|
|
161 |
}, |
|
|
162 |
{ |
|
|
163 |
"cell_type": "code", |
|
|
164 |
"execution_count": 8, |
|
|
165 |
"metadata": {}, |
|
|
166 |
"outputs": [], |
|
|
167 |
"source": [ |
|
|
168 |
"_,_ = loadMetadata2(True)" |
|
|
169 |
] |
|
|
170 |
}, |
|
|
171 |
{ |
|
|
172 |
"cell_type": "code", |
|
|
173 |
"execution_count": null, |
|
|
174 |
"metadata": {}, |
|
|
175 |
"outputs": [], |
|
|
176 |
"source": [] |
|
|
177 |
}, |
|
|
178 |
{ |
|
|
179 |
"cell_type": "code", |
|
|
180 |
"execution_count": 12, |
|
|
181 |
"metadata": {}, |
|
|
182 |
"outputs": [ |
|
|
183 |
{ |
|
|
184 |
"data": { |
|
|
185 |
"text/plain": [ |
|
|
186 |
"[7, 9]" |
|
|
187 |
] |
|
|
188 |
}, |
|
|
189 |
"execution_count": 12, |
|
|
190 |
"metadata": {}, |
|
|
191 |
"output_type": "execute_result" |
|
|
192 |
} |
|
|
193 |
], |
|
|
194 |
"source": [ |
|
|
195 |
"my_datasets3" |
|
|
196 |
] |
|
|
197 |
}, |
|
|
198 |
{ |
|
|
199 |
"cell_type": "code", |
|
|
200 |
"execution_count": 18, |
|
|
201 |
"metadata": {}, |
|
|
202 |
"outputs": [ |
|
|
203 |
{ |
|
|
204 |
"name": "stdout", |
|
|
205 |
"output_type": "stream", |
|
|
206 |
"text": [ |
|
|
207 |
"dataset 7 fold 0 feats size torch.Size([2697008, 552])\n", |
|
|
208 |
"dataset 7 fold 1 feats size torch.Size([2697008, 552])\n", |
|
|
209 |
"dataset 7 fold 2 feats size torch.Size([2697008, 552])\n" |
|
|
210 |
] |
|
|
211 |
} |
|
|
212 |
], |
|
|
213 |
"source": [ |
|
|
214 |
"preprocessedData(7,do_train=True,do_test=False)" |
|
|
215 |
] |
|
|
216 |
}, |
|
|
217 |
{ |
|
|
218 |
"cell_type": "code", |
|
|
219 |
"execution_count": 19, |
|
|
220 |
"metadata": {}, |
|
|
221 |
"outputs": [ |
|
|
222 |
{ |
|
|
223 |
"name": "stdout", |
|
|
224 |
"output_type": "stream", |
|
|
225 |
"text": [ |
|
|
226 |
"dataset 9 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
227 |
"dataset 9 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
228 |
"dataset 9 fold 2 feats size torch.Size([2697008, 256])\n" |
|
|
229 |
] |
|
|
230 |
} |
|
|
231 |
], |
|
|
232 |
"source": [ |
|
|
233 |
"preprocessedData(9,do_train=True,do_test=False)" |
|
|
234 |
] |
|
|
235 |
}, |
|
|
236 |
{ |
|
|
237 |
"cell_type": "code", |
|
|
238 |
"execution_count": 20, |
|
|
239 |
"metadata": {}, |
|
|
240 |
"outputs": [ |
|
|
241 |
{ |
|
|
242 |
"data": { |
|
|
243 |
"text/plain": [ |
|
|
244 |
"[11, 12, 13]" |
|
|
245 |
] |
|
|
246 |
}, |
|
|
247 |
"execution_count": 20, |
|
|
248 |
"metadata": {}, |
|
|
249 |
"output_type": "execute_result" |
|
|
250 |
} |
|
|
251 |
], |
|
|
252 |
"source": [ |
|
|
253 |
"my_datasets5" |
|
|
254 |
] |
|
|
255 |
}, |
|
|
256 |
{ |
|
|
257 |
"cell_type": "code", |
|
|
258 |
"execution_count": 23, |
|
|
259 |
"metadata": { |
|
|
260 |
"scrolled": false |
|
|
261 |
}, |
|
|
262 |
"outputs": [ |
|
|
263 |
{ |
|
|
264 |
"name": "stdout", |
|
|
265 |
"output_type": "stream", |
|
|
266 |
"text": [ |
|
|
267 |
"dataset 11 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
268 |
"dataset 11 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
269 |
"dataset 11 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
270 |
"dataset 11 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
271 |
"dataset 11 fold 4 feats size torch.Size([2697008, 256])\n" |
|
|
272 |
] |
|
|
273 |
} |
|
|
274 |
], |
|
|
275 |
"source": [ |
|
|
276 |
"preprocessedData(11,do_train=True,do_test=False, folds=range(5))" |
|
|
277 |
] |
|
|
278 |
}, |
|
|
279 |
{ |
|
|
280 |
"cell_type": "code", |
|
|
281 |
"execution_count": 24, |
|
|
282 |
"metadata": { |
|
|
283 |
"scrolled": false |
|
|
284 |
}, |
|
|
285 |
"outputs": [ |
|
|
286 |
{ |
|
|
287 |
"name": "stdout", |
|
|
288 |
"output_type": "stream", |
|
|
289 |
"text": [ |
|
|
290 |
"dataset 12 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
291 |
"dataset 12 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
292 |
"dataset 12 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
293 |
"dataset 12 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
294 |
"dataset 12 fold 4 feats size torch.Size([2697008, 256])\n" |
|
|
295 |
] |
|
|
296 |
} |
|
|
297 |
], |
|
|
298 |
"source": [ |
|
|
299 |
"preprocessedData(12,do_train=True,do_test=False, folds=range(5))" |
|
|
300 |
] |
|
|
301 |
}, |
|
|
302 |
{ |
|
|
303 |
"cell_type": "code", |
|
|
304 |
"execution_count": 26, |
|
|
305 |
"metadata": { |
|
|
306 |
"scrolled": true |
|
|
307 |
}, |
|
|
308 |
"outputs": [ |
|
|
309 |
{ |
|
|
310 |
"name": "stdout", |
|
|
311 |
"output_type": "stream", |
|
|
312 |
"text": [ |
|
|
313 |
"dataset 13 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
314 |
"dataset 13 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
315 |
"dataset 13 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
316 |
"dataset 13 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
317 |
"dataset 13 fold 4 feats size torch.Size([2697008, 256])\n" |
|
|
318 |
] |
|
|
319 |
} |
|
|
320 |
], |
|
|
321 |
"source": [ |
|
|
322 |
"preprocessedData(13,do_train=True,do_test=False, folds=range(5))" |
|
|
323 |
] |
|
|
324 |
}, |
|
|
325 |
{ |
|
|
326 |
"cell_type": "code", |
|
|
327 |
"execution_count": null, |
|
|
328 |
"metadata": {}, |
|
|
329 |
"outputs": [], |
|
|
330 |
"source": [] |
|
|
331 |
}, |
|
|
332 |
{ |
|
|
333 |
"cell_type": "code", |
|
|
334 |
"execution_count": 8, |
|
|
335 |
"metadata": {}, |
|
|
336 |
"outputs": [ |
|
|
337 |
{ |
|
|
338 |
"name": "stdout", |
|
|
339 |
"output_type": "stream", |
|
|
340 |
"text": [ |
|
|
341 |
"dataset 7 fold 0 feats size torch.Size([2697008, 552])\n", |
|
|
342 |
"dataset 7 fold 1 feats size torch.Size([2697008, 552])\n", |
|
|
343 |
"dataset 7 fold 2 feats size torch.Size([2697008, 552])\n", |
|
|
344 |
"dataset 9 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
345 |
"dataset 9 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
346 |
"dataset 9 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
347 |
"dataset 11 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
348 |
"dataset 11 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
349 |
"dataset 11 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
350 |
"dataset 11 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
351 |
"dataset 11 fold 4 feats size torch.Size([2697008, 256])\n", |
|
|
352 |
"dataset 12 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
353 |
"dataset 12 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
354 |
"dataset 12 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
355 |
"dataset 12 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
356 |
"dataset 12 fold 4 feats size torch.Size([2697008, 256])\n", |
|
|
357 |
"dataset 13 fold 0 feats size torch.Size([2697008, 256])\n", |
|
|
358 |
"dataset 13 fold 1 feats size torch.Size([2697008, 256])\n", |
|
|
359 |
"dataset 13 fold 2 feats size torch.Size([2697008, 256])\n", |
|
|
360 |
"dataset 13 fold 3 feats size torch.Size([2697008, 256])\n", |
|
|
361 |
"dataset 13 fold 4 feats size torch.Size([2697008, 256])\n" |
|
|
362 |
] |
|
|
363 |
} |
|
|
364 |
], |
|
|
365 |
"source": [ |
|
|
366 |
"for ds in my_datasets3:\n", |
|
|
367 |
" preprocessedData(ds)\n", |
|
|
368 |
"\n", |
|
|
369 |
"for ds in my_datasets5:\n", |
|
|
370 |
" preprocessedData(ds, folds=range(5))" |
|
|
371 |
] |
|
|
372 |
}, |
|
|
373 |
{ |
|
|
374 |
"cell_type": "code", |
|
|
375 |
"execution_count": 10, |
|
|
376 |
"metadata": {}, |
|
|
377 |
"outputs": [ |
|
|
378 |
{ |
|
|
379 |
"name": "stdout", |
|
|
380 |
"output_type": "stream", |
|
|
381 |
"text": [ |
|
|
382 |
"dataset 14 fold 0 feats size torch.Size([3011188, 256])\n" |
|
|
383 |
] |
|
|
384 |
}, |
|
|
385 |
{ |
|
|
386 |
"ename": "FileNotFoundError", |
|
|
387 |
"evalue": "[Errno 2] No such file or directory: '/mnt/edisk/running/yuval/model_se_resnet101_version_new_splits_stage2_type_features_test_split_0.pkl'", |
|
|
388 |
"output_type": "error", |
|
|
389 |
"traceback": [ |
|
|
390 |
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", |
|
|
391 |
"\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", |
|
|
392 |
"\u001b[0;32m<ipython-input-10-5a07cc4fa1a8>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mpreprocessedData\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m14\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfold_col\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'fold5'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdo_test\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdo_train\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", |
|
|
393 |
"\u001b[0;32m<ipython-input-6-51e1d35e10f9>\u001b[0m in \u001b[0;36mpreprocessedData\u001b[0;34m(dataset, folds, fold_col, do_test, do_test2, do_train)\u001b[0m\n\u001b[1;32m 62\u001b[0m .format(dataset_name.replace('_5n','').replace('_5f','').replace('_5',''),\n\u001b[1;32m 63\u001b[0m filename_add,dsft,focal,ds_num,test_fix,fold)\n\u001b[0;32m---> 64\u001b[0;31m \u001b[0mfeats\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpickle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilename\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'rb'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 65\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0;32massert\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeats\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m8\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtest_md\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", |
|
|
394 |
"\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '/mnt/edisk/running/yuval/model_se_resnet101_version_new_splits_stage2_type_features_test_split_0.pkl'" |
|
|
395 |
] |
|
|
396 |
} |
|
|
397 |
], |
|
|
398 |
"source": [ |
|
|
399 |
"preprocessedData(14,fold_col='fold5',do_test=True,do_train=True)" |
|
|
400 |
] |
|
|
401 |
}, |
|
|
402 |
{ |
|
|
403 |
"cell_type": "code", |
|
|
404 |
"execution_count": null, |
|
|
405 |
"metadata": {}, |
|
|
406 |
"outputs": [], |
|
|
407 |
"source": [] |
|
|
408 |
}, |
|
|
409 |
{ |
|
|
410 |
"cell_type": "markdown", |
|
|
411 |
"metadata": {}, |
|
|
412 |
"source": [ |
|
|
413 |
"# Running" |
|
|
414 |
] |
|
|
415 |
}, |
|
|
416 |
{ |
|
|
417 |
"cell_type": "code", |
|
|
418 |
"execution_count": 28, |
|
|
419 |
"metadata": { |
|
|
420 |
"scrolled": true |
|
|
421 |
}, |
|
|
422 |
"outputs": [ |
|
|
423 |
{ |
|
|
424 |
"name": "stdout", |
|
|
425 |
"output_type": "stream", |
|
|
426 |
"text": [ |
|
|
427 |
"completed epochs: 0 starting now: 4\n", |
|
|
428 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
429 |
"adding dummy serieses 27\n", |
|
|
430 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
431 |
"setFeats, augmentation 0\n", |
|
|
432 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
433 |
"starting from scratch\n", |
|
|
434 |
"setFeats, augmentation -1\n", |
|
|
435 |
"Batch 50 device: cuda time passed: 22.728 time per batch: 0.455\n", |
|
|
436 |
"Batch 100 device: cuda time passed: 39.605 time per batch: 0.396\n", |
|
|
437 |
"Batch 150 device: cuda time passed: 55.920 time per batch: 0.373\n", |
|
|
438 |
"Batch 200 device: cuda time passed: 72.699 time per batch: 0.363\n", |
|
|
439 |
"Batch 250 device: cuda time passed: 89.437 time per batch: 0.358\n", |
|
|
440 |
"Batch 300 device: cuda time passed: 105.583 time per batch: 0.352\n", |
|
|
441 |
"Batch 350 device: cuda time passed: 122.869 time per batch: 0.351\n", |
|
|
442 |
"Batch 400 device: cuda time passed: 139.694 time per batch: 0.349\n", |
|
|
443 |
"Batch 450 device: cuda time passed: 158.701 time per batch: 0.353\n", |
|
|
444 |
"Batch 500 device: cuda time passed: 176.083 time per batch: 0.352\n", |
|
|
445 |
"Batch 50 device: cuda time passed: 8.131 time per batch: 0.163\n", |
|
|
446 |
"Batch 100 device: cuda time passed: 14.930 time per batch: 0.149\n", |
|
|
447 |
"v35, d14, e1, f2, trn ll: 0.0580, val ll: 0.0650, ll_w: 0.0575, cor: 0.8338, auc: 0.9870, lr: 0.0002\n", |
|
|
448 |
"setFeats, augmentation -1\n", |
|
|
449 |
"Batch 50 device: cuda time passed: 22.873 time per batch: 0.457\n", |
|
|
450 |
"Batch 100 device: cuda time passed: 39.907 time per batch: 0.399\n", |
|
|
451 |
"Batch 150 device: cuda time passed: 57.763 time per batch: 0.385\n", |
|
|
452 |
"Batch 200 device: cuda time passed: 75.045 time per batch: 0.375\n", |
|
|
453 |
"Batch 250 device: cuda time passed: 92.730 time per batch: 0.371\n", |
|
|
454 |
"Batch 300 device: cuda time passed: 109.667 time per batch: 0.366\n", |
|
|
455 |
"Batch 350 device: cuda time passed: 126.574 time per batch: 0.362\n", |
|
|
456 |
"Batch 400 device: cuda time passed: 143.725 time per batch: 0.359\n", |
|
|
457 |
"Batch 450 device: cuda time passed: 161.952 time per batch: 0.360\n", |
|
|
458 |
"Batch 500 device: cuda time passed: 178.315 time per batch: 0.357\n", |
|
|
459 |
"Batch 50 device: cuda time passed: 8.428 time per batch: 0.169\n", |
|
|
460 |
"Batch 100 device: cuda time passed: 15.068 time per batch: 0.151\n", |
|
|
461 |
"v35, d14, e2, f2, trn ll: 0.0356, val ll: 0.0640, ll_w: 0.0562, cor: 0.8351, auc: 0.9872, lr: 0.0002\n", |
|
|
462 |
"setFeats, augmentation -1\n", |
|
|
463 |
"Batch 50 device: cuda time passed: 22.887 time per batch: 0.458\n", |
|
|
464 |
"Batch 100 device: cuda time passed: 39.853 time per batch: 0.399\n", |
|
|
465 |
"Batch 150 device: cuda time passed: 56.609 time per batch: 0.377\n", |
|
|
466 |
"Batch 200 device: cuda time passed: 73.060 time per batch: 0.365\n", |
|
|
467 |
"Batch 250 device: cuda time passed: 90.475 time per batch: 0.362\n", |
|
|
468 |
"Batch 300 device: cuda time passed: 107.703 time per batch: 0.359\n", |
|
|
469 |
"Batch 350 device: cuda time passed: 124.235 time per batch: 0.355\n", |
|
|
470 |
"Batch 400 device: cuda time passed: 141.549 time per batch: 0.354\n", |
|
|
471 |
"Batch 450 device: cuda time passed: 159.349 time per batch: 0.354\n", |
|
|
472 |
"Batch 500 device: cuda time passed: 175.549 time per batch: 0.351\n", |
|
|
473 |
"Batch 50 device: cuda time passed: 8.712 time per batch: 0.174\n", |
|
|
474 |
"Batch 100 device: cuda time passed: 15.378 time per batch: 0.154\n", |
|
|
475 |
"v35, d14, e3, f2, trn ll: 0.0341, val ll: 0.0622, ll_w: 0.0549, cor: 0.8409, auc: 0.9887, lr: 0.0002\n", |
|
|
476 |
"setFeats, augmentation -1\n", |
|
|
477 |
"Batch 50 device: cuda time passed: 23.339 time per batch: 0.467\n", |
|
|
478 |
"Batch 100 device: cuda time passed: 40.154 time per batch: 0.402\n", |
|
|
479 |
"Batch 150 device: cuda time passed: 56.698 time per batch: 0.378\n", |
|
|
480 |
"Batch 200 device: cuda time passed: 74.060 time per batch: 0.370\n", |
|
|
481 |
"Batch 250 device: cuda time passed: 91.302 time per batch: 0.365\n", |
|
|
482 |
"Batch 300 device: cuda time passed: 108.675 time per batch: 0.362\n", |
|
|
483 |
"Batch 350 device: cuda time passed: 126.067 time per batch: 0.360\n", |
|
|
484 |
"Batch 400 device: cuda time passed: 143.346 time per batch: 0.358\n", |
|
|
485 |
"Batch 450 device: cuda time passed: 162.692 time per batch: 0.362\n", |
|
|
486 |
"Batch 500 device: cuda time passed: 179.814 time per batch: 0.360\n", |
|
|
487 |
"Batch 50 device: cuda time passed: 7.879 time per batch: 0.158\n", |
|
|
488 |
"Batch 100 device: cuda time passed: 14.703 time per batch: 0.147\n", |
|
|
489 |
"v35, d14, e4, f2, trn ll: 0.0335, val ll: 0.0613, ll_w: 0.0539, cor: 0.8421, auc: 0.9887, lr: 0.0002\n", |
|
|
490 |
"total running time 900.058856010437\n", |
|
|
491 |
"completed epochs: 4 starting now: 4\n", |
|
|
492 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
493 |
"adding dummy serieses 27\n", |
|
|
494 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
495 |
"setFeats, augmentation 0\n", |
|
|
496 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
497 |
"loading model model.b4.f2.d14.v35\n", |
|
|
498 |
"setFeats, augmentation -1\n", |
|
|
499 |
"Batch 50 device: cuda time passed: 22.315 time per batch: 0.446\n", |
|
|
500 |
"Batch 100 device: cuda time passed: 38.974 time per batch: 0.390\n", |
|
|
501 |
"Batch 150 device: cuda time passed: 56.082 time per batch: 0.374\n", |
|
|
502 |
"Batch 200 device: cuda time passed: 73.546 time per batch: 0.368\n", |
|
|
503 |
"Batch 250 device: cuda time passed: 90.717 time per batch: 0.363\n", |
|
|
504 |
"Batch 300 device: cuda time passed: 107.371 time per batch: 0.358\n", |
|
|
505 |
"Batch 350 device: cuda time passed: 124.145 time per batch: 0.355\n", |
|
|
506 |
"Batch 400 device: cuda time passed: 140.102 time per batch: 0.350\n", |
|
|
507 |
"Batch 450 device: cuda time passed: 158.547 time per batch: 0.352\n", |
|
|
508 |
"Batch 500 device: cuda time passed: 176.664 time per batch: 0.353\n", |
|
|
509 |
"Batch 50 device: cuda time passed: 8.467 time per batch: 0.169\n", |
|
|
510 |
"Batch 100 device: cuda time passed: 15.208 time per batch: 0.152\n", |
|
|
511 |
"v35, d14, e5, f2, trn ll: 0.0319, val ll: 0.0602, ll_w: 0.0530, cor: 0.8445, auc: 0.9892, lr: 2e-05\n", |
|
|
512 |
"setFeats, augmentation -1\n", |
|
|
513 |
"Batch 50 device: cuda time passed: 23.093 time per batch: 0.462\n", |
|
|
514 |
"Batch 100 device: cuda time passed: 40.190 time per batch: 0.402\n", |
|
|
515 |
"Batch 150 device: cuda time passed: 57.287 time per batch: 0.382\n", |
|
|
516 |
"Batch 200 device: cuda time passed: 74.458 time per batch: 0.372\n", |
|
|
517 |
"Batch 250 device: cuda time passed: 91.213 time per batch: 0.365\n", |
|
|
518 |
"Batch 300 device: cuda time passed: 108.222 time per batch: 0.361\n", |
|
|
519 |
"Batch 350 device: cuda time passed: 125.171 time per batch: 0.358\n", |
|
|
520 |
"Batch 400 device: cuda time passed: 142.199 time per batch: 0.355\n", |
|
|
521 |
"Batch 450 device: cuda time passed: 162.403 time per batch: 0.361\n", |
|
|
522 |
"Batch 500 device: cuda time passed: 179.653 time per batch: 0.359\n", |
|
|
523 |
"Batch 50 device: cuda time passed: 8.772 time per batch: 0.175\n", |
|
|
524 |
"Batch 100 device: cuda time passed: 15.212 time per batch: 0.152\n", |
|
|
525 |
"v35, d14, e6, f2, trn ll: 0.0316, val ll: 0.0603, ll_w: 0.0531, cor: 0.8444, auc: 0.9892, lr: 2e-05\n", |
|
|
526 |
"setFeats, augmentation -1\n", |
|
|
527 |
"Batch 50 device: cuda time passed: 21.633 time per batch: 0.433\n", |
|
|
528 |
"Batch 100 device: cuda time passed: 38.635 time per batch: 0.386\n", |
|
|
529 |
"Batch 150 device: cuda time passed: 55.617 time per batch: 0.371\n", |
|
|
530 |
"Batch 200 device: cuda time passed: 72.396 time per batch: 0.362\n", |
|
|
531 |
"Batch 250 device: cuda time passed: 89.274 time per batch: 0.357\n", |
|
|
532 |
"Batch 300 device: cuda time passed: 106.187 time per batch: 0.354\n", |
|
|
533 |
"Batch 350 device: cuda time passed: 122.741 time per batch: 0.351\n", |
|
|
534 |
"Batch 400 device: cuda time passed: 139.218 time per batch: 0.348\n", |
|
|
535 |
"Batch 450 device: cuda time passed: 158.183 time per batch: 0.352\n", |
|
|
536 |
"Batch 500 device: cuda time passed: 175.811 time per batch: 0.352\n", |
|
|
537 |
"Batch 50 device: cuda time passed: 8.583 time per batch: 0.172\n", |
|
|
538 |
"Batch 100 device: cuda time passed: 15.243 time per batch: 0.152\n", |
|
|
539 |
"v35, d14, e7, f2, trn ll: 0.0315, val ll: 0.0602, ll_w: 0.0530, cor: 0.8446, auc: 0.9892, lr: 2e-05\n", |
|
|
540 |
"setFeats, augmentation -1\n", |
|
|
541 |
"Batch 50 device: cuda time passed: 23.204 time per batch: 0.464\n", |
|
|
542 |
"Batch 100 device: cuda time passed: 40.589 time per batch: 0.406\n", |
|
|
543 |
"Batch 150 device: cuda time passed: 58.004 time per batch: 0.387\n", |
|
|
544 |
"Batch 200 device: cuda time passed: 75.275 time per batch: 0.376\n", |
|
|
545 |
"Batch 250 device: cuda time passed: 92.072 time per batch: 0.368\n", |
|
|
546 |
"Batch 300 device: cuda time passed: 109.143 time per batch: 0.364\n", |
|
|
547 |
"Batch 350 device: cuda time passed: 125.692 time per batch: 0.359\n", |
|
|
548 |
"Batch 400 device: cuda time passed: 143.440 time per batch: 0.359\n", |
|
|
549 |
"Batch 450 device: cuda time passed: 164.424 time per batch: 0.365\n", |
|
|
550 |
"Batch 500 device: cuda time passed: 181.490 time per batch: 0.363\n", |
|
|
551 |
"Batch 50 device: cuda time passed: 8.635 time per batch: 0.173\n", |
|
|
552 |
"Batch 100 device: cuda time passed: 15.140 time per batch: 0.151\n", |
|
|
553 |
"v35, d14, e8, f2, trn ll: 0.0314, val ll: 0.0601, ll_w: 0.0529, cor: 0.8448, auc: 0.9892, lr: 2e-05\n", |
|
|
554 |
"total running time 871.9309167861938\n", |
|
|
555 |
"completed epochs: 8 starting now: 3\n", |
|
|
556 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
557 |
"adding dummy serieses 27\n", |
|
|
558 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
559 |
"setFeats, augmentation 0\n", |
|
|
560 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
561 |
"loading model model.b8.f2.d14.v35\n", |
|
|
562 |
"setFeats, augmentation -1\n", |
|
|
563 |
"Batch 50 device: cuda time passed: 22.052 time per batch: 0.441\n", |
|
|
564 |
"Batch 100 device: cuda time passed: 38.728 time per batch: 0.387\n" |
|
|
565 |
] |
|
|
566 |
}, |
|
|
567 |
{ |
|
|
568 |
"name": "stdout", |
|
|
569 |
"output_type": "stream", |
|
|
570 |
"text": [ |
|
|
571 |
"Batch 150 device: cuda time passed: 56.020 time per batch: 0.373\n", |
|
|
572 |
"Batch 200 device: cuda time passed: 73.183 time per batch: 0.366\n", |
|
|
573 |
"Batch 250 device: cuda time passed: 90.324 time per batch: 0.361\n", |
|
|
574 |
"Batch 300 device: cuda time passed: 107.782 time per batch: 0.359\n", |
|
|
575 |
"Batch 350 device: cuda time passed: 124.543 time per batch: 0.356\n", |
|
|
576 |
"Batch 400 device: cuda time passed: 141.478 time per batch: 0.354\n", |
|
|
577 |
"Batch 450 device: cuda time passed: 162.319 time per batch: 0.361\n", |
|
|
578 |
"Batch 500 device: cuda time passed: 179.659 time per batch: 0.359\n", |
|
|
579 |
"Batch 50 device: cuda time passed: 7.857 time per batch: 0.157\n", |
|
|
580 |
"Batch 100 device: cuda time passed: 14.956 time per batch: 0.150\n", |
|
|
581 |
"v35, d14, e9, f2, trn ll: 0.0313, val ll: 0.0599, ll_w: 0.0527, cor: 0.8454, auc: 0.9892, lr: 5e-06\n", |
|
|
582 |
"setFeats, augmentation -1\n", |
|
|
583 |
"Batch 50 device: cuda time passed: 23.852 time per batch: 0.477\n", |
|
|
584 |
"Batch 100 device: cuda time passed: 41.405 time per batch: 0.414\n", |
|
|
585 |
"Batch 150 device: cuda time passed: 59.015 time per batch: 0.393\n", |
|
|
586 |
"Batch 200 device: cuda time passed: 76.762 time per batch: 0.384\n", |
|
|
587 |
"Batch 250 device: cuda time passed: 94.142 time per batch: 0.377\n", |
|
|
588 |
"Batch 300 device: cuda time passed: 111.812 time per batch: 0.373\n", |
|
|
589 |
"Batch 350 device: cuda time passed: 129.992 time per batch: 0.371\n", |
|
|
590 |
"Batch 400 device: cuda time passed: 147.178 time per batch: 0.368\n", |
|
|
591 |
"Batch 450 device: cuda time passed: 168.899 time per batch: 0.375\n", |
|
|
592 |
"Batch 500 device: cuda time passed: 184.992 time per batch: 0.370\n", |
|
|
593 |
"Batch 50 device: cuda time passed: 8.119 time per batch: 0.162\n", |
|
|
594 |
"Batch 100 device: cuda time passed: 14.013 time per batch: 0.140\n", |
|
|
595 |
"v35, d14, e10, f2, trn ll: 0.0313, val ll: 0.0600, ll_w: 0.0528, cor: 0.8451, auc: 0.9893, lr: 5e-06\n", |
|
|
596 |
"setFeats, augmentation -1\n", |
|
|
597 |
"Batch 50 device: cuda time passed: 21.209 time per batch: 0.424\n", |
|
|
598 |
"Batch 100 device: cuda time passed: 37.676 time per batch: 0.377\n", |
|
|
599 |
"Batch 150 device: cuda time passed: 53.211 time per batch: 0.355\n", |
|
|
600 |
"Batch 200 device: cuda time passed: 68.886 time per batch: 0.344\n", |
|
|
601 |
"Batch 250 device: cuda time passed: 84.449 time per batch: 0.338\n", |
|
|
602 |
"Batch 300 device: cuda time passed: 100.285 time per batch: 0.334\n", |
|
|
603 |
"Batch 350 device: cuda time passed: 116.165 time per batch: 0.332\n", |
|
|
604 |
"Batch 400 device: cuda time passed: 131.615 time per batch: 0.329\n", |
|
|
605 |
"Batch 450 device: cuda time passed: 150.426 time per batch: 0.334\n", |
|
|
606 |
"Batch 500 device: cuda time passed: 166.304 time per batch: 0.333\n", |
|
|
607 |
"Batch 50 device: cuda time passed: 7.404 time per batch: 0.148\n", |
|
|
608 |
"Batch 100 device: cuda time passed: 13.992 time per batch: 0.140\n", |
|
|
609 |
"v35, d14, e11, f2, trn ll: 0.0312, val ll: 0.0600, ll_w: 0.0528, cor: 0.8450, auc: 0.9893, lr: 5e-06\n", |
|
|
610 |
"total running time 644.8997066020966\n", |
|
|
611 |
"completed epochs: 11 starting now: 2\n", |
|
|
612 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
613 |
"adding dummy serieses 27\n", |
|
|
614 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
615 |
"setFeats, augmentation 0\n", |
|
|
616 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
617 |
"loading model model.b11.f2.d14.v35\n", |
|
|
618 |
"setFeats, augmentation -1\n", |
|
|
619 |
"Batch 50 device: cuda time passed: 21.014 time per batch: 0.420\n", |
|
|
620 |
"Batch 100 device: cuda time passed: 36.560 time per batch: 0.366\n", |
|
|
621 |
"Batch 150 device: cuda time passed: 53.272 time per batch: 0.355\n", |
|
|
622 |
"Batch 200 device: cuda time passed: 68.574 time per batch: 0.343\n", |
|
|
623 |
"Batch 250 device: cuda time passed: 83.938 time per batch: 0.336\n", |
|
|
624 |
"Batch 300 device: cuda time passed: 100.283 time per batch: 0.334\n", |
|
|
625 |
"Batch 350 device: cuda time passed: 116.620 time per batch: 0.333\n", |
|
|
626 |
"Batch 400 device: cuda time passed: 132.783 time per batch: 0.332\n", |
|
|
627 |
"Batch 450 device: cuda time passed: 152.425 time per batch: 0.339\n", |
|
|
628 |
"Batch 500 device: cuda time passed: 168.627 time per batch: 0.337\n", |
|
|
629 |
"Batch 50 device: cuda time passed: 8.257 time per batch: 0.165\n", |
|
|
630 |
"Batch 100 device: cuda time passed: 14.836 time per batch: 0.148\n", |
|
|
631 |
"v35, d14, e12, f2, trn ll: 0.0311, val ll: 0.0600, ll_w: 0.0528, cor: 0.8451, auc: 0.9893, lr: 2e-06\n", |
|
|
632 |
"setFeats, augmentation -1\n", |
|
|
633 |
"Batch 50 device: cuda time passed: 21.311 time per batch: 0.426\n", |
|
|
634 |
"Batch 100 device: cuda time passed: 38.735 time per batch: 0.387\n", |
|
|
635 |
"Batch 150 device: cuda time passed: 54.760 time per batch: 0.365\n", |
|
|
636 |
"Batch 200 device: cuda time passed: 71.364 time per batch: 0.357\n", |
|
|
637 |
"Batch 250 device: cuda time passed: 87.975 time per batch: 0.352\n", |
|
|
638 |
"Batch 300 device: cuda time passed: 104.126 time per batch: 0.347\n", |
|
|
639 |
"Batch 350 device: cuda time passed: 120.536 time per batch: 0.344\n", |
|
|
640 |
"Batch 400 device: cuda time passed: 136.741 time per batch: 0.342\n", |
|
|
641 |
"Batch 450 device: cuda time passed: 154.903 time per batch: 0.344\n", |
|
|
642 |
"Batch 500 device: cuda time passed: 173.138 time per batch: 0.346\n", |
|
|
643 |
"Batch 50 device: cuda time passed: 8.686 time per batch: 0.174\n", |
|
|
644 |
"Batch 100 device: cuda time passed: 15.208 time per batch: 0.152\n", |
|
|
645 |
"v35, d14, e13, f2, trn ll: 0.0311, val ll: 0.0599, ll_w: 0.0527, cor: 0.8454, auc: 0.9892, lr: 2e-06\n", |
|
|
646 |
"total running time 421.16584634780884\n", |
|
|
647 |
"total time 2838.7946536540985\n", |
|
|
648 |
"completed epochs: 0 starting now: 4\n", |
|
|
649 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
650 |
"adding dummy serieses 16\n", |
|
|
651 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
652 |
"setFeats, augmentation 0\n", |
|
|
653 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
654 |
"starting from scratch\n", |
|
|
655 |
"setFeats, augmentation -1\n", |
|
|
656 |
"Batch 50 device: cuda time passed: 21.909 time per batch: 0.438\n", |
|
|
657 |
"Batch 100 device: cuda time passed: 38.295 time per batch: 0.383\n", |
|
|
658 |
"Batch 150 device: cuda time passed: 54.459 time per batch: 0.363\n", |
|
|
659 |
"Batch 200 device: cuda time passed: 71.059 time per batch: 0.355\n", |
|
|
660 |
"Batch 250 device: cuda time passed: 87.285 time per batch: 0.349\n", |
|
|
661 |
"Batch 300 device: cuda time passed: 103.278 time per batch: 0.344\n", |
|
|
662 |
"Batch 350 device: cuda time passed: 119.758 time per batch: 0.342\n", |
|
|
663 |
"Batch 400 device: cuda time passed: 136.023 time per batch: 0.340\n", |
|
|
664 |
"Batch 450 device: cuda time passed: 156.381 time per batch: 0.348\n", |
|
|
665 |
"Batch 500 device: cuda time passed: 172.724 time per batch: 0.345\n", |
|
|
666 |
"Batch 50 device: cuda time passed: 8.381 time per batch: 0.168\n", |
|
|
667 |
"Batch 100 device: cuda time passed: 15.352 time per batch: 0.154\n", |
|
|
668 |
"v35, d14, e1, f3, trn ll: 0.0574, val ll: 0.0673, ll_w: 0.0595, cor: 0.8311, auc: 0.9867, lr: 0.0002\n", |
|
|
669 |
"setFeats, augmentation -1\n", |
|
|
670 |
"Batch 50 device: cuda time passed: 21.941 time per batch: 0.439\n", |
|
|
671 |
"Batch 100 device: cuda time passed: 38.796 time per batch: 0.388\n", |
|
|
672 |
"Batch 150 device: cuda time passed: 55.281 time per batch: 0.369\n", |
|
|
673 |
"Batch 200 device: cuda time passed: 72.261 time per batch: 0.361\n", |
|
|
674 |
"Batch 250 device: cuda time passed: 88.752 time per batch: 0.355\n", |
|
|
675 |
"Batch 300 device: cuda time passed: 105.021 time per batch: 0.350\n", |
|
|
676 |
"Batch 350 device: cuda time passed: 121.855 time per batch: 0.348\n", |
|
|
677 |
"Batch 400 device: cuda time passed: 137.611 time per batch: 0.344\n", |
|
|
678 |
"Batch 450 device: cuda time passed: 158.361 time per batch: 0.352\n", |
|
|
679 |
"Batch 500 device: cuda time passed: 175.196 time per batch: 0.350\n", |
|
|
680 |
"Batch 50 device: cuda time passed: 7.713 time per batch: 0.154\n", |
|
|
681 |
"Batch 100 device: cuda time passed: 15.676 time per batch: 0.157\n", |
|
|
682 |
"v35, d14, e2, f3, trn ll: 0.0356, val ll: 0.0659, ll_w: 0.0581, cor: 0.8330, auc: 0.9871, lr: 0.0002\n", |
|
|
683 |
"setFeats, augmentation -1\n", |
|
|
684 |
"Batch 50 device: cuda time passed: 22.308 time per batch: 0.446\n", |
|
|
685 |
"Batch 100 device: cuda time passed: 38.613 time per batch: 0.386\n", |
|
|
686 |
"Batch 150 device: cuda time passed: 55.122 time per batch: 0.367\n", |
|
|
687 |
"Batch 200 device: cuda time passed: 71.951 time per batch: 0.360\n", |
|
|
688 |
"Batch 250 device: cuda time passed: 88.792 time per batch: 0.355\n", |
|
|
689 |
"Batch 300 device: cuda time passed: 105.939 time per batch: 0.353\n", |
|
|
690 |
"Batch 350 device: cuda time passed: 121.911 time per batch: 0.348\n", |
|
|
691 |
"Batch 400 device: cuda time passed: 138.362 time per batch: 0.346\n", |
|
|
692 |
"Batch 450 device: cuda time passed: 158.974 time per batch: 0.353\n", |
|
|
693 |
"Batch 500 device: cuda time passed: 175.745 time per batch: 0.351\n", |
|
|
694 |
"Batch 50 device: cuda time passed: 8.061 time per batch: 0.161\n", |
|
|
695 |
"Batch 100 device: cuda time passed: 14.448 time per batch: 0.144\n", |
|
|
696 |
"v35, d14, e3, f3, trn ll: 0.0344, val ll: 0.0648, ll_w: 0.0572, cor: 0.8354, auc: 0.9880, lr: 0.0002\n", |
|
|
697 |
"setFeats, augmentation -1\n", |
|
|
698 |
"Batch 50 device: cuda time passed: 22.337 time per batch: 0.447\n", |
|
|
699 |
"Batch 100 device: cuda time passed: 39.126 time per batch: 0.391\n", |
|
|
700 |
"Batch 150 device: cuda time passed: 56.197 time per batch: 0.375\n", |
|
|
701 |
"Batch 200 device: cuda time passed: 72.948 time per batch: 0.365\n", |
|
|
702 |
"Batch 250 device: cuda time passed: 89.869 time per batch: 0.359\n", |
|
|
703 |
"Batch 300 device: cuda time passed: 106.597 time per batch: 0.355\n", |
|
|
704 |
"Batch 350 device: cuda time passed: 123.356 time per batch: 0.352\n" |
|
|
705 |
] |
|
|
706 |
}, |
|
|
707 |
{ |
|
|
708 |
"name": "stdout", |
|
|
709 |
"output_type": "stream", |
|
|
710 |
"text": [ |
|
|
711 |
"Batch 400 device: cuda time passed: 140.828 time per batch: 0.352\n", |
|
|
712 |
"Batch 450 device: cuda time passed: 161.360 time per batch: 0.359\n", |
|
|
713 |
"Batch 500 device: cuda time passed: 178.210 time per batch: 0.356\n", |
|
|
714 |
"Batch 50 device: cuda time passed: 8.594 time per batch: 0.172\n", |
|
|
715 |
"Batch 100 device: cuda time passed: 15.062 time per batch: 0.151\n", |
|
|
716 |
"v35, d14, e4, f3, trn ll: 0.0338, val ll: 0.0646, ll_w: 0.0566, cor: 0.8354, auc: 0.9876, lr: 0.0002\n", |
|
|
717 |
"total running time 878.9186632633209\n", |
|
|
718 |
"completed epochs: 4 starting now: 4\n", |
|
|
719 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
720 |
"adding dummy serieses 16\n", |
|
|
721 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
722 |
"setFeats, augmentation 0\n", |
|
|
723 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
724 |
"loading model model.b4.f3.d14.v35\n", |
|
|
725 |
"setFeats, augmentation -1\n", |
|
|
726 |
"Batch 50 device: cuda time passed: 22.351 time per batch: 0.447\n", |
|
|
727 |
"Batch 100 device: cuda time passed: 39.285 time per batch: 0.393\n", |
|
|
728 |
"Batch 150 device: cuda time passed: 56.332 time per batch: 0.376\n", |
|
|
729 |
"Batch 200 device: cuda time passed: 73.401 time per batch: 0.367\n", |
|
|
730 |
"Batch 250 device: cuda time passed: 90.373 time per batch: 0.361\n", |
|
|
731 |
"Batch 300 device: cuda time passed: 107.953 time per batch: 0.360\n", |
|
|
732 |
"Batch 350 device: cuda time passed: 125.840 time per batch: 0.360\n", |
|
|
733 |
"Batch 400 device: cuda time passed: 141.813 time per batch: 0.355\n", |
|
|
734 |
"Batch 450 device: cuda time passed: 163.658 time per batch: 0.364\n", |
|
|
735 |
"Batch 500 device: cuda time passed: 180.005 time per batch: 0.360\n", |
|
|
736 |
"Batch 50 device: cuda time passed: 9.095 time per batch: 0.182\n", |
|
|
737 |
"Batch 100 device: cuda time passed: 15.365 time per batch: 0.154\n", |
|
|
738 |
"v35, d14, e5, f3, trn ll: 0.0323, val ll: 0.0630, ll_w: 0.0553, cor: 0.8398, auc: 0.9887, lr: 2e-05\n", |
|
|
739 |
"setFeats, augmentation -1\n", |
|
|
740 |
"Batch 50 device: cuda time passed: 22.265 time per batch: 0.445\n", |
|
|
741 |
"Batch 100 device: cuda time passed: 38.806 time per batch: 0.388\n", |
|
|
742 |
"Batch 150 device: cuda time passed: 55.889 time per batch: 0.373\n", |
|
|
743 |
"Batch 200 device: cuda time passed: 72.973 time per batch: 0.365\n", |
|
|
744 |
"Batch 250 device: cuda time passed: 89.595 time per batch: 0.358\n", |
|
|
745 |
"Batch 300 device: cuda time passed: 106.073 time per batch: 0.354\n", |
|
|
746 |
"Batch 350 device: cuda time passed: 123.445 time per batch: 0.353\n", |
|
|
747 |
"Batch 400 device: cuda time passed: 139.717 time per batch: 0.349\n", |
|
|
748 |
"Batch 450 device: cuda time passed: 159.922 time per batch: 0.355\n", |
|
|
749 |
"Batch 500 device: cuda time passed: 177.361 time per batch: 0.355\n", |
|
|
750 |
"Batch 50 device: cuda time passed: 8.455 time per batch: 0.169\n", |
|
|
751 |
"Batch 100 device: cuda time passed: 15.167 time per batch: 0.152\n", |
|
|
752 |
"v35, d14, e6, f3, trn ll: 0.0319, val ll: 0.0627, ll_w: 0.0550, cor: 0.8406, auc: 0.9888, lr: 2e-05\n", |
|
|
753 |
"setFeats, augmentation -1\n", |
|
|
754 |
"Batch 50 device: cuda time passed: 21.948 time per batch: 0.439\n", |
|
|
755 |
"Batch 100 device: cuda time passed: 38.589 time per batch: 0.386\n", |
|
|
756 |
"Batch 150 device: cuda time passed: 56.699 time per batch: 0.378\n", |
|
|
757 |
"Batch 200 device: cuda time passed: 74.066 time per batch: 0.370\n", |
|
|
758 |
"Batch 250 device: cuda time passed: 90.412 time per batch: 0.362\n", |
|
|
759 |
"Batch 300 device: cuda time passed: 108.005 time per batch: 0.360\n", |
|
|
760 |
"Batch 350 device: cuda time passed: 124.540 time per batch: 0.356\n", |
|
|
761 |
"Batch 400 device: cuda time passed: 141.433 time per batch: 0.354\n", |
|
|
762 |
"Batch 450 device: cuda time passed: 158.940 time per batch: 0.353\n", |
|
|
763 |
"Batch 500 device: cuda time passed: 176.269 time per batch: 0.353\n", |
|
|
764 |
"Batch 50 device: cuda time passed: 7.920 time per batch: 0.158\n", |
|
|
765 |
"Batch 100 device: cuda time passed: 14.641 time per batch: 0.146\n", |
|
|
766 |
"v35, d14, e7, f3, trn ll: 0.0317, val ll: 0.0628, ll_w: 0.0550, cor: 0.8408, auc: 0.9888, lr: 2e-05\n", |
|
|
767 |
"setFeats, augmentation -1\n", |
|
|
768 |
"Batch 50 device: cuda time passed: 21.956 time per batch: 0.439\n", |
|
|
769 |
"Batch 100 device: cuda time passed: 38.717 time per batch: 0.387\n", |
|
|
770 |
"Batch 150 device: cuda time passed: 54.847 time per batch: 0.366\n", |
|
|
771 |
"Batch 200 device: cuda time passed: 73.123 time per batch: 0.366\n", |
|
|
772 |
"Batch 250 device: cuda time passed: 89.668 time per batch: 0.359\n", |
|
|
773 |
"Batch 300 device: cuda time passed: 105.873 time per batch: 0.353\n", |
|
|
774 |
"Batch 350 device: cuda time passed: 122.045 time per batch: 0.349\n", |
|
|
775 |
"Batch 400 device: cuda time passed: 138.273 time per batch: 0.346\n", |
|
|
776 |
"Batch 450 device: cuda time passed: 157.145 time per batch: 0.349\n", |
|
|
777 |
"Batch 500 device: cuda time passed: 173.020 time per batch: 0.346\n", |
|
|
778 |
"Batch 50 device: cuda time passed: 8.091 time per batch: 0.162\n", |
|
|
779 |
"Batch 100 device: cuda time passed: 14.363 time per batch: 0.144\n", |
|
|
780 |
"v35, d14, e8, f3, trn ll: 0.0317, val ll: 0.0624, ll_w: 0.0548, cor: 0.8413, auc: 0.9889, lr: 2e-05\n", |
|
|
781 |
"total running time 867.7460985183716\n", |
|
|
782 |
"completed epochs: 8 starting now: 3\n", |
|
|
783 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
784 |
"adding dummy serieses 16\n", |
|
|
785 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
786 |
"setFeats, augmentation 0\n", |
|
|
787 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
788 |
"loading model model.b8.f3.d14.v35\n", |
|
|
789 |
"setFeats, augmentation -1\n", |
|
|
790 |
"Batch 50 device: cuda time passed: 20.904 time per batch: 0.418\n", |
|
|
791 |
"Batch 100 device: cuda time passed: 37.911 time per batch: 0.379\n", |
|
|
792 |
"Batch 150 device: cuda time passed: 54.975 time per batch: 0.367\n", |
|
|
793 |
"Batch 200 device: cuda time passed: 71.442 time per batch: 0.357\n", |
|
|
794 |
"Batch 250 device: cuda time passed: 87.521 time per batch: 0.350\n", |
|
|
795 |
"Batch 300 device: cuda time passed: 104.238 time per batch: 0.347\n", |
|
|
796 |
"Batch 350 device: cuda time passed: 121.406 time per batch: 0.347\n", |
|
|
797 |
"Batch 400 device: cuda time passed: 137.836 time per batch: 0.345\n", |
|
|
798 |
"Batch 450 device: cuda time passed: 157.379 time per batch: 0.350\n", |
|
|
799 |
"Batch 500 device: cuda time passed: 173.441 time per batch: 0.347\n", |
|
|
800 |
"Batch 50 device: cuda time passed: 8.016 time per batch: 0.160\n", |
|
|
801 |
"Batch 100 device: cuda time passed: 14.483 time per batch: 0.145\n", |
|
|
802 |
"v35, d14, e9, f3, trn ll: 0.0313, val ll: 0.0623, ll_w: 0.0547, cor: 0.8414, auc: 0.9889, lr: 5e-06\n", |
|
|
803 |
"setFeats, augmentation -1\n", |
|
|
804 |
"Batch 50 device: cuda time passed: 22.463 time per batch: 0.449\n", |
|
|
805 |
"Batch 100 device: cuda time passed: 39.561 time per batch: 0.396\n", |
|
|
806 |
"Batch 150 device: cuda time passed: 56.056 time per batch: 0.374\n", |
|
|
807 |
"Batch 200 device: cuda time passed: 72.486 time per batch: 0.362\n", |
|
|
808 |
"Batch 250 device: cuda time passed: 88.576 time per batch: 0.354\n", |
|
|
809 |
"Batch 300 device: cuda time passed: 105.400 time per batch: 0.351\n", |
|
|
810 |
"Batch 350 device: cuda time passed: 122.470 time per batch: 0.350\n", |
|
|
811 |
"Batch 400 device: cuda time passed: 138.730 time per batch: 0.347\n", |
|
|
812 |
"Batch 450 device: cuda time passed: 158.273 time per batch: 0.352\n", |
|
|
813 |
"Batch 500 device: cuda time passed: 174.807 time per batch: 0.350\n", |
|
|
814 |
"Batch 50 device: cuda time passed: 8.288 time per batch: 0.166\n", |
|
|
815 |
"Batch 100 device: cuda time passed: 14.541 time per batch: 0.145\n", |
|
|
816 |
"v35, d14, e10, f3, trn ll: 0.0314, val ll: 0.0623, ll_w: 0.0547, cor: 0.8414, auc: 0.9889, lr: 5e-06\n", |
|
|
817 |
"setFeats, augmentation -1\n", |
|
|
818 |
"Batch 50 device: cuda time passed: 23.117 time per batch: 0.462\n", |
|
|
819 |
"Batch 100 device: cuda time passed: 40.982 time per batch: 0.410\n", |
|
|
820 |
"Batch 150 device: cuda time passed: 61.655 time per batch: 0.411\n", |
|
|
821 |
"Batch 200 device: cuda time passed: 78.830 time per batch: 0.394\n", |
|
|
822 |
"Batch 250 device: cuda time passed: 94.886 time per batch: 0.380\n", |
|
|
823 |
"Batch 300 device: cuda time passed: 111.050 time per batch: 0.370\n", |
|
|
824 |
"Batch 350 device: cuda time passed: 127.401 time per batch: 0.364\n", |
|
|
825 |
"Batch 400 device: cuda time passed: 144.495 time per batch: 0.361\n", |
|
|
826 |
"Batch 450 device: cuda time passed: 163.904 time per batch: 0.364\n", |
|
|
827 |
"Batch 500 device: cuda time passed: 180.762 time per batch: 0.362\n", |
|
|
828 |
"Batch 50 device: cuda time passed: 9.296 time per batch: 0.186\n", |
|
|
829 |
"Batch 100 device: cuda time passed: 15.943 time per batch: 0.159\n", |
|
|
830 |
"v35, d14, e11, f3, trn ll: 0.0314, val ll: 0.0623, ll_w: 0.0548, cor: 0.8414, auc: 0.9890, lr: 5e-06\n", |
|
|
831 |
"total running time 650.1911387443542\n", |
|
|
832 |
"completed epochs: 11 starting now: 2\n", |
|
|
833 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
834 |
"adding dummy serieses 16\n", |
|
|
835 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
836 |
"setFeats, augmentation 0\n", |
|
|
837 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
838 |
"loading model model.b11.f3.d14.v35\n", |
|
|
839 |
"setFeats, augmentation -1\n", |
|
|
840 |
"Batch 50 device: cuda time passed: 22.074 time per batch: 0.441\n", |
|
|
841 |
"Batch 100 device: cuda time passed: 38.499 time per batch: 0.385\n", |
|
|
842 |
"Batch 150 device: cuda time passed: 55.562 time per batch: 0.370\n", |
|
|
843 |
"Batch 200 device: cuda time passed: 71.852 time per batch: 0.359\n", |
|
|
844 |
"Batch 250 device: cuda time passed: 88.012 time per batch: 0.352\n", |
|
|
845 |
"Batch 300 device: cuda time passed: 105.386 time per batch: 0.351\n", |
|
|
846 |
"Batch 350 device: cuda time passed: 123.305 time per batch: 0.352\n", |
|
|
847 |
"Batch 400 device: cuda time passed: 140.065 time per batch: 0.350\n" |
|
|
848 |
] |
|
|
849 |
}, |
|
|
850 |
{ |
|
|
851 |
"name": "stdout", |
|
|
852 |
"output_type": "stream", |
|
|
853 |
"text": [ |
|
|
854 |
"Batch 450 device: cuda time passed: 162.016 time per batch: 0.360\n", |
|
|
855 |
"Batch 500 device: cuda time passed: 179.990 time per batch: 0.360\n", |
|
|
856 |
"Batch 50 device: cuda time passed: 8.722 time per batch: 0.174\n", |
|
|
857 |
"Batch 100 device: cuda time passed: 15.752 time per batch: 0.158\n", |
|
|
858 |
"v35, d14, e12, f3, trn ll: 0.0313, val ll: 0.0622, ll_w: 0.0546, cor: 0.8417, auc: 0.9890, lr: 2e-06\n", |
|
|
859 |
"setFeats, augmentation -1\n", |
|
|
860 |
"Batch 50 device: cuda time passed: 24.253 time per batch: 0.485\n", |
|
|
861 |
"Batch 100 device: cuda time passed: 41.866 time per batch: 0.419\n", |
|
|
862 |
"Batch 150 device: cuda time passed: 60.387 time per batch: 0.403\n", |
|
|
863 |
"Batch 200 device: cuda time passed: 77.524 time per batch: 0.388\n", |
|
|
864 |
"Batch 250 device: cuda time passed: 95.133 time per batch: 0.381\n", |
|
|
865 |
"Batch 300 device: cuda time passed: 113.060 time per batch: 0.377\n", |
|
|
866 |
"Batch 350 device: cuda time passed: 130.037 time per batch: 0.372\n", |
|
|
867 |
"Batch 400 device: cuda time passed: 146.372 time per batch: 0.366\n", |
|
|
868 |
"Batch 450 device: cuda time passed: 166.916 time per batch: 0.371\n", |
|
|
869 |
"Batch 500 device: cuda time passed: 182.928 time per batch: 0.366\n", |
|
|
870 |
"Batch 50 device: cuda time passed: 7.564 time per batch: 0.151\n", |
|
|
871 |
"Batch 100 device: cuda time passed: 14.464 time per batch: 0.145\n", |
|
|
872 |
"v35, d14, e13, f3, trn ll: 0.0313, val ll: 0.0621, ll_w: 0.0546, cor: 0.8417, auc: 0.9890, lr: 2e-06\n", |
|
|
873 |
"total running time 444.2737958431244\n", |
|
|
874 |
"total time 5680.686395645142\n", |
|
|
875 |
"completed epochs: 0 starting now: 4\n", |
|
|
876 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
877 |
"adding dummy serieses 16\n", |
|
|
878 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
879 |
"setFeats, augmentation 0\n", |
|
|
880 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
881 |
"starting from scratch\n", |
|
|
882 |
"setFeats, augmentation -1\n", |
|
|
883 |
"Batch 50 device: cuda time passed: 21.233 time per batch: 0.425\n", |
|
|
884 |
"Batch 100 device: cuda time passed: 37.840 time per batch: 0.378\n", |
|
|
885 |
"Batch 150 device: cuda time passed: 54.944 time per batch: 0.366\n", |
|
|
886 |
"Batch 200 device: cuda time passed: 72.824 time per batch: 0.364\n", |
|
|
887 |
"Batch 250 device: cuda time passed: 89.779 time per batch: 0.359\n", |
|
|
888 |
"Batch 300 device: cuda time passed: 106.930 time per batch: 0.356\n", |
|
|
889 |
"Batch 350 device: cuda time passed: 123.451 time per batch: 0.353\n", |
|
|
890 |
"Batch 400 device: cuda time passed: 139.893 time per batch: 0.350\n", |
|
|
891 |
"Batch 450 device: cuda time passed: 157.031 time per batch: 0.349\n", |
|
|
892 |
"Batch 500 device: cuda time passed: 173.987 time per batch: 0.348\n", |
|
|
893 |
"Batch 50 device: cuda time passed: 8.944 time per batch: 0.179\n", |
|
|
894 |
"Batch 100 device: cuda time passed: 14.936 time per batch: 0.149\n", |
|
|
895 |
"v35, d14, e1, f4, trn ll: 0.0625, val ll: 0.0667, ll_w: 0.0586, cor: 0.8303, auc: 0.9854, lr: 0.0002\n", |
|
|
896 |
"setFeats, augmentation -1\n", |
|
|
897 |
"Batch 50 device: cuda time passed: 21.754 time per batch: 0.435\n", |
|
|
898 |
"Batch 100 device: cuda time passed: 38.725 time per batch: 0.387\n", |
|
|
899 |
"Batch 150 device: cuda time passed: 55.236 time per batch: 0.368\n", |
|
|
900 |
"Batch 200 device: cuda time passed: 72.718 time per batch: 0.364\n", |
|
|
901 |
"Batch 250 device: cuda time passed: 88.735 time per batch: 0.355\n", |
|
|
902 |
"Batch 300 device: cuda time passed: 105.104 time per batch: 0.350\n", |
|
|
903 |
"Batch 350 device: cuda time passed: 122.012 time per batch: 0.349\n", |
|
|
904 |
"Batch 400 device: cuda time passed: 137.964 time per batch: 0.345\n", |
|
|
905 |
"Batch 450 device: cuda time passed: 156.284 time per batch: 0.347\n", |
|
|
906 |
"Batch 500 device: cuda time passed: 173.980 time per batch: 0.348\n", |
|
|
907 |
"Batch 50 device: cuda time passed: 8.316 time per batch: 0.166\n", |
|
|
908 |
"Batch 100 device: cuda time passed: 14.987 time per batch: 0.150\n", |
|
|
909 |
"v35, d14, e2, f4, trn ll: 0.0359, val ll: 0.0657, ll_w: 0.0577, cor: 0.8311, auc: 0.9863, lr: 0.0002\n", |
|
|
910 |
"setFeats, augmentation -1\n", |
|
|
911 |
"Batch 50 device: cuda time passed: 22.364 time per batch: 0.447\n", |
|
|
912 |
"Batch 100 device: cuda time passed: 40.987 time per batch: 0.410\n", |
|
|
913 |
"Batch 150 device: cuda time passed: 58.507 time per batch: 0.390\n", |
|
|
914 |
"Batch 200 device: cuda time passed: 77.763 time per batch: 0.389\n", |
|
|
915 |
"Batch 250 device: cuda time passed: 95.192 time per batch: 0.381\n", |
|
|
916 |
"Batch 300 device: cuda time passed: 111.754 time per batch: 0.373\n", |
|
|
917 |
"Batch 350 device: cuda time passed: 129.000 time per batch: 0.369\n", |
|
|
918 |
"Batch 400 device: cuda time passed: 146.293 time per batch: 0.366\n", |
|
|
919 |
"Batch 450 device: cuda time passed: 165.444 time per batch: 0.368\n", |
|
|
920 |
"Batch 500 device: cuda time passed: 183.020 time per batch: 0.366\n", |
|
|
921 |
"Batch 50 device: cuda time passed: 7.861 time per batch: 0.157\n", |
|
|
922 |
"Batch 100 device: cuda time passed: 15.124 time per batch: 0.151\n", |
|
|
923 |
"v35, d14, e3, f4, trn ll: 0.0343, val ll: 0.0632, ll_w: 0.0554, cor: 0.8399, auc: 0.9873, lr: 0.0002\n", |
|
|
924 |
"setFeats, augmentation -1\n", |
|
|
925 |
"Batch 50 device: cuda time passed: 21.633 time per batch: 0.433\n", |
|
|
926 |
"Batch 100 device: cuda time passed: 37.934 time per batch: 0.379\n", |
|
|
927 |
"Batch 150 device: cuda time passed: 55.032 time per batch: 0.367\n", |
|
|
928 |
"Batch 200 device: cuda time passed: 72.134 time per batch: 0.361\n", |
|
|
929 |
"Batch 250 device: cuda time passed: 89.244 time per batch: 0.357\n", |
|
|
930 |
"Batch 300 device: cuda time passed: 107.215 time per batch: 0.357\n", |
|
|
931 |
"Batch 350 device: cuda time passed: 125.071 time per batch: 0.357\n", |
|
|
932 |
"Batch 400 device: cuda time passed: 142.857 time per batch: 0.357\n", |
|
|
933 |
"Batch 450 device: cuda time passed: 164.440 time per batch: 0.365\n", |
|
|
934 |
"Batch 500 device: cuda time passed: 181.712 time per batch: 0.363\n", |
|
|
935 |
"Batch 50 device: cuda time passed: 8.480 time per batch: 0.170\n", |
|
|
936 |
"Batch 100 device: cuda time passed: 15.129 time per batch: 0.151\n", |
|
|
937 |
"v35, d14, e4, f4, trn ll: 0.0335, val ll: 0.0664, ll_w: 0.0588, cor: 0.8346, auc: 0.9881, lr: 0.0002\n", |
|
|
938 |
"total running time 953.7901792526245\n", |
|
|
939 |
"completed epochs: 4 starting now: 4\n", |
|
|
940 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
941 |
"adding dummy serieses 16\n", |
|
|
942 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
943 |
"setFeats, augmentation 0\n", |
|
|
944 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
945 |
"loading model model.b4.f4.d14.v35\n", |
|
|
946 |
"setFeats, augmentation -1\n", |
|
|
947 |
"Batch 50 device: cuda time passed: 23.867 time per batch: 0.477\n", |
|
|
948 |
"Batch 100 device: cuda time passed: 41.869 time per batch: 0.419\n", |
|
|
949 |
"Batch 150 device: cuda time passed: 59.945 time per batch: 0.400\n", |
|
|
950 |
"Batch 200 device: cuda time passed: 78.035 time per batch: 0.390\n", |
|
|
951 |
"Batch 250 device: cuda time passed: 95.686 time per batch: 0.383\n", |
|
|
952 |
"Batch 300 device: cuda time passed: 113.281 time per batch: 0.378\n", |
|
|
953 |
"Batch 350 device: cuda time passed: 130.764 time per batch: 0.374\n", |
|
|
954 |
"Batch 400 device: cuda time passed: 148.630 time per batch: 0.372\n", |
|
|
955 |
"Batch 450 device: cuda time passed: 170.233 time per batch: 0.378\n", |
|
|
956 |
"Batch 500 device: cuda time passed: 188.222 time per batch: 0.376\n", |
|
|
957 |
"Batch 50 device: cuda time passed: 8.176 time per batch: 0.164\n", |
|
|
958 |
"Batch 100 device: cuda time passed: 14.735 time per batch: 0.147\n", |
|
|
959 |
"v35, d14, e5, f4, trn ll: 0.0317, val ll: 0.0608, ll_w: 0.0533, cor: 0.8448, auc: 0.9887, lr: 2e-05\n", |
|
|
960 |
"setFeats, augmentation -1\n", |
|
|
961 |
"Batch 50 device: cuda time passed: 21.737 time per batch: 0.435\n", |
|
|
962 |
"Batch 100 device: cuda time passed: 37.853 time per batch: 0.379\n", |
|
|
963 |
"Batch 150 device: cuda time passed: 55.337 time per batch: 0.369\n", |
|
|
964 |
"Batch 200 device: cuda time passed: 71.157 time per batch: 0.356\n", |
|
|
965 |
"Batch 250 device: cuda time passed: 87.010 time per batch: 0.348\n", |
|
|
966 |
"Batch 300 device: cuda time passed: 103.209 time per batch: 0.344\n", |
|
|
967 |
"Batch 350 device: cuda time passed: 119.077 time per batch: 0.340\n", |
|
|
968 |
"Batch 400 device: cuda time passed: 135.723 time per batch: 0.339\n", |
|
|
969 |
"Batch 450 device: cuda time passed: 154.558 time per batch: 0.343\n", |
|
|
970 |
"Batch 500 device: cuda time passed: 170.455 time per batch: 0.341\n", |
|
|
971 |
"Batch 50 device: cuda time passed: 7.589 time per batch: 0.152\n", |
|
|
972 |
"Batch 100 device: cuda time passed: 14.520 time per batch: 0.145\n", |
|
|
973 |
"v35, d14, e6, f4, trn ll: 0.0314, val ll: 0.0611, ll_w: 0.0535, cor: 0.8444, auc: 0.9886, lr: 2e-05\n", |
|
|
974 |
"setFeats, augmentation -1\n", |
|
|
975 |
"Batch 50 device: cuda time passed: 20.682 time per batch: 0.414\n", |
|
|
976 |
"Batch 100 device: cuda time passed: 36.792 time per batch: 0.368\n", |
|
|
977 |
"Batch 150 device: cuda time passed: 52.970 time per batch: 0.353\n", |
|
|
978 |
"Batch 200 device: cuda time passed: 69.095 time per batch: 0.345\n", |
|
|
979 |
"Batch 250 device: cuda time passed: 85.616 time per batch: 0.342\n", |
|
|
980 |
"Batch 300 device: cuda time passed: 101.163 time per batch: 0.337\n", |
|
|
981 |
"Batch 350 device: cuda time passed: 117.018 time per batch: 0.334\n", |
|
|
982 |
"Batch 400 device: cuda time passed: 133.178 time per batch: 0.333\n", |
|
|
983 |
"Batch 450 device: cuda time passed: 151.207 time per batch: 0.336\n", |
|
|
984 |
"Batch 500 device: cuda time passed: 168.530 time per batch: 0.337\n", |
|
|
985 |
"Batch 50 device: cuda time passed: 7.677 time per batch: 0.154\n", |
|
|
986 |
"Batch 100 device: cuda time passed: 14.814 time per batch: 0.148\n", |
|
|
987 |
"v35, d14, e7, f4, trn ll: 0.0312, val ll: 0.0609, ll_w: 0.0534, cor: 0.8449, auc: 0.9886, lr: 2e-05\n", |
|
|
988 |
"setFeats, augmentation -1\n" |
|
|
989 |
] |
|
|
990 |
}, |
|
|
991 |
{ |
|
|
992 |
"name": "stdout", |
|
|
993 |
"output_type": "stream", |
|
|
994 |
"text": [ |
|
|
995 |
"Batch 50 device: cuda time passed: 21.214 time per batch: 0.424\n", |
|
|
996 |
"Batch 100 device: cuda time passed: 37.518 time per batch: 0.375\n", |
|
|
997 |
"Batch 150 device: cuda time passed: 54.071 time per batch: 0.360\n", |
|
|
998 |
"Batch 200 device: cuda time passed: 69.747 time per batch: 0.349\n", |
|
|
999 |
"Batch 250 device: cuda time passed: 85.969 time per batch: 0.344\n", |
|
|
1000 |
"Batch 300 device: cuda time passed: 102.031 time per batch: 0.340\n", |
|
|
1001 |
"Batch 350 device: cuda time passed: 119.208 time per batch: 0.341\n", |
|
|
1002 |
"Batch 400 device: cuda time passed: 134.735 time per batch: 0.337\n", |
|
|
1003 |
"Batch 450 device: cuda time passed: 154.078 time per batch: 0.342\n", |
|
|
1004 |
"Batch 500 device: cuda time passed: 170.411 time per batch: 0.341\n", |
|
|
1005 |
"Batch 50 device: cuda time passed: 8.323 time per batch: 0.166\n", |
|
|
1006 |
"Batch 100 device: cuda time passed: 14.637 time per batch: 0.146\n", |
|
|
1007 |
"v35, d14, e8, f4, trn ll: 0.0311, val ll: 0.0610, ll_w: 0.0534, cor: 0.8453, auc: 0.9885, lr: 2e-05\n", |
|
|
1008 |
"total running time 855.8814930915833\n", |
|
|
1009 |
"completed epochs: 8 starting now: 3\n", |
|
|
1010 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
1011 |
"adding dummy serieses 16\n", |
|
|
1012 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
1013 |
"setFeats, augmentation 0\n", |
|
|
1014 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
1015 |
"loading model model.b8.f4.d14.v35\n", |
|
|
1016 |
"setFeats, augmentation -1\n", |
|
|
1017 |
"Batch 50 device: cuda time passed: 21.184 time per batch: 0.424\n", |
|
|
1018 |
"Batch 100 device: cuda time passed: 37.215 time per batch: 0.372\n", |
|
|
1019 |
"Batch 150 device: cuda time passed: 53.050 time per batch: 0.354\n", |
|
|
1020 |
"Batch 200 device: cuda time passed: 69.171 time per batch: 0.346\n", |
|
|
1021 |
"Batch 250 device: cuda time passed: 85.650 time per batch: 0.343\n", |
|
|
1022 |
"Batch 300 device: cuda time passed: 101.736 time per batch: 0.339\n", |
|
|
1023 |
"Batch 350 device: cuda time passed: 117.355 time per batch: 0.335\n", |
|
|
1024 |
"Batch 400 device: cuda time passed: 133.001 time per batch: 0.333\n", |
|
|
1025 |
"Batch 450 device: cuda time passed: 152.997 time per batch: 0.340\n", |
|
|
1026 |
"Batch 500 device: cuda time passed: 169.252 time per batch: 0.339\n", |
|
|
1027 |
"Batch 50 device: cuda time passed: 7.640 time per batch: 0.153\n", |
|
|
1028 |
"Batch 100 device: cuda time passed: 14.457 time per batch: 0.145\n", |
|
|
1029 |
"v35, d14, e9, f4, trn ll: 0.0310, val ll: 0.0609, ll_w: 0.0532, cor: 0.8452, auc: 0.9886, lr: 5e-06\n", |
|
|
1030 |
"setFeats, augmentation -1\n", |
|
|
1031 |
"Batch 50 device: cuda time passed: 20.694 time per batch: 0.414\n", |
|
|
1032 |
"Batch 100 device: cuda time passed: 37.491 time per batch: 0.375\n", |
|
|
1033 |
"Batch 150 device: cuda time passed: 53.734 time per batch: 0.358\n", |
|
|
1034 |
"Batch 200 device: cuda time passed: 69.955 time per batch: 0.350\n", |
|
|
1035 |
"Batch 250 device: cuda time passed: 85.941 time per batch: 0.344\n", |
|
|
1036 |
"Batch 300 device: cuda time passed: 101.942 time per batch: 0.340\n", |
|
|
1037 |
"Batch 350 device: cuda time passed: 117.779 time per batch: 0.337\n", |
|
|
1038 |
"Batch 400 device: cuda time passed: 133.697 time per batch: 0.334\n", |
|
|
1039 |
"Batch 450 device: cuda time passed: 151.893 time per batch: 0.338\n", |
|
|
1040 |
"Batch 500 device: cuda time passed: 169.235 time per batch: 0.338\n", |
|
|
1041 |
"Batch 50 device: cuda time passed: 8.345 time per batch: 0.167\n", |
|
|
1042 |
"Batch 100 device: cuda time passed: 14.826 time per batch: 0.148\n", |
|
|
1043 |
"v35, d14, e10, f4, trn ll: 0.0308, val ll: 0.0608, ll_w: 0.0532, cor: 0.8453, auc: 0.9887, lr: 5e-06\n", |
|
|
1044 |
"setFeats, augmentation -1\n", |
|
|
1045 |
"Batch 50 device: cuda time passed: 21.099 time per batch: 0.422\n", |
|
|
1046 |
"Batch 100 device: cuda time passed: 36.777 time per batch: 0.368\n", |
|
|
1047 |
"Batch 150 device: cuda time passed: 53.978 time per batch: 0.360\n", |
|
|
1048 |
"Batch 200 device: cuda time passed: 69.323 time per batch: 0.347\n", |
|
|
1049 |
"Batch 250 device: cuda time passed: 85.573 time per batch: 0.342\n", |
|
|
1050 |
"Batch 300 device: cuda time passed: 102.080 time per batch: 0.340\n", |
|
|
1051 |
"Batch 350 device: cuda time passed: 118.682 time per batch: 0.339\n", |
|
|
1052 |
"Batch 400 device: cuda time passed: 135.171 time per batch: 0.338\n", |
|
|
1053 |
"Batch 450 device: cuda time passed: 152.631 time per batch: 0.339\n", |
|
|
1054 |
"Batch 500 device: cuda time passed: 167.938 time per batch: 0.336\n", |
|
|
1055 |
"Batch 50 device: cuda time passed: 8.310 time per batch: 0.166\n", |
|
|
1056 |
"Batch 100 device: cuda time passed: 14.317 time per batch: 0.143\n", |
|
|
1057 |
"v35, d14, e11, f4, trn ll: 0.0308, val ll: 0.0607, ll_w: 0.0532, cor: 0.8454, auc: 0.9887, lr: 5e-06\n", |
|
|
1058 |
"total running time 623.4176635742188\n", |
|
|
1059 |
"completed epochs: 11 starting now: 2\n", |
|
|
1060 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
1061 |
"adding dummy serieses 16\n", |
|
|
1062 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
1063 |
"setFeats, augmentation 0\n", |
|
|
1064 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
1065 |
"loading model model.b11.f4.d14.v35\n", |
|
|
1066 |
"setFeats, augmentation -1\n", |
|
|
1067 |
"Batch 50 device: cuda time passed: 21.779 time per batch: 0.436\n", |
|
|
1068 |
"Batch 100 device: cuda time passed: 38.087 time per batch: 0.381\n", |
|
|
1069 |
"Batch 150 device: cuda time passed: 53.619 time per batch: 0.357\n", |
|
|
1070 |
"Batch 200 device: cuda time passed: 69.523 time per batch: 0.348\n", |
|
|
1071 |
"Batch 250 device: cuda time passed: 85.874 time per batch: 0.343\n", |
|
|
1072 |
"Batch 300 device: cuda time passed: 101.439 time per batch: 0.338\n", |
|
|
1073 |
"Batch 350 device: cuda time passed: 117.336 time per batch: 0.335\n", |
|
|
1074 |
"Batch 400 device: cuda time passed: 133.035 time per batch: 0.333\n", |
|
|
1075 |
"Batch 450 device: cuda time passed: 152.595 time per batch: 0.339\n", |
|
|
1076 |
"Batch 500 device: cuda time passed: 169.052 time per batch: 0.338\n", |
|
|
1077 |
"Batch 50 device: cuda time passed: 8.452 time per batch: 0.169\n", |
|
|
1078 |
"Batch 100 device: cuda time passed: 14.411 time per batch: 0.144\n", |
|
|
1079 |
"v35, d14, e12, f4, trn ll: 0.0307, val ll: 0.0609, ll_w: 0.0532, cor: 0.8456, auc: 0.9886, lr: 2e-06\n", |
|
|
1080 |
"setFeats, augmentation -1\n", |
|
|
1081 |
"Batch 50 device: cuda time passed: 20.947 time per batch: 0.419\n", |
|
|
1082 |
"Batch 100 device: cuda time passed: 37.425 time per batch: 0.374\n", |
|
|
1083 |
"Batch 150 device: cuda time passed: 52.922 time per batch: 0.353\n", |
|
|
1084 |
"Batch 200 device: cuda time passed: 69.971 time per batch: 0.350\n", |
|
|
1085 |
"Batch 250 device: cuda time passed: 85.655 time per batch: 0.343\n", |
|
|
1086 |
"Batch 300 device: cuda time passed: 101.705 time per batch: 0.339\n", |
|
|
1087 |
"Batch 350 device: cuda time passed: 117.494 time per batch: 0.336\n", |
|
|
1088 |
"Batch 400 device: cuda time passed: 134.239 time per batch: 0.336\n", |
|
|
1089 |
"Batch 450 device: cuda time passed: 154.606 time per batch: 0.344\n", |
|
|
1090 |
"Batch 500 device: cuda time passed: 171.257 time per batch: 0.343\n", |
|
|
1091 |
"Batch 50 device: cuda time passed: 8.181 time per batch: 0.164\n", |
|
|
1092 |
"Batch 100 device: cuda time passed: 14.339 time per batch: 0.143\n", |
|
|
1093 |
"v35, d14, e13, f4, trn ll: 0.0308, val ll: 0.0607, ll_w: 0.0532, cor: 0.8454, auc: 0.9887, lr: 2e-06\n", |
|
|
1094 |
"total running time 417.99802231788635\n", |
|
|
1095 |
"total time 8532.541821241379\n", |
|
|
1096 |
"total time 8532.541992664337\n", |
|
|
1097 |
"total time 8532.54287481308\n" |
|
|
1098 |
] |
|
|
1099 |
} |
|
|
1100 |
], |
|
|
1101 |
"source": [ |
|
|
1102 |
"weight_decay = 1e-4\n", |
|
|
1103 |
"lrs = np.array([2e-4, 2e-5, 5e-6, 2e-6])\n", |
|
|
1104 |
"epochs = np.array([4, 4, 3, 2])\n", |
|
|
1105 |
"stg = time.time()\n", |
|
|
1106 |
"for ds in [14]:\n", |
|
|
1107 |
" #folds = getNFolds(ds)\n", |
|
|
1108 |
" for f in [2,3,4]:#range(folds):\n", |
|
|
1109 |
" for i,lr in enumerate(lrs):\n", |
|
|
1110 |
" learning_rate = lr\n", |
|
|
1111 |
" model, predictions, val_results = train_one(dataset=ds, epochs=epochs[i], bs=32, fold=f)\n", |
|
|
1112 |
" print('total time', time.time() - stg)\n", |
|
|
1113 |
" print('total time', time.time() - stg)\n", |
|
|
1114 |
"print('total time', time.time() - stg)" |
|
|
1115 |
] |
|
|
1116 |
}, |
|
|
1117 |
{ |
|
|
1118 |
"cell_type": "code", |
|
|
1119 |
"execution_count": null, |
|
|
1120 |
"metadata": {}, |
|
|
1121 |
"outputs": [], |
|
|
1122 |
"source": [] |
|
|
1123 |
}, |
|
|
1124 |
{ |
|
|
1125 |
"cell_type": "code", |
|
|
1126 |
"execution_count": 11, |
|
|
1127 |
"metadata": { |
|
|
1128 |
"scrolled": true |
|
|
1129 |
}, |
|
|
1130 |
"outputs": [ |
|
|
1131 |
{ |
|
|
1132 |
"name": "stdout", |
|
|
1133 |
"output_type": "stream", |
|
|
1134 |
"text": [ |
|
|
1135 |
"completed epochs: 0 starting now: 2\n", |
|
|
1136 |
"DataSet 14 train size 17369 fold 0\n", |
|
|
1137 |
"adding dummy serieses 9\n", |
|
|
1138 |
"DataSet 14 valid size 4384 fold 0\n", |
|
|
1139 |
"setFeats, augmentation 0\n", |
|
|
1140 |
"WeightedRandomSampler\n", |
|
|
1141 |
"dataset train: 17369 valid: 4384 loader train: 542 valid: 137\n", |
|
|
1142 |
"loading model model.b13.f0.d14.v35\n", |
|
|
1143 |
"setFeats, augmentation -1\n", |
|
|
1144 |
"Batch 50 device: cuda time passed: 21.626 time per batch: 0.433\n", |
|
|
1145 |
"Batch 100 device: cuda time passed: 37.191 time per batch: 0.372\n", |
|
|
1146 |
"Batch 150 device: cuda time passed: 53.924 time per batch: 0.359\n", |
|
|
1147 |
"Batch 200 device: cuda time passed: 69.954 time per batch: 0.350\n", |
|
|
1148 |
"Batch 250 device: cuda time passed: 85.591 time per batch: 0.342\n", |
|
|
1149 |
"Batch 300 device: cuda time passed: 101.332 time per batch: 0.338\n", |
|
|
1150 |
"Batch 350 device: cuda time passed: 118.288 time per batch: 0.338\n", |
|
|
1151 |
"Batch 400 device: cuda time passed: 134.411 time per batch: 0.336\n", |
|
|
1152 |
"Batch 450 device: cuda time passed: 155.215 time per batch: 0.345\n", |
|
|
1153 |
"Batch 500 device: cuda time passed: 171.357 time per batch: 0.343\n", |
|
|
1154 |
"Batch 50 device: cuda time passed: 8.519 time per batch: 0.170\n", |
|
|
1155 |
"Batch 100 device: cuda time passed: 14.697 time per batch: 0.147\n", |
|
|
1156 |
"v36, d14, e1, f0, trn ll: 0.0293, val ll: 0.0595, ll_w: 0.0575, cor: 0.8474, auc: 0.9893, lr: 1e-05\n", |
|
|
1157 |
"setFeats, augmentation -1\n", |
|
|
1158 |
"Batch 50 device: cuda time passed: 20.234 time per batch: 0.405\n", |
|
|
1159 |
"Batch 100 device: cuda time passed: 37.254 time per batch: 0.373\n", |
|
|
1160 |
"Batch 150 device: cuda time passed: 53.470 time per batch: 0.356\n", |
|
|
1161 |
"Batch 200 device: cuda time passed: 69.176 time per batch: 0.346\n", |
|
|
1162 |
"Batch 250 device: cuda time passed: 85.543 time per batch: 0.342\n", |
|
|
1163 |
"Batch 300 device: cuda time passed: 102.066 time per batch: 0.340\n", |
|
|
1164 |
"Batch 350 device: cuda time passed: 118.857 time per batch: 0.340\n", |
|
|
1165 |
"Batch 400 device: cuda time passed: 134.521 time per batch: 0.336\n", |
|
|
1166 |
"Batch 450 device: cuda time passed: 153.072 time per batch: 0.340\n", |
|
|
1167 |
"Batch 500 device: cuda time passed: 170.818 time per batch: 0.342\n", |
|
|
1168 |
"Batch 50 device: cuda time passed: 8.401 time per batch: 0.168\n", |
|
|
1169 |
"Batch 100 device: cuda time passed: 14.616 time per batch: 0.146\n", |
|
|
1170 |
"v36, d14, e2, f0, trn ll: 0.0302, val ll: 0.0595, ll_w: 0.0574, cor: 0.8477, auc: 0.9893, lr: 1e-05\n", |
|
|
1171 |
"total running time 458.30859661102295\n", |
|
|
1172 |
"completed epochs: 2 starting now: 1\n", |
|
|
1173 |
"DataSet 14 train size 17369 fold 0\n", |
|
|
1174 |
"adding dummy serieses 9\n", |
|
|
1175 |
"DataSet 14 valid size 4384 fold 0\n", |
|
|
1176 |
"setFeats, augmentation 0\n", |
|
|
1177 |
"WeightedRandomSampler\n", |
|
|
1178 |
"dataset train: 17369 valid: 4384 loader train: 542 valid: 137\n", |
|
|
1179 |
"loading model model.b2.f0.d14.v36\n", |
|
|
1180 |
"setFeats, augmentation -1\n", |
|
|
1181 |
"Batch 50 device: cuda time passed: 21.426 time per batch: 0.429\n", |
|
|
1182 |
"Batch 100 device: cuda time passed: 38.129 time per batch: 0.381\n", |
|
|
1183 |
"Batch 150 device: cuda time passed: 53.906 time per batch: 0.359\n", |
|
|
1184 |
"Batch 200 device: cuda time passed: 70.144 time per batch: 0.351\n", |
|
|
1185 |
"Batch 250 device: cuda time passed: 86.070 time per batch: 0.344\n", |
|
|
1186 |
"Batch 300 device: cuda time passed: 102.134 time per batch: 0.340\n", |
|
|
1187 |
"Batch 350 device: cuda time passed: 118.405 time per batch: 0.338\n", |
|
|
1188 |
"Batch 400 device: cuda time passed: 134.645 time per batch: 0.337\n", |
|
|
1189 |
"Batch 450 device: cuda time passed: 155.131 time per batch: 0.345\n", |
|
|
1190 |
"Batch 500 device: cuda time passed: 171.499 time per batch: 0.343\n", |
|
|
1191 |
"Batch 50 device: cuda time passed: 8.219 time per batch: 0.164\n", |
|
|
1192 |
"Batch 100 device: cuda time passed: 14.580 time per batch: 0.146\n", |
|
|
1193 |
"v36, d14, e3, f0, trn ll: 0.0298, val ll: 0.0595, ll_w: 0.0575, cor: 0.8472, auc: 0.9893, lr: 5e-06\n", |
|
|
1194 |
"total running time 213.19121074676514\n", |
|
|
1195 |
"total time 671.833601474762\n", |
|
|
1196 |
"completed epochs: 0 starting now: 2\n", |
|
|
1197 |
"DataSet 14 train size 17468 fold 1\n", |
|
|
1198 |
"adding dummy serieses 12\n", |
|
|
1199 |
"DataSet 14 valid size 4288 fold 1\n", |
|
|
1200 |
"setFeats, augmentation 0\n", |
|
|
1201 |
"WeightedRandomSampler\n", |
|
|
1202 |
"dataset train: 17468 valid: 4288 loader train: 545 valid: 134\n", |
|
|
1203 |
"loading model model.b13.f1.d14.v35\n", |
|
|
1204 |
"setFeats, augmentation -1\n", |
|
|
1205 |
"Batch 50 device: cuda time passed: 21.207 time per batch: 0.424\n", |
|
|
1206 |
"Batch 100 device: cuda time passed: 37.348 time per batch: 0.373\n", |
|
|
1207 |
"Batch 150 device: cuda time passed: 53.336 time per batch: 0.356\n", |
|
|
1208 |
"Batch 200 device: cuda time passed: 69.851 time per batch: 0.349\n", |
|
|
1209 |
"Batch 250 device: cuda time passed: 86.062 time per batch: 0.344\n", |
|
|
1210 |
"Batch 300 device: cuda time passed: 102.322 time per batch: 0.341\n", |
|
|
1211 |
"Batch 350 device: cuda time passed: 119.087 time per batch: 0.340\n", |
|
|
1212 |
"Batch 400 device: cuda time passed: 135.502 time per batch: 0.339\n", |
|
|
1213 |
"Batch 450 device: cuda time passed: 154.983 time per batch: 0.344\n", |
|
|
1214 |
"Batch 500 device: cuda time passed: 172.005 time per batch: 0.344\n", |
|
|
1215 |
"Batch 50 device: cuda time passed: 8.149 time per batch: 0.163\n", |
|
|
1216 |
"Batch 100 device: cuda time passed: 14.844 time per batch: 0.148\n", |
|
|
1217 |
"v36, d14, e1, f1, trn ll: 0.0290, val ll: 0.0593, ll_w: 0.0580, cor: 0.8483, auc: 0.9901, lr: 1e-05\n", |
|
|
1218 |
"setFeats, augmentation -1\n", |
|
|
1219 |
"Batch 50 device: cuda time passed: 20.749 time per batch: 0.415\n", |
|
|
1220 |
"Batch 100 device: cuda time passed: 37.137 time per batch: 0.371\n", |
|
|
1221 |
"Batch 150 device: cuda time passed: 53.310 time per batch: 0.355\n", |
|
|
1222 |
"Batch 200 device: cuda time passed: 69.517 time per batch: 0.348\n", |
|
|
1223 |
"Batch 250 device: cuda time passed: 85.902 time per batch: 0.344\n", |
|
|
1224 |
"Batch 300 device: cuda time passed: 101.682 time per batch: 0.339\n", |
|
|
1225 |
"Batch 350 device: cuda time passed: 118.095 time per batch: 0.337\n", |
|
|
1226 |
"Batch 400 device: cuda time passed: 134.853 time per batch: 0.337\n", |
|
|
1227 |
"Batch 450 device: cuda time passed: 155.186 time per batch: 0.345\n", |
|
|
1228 |
"Batch 500 device: cuda time passed: 171.021 time per batch: 0.342\n", |
|
|
1229 |
"Batch 50 device: cuda time passed: 7.627 time per batch: 0.153\n", |
|
|
1230 |
"Batch 100 device: cuda time passed: 13.832 time per batch: 0.138\n", |
|
|
1231 |
"v36, d14, e2, f1, trn ll: 0.0294, val ll: 0.0592, ll_w: 0.0579, cor: 0.8485, auc: 0.9901, lr: 1e-05\n", |
|
|
1232 |
"total running time 450.5849003791809\n", |
|
|
1233 |
"completed epochs: 2 starting now: 1\n", |
|
|
1234 |
"DataSet 14 train size 17468 fold 1\n", |
|
|
1235 |
"adding dummy serieses 12\n", |
|
|
1236 |
"DataSet 14 valid size 4288 fold 1\n", |
|
|
1237 |
"setFeats, augmentation 0\n", |
|
|
1238 |
"WeightedRandomSampler\n", |
|
|
1239 |
"dataset train: 17468 valid: 4288 loader train: 545 valid: 134\n", |
|
|
1240 |
"loading model model.b2.f1.d14.v36\n", |
|
|
1241 |
"setFeats, augmentation -1\n", |
|
|
1242 |
"Batch 50 device: cuda time passed: 21.513 time per batch: 0.430\n", |
|
|
1243 |
"Batch 100 device: cuda time passed: 37.783 time per batch: 0.378\n", |
|
|
1244 |
"Batch 150 device: cuda time passed: 53.955 time per batch: 0.360\n", |
|
|
1245 |
"Batch 200 device: cuda time passed: 70.656 time per batch: 0.353\n", |
|
|
1246 |
"Batch 250 device: cuda time passed: 86.881 time per batch: 0.348\n", |
|
|
1247 |
"Batch 300 device: cuda time passed: 102.464 time per batch: 0.342\n", |
|
|
1248 |
"Batch 350 device: cuda time passed: 119.210 time per batch: 0.341\n", |
|
|
1249 |
"Batch 400 device: cuda time passed: 135.295 time per batch: 0.338\n", |
|
|
1250 |
"Batch 450 device: cuda time passed: 155.318 time per batch: 0.345\n", |
|
|
1251 |
"Batch 500 device: cuda time passed: 171.368 time per batch: 0.343\n", |
|
|
1252 |
"Batch 50 device: cuda time passed: 8.426 time per batch: 0.169\n", |
|
|
1253 |
"Batch 100 device: cuda time passed: 14.857 time per batch: 0.149\n", |
|
|
1254 |
"v36, d14, e3, f1, trn ll: 0.0287, val ll: 0.0593, ll_w: 0.0580, cor: 0.8484, auc: 0.9900, lr: 5e-06\n", |
|
|
1255 |
"total running time 213.8144974708557\n", |
|
|
1256 |
"total time 1336.568071603775\n", |
|
|
1257 |
"completed epochs: 0 starting now: 2\n", |
|
|
1258 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
1259 |
"adding dummy serieses 27\n", |
|
|
1260 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
1261 |
"setFeats, augmentation 0\n", |
|
|
1262 |
"WeightedRandomSampler\n", |
|
|
1263 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
1264 |
"loading model model.b13.f2.d14.v35\n", |
|
|
1265 |
"setFeats, augmentation -1\n", |
|
|
1266 |
"Batch 50 device: cuda time passed: 21.352 time per batch: 0.427\n", |
|
|
1267 |
"Batch 100 device: cuda time passed: 37.326 time per batch: 0.373\n", |
|
|
1268 |
"Batch 150 device: cuda time passed: 53.162 time per batch: 0.354\n", |
|
|
1269 |
"Batch 200 device: cuda time passed: 68.927 time per batch: 0.345\n", |
|
|
1270 |
"Batch 250 device: cuda time passed: 84.995 time per batch: 0.340\n", |
|
|
1271 |
"Batch 300 device: cuda time passed: 101.175 time per batch: 0.337\n", |
|
|
1272 |
"Batch 350 device: cuda time passed: 117.550 time per batch: 0.336\n", |
|
|
1273 |
"Batch 400 device: cuda time passed: 134.080 time per batch: 0.335\n", |
|
|
1274 |
"Batch 450 device: cuda time passed: 153.286 time per batch: 0.341\n", |
|
|
1275 |
"Batch 500 device: cuda time passed: 169.635 time per batch: 0.339\n", |
|
|
1276 |
"Batch 50 device: cuda time passed: 8.047 time per batch: 0.161\n", |
|
|
1277 |
"Batch 100 device: cuda time passed: 14.520 time per batch: 0.145\n", |
|
|
1278 |
"v36, d14, e1, f2, trn ll: 0.0284, val ll: 0.0606, ll_w: 0.0577, cor: 0.8449, auc: 0.9889, lr: 1e-05\n", |
|
|
1279 |
"setFeats, augmentation -1\n", |
|
|
1280 |
"Batch 50 device: cuda time passed: 20.672 time per batch: 0.413\n", |
|
|
1281 |
"Batch 100 device: cuda time passed: 37.469 time per batch: 0.375\n", |
|
|
1282 |
"Batch 150 device: cuda time passed: 54.116 time per batch: 0.361\n", |
|
|
1283 |
"Batch 200 device: cuda time passed: 69.517 time per batch: 0.348\n" |
|
|
1284 |
] |
|
|
1285 |
}, |
|
|
1286 |
{ |
|
|
1287 |
"name": "stdout", |
|
|
1288 |
"output_type": "stream", |
|
|
1289 |
"text": [ |
|
|
1290 |
"Batch 250 device: cuda time passed: 86.149 time per batch: 0.345\n", |
|
|
1291 |
"Batch 300 device: cuda time passed: 102.999 time per batch: 0.343\n", |
|
|
1292 |
"Batch 350 device: cuda time passed: 119.363 time per batch: 0.341\n", |
|
|
1293 |
"Batch 400 device: cuda time passed: 135.685 time per batch: 0.339\n", |
|
|
1294 |
"Batch 450 device: cuda time passed: 152.529 time per batch: 0.339\n", |
|
|
1295 |
"Batch 500 device: cuda time passed: 172.402 time per batch: 0.345\n", |
|
|
1296 |
"Batch 50 device: cuda time passed: 8.181 time per batch: 0.164\n", |
|
|
1297 |
"Batch 100 device: cuda time passed: 14.485 time per batch: 0.145\n", |
|
|
1298 |
"v36, d14, e2, f2, trn ll: 0.0292, val ll: 0.0600, ll_w: 0.0571, cor: 0.8455, auc: 0.9891, lr: 1e-05\n", |
|
|
1299 |
"total running time 443.0156455039978\n", |
|
|
1300 |
"completed epochs: 2 starting now: 1\n", |
|
|
1301 |
"DataSet 14 train size 17355 fold 2\n", |
|
|
1302 |
"adding dummy serieses 27\n", |
|
|
1303 |
"DataSet 14 valid size 4416 fold 2\n", |
|
|
1304 |
"setFeats, augmentation 0\n", |
|
|
1305 |
"WeightedRandomSampler\n", |
|
|
1306 |
"dataset train: 17355 valid: 4416 loader train: 542 valid: 138\n", |
|
|
1307 |
"loading model model.b2.f2.d14.v36\n", |
|
|
1308 |
"setFeats, augmentation -1\n", |
|
|
1309 |
"Batch 50 device: cuda time passed: 20.755 time per batch: 0.415\n", |
|
|
1310 |
"Batch 100 device: cuda time passed: 36.901 time per batch: 0.369\n", |
|
|
1311 |
"Batch 150 device: cuda time passed: 52.968 time per batch: 0.353\n", |
|
|
1312 |
"Batch 200 device: cuda time passed: 69.214 time per batch: 0.346\n", |
|
|
1313 |
"Batch 250 device: cuda time passed: 85.613 time per batch: 0.342\n", |
|
|
1314 |
"Batch 300 device: cuda time passed: 102.234 time per batch: 0.341\n", |
|
|
1315 |
"Batch 350 device: cuda time passed: 118.636 time per batch: 0.339\n", |
|
|
1316 |
"Batch 400 device: cuda time passed: 135.389 time per batch: 0.338\n", |
|
|
1317 |
"Batch 450 device: cuda time passed: 155.574 time per batch: 0.346\n", |
|
|
1318 |
"Batch 500 device: cuda time passed: 171.428 time per batch: 0.343\n", |
|
|
1319 |
"Batch 50 device: cuda time passed: 8.167 time per batch: 0.163\n", |
|
|
1320 |
"Batch 100 device: cuda time passed: 14.550 time per batch: 0.145\n", |
|
|
1321 |
"v36, d14, e3, f2, trn ll: 0.0292, val ll: 0.0600, ll_w: 0.0571, cor: 0.8454, auc: 0.9892, lr: 5e-06\n", |
|
|
1322 |
"total running time 213.9665548801422\n", |
|
|
1323 |
"total time 1993.8867774009705\n", |
|
|
1324 |
"completed epochs: 0 starting now: 2\n", |
|
|
1325 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
1326 |
"adding dummy serieses 16\n", |
|
|
1327 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
1328 |
"setFeats, augmentation 0\n", |
|
|
1329 |
"WeightedRandomSampler\n", |
|
|
1330 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
1331 |
"loading model model.b13.f3.d14.v35\n", |
|
|
1332 |
"setFeats, augmentation -1\n", |
|
|
1333 |
"Batch 50 device: cuda time passed: 21.250 time per batch: 0.425\n", |
|
|
1334 |
"Batch 100 device: cuda time passed: 37.196 time per batch: 0.372\n", |
|
|
1335 |
"Batch 150 device: cuda time passed: 53.011 time per batch: 0.353\n", |
|
|
1336 |
"Batch 200 device: cuda time passed: 69.612 time per batch: 0.348\n", |
|
|
1337 |
"Batch 250 device: cuda time passed: 85.879 time per batch: 0.344\n", |
|
|
1338 |
"Batch 300 device: cuda time passed: 102.447 time per batch: 0.341\n", |
|
|
1339 |
"Batch 350 device: cuda time passed: 118.245 time per batch: 0.338\n", |
|
|
1340 |
"Batch 400 device: cuda time passed: 135.061 time per batch: 0.338\n", |
|
|
1341 |
"Batch 450 device: cuda time passed: 153.103 time per batch: 0.340\n", |
|
|
1342 |
"Batch 500 device: cuda time passed: 170.886 time per batch: 0.342\n", |
|
|
1343 |
"Batch 50 device: cuda time passed: 8.188 time per batch: 0.164\n", |
|
|
1344 |
"Batch 100 device: cuda time passed: 14.597 time per batch: 0.146\n", |
|
|
1345 |
"v36, d14, e1, f3, trn ll: 0.0290, val ll: 0.0626, ll_w: 0.0604, cor: 0.8411, auc: 0.9889, lr: 1e-05\n", |
|
|
1346 |
"setFeats, augmentation -1\n", |
|
|
1347 |
"Batch 50 device: cuda time passed: 20.856 time per batch: 0.417\n", |
|
|
1348 |
"Batch 100 device: cuda time passed: 36.492 time per batch: 0.365\n", |
|
|
1349 |
"Batch 150 device: cuda time passed: 52.552 time per batch: 0.350\n", |
|
|
1350 |
"Batch 200 device: cuda time passed: 68.865 time per batch: 0.344\n", |
|
|
1351 |
"Batch 250 device: cuda time passed: 85.231 time per batch: 0.341\n", |
|
|
1352 |
"Batch 300 device: cuda time passed: 102.115 time per batch: 0.340\n", |
|
|
1353 |
"Batch 350 device: cuda time passed: 118.472 time per batch: 0.338\n", |
|
|
1354 |
"Batch 400 device: cuda time passed: 134.599 time per batch: 0.336\n", |
|
|
1355 |
"Batch 450 device: cuda time passed: 152.687 time per batch: 0.339\n", |
|
|
1356 |
"Batch 500 device: cuda time passed: 170.115 time per batch: 0.340\n", |
|
|
1357 |
"Batch 50 device: cuda time passed: 8.548 time per batch: 0.171\n", |
|
|
1358 |
"Batch 100 device: cuda time passed: 14.807 time per batch: 0.148\n", |
|
|
1359 |
"v36, d14, e2, f3, trn ll: 0.0295, val ll: 0.0629, ll_w: 0.0605, cor: 0.8408, auc: 0.9889, lr: 1e-05\n", |
|
|
1360 |
"total running time 447.1415765285492\n", |
|
|
1361 |
"completed epochs: 2 starting now: 1\n", |
|
|
1362 |
"DataSet 14 train size 17408 fold 3\n", |
|
|
1363 |
"adding dummy serieses 16\n", |
|
|
1364 |
"DataSet 14 valid size 4352 fold 3\n", |
|
|
1365 |
"setFeats, augmentation 0\n", |
|
|
1366 |
"WeightedRandomSampler\n", |
|
|
1367 |
"dataset train: 17408 valid: 4352 loader train: 544 valid: 136\n", |
|
|
1368 |
"loading model model.b2.f3.d14.v36\n", |
|
|
1369 |
"setFeats, augmentation -1\n", |
|
|
1370 |
"Batch 50 device: cuda time passed: 20.516 time per batch: 0.410\n", |
|
|
1371 |
"Batch 100 device: cuda time passed: 36.334 time per batch: 0.363\n", |
|
|
1372 |
"Batch 150 device: cuda time passed: 53.084 time per batch: 0.354\n", |
|
|
1373 |
"Batch 200 device: cuda time passed: 69.037 time per batch: 0.345\n", |
|
|
1374 |
"Batch 250 device: cuda time passed: 85.908 time per batch: 0.344\n", |
|
|
1375 |
"Batch 300 device: cuda time passed: 102.228 time per batch: 0.341\n", |
|
|
1376 |
"Batch 350 device: cuda time passed: 118.540 time per batch: 0.339\n", |
|
|
1377 |
"Batch 400 device: cuda time passed: 134.860 time per batch: 0.337\n", |
|
|
1378 |
"Batch 450 device: cuda time passed: 153.588 time per batch: 0.341\n", |
|
|
1379 |
"Batch 500 device: cuda time passed: 170.057 time per batch: 0.340\n", |
|
|
1380 |
"Batch 50 device: cuda time passed: 8.858 time per batch: 0.177\n", |
|
|
1381 |
"Batch 100 device: cuda time passed: 15.062 time per batch: 0.151\n", |
|
|
1382 |
"v36, d14, e3, f3, trn ll: 0.0290, val ll: 0.0624, ll_w: 0.0601, cor: 0.8414, auc: 0.9889, lr: 5e-06\n", |
|
|
1383 |
"total running time 212.96735048294067\n", |
|
|
1384 |
"total time 2654.3308753967285\n", |
|
|
1385 |
"completed epochs: 0 starting now: 2\n", |
|
|
1386 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
1387 |
"adding dummy serieses 16\n", |
|
|
1388 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
1389 |
"setFeats, augmentation 0\n", |
|
|
1390 |
"WeightedRandomSampler\n", |
|
|
1391 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
1392 |
"loading model model.b13.f4.d14.v35\n", |
|
|
1393 |
"setFeats, augmentation -1\n", |
|
|
1394 |
"Batch 50 device: cuda time passed: 20.938 time per batch: 0.419\n", |
|
|
1395 |
"Batch 100 device: cuda time passed: 36.742 time per batch: 0.367\n", |
|
|
1396 |
"Batch 150 device: cuda time passed: 53.118 time per batch: 0.354\n", |
|
|
1397 |
"Batch 200 device: cuda time passed: 69.207 time per batch: 0.346\n", |
|
|
1398 |
"Batch 250 device: cuda time passed: 85.657 time per batch: 0.343\n", |
|
|
1399 |
"Batch 300 device: cuda time passed: 102.577 time per batch: 0.342\n", |
|
|
1400 |
"Batch 350 device: cuda time passed: 118.931 time per batch: 0.340\n", |
|
|
1401 |
"Batch 400 device: cuda time passed: 135.803 time per batch: 0.340\n", |
|
|
1402 |
"Batch 450 device: cuda time passed: 152.349 time per batch: 0.339\n", |
|
|
1403 |
"Batch 500 device: cuda time passed: 171.693 time per batch: 0.343\n", |
|
|
1404 |
"Batch 50 device: cuda time passed: 8.196 time per batch: 0.164\n", |
|
|
1405 |
"Batch 100 device: cuda time passed: 14.573 time per batch: 0.146\n", |
|
|
1406 |
"v36, d14, e1, f4, trn ll: 0.0293, val ll: 0.0609, ll_w: 0.0584, cor: 0.8450, auc: 0.9886, lr: 1e-05\n", |
|
|
1407 |
"setFeats, augmentation -1\n", |
|
|
1408 |
"Batch 50 device: cuda time passed: 20.888 time per batch: 0.418\n", |
|
|
1409 |
"Batch 100 device: cuda time passed: 37.072 time per batch: 0.371\n", |
|
|
1410 |
"Batch 150 device: cuda time passed: 52.913 time per batch: 0.353\n", |
|
|
1411 |
"Batch 200 device: cuda time passed: 69.316 time per batch: 0.347\n", |
|
|
1412 |
"Batch 250 device: cuda time passed: 85.225 time per batch: 0.341\n", |
|
|
1413 |
"Batch 300 device: cuda time passed: 101.227 time per batch: 0.337\n", |
|
|
1414 |
"Batch 350 device: cuda time passed: 117.882 time per batch: 0.337\n", |
|
|
1415 |
"Batch 400 device: cuda time passed: 134.412 time per batch: 0.336\n", |
|
|
1416 |
"Batch 450 device: cuda time passed: 153.441 time per batch: 0.341\n", |
|
|
1417 |
"Batch 500 device: cuda time passed: 169.712 time per batch: 0.339\n", |
|
|
1418 |
"Batch 50 device: cuda time passed: 8.684 time per batch: 0.174\n", |
|
|
1419 |
"Batch 100 device: cuda time passed: 15.135 time per batch: 0.151\n", |
|
|
1420 |
"v36, d14, e2, f4, trn ll: 0.0288, val ll: 0.0610, ll_w: 0.0585, cor: 0.8453, auc: 0.9885, lr: 1e-05\n", |
|
|
1421 |
"total running time 445.4769241809845\n", |
|
|
1422 |
"completed epochs: 2 starting now: 1\n", |
|
|
1423 |
"DataSet 14 train size 17376 fold 4\n", |
|
|
1424 |
"adding dummy serieses 16\n", |
|
|
1425 |
"DataSet 14 valid size 4384 fold 4\n", |
|
|
1426 |
"setFeats, augmentation 0\n", |
|
|
1427 |
"WeightedRandomSampler\n", |
|
|
1428 |
"dataset train: 17376 valid: 4384 loader train: 543 valid: 137\n", |
|
|
1429 |
"loading model model.b2.f4.d14.v36\n", |
|
|
1430 |
"setFeats, augmentation -1\n", |
|
|
1431 |
"Batch 50 device: cuda time passed: 20.951 time per batch: 0.419\n", |
|
|
1432 |
"Batch 100 device: cuda time passed: 37.131 time per batch: 0.371\n", |
|
|
1433 |
"Batch 150 device: cuda time passed: 53.374 time per batch: 0.356\n", |
|
|
1434 |
"Batch 200 device: cuda time passed: 69.208 time per batch: 0.346\n", |
|
|
1435 |
"Batch 250 device: cuda time passed: 85.747 time per batch: 0.343\n", |
|
|
1436 |
"Batch 300 device: cuda time passed: 102.371 time per batch: 0.341\n", |
|
|
1437 |
"Batch 350 device: cuda time passed: 118.719 time per batch: 0.339\n", |
|
|
1438 |
"Batch 400 device: cuda time passed: 134.930 time per batch: 0.337\n" |
|
|
1439 |
] |
|
|
1440 |
}, |
|
|
1441 |
{ |
|
|
1442 |
"name": "stdout", |
|
|
1443 |
"output_type": "stream", |
|
|
1444 |
"text": [ |
|
|
1445 |
"Batch 450 device: cuda time passed: 154.019 time per batch: 0.342\n", |
|
|
1446 |
"Batch 500 device: cuda time passed: 170.163 time per batch: 0.340\n", |
|
|
1447 |
"Batch 50 device: cuda time passed: 7.538 time per batch: 0.151\n", |
|
|
1448 |
"Batch 100 device: cuda time passed: 15.825 time per batch: 0.158\n", |
|
|
1449 |
"v36, d14, e3, f4, trn ll: 0.0284, val ll: 0.0610, ll_w: 0.0585, cor: 0.8453, auc: 0.9885, lr: 5e-06\n", |
|
|
1450 |
"total running time 213.95540952682495\n", |
|
|
1451 |
"total time 3314.099096775055\n", |
|
|
1452 |
"total time 3314.0992426872253\n", |
|
|
1453 |
"total time 3314.099636554718\n" |
|
|
1454 |
] |
|
|
1455 |
} |
|
|
1456 |
], |
|
|
1457 |
"source": [ |
|
|
1458 |
"weight_decay = 1e-4\n", |
|
|
1459 |
"lrs = np.array([1e-5, 5e-6])\n", |
|
|
1460 |
"epochs = np.array([2, 1])\n", |
|
|
1461 |
"stg = time.time()\n", |
|
|
1462 |
"for ds in [14]:#my_datasets3+my_datasets5:\n", |
|
|
1463 |
" folds = getNFolds(ds)\n", |
|
|
1464 |
" #folds = 2\n", |
|
|
1465 |
" for f in range(folds):\n", |
|
|
1466 |
" for i,lr in enumerate(lrs):\n", |
|
|
1467 |
" learning_rate = lr\n", |
|
|
1468 |
" model, predictions, val_results = train_one(dataset=ds, epochs=epochs[i], bs=32, fold=f, init_ver=35)\n", |
|
|
1469 |
" print('total time', time.time() - stg)\n", |
|
|
1470 |
" print('total time', time.time() - stg)\n", |
|
|
1471 |
"print('total time', time.time() - stg)" |
|
|
1472 |
] |
|
|
1473 |
}, |
|
|
1474 |
{ |
|
|
1475 |
"cell_type": "code", |
|
|
1476 |
"execution_count": null, |
|
|
1477 |
"metadata": {}, |
|
|
1478 |
"outputs": [], |
|
|
1479 |
"source": [] |
|
|
1480 |
}, |
|
|
1481 |
{ |
|
|
1482 |
"cell_type": "code", |
|
|
1483 |
"execution_count": null, |
|
|
1484 |
"metadata": {}, |
|
|
1485 |
"outputs": [], |
|
|
1486 |
"source": [] |
|
|
1487 |
}, |
|
|
1488 |
{ |
|
|
1489 |
"cell_type": "code", |
|
|
1490 |
"execution_count": 12, |
|
|
1491 |
"metadata": {}, |
|
|
1492 |
"outputs": [ |
|
|
1493 |
{ |
|
|
1494 |
"name": "stdout", |
|
|
1495 |
"output_type": "stream", |
|
|
1496 |
"text": [ |
|
|
1497 |
"31 0 65 5 0\n", |
|
|
1498 |
"31 1 65 5 0\n", |
|
|
1499 |
"31 2 65 5 0\n", |
|
|
1500 |
"31 3 39 3 0\n", |
|
|
1501 |
"31 4 39 3 0\n", |
|
|
1502 |
"33 0 65 5 0\n", |
|
|
1503 |
"33 1 65 5 0\n", |
|
|
1504 |
"33 2 65 5 0\n", |
|
|
1505 |
"33 3 39 3 0\n", |
|
|
1506 |
"33 4 39 3 0\n", |
|
|
1507 |
"34 0 15 0 0\n", |
|
|
1508 |
"34 1 15 0 0\n", |
|
|
1509 |
"34 2 15 0 0\n", |
|
|
1510 |
"34 3 9 0 0\n", |
|
|
1511 |
"34 4 9 0 0\n", |
|
|
1512 |
"35 0 13 1 0\n", |
|
|
1513 |
"35 1 13 1 0\n", |
|
|
1514 |
"35 2 13 1 0\n", |
|
|
1515 |
"35 3 13 1 0\n", |
|
|
1516 |
"35 4 13 1 0\n", |
|
|
1517 |
"36 0 3 0 0\n", |
|
|
1518 |
"36 1 3 0 0\n", |
|
|
1519 |
"36 2 3 0 0\n", |
|
|
1520 |
"36 3 3 0 0\n", |
|
|
1521 |
"36 4 3 0 0\n" |
|
|
1522 |
] |
|
|
1523 |
} |
|
|
1524 |
], |
|
|
1525 |
"source": [ |
|
|
1526 |
"for ver in [31,33,34,35,36]:\n", |
|
|
1527 |
" for i in range(5):\n", |
|
|
1528 |
" stats_filename = PATH_WORK/'stats.f{}.v{}'.format(i,ver)\n", |
|
|
1529 |
" stats = pd.read_csv(stats_filename)\n", |
|
|
1530 |
" #stats = stats.loc[stats.epoch != 13]\n", |
|
|
1531 |
" #stats.to_csv(stats_filename, index=False)\n", |
|
|
1532 |
" print(ver,i,len(stats),len(stats.loc[stats.epoch==13]),len(stats.loc[stats.epoch>13]))" |
|
|
1533 |
] |
|
|
1534 |
}, |
|
|
1535 |
{ |
|
|
1536 |
"cell_type": "code", |
|
|
1537 |
"execution_count": null, |
|
|
1538 |
"metadata": {}, |
|
|
1539 |
"outputs": [], |
|
|
1540 |
"source": [] |
|
|
1541 |
}, |
|
|
1542 |
{ |
|
|
1543 |
"cell_type": "code", |
|
|
1544 |
"execution_count": 59, |
|
|
1545 |
"metadata": {}, |
|
|
1546 |
"outputs": [], |
|
|
1547 |
"source": [ |
|
|
1548 |
"stats1 = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,31)) for i in range(5)] +\n", |
|
|
1549 |
" [pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,33)) for i in range(5)] + \n", |
|
|
1550 |
" [pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,35)) for i in range(5)], axis=0, sort=False)" |
|
|
1551 |
] |
|
|
1552 |
}, |
|
|
1553 |
{ |
|
|
1554 |
"cell_type": "code", |
|
|
1555 |
"execution_count": 70, |
|
|
1556 |
"metadata": {}, |
|
|
1557 |
"outputs": [], |
|
|
1558 |
"source": [ |
|
|
1559 |
"stats1 = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,33)) for i in range(5)] + \n", |
|
|
1560 |
" [pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,35)) for i in range(5)], axis=0, sort=False)" |
|
|
1561 |
] |
|
|
1562 |
}, |
|
|
1563 |
{ |
|
|
1564 |
"cell_type": "code", |
|
|
1565 |
"execution_count": 71, |
|
|
1566 |
"metadata": { |
|
|
1567 |
"scrolled": false |
|
|
1568 |
}, |
|
|
1569 |
"outputs": [ |
|
|
1570 |
{ |
|
|
1571 |
"data": { |
|
|
1572 |
"text/html": [ |
|
|
1573 |
"<div>\n", |
|
|
1574 |
"<style scoped>\n", |
|
|
1575 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
1576 |
" vertical-align: middle;\n", |
|
|
1577 |
" }\n", |
|
|
1578 |
"\n", |
|
|
1579 |
" .dataframe tbody tr th {\n", |
|
|
1580 |
" vertical-align: top;\n", |
|
|
1581 |
" }\n", |
|
|
1582 |
"\n", |
|
|
1583 |
" .dataframe thead th {\n", |
|
|
1584 |
" text-align: right;\n", |
|
|
1585 |
" }\n", |
|
|
1586 |
"</style>\n", |
|
|
1587 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
1588 |
" <thead>\n", |
|
|
1589 |
" <tr style=\"text-align: right;\">\n", |
|
|
1590 |
" <th></th>\n", |
|
|
1591 |
" <th></th>\n", |
|
|
1592 |
" <th>val_loss</th>\n", |
|
|
1593 |
" <th>val_w_loss</th>\n", |
|
|
1594 |
" </tr>\n", |
|
|
1595 |
" <tr>\n", |
|
|
1596 |
" <th>dataset</th>\n", |
|
|
1597 |
" <th>ver</th>\n", |
|
|
1598 |
" <th></th>\n", |
|
|
1599 |
" <th></th>\n", |
|
|
1600 |
" </tr>\n", |
|
|
1601 |
" </thead>\n", |
|
|
1602 |
" <tbody>\n", |
|
|
1603 |
" <tr>\n", |
|
|
1604 |
" <td>7</td>\n", |
|
|
1605 |
" <td>33.0</td>\n", |
|
|
1606 |
" <td>0.062216</td>\n", |
|
|
1607 |
" <td>0.059966</td>\n", |
|
|
1608 |
" </tr>\n", |
|
|
1609 |
" <tr>\n", |
|
|
1610 |
" <td>9</td>\n", |
|
|
1611 |
" <td>33.0</td>\n", |
|
|
1612 |
" <td>0.061798</td>\n", |
|
|
1613 |
" <td>0.059631</td>\n", |
|
|
1614 |
" </tr>\n", |
|
|
1615 |
" <tr>\n", |
|
|
1616 |
" <td>11</td>\n", |
|
|
1617 |
" <td>33.0</td>\n", |
|
|
1618 |
" <td>0.060567</td>\n", |
|
|
1619 |
" <td>0.058444</td>\n", |
|
|
1620 |
" </tr>\n", |
|
|
1621 |
" <tr>\n", |
|
|
1622 |
" <td>12</td>\n", |
|
|
1623 |
" <td>33.0</td>\n", |
|
|
1624 |
" <td>0.060435</td>\n", |
|
|
1625 |
" <td>0.058339</td>\n", |
|
|
1626 |
" </tr>\n", |
|
|
1627 |
" <tr>\n", |
|
|
1628 |
" <td>13</td>\n", |
|
|
1629 |
" <td>33.0</td>\n", |
|
|
1630 |
" <td>0.060730</td>\n", |
|
|
1631 |
" <td>0.058572</td>\n", |
|
|
1632 |
" </tr>\n", |
|
|
1633 |
" <tr>\n", |
|
|
1634 |
" <td>14</td>\n", |
|
|
1635 |
" <td>35.0</td>\n", |
|
|
1636 |
" <td>0.060279</td>\n", |
|
|
1637 |
" <td>0.058197</td>\n", |
|
|
1638 |
" </tr>\n", |
|
|
1639 |
" </tbody>\n", |
|
|
1640 |
"</table>\n", |
|
|
1641 |
"</div>" |
|
|
1642 |
], |
|
|
1643 |
"text/plain": [ |
|
|
1644 |
" val_loss val_w_loss\n", |
|
|
1645 |
"dataset ver \n", |
|
|
1646 |
"7 33.0 0.062216 0.059966\n", |
|
|
1647 |
"9 33.0 0.061798 0.059631\n", |
|
|
1648 |
"11 33.0 0.060567 0.058444\n", |
|
|
1649 |
"12 33.0 0.060435 0.058339\n", |
|
|
1650 |
"13 33.0 0.060730 0.058572\n", |
|
|
1651 |
"14 35.0 0.060279 0.058197" |
|
|
1652 |
] |
|
|
1653 |
}, |
|
|
1654 |
"execution_count": 71, |
|
|
1655 |
"metadata": {}, |
|
|
1656 |
"output_type": "execute_result" |
|
|
1657 |
} |
|
|
1658 |
], |
|
|
1659 |
"source": [ |
|
|
1660 |
"stats1.loc[stats1.epoch==13].groupby(['dataset','ver'])['val_loss','val_w_loss'].mean()" |
|
|
1661 |
] |
|
|
1662 |
}, |
|
|
1663 |
{ |
|
|
1664 |
"cell_type": "code", |
|
|
1665 |
"execution_count": 76, |
|
|
1666 |
"metadata": {}, |
|
|
1667 |
"outputs": [ |
|
|
1668 |
{ |
|
|
1669 |
"data": { |
|
|
1670 |
"text/plain": [ |
|
|
1671 |
"epoch\n", |
|
|
1672 |
"1 0.069373\n", |
|
|
1673 |
"2 0.066851\n", |
|
|
1674 |
"3 0.066766\n", |
|
|
1675 |
"4 0.065325\n", |
|
|
1676 |
"5 0.063819\n", |
|
|
1677 |
"6 0.063702\n", |
|
|
1678 |
"7 0.063659\n", |
|
|
1679 |
"8 0.063762\n", |
|
|
1680 |
"9 0.063641\n", |
|
|
1681 |
"10 0.063510\n", |
|
|
1682 |
"11 0.063465\n", |
|
|
1683 |
"12 0.063588\n", |
|
|
1684 |
"13 0.058714\n", |
|
|
1685 |
"Name: val_w_loss, dtype: float64" |
|
|
1686 |
] |
|
|
1687 |
}, |
|
|
1688 |
"execution_count": 76, |
|
|
1689 |
"metadata": {}, |
|
|
1690 |
"output_type": "execute_result" |
|
|
1691 |
} |
|
|
1692 |
], |
|
|
1693 |
"source": [ |
|
|
1694 |
"stats1.groupby('epoch')['val_w_loss'].mean()" |
|
|
1695 |
] |
|
|
1696 |
}, |
|
|
1697 |
{ |
|
|
1698 |
"cell_type": "code", |
|
|
1699 |
"execution_count": 77, |
|
|
1700 |
"metadata": {}, |
|
|
1701 |
"outputs": [ |
|
|
1702 |
{ |
|
|
1703 |
"data": { |
|
|
1704 |
"text/plain": [ |
|
|
1705 |
"epoch\n", |
|
|
1706 |
"1 0.058880\n", |
|
|
1707 |
"2 0.058748\n", |
|
|
1708 |
"3 0.058762\n", |
|
|
1709 |
"Name: val_w_loss, dtype: float64" |
|
|
1710 |
] |
|
|
1711 |
}, |
|
|
1712 |
"execution_count": 77, |
|
|
1713 |
"metadata": {}, |
|
|
1714 |
"output_type": "execute_result" |
|
|
1715 |
} |
|
|
1716 |
], |
|
|
1717 |
"source": [ |
|
|
1718 |
"stats2.groupby('epoch')['val_w_loss'].mean()" |
|
|
1719 |
] |
|
|
1720 |
}, |
|
|
1721 |
{ |
|
|
1722 |
"cell_type": "code", |
|
|
1723 |
"execution_count": 78, |
|
|
1724 |
"metadata": {}, |
|
|
1725 |
"outputs": [ |
|
|
1726 |
{ |
|
|
1727 |
"data": { |
|
|
1728 |
"text/html": [ |
|
|
1729 |
"<div>\n", |
|
|
1730 |
"<style scoped>\n", |
|
|
1731 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
1732 |
" vertical-align: middle;\n", |
|
|
1733 |
" }\n", |
|
|
1734 |
"\n", |
|
|
1735 |
" .dataframe tbody tr th {\n", |
|
|
1736 |
" vertical-align: top;\n", |
|
|
1737 |
" }\n", |
|
|
1738 |
"\n", |
|
|
1739 |
" .dataframe thead th {\n", |
|
|
1740 |
" text-align: right;\n", |
|
|
1741 |
" }\n", |
|
|
1742 |
"</style>\n", |
|
|
1743 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
1744 |
" <thead>\n", |
|
|
1745 |
" <tr style=\"text-align: right;\">\n", |
|
|
1746 |
" <th></th>\n", |
|
|
1747 |
" <th>ver</th>\n", |
|
|
1748 |
" <th>dataset</th>\n", |
|
|
1749 |
" <th>epoch</th>\n", |
|
|
1750 |
" <th>fold</th>\n", |
|
|
1751 |
" <th>train_loss</th>\n", |
|
|
1752 |
" <th>val_loss</th>\n", |
|
|
1753 |
" <th>val_w_loss</th>\n", |
|
|
1754 |
" <th>val_loss2</th>\n", |
|
|
1755 |
" <th>val_w_loss2</th>\n", |
|
|
1756 |
" <th>cor</th>\n", |
|
|
1757 |
" <th>any</th>\n", |
|
|
1758 |
" <th>epidural</th>\n", |
|
|
1759 |
" <th>intraparenchymal</th>\n", |
|
|
1760 |
" <th>intraventricular</th>\n", |
|
|
1761 |
" <th>subarachnoid</th>\n", |
|
|
1762 |
" <th>subdural</th>\n", |
|
|
1763 |
" <th>train_sz</th>\n", |
|
|
1764 |
" <th>val_sz</th>\n", |
|
|
1765 |
" <th>bs</th>\n", |
|
|
1766 |
" <th>train_time</th>\n", |
|
|
1767 |
" <th>valid_time</th>\n", |
|
|
1768 |
" <th>lr</th>\n", |
|
|
1769 |
" <th>wd</th>\n", |
|
|
1770 |
" </tr>\n", |
|
|
1771 |
" </thead>\n", |
|
|
1772 |
" <tbody>\n", |
|
|
1773 |
" <tr>\n", |
|
|
1774 |
" <td>0</td>\n", |
|
|
1775 |
" <td>34</td>\n", |
|
|
1776 |
" <td>7</td>\n", |
|
|
1777 |
" <td>1</td>\n", |
|
|
1778 |
" <td>0</td>\n", |
|
|
1779 |
" <td>0.026721</td>\n", |
|
|
1780 |
" <td>0.064065</td>\n", |
|
|
1781 |
" <td>0.061943</td>\n", |
|
|
1782 |
" <td>0.058069</td>\n", |
|
|
1783 |
" <td>0.041959</td>\n", |
|
|
1784 |
" <td>0.841664</td>\n", |
|
|
1785 |
" <td>0.094596</td>\n", |
|
|
1786 |
" <td>0.017690</td>\n", |
|
|
1787 |
" <td>0.048768</td>\n", |
|
|
1788 |
" <td>0.022488</td>\n", |
|
|
1789 |
" <td>0.050100</td>\n", |
|
|
1790 |
" <td>0.078243</td>\n", |
|
|
1791 |
" <td>14526</td>\n", |
|
|
1792 |
" <td>7232</td>\n", |
|
|
1793 |
" <td>32</td>\n", |
|
|
1794 |
" <td>188.513041</td>\n", |
|
|
1795 |
" <td>42.595724</td>\n", |
|
|
1796 |
" <td>0.000010</td>\n", |
|
|
1797 |
" <td>0.0001</td>\n", |
|
|
1798 |
" </tr>\n", |
|
|
1799 |
" <tr>\n", |
|
|
1800 |
" <td>1</td>\n", |
|
|
1801 |
" <td>34</td>\n", |
|
|
1802 |
" <td>7</td>\n", |
|
|
1803 |
" <td>2</td>\n", |
|
|
1804 |
" <td>0</td>\n", |
|
|
1805 |
" <td>0.026779</td>\n", |
|
|
1806 |
" <td>0.063986</td>\n", |
|
|
1807 |
" <td>0.061799</td>\n", |
|
|
1808 |
" <td>0.058277</td>\n", |
|
|
1809 |
" <td>0.042019</td>\n", |
|
|
1810 |
" <td>0.841992</td>\n", |
|
|
1811 |
" <td>0.095012</td>\n", |
|
|
1812 |
" <td>0.017891</td>\n", |
|
|
1813 |
" <td>0.048536</td>\n", |
|
|
1814 |
" <td>0.022399</td>\n", |
|
|
1815 |
" <td>0.050454</td>\n", |
|
|
1816 |
" <td>0.078633</td>\n", |
|
|
1817 |
" <td>14526</td>\n", |
|
|
1818 |
" <td>7232</td>\n", |
|
|
1819 |
" <td>32</td>\n", |
|
|
1820 |
" <td>148.325198</td>\n", |
|
|
1821 |
" <td>42.840445</td>\n", |
|
|
1822 |
" <td>0.000010</td>\n", |
|
|
1823 |
" <td>0.0001</td>\n", |
|
|
1824 |
" </tr>\n", |
|
|
1825 |
" <tr>\n", |
|
|
1826 |
" <td>2</td>\n", |
|
|
1827 |
" <td>34</td>\n", |
|
|
1828 |
" <td>7</td>\n", |
|
|
1829 |
" <td>3</td>\n", |
|
|
1830 |
" <td>0</td>\n", |
|
|
1831 |
" <td>0.026806</td>\n", |
|
|
1832 |
" <td>0.063772</td>\n", |
|
|
1833 |
" <td>0.061700</td>\n", |
|
|
1834 |
" <td>0.057858</td>\n", |
|
|
1835 |
" <td>0.041902</td>\n", |
|
|
1836 |
" <td>0.841932</td>\n", |
|
|
1837 |
" <td>0.094341</td>\n", |
|
|
1838 |
" <td>0.017474</td>\n", |
|
|
1839 |
" <td>0.048568</td>\n", |
|
|
1840 |
" <td>0.022377</td>\n", |
|
|
1841 |
" <td>0.050502</td>\n", |
|
|
1842 |
" <td>0.077406</td>\n", |
|
|
1843 |
" <td>14526</td>\n", |
|
|
1844 |
" <td>7232</td>\n", |
|
|
1845 |
" <td>32</td>\n", |
|
|
1846 |
" <td>148.716054</td>\n", |
|
|
1847 |
" <td>42.719312</td>\n", |
|
|
1848 |
" <td>0.000005</td>\n", |
|
|
1849 |
" <td>0.0001</td>\n", |
|
|
1850 |
" </tr>\n", |
|
|
1851 |
" <tr>\n", |
|
|
1852 |
" <td>3</td>\n", |
|
|
1853 |
" <td>34</td>\n", |
|
|
1854 |
" <td>9</td>\n", |
|
|
1855 |
" <td>1</td>\n", |
|
|
1856 |
" <td>0</td>\n", |
|
|
1857 |
" <td>0.027112</td>\n", |
|
|
1858 |
" <td>0.063366</td>\n", |
|
|
1859 |
" <td>0.061400</td>\n", |
|
|
1860 |
" <td>0.059194</td>\n", |
|
|
1861 |
" <td>0.042473</td>\n", |
|
|
1862 |
" <td>0.842161</td>\n", |
|
|
1863 |
" <td>0.095822</td>\n", |
|
|
1864 |
" <td>0.016753</td>\n", |
|
|
1865 |
" <td>0.048794</td>\n", |
|
|
1866 |
" <td>0.022736</td>\n", |
|
|
1867 |
" <td>0.053430</td>\n", |
|
|
1868 |
" <td>0.081001</td>\n", |
|
|
1869 |
" <td>14526</td>\n", |
|
|
1870 |
" <td>7232</td>\n", |
|
|
1871 |
" <td>32</td>\n", |
|
|
1872 |
" <td>154.520063</td>\n", |
|
|
1873 |
" <td>41.107329</td>\n", |
|
|
1874 |
" <td>0.000010</td>\n", |
|
|
1875 |
" <td>0.0001</td>\n", |
|
|
1876 |
" </tr>\n", |
|
|
1877 |
" <tr>\n", |
|
|
1878 |
" <td>4</td>\n", |
|
|
1879 |
" <td>34</td>\n", |
|
|
1880 |
" <td>9</td>\n", |
|
|
1881 |
" <td>2</td>\n", |
|
|
1882 |
" <td>0</td>\n", |
|
|
1883 |
" <td>0.028070</td>\n", |
|
|
1884 |
" <td>0.063492</td>\n", |
|
|
1885 |
" <td>0.061455</td>\n", |
|
|
1886 |
" <td>0.059902</td>\n", |
|
|
1887 |
" <td>0.042804</td>\n", |
|
|
1888 |
" <td>0.842046</td>\n", |
|
|
1889 |
" <td>0.097219</td>\n", |
|
|
1890 |
" <td>0.016762</td>\n", |
|
|
1891 |
" <td>0.048958</td>\n", |
|
|
1892 |
" <td>0.023702</td>\n", |
|
|
1893 |
" <td>0.053533</td>\n", |
|
|
1894 |
" <td>0.081922</td>\n", |
|
|
1895 |
" <td>14526</td>\n", |
|
|
1896 |
" <td>7232</td>\n", |
|
|
1897 |
" <td>32</td>\n", |
|
|
1898 |
" <td>136.198384</td>\n", |
|
|
1899 |
" <td>41.837500</td>\n", |
|
|
1900 |
" <td>0.000010</td>\n", |
|
|
1901 |
" <td>0.0001</td>\n", |
|
|
1902 |
" </tr>\n", |
|
|
1903 |
" <tr>\n", |
|
|
1904 |
" <td>...</td>\n", |
|
|
1905 |
" <td>...</td>\n", |
|
|
1906 |
" <td>...</td>\n", |
|
|
1907 |
" <td>...</td>\n", |
|
|
1908 |
" <td>...</td>\n", |
|
|
1909 |
" <td>...</td>\n", |
|
|
1910 |
" <td>...</td>\n", |
|
|
1911 |
" <td>...</td>\n", |
|
|
1912 |
" <td>...</td>\n", |
|
|
1913 |
" <td>...</td>\n", |
|
|
1914 |
" <td>...</td>\n", |
|
|
1915 |
" <td>...</td>\n", |
|
|
1916 |
" <td>...</td>\n", |
|
|
1917 |
" <td>...</td>\n", |
|
|
1918 |
" <td>...</td>\n", |
|
|
1919 |
" <td>...</td>\n", |
|
|
1920 |
" <td>...</td>\n", |
|
|
1921 |
" <td>...</td>\n", |
|
|
1922 |
" <td>...</td>\n", |
|
|
1923 |
" <td>...</td>\n", |
|
|
1924 |
" <td>...</td>\n", |
|
|
1925 |
" <td>...</td>\n", |
|
|
1926 |
" <td>...</td>\n", |
|
|
1927 |
" <td>...</td>\n", |
|
|
1928 |
" </tr>\n", |
|
|
1929 |
" <tr>\n", |
|
|
1930 |
" <td>1</td>\n", |
|
|
1931 |
" <td>36</td>\n", |
|
|
1932 |
" <td>14</td>\n", |
|
|
1933 |
" <td>2</td>\n", |
|
|
1934 |
" <td>3</td>\n", |
|
|
1935 |
" <td>0.029522</td>\n", |
|
|
1936 |
" <td>0.062853</td>\n", |
|
|
1937 |
" <td>0.060485</td>\n", |
|
|
1938 |
" <td>0.063770</td>\n", |
|
|
1939 |
" <td>0.045538</td>\n", |
|
|
1940 |
" <td>0.840799</td>\n", |
|
|
1941 |
" <td>0.101509</td>\n", |
|
|
1942 |
" <td>0.012685</td>\n", |
|
|
1943 |
" <td>0.049486</td>\n", |
|
|
1944 |
" <td>0.034019</td>\n", |
|
|
1945 |
" <td>0.059630</td>\n", |
|
|
1946 |
" <td>0.087555</td>\n", |
|
|
1947 |
" <td>17408</td>\n", |
|
|
1948 |
" <td>4352</td>\n", |
|
|
1949 |
" <td>32</td>\n", |
|
|
1950 |
" <td>195.092067</td>\n", |
|
|
1951 |
" <td>19.647925</td>\n", |
|
|
1952 |
" <td>0.000010</td>\n", |
|
|
1953 |
" <td>0.0001</td>\n", |
|
|
1954 |
" </tr>\n", |
|
|
1955 |
" <tr>\n", |
|
|
1956 |
" <td>2</td>\n", |
|
|
1957 |
" <td>36</td>\n", |
|
|
1958 |
" <td>14</td>\n", |
|
|
1959 |
" <td>3</td>\n", |
|
|
1960 |
" <td>3</td>\n", |
|
|
1961 |
" <td>0.029033</td>\n", |
|
|
1962 |
" <td>0.062372</td>\n", |
|
|
1963 |
" <td>0.060134</td>\n", |
|
|
1964 |
" <td>0.062729</td>\n", |
|
|
1965 |
" <td>0.044978</td>\n", |
|
|
1966 |
" <td>0.841355</td>\n", |
|
|
1967 |
" <td>0.099212</td>\n", |
|
|
1968 |
" <td>0.012550</td>\n", |
|
|
1969 |
" <td>0.048394</td>\n", |
|
|
1970 |
" <td>0.033793</td>\n", |
|
|
1971 |
" <td>0.059500</td>\n", |
|
|
1972 |
" <td>0.086445</td>\n", |
|
|
1973 |
" <td>17408</td>\n", |
|
|
1974 |
" <td>4352</td>\n", |
|
|
1975 |
" <td>32</td>\n", |
|
|
1976 |
" <td>188.747055</td>\n", |
|
|
1977 |
" <td>19.592905</td>\n", |
|
|
1978 |
" <td>0.000005</td>\n", |
|
|
1979 |
" <td>0.0001</td>\n", |
|
|
1980 |
" </tr>\n", |
|
|
1981 |
" <tr>\n", |
|
|
1982 |
" <td>0</td>\n", |
|
|
1983 |
" <td>36</td>\n", |
|
|
1984 |
" <td>14</td>\n", |
|
|
1985 |
" <td>1</td>\n", |
|
|
1986 |
" <td>4</td>\n", |
|
|
1987 |
" <td>0.029257</td>\n", |
|
|
1988 |
" <td>0.060873</td>\n", |
|
|
1989 |
" <td>0.058421</td>\n", |
|
|
1990 |
" <td>0.061626</td>\n", |
|
|
1991 |
" <td>0.045347</td>\n", |
|
|
1992 |
" <td>0.845014</td>\n", |
|
|
1993 |
" <td>0.102233</td>\n", |
|
|
1994 |
" <td>0.018479</td>\n", |
|
|
1995 |
" <td>0.052477</td>\n", |
|
|
1996 |
" <td>0.020628</td>\n", |
|
|
1997 |
" <td>0.056853</td>\n", |
|
|
1998 |
" <td>0.078477</td>\n", |
|
|
1999 |
" <td>17376</td>\n", |
|
|
2000 |
" <td>4384</td>\n", |
|
|
2001 |
" <td>32</td>\n", |
|
|
2002 |
" <td>208.909939</td>\n", |
|
|
2003 |
" <td>19.676935</td>\n", |
|
|
2004 |
" <td>0.000010</td>\n", |
|
|
2005 |
" <td>0.0001</td>\n", |
|
|
2006 |
" </tr>\n", |
|
|
2007 |
" <tr>\n", |
|
|
2008 |
" <td>1</td>\n", |
|
|
2009 |
" <td>36</td>\n", |
|
|
2010 |
" <td>14</td>\n", |
|
|
2011 |
" <td>2</td>\n", |
|
|
2012 |
" <td>4</td>\n", |
|
|
2013 |
" <td>0.028809</td>\n", |
|
|
2014 |
" <td>0.060978</td>\n", |
|
|
2015 |
" <td>0.058489</td>\n", |
|
|
2016 |
" <td>0.061709</td>\n", |
|
|
2017 |
" <td>0.045331</td>\n", |
|
|
2018 |
" <td>0.845300</td>\n", |
|
|
2019 |
" <td>0.102328</td>\n", |
|
|
2020 |
" <td>0.018075</td>\n", |
|
|
2021 |
" <td>0.052644</td>\n", |
|
|
2022 |
" <td>0.021467</td>\n", |
|
|
2023 |
" <td>0.056481</td>\n", |
|
|
2024 |
" <td>0.078641</td>\n", |
|
|
2025 |
" <td>17376</td>\n", |
|
|
2026 |
" <td>4384</td>\n", |
|
|
2027 |
" <td>32</td>\n", |
|
|
2028 |
" <td>192.605031</td>\n", |
|
|
2029 |
" <td>19.740591</td>\n", |
|
|
2030 |
" <td>0.000010</td>\n", |
|
|
2031 |
" <td>0.0001</td>\n", |
|
|
2032 |
" </tr>\n", |
|
|
2033 |
" <tr>\n", |
|
|
2034 |
" <td>2</td>\n", |
|
|
2035 |
" <td>36</td>\n", |
|
|
2036 |
" <td>14</td>\n", |
|
|
2037 |
" <td>3</td>\n", |
|
|
2038 |
" <td>4</td>\n", |
|
|
2039 |
" <td>0.028370</td>\n", |
|
|
2040 |
" <td>0.060988</td>\n", |
|
|
2041 |
" <td>0.058506</td>\n", |
|
|
2042 |
" <td>0.061573</td>\n", |
|
|
2043 |
" <td>0.045219</td>\n", |
|
|
2044 |
" <td>0.845341</td>\n", |
|
|
2045 |
" <td>0.102488</td>\n", |
|
|
2046 |
" <td>0.018332</td>\n", |
|
|
2047 |
" <td>0.052359</td>\n", |
|
|
2048 |
" <td>0.020422</td>\n", |
|
|
2049 |
" <td>0.056385</td>\n", |
|
|
2050 |
" <td>0.078535</td>\n", |
|
|
2051 |
" <td>17376</td>\n", |
|
|
2052 |
" <td>4384</td>\n", |
|
|
2053 |
" <td>32</td>\n", |
|
|
2054 |
" <td>189.093271</td>\n", |
|
|
2055 |
" <td>20.038952</td>\n", |
|
|
2056 |
" <td>0.000005</td>\n", |
|
|
2057 |
" <td>0.0001</td>\n", |
|
|
2058 |
" </tr>\n", |
|
|
2059 |
" </tbody>\n", |
|
|
2060 |
"</table>\n", |
|
|
2061 |
"<p>78 rows × 23 columns</p>\n", |
|
|
2062 |
"</div>" |
|
|
2063 |
], |
|
|
2064 |
"text/plain": [ |
|
|
2065 |
" ver dataset epoch fold train_loss val_loss val_w_loss val_loss2 \\\n", |
|
|
2066 |
"0 34 7 1 0 0.026721 0.064065 0.061943 0.058069 \n", |
|
|
2067 |
"1 34 7 2 0 0.026779 0.063986 0.061799 0.058277 \n", |
|
|
2068 |
"2 34 7 3 0 0.026806 0.063772 0.061700 0.057858 \n", |
|
|
2069 |
"3 34 9 1 0 0.027112 0.063366 0.061400 0.059194 \n", |
|
|
2070 |
"4 34 9 2 0 0.028070 0.063492 0.061455 0.059902 \n", |
|
|
2071 |
".. ... ... ... ... ... ... ... ... \n", |
|
|
2072 |
"1 36 14 2 3 0.029522 0.062853 0.060485 0.063770 \n", |
|
|
2073 |
"2 36 14 3 3 0.029033 0.062372 0.060134 0.062729 \n", |
|
|
2074 |
"0 36 14 1 4 0.029257 0.060873 0.058421 0.061626 \n", |
|
|
2075 |
"1 36 14 2 4 0.028809 0.060978 0.058489 0.061709 \n", |
|
|
2076 |
"2 36 14 3 4 0.028370 0.060988 0.058506 0.061573 \n", |
|
|
2077 |
"\n", |
|
|
2078 |
" val_w_loss2 cor any epidural intraparenchymal \\\n", |
|
|
2079 |
"0 0.041959 0.841664 0.094596 0.017690 0.048768 \n", |
|
|
2080 |
"1 0.042019 0.841992 0.095012 0.017891 0.048536 \n", |
|
|
2081 |
"2 0.041902 0.841932 0.094341 0.017474 0.048568 \n", |
|
|
2082 |
"3 0.042473 0.842161 0.095822 0.016753 0.048794 \n", |
|
|
2083 |
"4 0.042804 0.842046 0.097219 0.016762 0.048958 \n", |
|
|
2084 |
".. ... ... ... ... ... \n", |
|
|
2085 |
"1 0.045538 0.840799 0.101509 0.012685 0.049486 \n", |
|
|
2086 |
"2 0.044978 0.841355 0.099212 0.012550 0.048394 \n", |
|
|
2087 |
"0 0.045347 0.845014 0.102233 0.018479 0.052477 \n", |
|
|
2088 |
"1 0.045331 0.845300 0.102328 0.018075 0.052644 \n", |
|
|
2089 |
"2 0.045219 0.845341 0.102488 0.018332 0.052359 \n", |
|
|
2090 |
"\n", |
|
|
2091 |
" intraventricular subarachnoid subdural train_sz val_sz bs \\\n", |
|
|
2092 |
"0 0.022488 0.050100 0.078243 14526 7232 32 \n", |
|
|
2093 |
"1 0.022399 0.050454 0.078633 14526 7232 32 \n", |
|
|
2094 |
"2 0.022377 0.050502 0.077406 14526 7232 32 \n", |
|
|
2095 |
"3 0.022736 0.053430 0.081001 14526 7232 32 \n", |
|
|
2096 |
"4 0.023702 0.053533 0.081922 14526 7232 32 \n", |
|
|
2097 |
".. ... ... ... ... ... .. \n", |
|
|
2098 |
"1 0.034019 0.059630 0.087555 17408 4352 32 \n", |
|
|
2099 |
"2 0.033793 0.059500 0.086445 17408 4352 32 \n", |
|
|
2100 |
"0 0.020628 0.056853 0.078477 17376 4384 32 \n", |
|
|
2101 |
"1 0.021467 0.056481 0.078641 17376 4384 32 \n", |
|
|
2102 |
"2 0.020422 0.056385 0.078535 17376 4384 32 \n", |
|
|
2103 |
"\n", |
|
|
2104 |
" train_time valid_time lr wd \n", |
|
|
2105 |
"0 188.513041 42.595724 0.000010 0.0001 \n", |
|
|
2106 |
"1 148.325198 42.840445 0.000010 0.0001 \n", |
|
|
2107 |
"2 148.716054 42.719312 0.000005 0.0001 \n", |
|
|
2108 |
"3 154.520063 41.107329 0.000010 0.0001 \n", |
|
|
2109 |
"4 136.198384 41.837500 0.000010 0.0001 \n", |
|
|
2110 |
".. ... ... ... ... \n", |
|
|
2111 |
"1 195.092067 19.647925 0.000010 0.0001 \n", |
|
|
2112 |
"2 188.747055 19.592905 0.000005 0.0001 \n", |
|
|
2113 |
"0 208.909939 19.676935 0.000010 0.0001 \n", |
|
|
2114 |
"1 192.605031 19.740591 0.000010 0.0001 \n", |
|
|
2115 |
"2 189.093271 20.038952 0.000005 0.0001 \n", |
|
|
2116 |
"\n", |
|
|
2117 |
"[78 rows x 23 columns]" |
|
|
2118 |
] |
|
|
2119 |
}, |
|
|
2120 |
"execution_count": 78, |
|
|
2121 |
"metadata": {}, |
|
|
2122 |
"output_type": "execute_result" |
|
|
2123 |
} |
|
|
2124 |
], |
|
|
2125 |
"source": [ |
|
|
2126 |
"stats2" |
|
|
2127 |
] |
|
|
2128 |
}, |
|
|
2129 |
{ |
|
|
2130 |
"cell_type": "code", |
|
|
2131 |
"execution_count": null, |
|
|
2132 |
"metadata": {}, |
|
|
2133 |
"outputs": [], |
|
|
2134 |
"source": [] |
|
|
2135 |
}, |
|
|
2136 |
{ |
|
|
2137 |
"cell_type": "code", |
|
|
2138 |
"execution_count": null, |
|
|
2139 |
"metadata": {}, |
|
|
2140 |
"outputs": [], |
|
|
2141 |
"source": [] |
|
|
2142 |
}, |
|
|
2143 |
{ |
|
|
2144 |
"cell_type": "code", |
|
|
2145 |
"execution_count": 18, |
|
|
2146 |
"metadata": {}, |
|
|
2147 |
"outputs": [], |
|
|
2148 |
"source": [ |
|
|
2149 |
"stats1 = stats1.loc[stats1.epoch==13]" |
|
|
2150 |
] |
|
|
2151 |
}, |
|
|
2152 |
{ |
|
|
2153 |
"cell_type": "code", |
|
|
2154 |
"execution_count": 64, |
|
|
2155 |
"metadata": {}, |
|
|
2156 |
"outputs": [], |
|
|
2157 |
"source": [ |
|
|
2158 |
"stats2 = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,34)) for i in range(5)] +\n", |
|
|
2159 |
" [pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(i,36)) for i in range(5)], axis=0, sort=False)" |
|
|
2160 |
] |
|
|
2161 |
}, |
|
|
2162 |
{ |
|
|
2163 |
"cell_type": "code", |
|
|
2164 |
"execution_count": 65, |
|
|
2165 |
"metadata": { |
|
|
2166 |
"scrolled": false |
|
|
2167 |
}, |
|
|
2168 |
"outputs": [ |
|
|
2169 |
{ |
|
|
2170 |
"data": { |
|
|
2171 |
"text/html": [ |
|
|
2172 |
"<div>\n", |
|
|
2173 |
"<style scoped>\n", |
|
|
2174 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
2175 |
" vertical-align: middle;\n", |
|
|
2176 |
" }\n", |
|
|
2177 |
"\n", |
|
|
2178 |
" .dataframe tbody tr th {\n", |
|
|
2179 |
" vertical-align: top;\n", |
|
|
2180 |
" }\n", |
|
|
2181 |
"\n", |
|
|
2182 |
" .dataframe thead th {\n", |
|
|
2183 |
" text-align: right;\n", |
|
|
2184 |
" }\n", |
|
|
2185 |
"</style>\n", |
|
|
2186 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
2187 |
" <thead>\n", |
|
|
2188 |
" <tr style=\"text-align: right;\">\n", |
|
|
2189 |
" <th></th>\n", |
|
|
2190 |
" <th></th>\n", |
|
|
2191 |
" <th>val_loss</th>\n", |
|
|
2192 |
" <th>val_w_loss</th>\n", |
|
|
2193 |
" </tr>\n", |
|
|
2194 |
" <tr>\n", |
|
|
2195 |
" <th>dataset</th>\n", |
|
|
2196 |
" <th>ver</th>\n", |
|
|
2197 |
" <th></th>\n", |
|
|
2198 |
" <th></th>\n", |
|
|
2199 |
" </tr>\n", |
|
|
2200 |
" </thead>\n", |
|
|
2201 |
" <tbody>\n", |
|
|
2202 |
" <tr>\n", |
|
|
2203 |
" <td>7</td>\n", |
|
|
2204 |
" <td>34</td>\n", |
|
|
2205 |
" <td>0.062340</td>\n", |
|
|
2206 |
" <td>0.060022</td>\n", |
|
|
2207 |
" </tr>\n", |
|
|
2208 |
" <tr>\n", |
|
|
2209 |
" <td>9</td>\n", |
|
|
2210 |
" <td>34</td>\n", |
|
|
2211 |
" <td>0.061991</td>\n", |
|
|
2212 |
" <td>0.059712</td>\n", |
|
|
2213 |
" </tr>\n", |
|
|
2214 |
" <tr>\n", |
|
|
2215 |
" <td>11</td>\n", |
|
|
2216 |
" <td>34</td>\n", |
|
|
2217 |
" <td>0.060771</td>\n", |
|
|
2218 |
" <td>0.058518</td>\n", |
|
|
2219 |
" </tr>\n", |
|
|
2220 |
" <tr>\n", |
|
|
2221 |
" <td>12</td>\n", |
|
|
2222 |
" <td>34</td>\n", |
|
|
2223 |
" <td>0.060592</td>\n", |
|
|
2224 |
" <td>0.058365</td>\n", |
|
|
2225 |
" </tr>\n", |
|
|
2226 |
" <tr>\n", |
|
|
2227 |
" <td>13</td>\n", |
|
|
2228 |
" <td>34</td>\n", |
|
|
2229 |
" <td>0.060883</td>\n", |
|
|
2230 |
" <td>0.058596</td>\n", |
|
|
2231 |
" </tr>\n", |
|
|
2232 |
" <tr>\n", |
|
|
2233 |
" <td>14</td>\n", |
|
|
2234 |
" <td>36</td>\n", |
|
|
2235 |
" <td>0.060440</td>\n", |
|
|
2236 |
" <td>0.058245</td>\n", |
|
|
2237 |
" </tr>\n", |
|
|
2238 |
" </tbody>\n", |
|
|
2239 |
"</table>\n", |
|
|
2240 |
"</div>" |
|
|
2241 |
], |
|
|
2242 |
"text/plain": [ |
|
|
2243 |
" val_loss val_w_loss\n", |
|
|
2244 |
"dataset ver \n", |
|
|
2245 |
"7 34 0.062340 0.060022\n", |
|
|
2246 |
"9 34 0.061991 0.059712\n", |
|
|
2247 |
"11 34 0.060771 0.058518\n", |
|
|
2248 |
"12 34 0.060592 0.058365\n", |
|
|
2249 |
"13 34 0.060883 0.058596\n", |
|
|
2250 |
"14 36 0.060440 0.058245" |
|
|
2251 |
] |
|
|
2252 |
}, |
|
|
2253 |
"execution_count": 65, |
|
|
2254 |
"metadata": {}, |
|
|
2255 |
"output_type": "execute_result" |
|
|
2256 |
} |
|
|
2257 |
], |
|
|
2258 |
"source": [ |
|
|
2259 |
"stats2.loc[stats2.epoch==3].groupby(['dataset','ver'])['val_loss','val_w_loss'].mean()" |
|
|
2260 |
] |
|
|
2261 |
}, |
|
|
2262 |
{ |
|
|
2263 |
"cell_type": "code", |
|
|
2264 |
"execution_count": 19, |
|
|
2265 |
"metadata": {}, |
|
|
2266 |
"outputs": [], |
|
|
2267 |
"source": [ |
|
|
2268 |
"stats2 = stats2.loc[stats2.epoch==3]" |
|
|
2269 |
] |
|
|
2270 |
}, |
|
|
2271 |
{ |
|
|
2272 |
"cell_type": "code", |
|
|
2273 |
"execution_count": 24, |
|
|
2274 |
"metadata": {}, |
|
|
2275 |
"outputs": [], |
|
|
2276 |
"source": [ |
|
|
2277 |
"stats1['weighted'] = False\n", |
|
|
2278 |
"stats2['weighted'] = True" |
|
|
2279 |
] |
|
|
2280 |
}, |
|
|
2281 |
{ |
|
|
2282 |
"cell_type": "code", |
|
|
2283 |
"execution_count": 25, |
|
|
2284 |
"metadata": {}, |
|
|
2285 |
"outputs": [], |
|
|
2286 |
"source": [ |
|
|
2287 |
"stats = pd.concat([stats1,stats2],axis=0,sort=False)" |
|
|
2288 |
] |
|
|
2289 |
}, |
|
|
2290 |
{ |
|
|
2291 |
"cell_type": "code", |
|
|
2292 |
"execution_count": 28, |
|
|
2293 |
"metadata": {}, |
|
|
2294 |
"outputs": [], |
|
|
2295 |
"source": [ |
|
|
2296 |
"stats['name'] = [getDSName(ds) for ds in stats.dataset.values]" |
|
|
2297 |
] |
|
|
2298 |
}, |
|
|
2299 |
{ |
|
|
2300 |
"cell_type": "code", |
|
|
2301 |
"execution_count": 31, |
|
|
2302 |
"metadata": {}, |
|
|
2303 |
"outputs": [], |
|
|
2304 |
"source": [ |
|
|
2305 |
"stats['type'] = np.where(stats.ver.isin([31,32]), 'old feats, no stage1', \n", |
|
|
2306 |
" np.where(stats.ver.isin([33,34]), 'old feats, with stage1', 'new feats, with stage 1'))" |
|
|
2307 |
] |
|
|
2308 |
}, |
|
|
2309 |
{ |
|
|
2310 |
"cell_type": "code", |
|
|
2311 |
"execution_count": 41, |
|
|
2312 |
"metadata": {}, |
|
|
2313 |
"outputs": [], |
|
|
2314 |
"source": [ |
|
|
2315 |
"stats['name'] = pd.Categorical(stats['name'], \\\n", |
|
|
2316 |
" ['Densenet161_F3','se_resnext101_32x4d_F3','se_resnet101_F5','se_resnet101_focal_F5','se_resnext101_32x4d_F5'])" |
|
|
2317 |
] |
|
|
2318 |
}, |
|
|
2319 |
{ |
|
|
2320 |
"cell_type": "code", |
|
|
2321 |
"execution_count": 40, |
|
|
2322 |
"metadata": {}, |
|
|
2323 |
"outputs": [], |
|
|
2324 |
"source": [ |
|
|
2325 |
"stats['type'] = pd.Categorical(stats['type'], \\\n", |
|
|
2326 |
" ['old feats, no stage1', 'old feats, with stage1', 'new feats, with stage 1'])" |
|
|
2327 |
] |
|
|
2328 |
}, |
|
|
2329 |
{ |
|
|
2330 |
"cell_type": "code", |
|
|
2331 |
"execution_count": 44, |
|
|
2332 |
"metadata": {}, |
|
|
2333 |
"outputs": [], |
|
|
2334 |
"source": [ |
|
|
2335 |
"stats.val_loss2 = np.where(stats.val_loss2 == 0, np.nan, stats.val_loss2)\n", |
|
|
2336 |
"stats.val_w_loss2 = np.where(stats.val_w_loss2 == 0, np.nan, stats.val_w_loss2)" |
|
|
2337 |
] |
|
|
2338 |
}, |
|
|
2339 |
{ |
|
|
2340 |
"cell_type": "code", |
|
|
2341 |
"execution_count": 46, |
|
|
2342 |
"metadata": {}, |
|
|
2343 |
"outputs": [ |
|
|
2344 |
{ |
|
|
2345 |
"data": { |
|
|
2346 |
"text/html": [ |
|
|
2347 |
"<div>\n", |
|
|
2348 |
"<style scoped>\n", |
|
|
2349 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
2350 |
" vertical-align: middle;\n", |
|
|
2351 |
" }\n", |
|
|
2352 |
"\n", |
|
|
2353 |
" .dataframe tbody tr th {\n", |
|
|
2354 |
" vertical-align: top;\n", |
|
|
2355 |
" }\n", |
|
|
2356 |
"\n", |
|
|
2357 |
" .dataframe thead th {\n", |
|
|
2358 |
" text-align: right;\n", |
|
|
2359 |
" }\n", |
|
|
2360 |
"</style>\n", |
|
|
2361 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
2362 |
" <thead>\n", |
|
|
2363 |
" <tr style=\"text-align: right;\">\n", |
|
|
2364 |
" <th></th>\n", |
|
|
2365 |
" <th></th>\n", |
|
|
2366 |
" <th></th>\n", |
|
|
2367 |
" <th>val_loss</th>\n", |
|
|
2368 |
" <th>val_w_loss</th>\n", |
|
|
2369 |
" <th>val_loss2</th>\n", |
|
|
2370 |
" <th>val_w_loss2</th>\n", |
|
|
2371 |
" </tr>\n", |
|
|
2372 |
" <tr>\n", |
|
|
2373 |
" <th>name</th>\n", |
|
|
2374 |
" <th>weighted</th>\n", |
|
|
2375 |
" <th>type</th>\n", |
|
|
2376 |
" <th></th>\n", |
|
|
2377 |
" <th></th>\n", |
|
|
2378 |
" <th></th>\n", |
|
|
2379 |
" <th></th>\n", |
|
|
2380 |
" </tr>\n", |
|
|
2381 |
" </thead>\n", |
|
|
2382 |
" <tbody>\n", |
|
|
2383 |
" <tr>\n", |
|
|
2384 |
" <td rowspan=\"6\" valign=\"top\">Densenet161_F3</td>\n", |
|
|
2385 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2386 |
" <td>old feats, no stage1</td>\n", |
|
|
2387 |
" <td>0.062832</td>\n", |
|
|
2388 |
" <td>0.060421</td>\n", |
|
|
2389 |
" <td>NaN</td>\n", |
|
|
2390 |
" <td>NaN</td>\n", |
|
|
2391 |
" </tr>\n", |
|
|
2392 |
" <tr>\n", |
|
|
2393 |
" <td>old feats, with stage1</td>\n", |
|
|
2394 |
" <td>0.062216</td>\n", |
|
|
2395 |
" <td>0.059966</td>\n", |
|
|
2396 |
" <td>0.063667</td>\n", |
|
|
2397 |
" <td>0.045864</td>\n", |
|
|
2398 |
" </tr>\n", |
|
|
2399 |
" <tr>\n", |
|
|
2400 |
" <td>new feats, with stage 1</td>\n", |
|
|
2401 |
" <td>NaN</td>\n", |
|
|
2402 |
" <td>NaN</td>\n", |
|
|
2403 |
" <td>NaN</td>\n", |
|
|
2404 |
" <td>NaN</td>\n", |
|
|
2405 |
" </tr>\n", |
|
|
2406 |
" <tr>\n", |
|
|
2407 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2408 |
" <td>old feats, no stage1</td>\n", |
|
|
2409 |
" <td>NaN</td>\n", |
|
|
2410 |
" <td>NaN</td>\n", |
|
|
2411 |
" <td>NaN</td>\n", |
|
|
2412 |
" <td>NaN</td>\n", |
|
|
2413 |
" </tr>\n", |
|
|
2414 |
" <tr>\n", |
|
|
2415 |
" <td>old feats, with stage1</td>\n", |
|
|
2416 |
" <td>0.062340</td>\n", |
|
|
2417 |
" <td>0.060022</td>\n", |
|
|
2418 |
" <td>0.064111</td>\n", |
|
|
2419 |
" <td>0.045960</td>\n", |
|
|
2420 |
" </tr>\n", |
|
|
2421 |
" <tr>\n", |
|
|
2422 |
" <td>new feats, with stage 1</td>\n", |
|
|
2423 |
" <td>NaN</td>\n", |
|
|
2424 |
" <td>NaN</td>\n", |
|
|
2425 |
" <td>NaN</td>\n", |
|
|
2426 |
" <td>NaN</td>\n", |
|
|
2427 |
" </tr>\n", |
|
|
2428 |
" <tr>\n", |
|
|
2429 |
" <td rowspan=\"6\" valign=\"top\">se_resnext101_32x4d_F3</td>\n", |
|
|
2430 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2431 |
" <td>old feats, no stage1</td>\n", |
|
|
2432 |
" <td>0.062391</td>\n", |
|
|
2433 |
" <td>0.060030</td>\n", |
|
|
2434 |
" <td>NaN</td>\n", |
|
|
2435 |
" <td>NaN</td>\n", |
|
|
2436 |
" </tr>\n", |
|
|
2437 |
" <tr>\n", |
|
|
2438 |
" <td>old feats, with stage1</td>\n", |
|
|
2439 |
" <td>0.061798</td>\n", |
|
|
2440 |
" <td>0.059631</td>\n", |
|
|
2441 |
" <td>0.063498</td>\n", |
|
|
2442 |
" <td>0.045743</td>\n", |
|
|
2443 |
" </tr>\n", |
|
|
2444 |
" <tr>\n", |
|
|
2445 |
" <td>new feats, with stage 1</td>\n", |
|
|
2446 |
" <td>NaN</td>\n", |
|
|
2447 |
" <td>NaN</td>\n", |
|
|
2448 |
" <td>NaN</td>\n", |
|
|
2449 |
" <td>NaN</td>\n", |
|
|
2450 |
" </tr>\n", |
|
|
2451 |
" <tr>\n", |
|
|
2452 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2453 |
" <td>old feats, no stage1</td>\n", |
|
|
2454 |
" <td>NaN</td>\n", |
|
|
2455 |
" <td>NaN</td>\n", |
|
|
2456 |
" <td>NaN</td>\n", |
|
|
2457 |
" <td>NaN</td>\n", |
|
|
2458 |
" </tr>\n", |
|
|
2459 |
" <tr>\n", |
|
|
2460 |
" <td>old feats, with stage1</td>\n", |
|
|
2461 |
" <td>0.061991</td>\n", |
|
|
2462 |
" <td>0.059712</td>\n", |
|
|
2463 |
" <td>0.064051</td>\n", |
|
|
2464 |
" <td>0.045793</td>\n", |
|
|
2465 |
" </tr>\n", |
|
|
2466 |
" <tr>\n", |
|
|
2467 |
" <td>new feats, with stage 1</td>\n", |
|
|
2468 |
" <td>NaN</td>\n", |
|
|
2469 |
" <td>NaN</td>\n", |
|
|
2470 |
" <td>NaN</td>\n", |
|
|
2471 |
" <td>NaN</td>\n", |
|
|
2472 |
" </tr>\n", |
|
|
2473 |
" <tr>\n", |
|
|
2474 |
" <td rowspan=\"6\" valign=\"top\">se_resnet101_F5</td>\n", |
|
|
2475 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2476 |
" <td>old feats, no stage1</td>\n", |
|
|
2477 |
" <td>0.060777</td>\n", |
|
|
2478 |
" <td>0.058524</td>\n", |
|
|
2479 |
" <td>NaN</td>\n", |
|
|
2480 |
" <td>NaN</td>\n", |
|
|
2481 |
" </tr>\n", |
|
|
2482 |
" <tr>\n", |
|
|
2483 |
" <td>old feats, with stage1</td>\n", |
|
|
2484 |
" <td>0.060435</td>\n", |
|
|
2485 |
" <td>0.058339</td>\n", |
|
|
2486 |
" <td>0.062473</td>\n", |
|
|
2487 |
" <td>0.045330</td>\n", |
|
|
2488 |
" </tr>\n", |
|
|
2489 |
" <tr>\n", |
|
|
2490 |
" <td>new feats, with stage 1</td>\n", |
|
|
2491 |
" <td>0.060279</td>\n", |
|
|
2492 |
" <td>0.058197</td>\n", |
|
|
2493 |
" <td>0.059201</td>\n", |
|
|
2494 |
" <td>0.042987</td>\n", |
|
|
2495 |
" </tr>\n", |
|
|
2496 |
" <tr>\n", |
|
|
2497 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2498 |
" <td>old feats, no stage1</td>\n", |
|
|
2499 |
" <td>NaN</td>\n", |
|
|
2500 |
" <td>NaN</td>\n", |
|
|
2501 |
" <td>NaN</td>\n", |
|
|
2502 |
" <td>NaN</td>\n", |
|
|
2503 |
" </tr>\n", |
|
|
2504 |
" <tr>\n", |
|
|
2505 |
" <td>old feats, with stage1</td>\n", |
|
|
2506 |
" <td>0.060592</td>\n", |
|
|
2507 |
" <td>0.058365</td>\n", |
|
|
2508 |
" <td>0.063225</td>\n", |
|
|
2509 |
" <td>0.045505</td>\n", |
|
|
2510 |
" </tr>\n", |
|
|
2511 |
" <tr>\n", |
|
|
2512 |
" <td>new feats, with stage 1</td>\n", |
|
|
2513 |
" <td>0.060440</td>\n", |
|
|
2514 |
" <td>0.058245</td>\n", |
|
|
2515 |
" <td>0.059749</td>\n", |
|
|
2516 |
" <td>0.043057</td>\n", |
|
|
2517 |
" </tr>\n", |
|
|
2518 |
" <tr>\n", |
|
|
2519 |
" <td rowspan=\"6\" valign=\"top\">se_resnet101_focal_F5</td>\n", |
|
|
2520 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2521 |
" <td>old feats, no stage1</td>\n", |
|
|
2522 |
" <td>0.061076</td>\n", |
|
|
2523 |
" <td>0.058784</td>\n", |
|
|
2524 |
" <td>NaN</td>\n", |
|
|
2525 |
" <td>NaN</td>\n", |
|
|
2526 |
" </tr>\n", |
|
|
2527 |
" <tr>\n", |
|
|
2528 |
" <td>old feats, with stage1</td>\n", |
|
|
2529 |
" <td>0.060730</td>\n", |
|
|
2530 |
" <td>0.058572</td>\n", |
|
|
2531 |
" <td>0.062898</td>\n", |
|
|
2532 |
" <td>0.045629</td>\n", |
|
|
2533 |
" </tr>\n", |
|
|
2534 |
" <tr>\n", |
|
|
2535 |
" <td>new feats, with stage 1</td>\n", |
|
|
2536 |
" <td>NaN</td>\n", |
|
|
2537 |
" <td>NaN</td>\n", |
|
|
2538 |
" <td>NaN</td>\n", |
|
|
2539 |
" <td>NaN</td>\n", |
|
|
2540 |
" </tr>\n", |
|
|
2541 |
" <tr>\n", |
|
|
2542 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2543 |
" <td>old feats, no stage1</td>\n", |
|
|
2544 |
" <td>NaN</td>\n", |
|
|
2545 |
" <td>NaN</td>\n", |
|
|
2546 |
" <td>NaN</td>\n", |
|
|
2547 |
" <td>NaN</td>\n", |
|
|
2548 |
" </tr>\n", |
|
|
2549 |
" <tr>\n", |
|
|
2550 |
" <td>old feats, with stage1</td>\n", |
|
|
2551 |
" <td>0.060883</td>\n", |
|
|
2552 |
" <td>0.058596</td>\n", |
|
|
2553 |
" <td>0.063621</td>\n", |
|
|
2554 |
" <td>0.045774</td>\n", |
|
|
2555 |
" </tr>\n", |
|
|
2556 |
" <tr>\n", |
|
|
2557 |
" <td>new feats, with stage 1</td>\n", |
|
|
2558 |
" <td>NaN</td>\n", |
|
|
2559 |
" <td>NaN</td>\n", |
|
|
2560 |
" <td>NaN</td>\n", |
|
|
2561 |
" <td>NaN</td>\n", |
|
|
2562 |
" </tr>\n", |
|
|
2563 |
" <tr>\n", |
|
|
2564 |
" <td rowspan=\"6\" valign=\"top\">se_resnext101_32x4d_F5</td>\n", |
|
|
2565 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2566 |
" <td>old feats, no stage1</td>\n", |
|
|
2567 |
" <td>0.061160</td>\n", |
|
|
2568 |
" <td>0.058838</td>\n", |
|
|
2569 |
" <td>NaN</td>\n", |
|
|
2570 |
" <td>NaN</td>\n", |
|
|
2571 |
" </tr>\n", |
|
|
2572 |
" <tr>\n", |
|
|
2573 |
" <td>old feats, with stage1</td>\n", |
|
|
2574 |
" <td>0.060567</td>\n", |
|
|
2575 |
" <td>0.058444</td>\n", |
|
|
2576 |
" <td>0.063178</td>\n", |
|
|
2577 |
" <td>0.045846</td>\n", |
|
|
2578 |
" </tr>\n", |
|
|
2579 |
" <tr>\n", |
|
|
2580 |
" <td>new feats, with stage 1</td>\n", |
|
|
2581 |
" <td>NaN</td>\n", |
|
|
2582 |
" <td>NaN</td>\n", |
|
|
2583 |
" <td>NaN</td>\n", |
|
|
2584 |
" <td>NaN</td>\n", |
|
|
2585 |
" </tr>\n", |
|
|
2586 |
" <tr>\n", |
|
|
2587 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2588 |
" <td>old feats, no stage1</td>\n", |
|
|
2589 |
" <td>NaN</td>\n", |
|
|
2590 |
" <td>NaN</td>\n", |
|
|
2591 |
" <td>NaN</td>\n", |
|
|
2592 |
" <td>NaN</td>\n", |
|
|
2593 |
" </tr>\n", |
|
|
2594 |
" <tr>\n", |
|
|
2595 |
" <td>old feats, with stage1</td>\n", |
|
|
2596 |
" <td>0.060771</td>\n", |
|
|
2597 |
" <td>0.058518</td>\n", |
|
|
2598 |
" <td>0.063957</td>\n", |
|
|
2599 |
" <td>0.046079</td>\n", |
|
|
2600 |
" </tr>\n", |
|
|
2601 |
" <tr>\n", |
|
|
2602 |
" <td>new feats, with stage 1</td>\n", |
|
|
2603 |
" <td>NaN</td>\n", |
|
|
2604 |
" <td>NaN</td>\n", |
|
|
2605 |
" <td>NaN</td>\n", |
|
|
2606 |
" <td>NaN</td>\n", |
|
|
2607 |
" </tr>\n", |
|
|
2608 |
" </tbody>\n", |
|
|
2609 |
"</table>\n", |
|
|
2610 |
"</div>" |
|
|
2611 |
], |
|
|
2612 |
"text/plain": [ |
|
|
2613 |
" val_loss val_w_loss \\\n", |
|
|
2614 |
"name weighted type \n", |
|
|
2615 |
"Densenet161_F3 False old feats, no stage1 0.062832 0.060421 \n", |
|
|
2616 |
" old feats, with stage1 0.062216 0.059966 \n", |
|
|
2617 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2618 |
" True old feats, no stage1 NaN NaN \n", |
|
|
2619 |
" old feats, with stage1 0.062340 0.060022 \n", |
|
|
2620 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2621 |
"se_resnext101_32x4d_F3 False old feats, no stage1 0.062391 0.060030 \n", |
|
|
2622 |
" old feats, with stage1 0.061798 0.059631 \n", |
|
|
2623 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2624 |
" True old feats, no stage1 NaN NaN \n", |
|
|
2625 |
" old feats, with stage1 0.061991 0.059712 \n", |
|
|
2626 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2627 |
"se_resnet101_F5 False old feats, no stage1 0.060777 0.058524 \n", |
|
|
2628 |
" old feats, with stage1 0.060435 0.058339 \n", |
|
|
2629 |
" new feats, with stage 1 0.060279 0.058197 \n", |
|
|
2630 |
" True old feats, no stage1 NaN NaN \n", |
|
|
2631 |
" old feats, with stage1 0.060592 0.058365 \n", |
|
|
2632 |
" new feats, with stage 1 0.060440 0.058245 \n", |
|
|
2633 |
"se_resnet101_focal_F5 False old feats, no stage1 0.061076 0.058784 \n", |
|
|
2634 |
" old feats, with stage1 0.060730 0.058572 \n", |
|
|
2635 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2636 |
" True old feats, no stage1 NaN NaN \n", |
|
|
2637 |
" old feats, with stage1 0.060883 0.058596 \n", |
|
|
2638 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2639 |
"se_resnext101_32x4d_F5 False old feats, no stage1 0.061160 0.058838 \n", |
|
|
2640 |
" old feats, with stage1 0.060567 0.058444 \n", |
|
|
2641 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2642 |
" True old feats, no stage1 NaN NaN \n", |
|
|
2643 |
" old feats, with stage1 0.060771 0.058518 \n", |
|
|
2644 |
" new feats, with stage 1 NaN NaN \n", |
|
|
2645 |
"\n", |
|
|
2646 |
" val_loss2 \\\n", |
|
|
2647 |
"name weighted type \n", |
|
|
2648 |
"Densenet161_F3 False old feats, no stage1 NaN \n", |
|
|
2649 |
" old feats, with stage1 0.063667 \n", |
|
|
2650 |
" new feats, with stage 1 NaN \n", |
|
|
2651 |
" True old feats, no stage1 NaN \n", |
|
|
2652 |
" old feats, with stage1 0.064111 \n", |
|
|
2653 |
" new feats, with stage 1 NaN \n", |
|
|
2654 |
"se_resnext101_32x4d_F3 False old feats, no stage1 NaN \n", |
|
|
2655 |
" old feats, with stage1 0.063498 \n", |
|
|
2656 |
" new feats, with stage 1 NaN \n", |
|
|
2657 |
" True old feats, no stage1 NaN \n", |
|
|
2658 |
" old feats, with stage1 0.064051 \n", |
|
|
2659 |
" new feats, with stage 1 NaN \n", |
|
|
2660 |
"se_resnet101_F5 False old feats, no stage1 NaN \n", |
|
|
2661 |
" old feats, with stage1 0.062473 \n", |
|
|
2662 |
" new feats, with stage 1 0.059201 \n", |
|
|
2663 |
" True old feats, no stage1 NaN \n", |
|
|
2664 |
" old feats, with stage1 0.063225 \n", |
|
|
2665 |
" new feats, with stage 1 0.059749 \n", |
|
|
2666 |
"se_resnet101_focal_F5 False old feats, no stage1 NaN \n", |
|
|
2667 |
" old feats, with stage1 0.062898 \n", |
|
|
2668 |
" new feats, with stage 1 NaN \n", |
|
|
2669 |
" True old feats, no stage1 NaN \n", |
|
|
2670 |
" old feats, with stage1 0.063621 \n", |
|
|
2671 |
" new feats, with stage 1 NaN \n", |
|
|
2672 |
"se_resnext101_32x4d_F5 False old feats, no stage1 NaN \n", |
|
|
2673 |
" old feats, with stage1 0.063178 \n", |
|
|
2674 |
" new feats, with stage 1 NaN \n", |
|
|
2675 |
" True old feats, no stage1 NaN \n", |
|
|
2676 |
" old feats, with stage1 0.063957 \n", |
|
|
2677 |
" new feats, with stage 1 NaN \n", |
|
|
2678 |
"\n", |
|
|
2679 |
" val_w_loss2 \n", |
|
|
2680 |
"name weighted type \n", |
|
|
2681 |
"Densenet161_F3 False old feats, no stage1 NaN \n", |
|
|
2682 |
" old feats, with stage1 0.045864 \n", |
|
|
2683 |
" new feats, with stage 1 NaN \n", |
|
|
2684 |
" True old feats, no stage1 NaN \n", |
|
|
2685 |
" old feats, with stage1 0.045960 \n", |
|
|
2686 |
" new feats, with stage 1 NaN \n", |
|
|
2687 |
"se_resnext101_32x4d_F3 False old feats, no stage1 NaN \n", |
|
|
2688 |
" old feats, with stage1 0.045743 \n", |
|
|
2689 |
" new feats, with stage 1 NaN \n", |
|
|
2690 |
" True old feats, no stage1 NaN \n", |
|
|
2691 |
" old feats, with stage1 0.045793 \n", |
|
|
2692 |
" new feats, with stage 1 NaN \n", |
|
|
2693 |
"se_resnet101_F5 False old feats, no stage1 NaN \n", |
|
|
2694 |
" old feats, with stage1 0.045330 \n", |
|
|
2695 |
" new feats, with stage 1 0.042987 \n", |
|
|
2696 |
" True old feats, no stage1 NaN \n", |
|
|
2697 |
" old feats, with stage1 0.045505 \n", |
|
|
2698 |
" new feats, with stage 1 0.043057 \n", |
|
|
2699 |
"se_resnet101_focal_F5 False old feats, no stage1 NaN \n", |
|
|
2700 |
" old feats, with stage1 0.045629 \n", |
|
|
2701 |
" new feats, with stage 1 NaN \n", |
|
|
2702 |
" True old feats, no stage1 NaN \n", |
|
|
2703 |
" old feats, with stage1 0.045774 \n", |
|
|
2704 |
" new feats, with stage 1 NaN \n", |
|
|
2705 |
"se_resnext101_32x4d_F5 False old feats, no stage1 NaN \n", |
|
|
2706 |
" old feats, with stage1 0.045846 \n", |
|
|
2707 |
" new feats, with stage 1 NaN \n", |
|
|
2708 |
" True old feats, no stage1 NaN \n", |
|
|
2709 |
" old feats, with stage1 0.046079 \n", |
|
|
2710 |
" new feats, with stage 1 NaN " |
|
|
2711 |
] |
|
|
2712 |
}, |
|
|
2713 |
"execution_count": 46, |
|
|
2714 |
"metadata": {}, |
|
|
2715 |
"output_type": "execute_result" |
|
|
2716 |
} |
|
|
2717 |
], |
|
|
2718 |
"source": [ |
|
|
2719 |
"stats.groupby(['name','weighted','type'])['val_loss','val_w_loss','val_loss2','val_w_loss2'].mean()" |
|
|
2720 |
] |
|
|
2721 |
}, |
|
|
2722 |
{ |
|
|
2723 |
"cell_type": "code", |
|
|
2724 |
"execution_count": 53, |
|
|
2725 |
"metadata": { |
|
|
2726 |
"scrolled": false |
|
|
2727 |
}, |
|
|
2728 |
"outputs": [ |
|
|
2729 |
{ |
|
|
2730 |
"data": { |
|
|
2731 |
"text/html": [ |
|
|
2732 |
"<div>\n", |
|
|
2733 |
"<style scoped>\n", |
|
|
2734 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
2735 |
" vertical-align: middle;\n", |
|
|
2736 |
" }\n", |
|
|
2737 |
"\n", |
|
|
2738 |
" .dataframe tbody tr th {\n", |
|
|
2739 |
" vertical-align: top;\n", |
|
|
2740 |
" }\n", |
|
|
2741 |
"\n", |
|
|
2742 |
" .dataframe thead th {\n", |
|
|
2743 |
" text-align: right;\n", |
|
|
2744 |
" }\n", |
|
|
2745 |
"</style>\n", |
|
|
2746 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
2747 |
" <thead>\n", |
|
|
2748 |
" <tr style=\"text-align: right;\">\n", |
|
|
2749 |
" <th></th>\n", |
|
|
2750 |
" <th></th>\n", |
|
|
2751 |
" <th></th>\n", |
|
|
2752 |
" <th>val_loss</th>\n", |
|
|
2753 |
" <th>val_w_loss</th>\n", |
|
|
2754 |
" <th>val_loss2</th>\n", |
|
|
2755 |
" <th>val_w_loss2</th>\n", |
|
|
2756 |
" </tr>\n", |
|
|
2757 |
" <tr>\n", |
|
|
2758 |
" <th>fold</th>\n", |
|
|
2759 |
" <th>weighted</th>\n", |
|
|
2760 |
" <th>type</th>\n", |
|
|
2761 |
" <th></th>\n", |
|
|
2762 |
" <th></th>\n", |
|
|
2763 |
" <th></th>\n", |
|
|
2764 |
" <th></th>\n", |
|
|
2765 |
" </tr>\n", |
|
|
2766 |
" </thead>\n", |
|
|
2767 |
" <tbody>\n", |
|
|
2768 |
" <tr>\n", |
|
|
2769 |
" <td rowspan=\"6\" valign=\"top\">0</td>\n", |
|
|
2770 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2771 |
" <td>old feats, no stage1</td>\n", |
|
|
2772 |
" <td>0.059207</td>\n", |
|
|
2773 |
" <td>0.057173</td>\n", |
|
|
2774 |
" <td>NaN</td>\n", |
|
|
2775 |
" <td>NaN</td>\n", |
|
|
2776 |
" </tr>\n", |
|
|
2777 |
" <tr>\n", |
|
|
2778 |
" <td>old feats, with stage1</td>\n", |
|
|
2779 |
" <td>0.059376</td>\n", |
|
|
2780 |
" <td>0.057322</td>\n", |
|
|
2781 |
" <td>0.071586</td>\n", |
|
|
2782 |
" <td>0.052126</td>\n", |
|
|
2783 |
" </tr>\n", |
|
|
2784 |
" <tr>\n", |
|
|
2785 |
" <td>new feats, with stage 1</td>\n", |
|
|
2786 |
" <td>0.059336</td>\n", |
|
|
2787 |
" <td>0.057380</td>\n", |
|
|
2788 |
" <td>0.067331</td>\n", |
|
|
2789 |
" <td>0.048938</td>\n", |
|
|
2790 |
" </tr>\n", |
|
|
2791 |
" <tr>\n", |
|
|
2792 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2793 |
" <td>old feats, no stage1</td>\n", |
|
|
2794 |
" <td>NaN</td>\n", |
|
|
2795 |
" <td>NaN</td>\n", |
|
|
2796 |
" <td>NaN</td>\n", |
|
|
2797 |
" <td>NaN</td>\n", |
|
|
2798 |
" </tr>\n", |
|
|
2799 |
" <tr>\n", |
|
|
2800 |
" <td>old feats, with stage1</td>\n", |
|
|
2801 |
" <td>0.059580</td>\n", |
|
|
2802 |
" <td>0.057412</td>\n", |
|
|
2803 |
" <td>0.072157</td>\n", |
|
|
2804 |
" <td>0.052435</td>\n", |
|
|
2805 |
" </tr>\n", |
|
|
2806 |
" <tr>\n", |
|
|
2807 |
" <td>new feats, with stage 1</td>\n", |
|
|
2808 |
" <td>0.059511</td>\n", |
|
|
2809 |
" <td>0.057492</td>\n", |
|
|
2810 |
" <td>0.067691</td>\n", |
|
|
2811 |
" <td>0.049024</td>\n", |
|
|
2812 |
" </tr>\n", |
|
|
2813 |
" <tr>\n", |
|
|
2814 |
" <td rowspan=\"6\" valign=\"top\">1</td>\n", |
|
|
2815 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2816 |
" <td>old feats, no stage1</td>\n", |
|
|
2817 |
" <td>0.059903</td>\n", |
|
|
2818 |
" <td>0.058524</td>\n", |
|
|
2819 |
" <td>NaN</td>\n", |
|
|
2820 |
" <td>NaN</td>\n", |
|
|
2821 |
" </tr>\n", |
|
|
2822 |
" <tr>\n", |
|
|
2823 |
" <td>old feats, with stage1</td>\n", |
|
|
2824 |
" <td>0.059548</td>\n", |
|
|
2825 |
" <td>0.058294</td>\n", |
|
|
2826 |
" <td>0.055482</td>\n", |
|
|
2827 |
" <td>0.040188</td>\n", |
|
|
2828 |
" </tr>\n", |
|
|
2829 |
" <tr>\n", |
|
|
2830 |
" <td>new feats, with stage 1</td>\n", |
|
|
2831 |
" <td>0.059209</td>\n", |
|
|
2832 |
" <td>0.057936</td>\n", |
|
|
2833 |
" <td>0.052200</td>\n", |
|
|
2834 |
" <td>0.038043</td>\n", |
|
|
2835 |
" </tr>\n", |
|
|
2836 |
" <tr>\n", |
|
|
2837 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2838 |
" <td>old feats, no stage1</td>\n", |
|
|
2839 |
" <td>NaN</td>\n", |
|
|
2840 |
" <td>NaN</td>\n", |
|
|
2841 |
" <td>NaN</td>\n", |
|
|
2842 |
" <td>NaN</td>\n", |
|
|
2843 |
" </tr>\n", |
|
|
2844 |
" <tr>\n", |
|
|
2845 |
" <td>old feats, with stage1</td>\n", |
|
|
2846 |
" <td>0.059670</td>\n", |
|
|
2847 |
" <td>0.058308</td>\n", |
|
|
2848 |
" <td>0.056408</td>\n", |
|
|
2849 |
" <td>0.040399</td>\n", |
|
|
2850 |
" </tr>\n", |
|
|
2851 |
" <tr>\n", |
|
|
2852 |
" <td>new feats, with stage 1</td>\n", |
|
|
2853 |
" <td>0.059323</td>\n", |
|
|
2854 |
" <td>0.057955</td>\n", |
|
|
2855 |
" <td>0.052909</td>\n", |
|
|
2856 |
" <td>0.038214</td>\n", |
|
|
2857 |
" </tr>\n", |
|
|
2858 |
" <tr>\n", |
|
|
2859 |
" <td rowspan=\"6\" valign=\"top\">2</td>\n", |
|
|
2860 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2861 |
" <td>old feats, no stage1</td>\n", |
|
|
2862 |
" <td>0.060600</td>\n", |
|
|
2863 |
" <td>0.057828</td>\n", |
|
|
2864 |
" <td>NaN</td>\n", |
|
|
2865 |
" <td>NaN</td>\n", |
|
|
2866 |
" </tr>\n", |
|
|
2867 |
" <tr>\n", |
|
|
2868 |
" <td>old feats, with stage1</td>\n", |
|
|
2869 |
" <td>0.060391</td>\n", |
|
|
2870 |
" <td>0.057761</td>\n", |
|
|
2871 |
" <td>0.056373</td>\n", |
|
|
2872 |
" <td>0.040194</td>\n", |
|
|
2873 |
" </tr>\n", |
|
|
2874 |
" <tr>\n", |
|
|
2875 |
" <td>new feats, with stage 1</td>\n", |
|
|
2876 |
" <td>0.059884</td>\n", |
|
|
2877 |
" <td>0.057178</td>\n", |
|
|
2878 |
" <td>0.053371</td>\n", |
|
|
2879 |
" <td>0.037849</td>\n", |
|
|
2880 |
" </tr>\n", |
|
|
2881 |
" <tr>\n", |
|
|
2882 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2883 |
" <td>old feats, no stage1</td>\n", |
|
|
2884 |
" <td>NaN</td>\n", |
|
|
2885 |
" <td>NaN</td>\n", |
|
|
2886 |
" <td>NaN</td>\n", |
|
|
2887 |
" <td>NaN</td>\n", |
|
|
2888 |
" </tr>\n", |
|
|
2889 |
" <tr>\n", |
|
|
2890 |
" <td>old feats, with stage1</td>\n", |
|
|
2891 |
" <td>0.060492</td>\n", |
|
|
2892 |
" <td>0.057737</td>\n", |
|
|
2893 |
" <td>0.056875</td>\n", |
|
|
2894 |
" <td>0.040117</td>\n", |
|
|
2895 |
" </tr>\n", |
|
|
2896 |
" <tr>\n", |
|
|
2897 |
" <td>new feats, with stage 1</td>\n", |
|
|
2898 |
" <td>0.060004</td>\n", |
|
|
2899 |
" <td>0.057138</td>\n", |
|
|
2900 |
" <td>0.053842</td>\n", |
|
|
2901 |
" <td>0.037853</td>\n", |
|
|
2902 |
" </tr>\n", |
|
|
2903 |
" <tr>\n", |
|
|
2904 |
" <td rowspan=\"6\" valign=\"top\">3</td>\n", |
|
|
2905 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2906 |
" <td>old feats, no stage1</td>\n", |
|
|
2907 |
" <td>0.062978</td>\n", |
|
|
2908 |
" <td>0.060445</td>\n", |
|
|
2909 |
" <td>NaN</td>\n", |
|
|
2910 |
" <td>NaN</td>\n", |
|
|
2911 |
" </tr>\n", |
|
|
2912 |
" <tr>\n", |
|
|
2913 |
" <td>old feats, with stage1</td>\n", |
|
|
2914 |
" <td>0.062238</td>\n", |
|
|
2915 |
" <td>0.060029</td>\n", |
|
|
2916 |
" <td>0.063756</td>\n", |
|
|
2917 |
" <td>0.045654</td>\n", |
|
|
2918 |
" </tr>\n", |
|
|
2919 |
" <tr>\n", |
|
|
2920 |
" <td>new feats, with stage 1</td>\n", |
|
|
2921 |
" <td>0.062283</td>\n", |
|
|
2922 |
" <td>0.060144</td>\n", |
|
|
2923 |
" <td>0.062267</td>\n", |
|
|
2924 |
" <td>0.044982</td>\n", |
|
|
2925 |
" </tr>\n", |
|
|
2926 |
" <tr>\n", |
|
|
2927 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2928 |
" <td>old feats, no stage1</td>\n", |
|
|
2929 |
" <td>NaN</td>\n", |
|
|
2930 |
" <td>NaN</td>\n", |
|
|
2931 |
" <td>NaN</td>\n", |
|
|
2932 |
" <td>NaN</td>\n", |
|
|
2933 |
" </tr>\n", |
|
|
2934 |
" <tr>\n", |
|
|
2935 |
" <td>old feats, with stage1</td>\n", |
|
|
2936 |
" <td>0.062409</td>\n", |
|
|
2937 |
" <td>0.060069</td>\n", |
|
|
2938 |
" <td>0.064277</td>\n", |
|
|
2939 |
" <td>0.045677</td>\n", |
|
|
2940 |
" </tr>\n", |
|
|
2941 |
" <tr>\n", |
|
|
2942 |
" <td>new feats, with stage 1</td>\n", |
|
|
2943 |
" <td>0.062372</td>\n", |
|
|
2944 |
" <td>0.060134</td>\n", |
|
|
2945 |
" <td>0.062729</td>\n", |
|
|
2946 |
" <td>0.044978</td>\n", |
|
|
2947 |
" </tr>\n", |
|
|
2948 |
" <tr>\n", |
|
|
2949 |
" <td rowspan=\"6\" valign=\"top\">4</td>\n", |
|
|
2950 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
2951 |
" <td>old feats, no stage1</td>\n", |
|
|
2952 |
" <td>0.061198</td>\n", |
|
|
2953 |
" <td>0.058650</td>\n", |
|
|
2954 |
" <td>NaN</td>\n", |
|
|
2955 |
" <td>NaN</td>\n", |
|
|
2956 |
" </tr>\n", |
|
|
2957 |
" <tr>\n", |
|
|
2958 |
" <td>old feats, with stage1</td>\n", |
|
|
2959 |
" <td>0.060621</td>\n", |
|
|
2960 |
" <td>0.058288</td>\n", |
|
|
2961 |
" <td>0.065170</td>\n", |
|
|
2962 |
" <td>0.048486</td>\n", |
|
|
2963 |
" </tr>\n", |
|
|
2964 |
" <tr>\n", |
|
|
2965 |
" <td>new feats, with stage 1</td>\n", |
|
|
2966 |
" <td>0.060685</td>\n", |
|
|
2967 |
" <td>0.058348</td>\n", |
|
|
2968 |
" <td>0.060837</td>\n", |
|
|
2969 |
" <td>0.045122</td>\n", |
|
|
2970 |
" </tr>\n", |
|
|
2971 |
" <tr>\n", |
|
|
2972 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
2973 |
" <td>old feats, no stage1</td>\n", |
|
|
2974 |
" <td>NaN</td>\n", |
|
|
2975 |
" <td>NaN</td>\n", |
|
|
2976 |
" <td>NaN</td>\n", |
|
|
2977 |
" <td>NaN</td>\n", |
|
|
2978 |
" </tr>\n", |
|
|
2979 |
" <tr>\n", |
|
|
2980 |
" <td>old feats, with stage1</td>\n", |
|
|
2981 |
" <td>0.060810</td>\n", |
|
|
2982 |
" <td>0.058297</td>\n", |
|
|
2983 |
" <td>0.066407</td>\n", |
|
|
2984 |
" <td>0.048896</td>\n", |
|
|
2985 |
" </tr>\n", |
|
|
2986 |
" <tr>\n", |
|
|
2987 |
" <td>new feats, with stage 1</td>\n", |
|
|
2988 |
" <td>0.060988</td>\n", |
|
|
2989 |
" <td>0.058506</td>\n", |
|
|
2990 |
" <td>0.061573</td>\n", |
|
|
2991 |
" <td>0.045219</td>\n", |
|
|
2992 |
" </tr>\n", |
|
|
2993 |
" </tbody>\n", |
|
|
2994 |
"</table>\n", |
|
|
2995 |
"</div>" |
|
|
2996 |
], |
|
|
2997 |
"text/plain": [ |
|
|
2998 |
" val_loss val_w_loss val_loss2 \\\n", |
|
|
2999 |
"fold weighted type \n", |
|
|
3000 |
"0 False old feats, no stage1 0.059207 0.057173 NaN \n", |
|
|
3001 |
" old feats, with stage1 0.059376 0.057322 0.071586 \n", |
|
|
3002 |
" new feats, with stage 1 0.059336 0.057380 0.067331 \n", |
|
|
3003 |
" True old feats, no stage1 NaN NaN NaN \n", |
|
|
3004 |
" old feats, with stage1 0.059580 0.057412 0.072157 \n", |
|
|
3005 |
" new feats, with stage 1 0.059511 0.057492 0.067691 \n", |
|
|
3006 |
"1 False old feats, no stage1 0.059903 0.058524 NaN \n", |
|
|
3007 |
" old feats, with stage1 0.059548 0.058294 0.055482 \n", |
|
|
3008 |
" new feats, with stage 1 0.059209 0.057936 0.052200 \n", |
|
|
3009 |
" True old feats, no stage1 NaN NaN NaN \n", |
|
|
3010 |
" old feats, with stage1 0.059670 0.058308 0.056408 \n", |
|
|
3011 |
" new feats, with stage 1 0.059323 0.057955 0.052909 \n", |
|
|
3012 |
"2 False old feats, no stage1 0.060600 0.057828 NaN \n", |
|
|
3013 |
" old feats, with stage1 0.060391 0.057761 0.056373 \n", |
|
|
3014 |
" new feats, with stage 1 0.059884 0.057178 0.053371 \n", |
|
|
3015 |
" True old feats, no stage1 NaN NaN NaN \n", |
|
|
3016 |
" old feats, with stage1 0.060492 0.057737 0.056875 \n", |
|
|
3017 |
" new feats, with stage 1 0.060004 0.057138 0.053842 \n", |
|
|
3018 |
"3 False old feats, no stage1 0.062978 0.060445 NaN \n", |
|
|
3019 |
" old feats, with stage1 0.062238 0.060029 0.063756 \n", |
|
|
3020 |
" new feats, with stage 1 0.062283 0.060144 0.062267 \n", |
|
|
3021 |
" True old feats, no stage1 NaN NaN NaN \n", |
|
|
3022 |
" old feats, with stage1 0.062409 0.060069 0.064277 \n", |
|
|
3023 |
" new feats, with stage 1 0.062372 0.060134 0.062729 \n", |
|
|
3024 |
"4 False old feats, no stage1 0.061198 0.058650 NaN \n", |
|
|
3025 |
" old feats, with stage1 0.060621 0.058288 0.065170 \n", |
|
|
3026 |
" new feats, with stage 1 0.060685 0.058348 0.060837 \n", |
|
|
3027 |
" True old feats, no stage1 NaN NaN NaN \n", |
|
|
3028 |
" old feats, with stage1 0.060810 0.058297 0.066407 \n", |
|
|
3029 |
" new feats, with stage 1 0.060988 0.058506 0.061573 \n", |
|
|
3030 |
"\n", |
|
|
3031 |
" val_w_loss2 \n", |
|
|
3032 |
"fold weighted type \n", |
|
|
3033 |
"0 False old feats, no stage1 NaN \n", |
|
|
3034 |
" old feats, with stage1 0.052126 \n", |
|
|
3035 |
" new feats, with stage 1 0.048938 \n", |
|
|
3036 |
" True old feats, no stage1 NaN \n", |
|
|
3037 |
" old feats, with stage1 0.052435 \n", |
|
|
3038 |
" new feats, with stage 1 0.049024 \n", |
|
|
3039 |
"1 False old feats, no stage1 NaN \n", |
|
|
3040 |
" old feats, with stage1 0.040188 \n", |
|
|
3041 |
" new feats, with stage 1 0.038043 \n", |
|
|
3042 |
" True old feats, no stage1 NaN \n", |
|
|
3043 |
" old feats, with stage1 0.040399 \n", |
|
|
3044 |
" new feats, with stage 1 0.038214 \n", |
|
|
3045 |
"2 False old feats, no stage1 NaN \n", |
|
|
3046 |
" old feats, with stage1 0.040194 \n", |
|
|
3047 |
" new feats, with stage 1 0.037849 \n", |
|
|
3048 |
" True old feats, no stage1 NaN \n", |
|
|
3049 |
" old feats, with stage1 0.040117 \n", |
|
|
3050 |
" new feats, with stage 1 0.037853 \n", |
|
|
3051 |
"3 False old feats, no stage1 NaN \n", |
|
|
3052 |
" old feats, with stage1 0.045654 \n", |
|
|
3053 |
" new feats, with stage 1 0.044982 \n", |
|
|
3054 |
" True old feats, no stage1 NaN \n", |
|
|
3055 |
" old feats, with stage1 0.045677 \n", |
|
|
3056 |
" new feats, with stage 1 0.044978 \n", |
|
|
3057 |
"4 False old feats, no stage1 NaN \n", |
|
|
3058 |
" old feats, with stage1 0.048486 \n", |
|
|
3059 |
" new feats, with stage 1 0.045122 \n", |
|
|
3060 |
" True old feats, no stage1 NaN \n", |
|
|
3061 |
" old feats, with stage1 0.048896 \n", |
|
|
3062 |
" new feats, with stage 1 0.045219 " |
|
|
3063 |
] |
|
|
3064 |
}, |
|
|
3065 |
"execution_count": 53, |
|
|
3066 |
"metadata": {}, |
|
|
3067 |
"output_type": "execute_result" |
|
|
3068 |
} |
|
|
3069 |
], |
|
|
3070 |
"source": [ |
|
|
3071 |
"stats.loc[stats.name=='se_resnet101_F5']\\\n", |
|
|
3072 |
" .groupby(['fold','weighted','type'])['val_loss','val_w_loss','val_loss2','val_w_loss2'].mean()" |
|
|
3073 |
] |
|
|
3074 |
}, |
|
|
3075 |
{ |
|
|
3076 |
"cell_type": "code", |
|
|
3077 |
"execution_count": null, |
|
|
3078 |
"metadata": {}, |
|
|
3079 |
"outputs": [], |
|
|
3080 |
"source": [] |
|
|
3081 |
}, |
|
|
3082 |
{ |
|
|
3083 |
"cell_type": "code", |
|
|
3084 |
"execution_count": null, |
|
|
3085 |
"metadata": {}, |
|
|
3086 |
"outputs": [], |
|
|
3087 |
"source": [] |
|
|
3088 |
}, |
|
|
3089 |
{ |
|
|
3090 |
"cell_type": "code", |
|
|
3091 |
"execution_count": 21, |
|
|
3092 |
"metadata": {}, |
|
|
3093 |
"outputs": [ |
|
|
3094 |
{ |
|
|
3095 |
"data": { |
|
|
3096 |
"text/plain": [ |
|
|
3097 |
"14 5\n", |
|
|
3098 |
"13 5\n", |
|
|
3099 |
"12 5\n", |
|
|
3100 |
"11 5\n", |
|
|
3101 |
"9 3\n", |
|
|
3102 |
"7 3\n", |
|
|
3103 |
"Name: dataset, dtype: int64" |
|
|
3104 |
] |
|
|
3105 |
}, |
|
|
3106 |
"execution_count": 21, |
|
|
3107 |
"metadata": {}, |
|
|
3108 |
"output_type": "execute_result" |
|
|
3109 |
} |
|
|
3110 |
], |
|
|
3111 |
"source": [ |
|
|
3112 |
"stats2.dataset.value_counts()" |
|
|
3113 |
] |
|
|
3114 |
}, |
|
|
3115 |
{ |
|
|
3116 |
"cell_type": "code", |
|
|
3117 |
"execution_count": 39, |
|
|
3118 |
"metadata": {}, |
|
|
3119 |
"outputs": [], |
|
|
3120 |
"source": [ |
|
|
3121 |
"stats2 = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(0,32)),\n", |
|
|
3122 |
" pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(1,32)),\n", |
|
|
3123 |
" pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(0,34)),\n", |
|
|
3124 |
" pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(1,34))], axis=0, sort=False)" |
|
|
3125 |
] |
|
|
3126 |
}, |
|
|
3127 |
{ |
|
|
3128 |
"cell_type": "code", |
|
|
3129 |
"execution_count": 40, |
|
|
3130 |
"metadata": {}, |
|
|
3131 |
"outputs": [ |
|
|
3132 |
{ |
|
|
3133 |
"data": { |
|
|
3134 |
"text/html": [ |
|
|
3135 |
"<div>\n", |
|
|
3136 |
"<style scoped>\n", |
|
|
3137 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
3138 |
" vertical-align: middle;\n", |
|
|
3139 |
" }\n", |
|
|
3140 |
"\n", |
|
|
3141 |
" .dataframe tbody tr th {\n", |
|
|
3142 |
" vertical-align: top;\n", |
|
|
3143 |
" }\n", |
|
|
3144 |
"\n", |
|
|
3145 |
" .dataframe thead th {\n", |
|
|
3146 |
" text-align: right;\n", |
|
|
3147 |
" }\n", |
|
|
3148 |
"</style>\n", |
|
|
3149 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
3150 |
" <thead>\n", |
|
|
3151 |
" <tr style=\"text-align: right;\">\n", |
|
|
3152 |
" <th></th>\n", |
|
|
3153 |
" <th>dataset</th>\n", |
|
|
3154 |
" <th>epoch</th>\n", |
|
|
3155 |
" <th>fold</th>\n", |
|
|
3156 |
" <th>train_loss</th>\n", |
|
|
3157 |
" <th>val_loss</th>\n", |
|
|
3158 |
" <th>val_w_loss</th>\n", |
|
|
3159 |
" <th>val_loss2</th>\n", |
|
|
3160 |
" <th>val_w_loss2</th>\n", |
|
|
3161 |
" <th>cor</th>\n", |
|
|
3162 |
" <th>any</th>\n", |
|
|
3163 |
" <th>epidural</th>\n", |
|
|
3164 |
" <th>intraparenchymal</th>\n", |
|
|
3165 |
" <th>intraventricular</th>\n", |
|
|
3166 |
" <th>subarachnoid</th>\n", |
|
|
3167 |
" <th>subdural</th>\n", |
|
|
3168 |
" <th>train_sz</th>\n", |
|
|
3169 |
" <th>val_sz</th>\n", |
|
|
3170 |
" <th>bs</th>\n", |
|
|
3171 |
" <th>train_time</th>\n", |
|
|
3172 |
" <th>valid_time</th>\n", |
|
|
3173 |
" <th>lr</th>\n", |
|
|
3174 |
" <th>wd</th>\n", |
|
|
3175 |
" <th>ver</th>\n", |
|
|
3176 |
" </tr>\n", |
|
|
3177 |
" </thead>\n", |
|
|
3178 |
" <tbody>\n", |
|
|
3179 |
" <tr>\n", |
|
|
3180 |
" <td>12</td>\n", |
|
|
3181 |
" <td>12</td>\n", |
|
|
3182 |
" <td>13</td>\n", |
|
|
3183 |
" <td>0</td>\n", |
|
|
3184 |
" <td>0.031208</td>\n", |
|
|
3185 |
" <td>0.059201</td>\n", |
|
|
3186 |
" <td>0.064282</td>\n", |
|
|
3187 |
" <td>0.000000</td>\n", |
|
|
3188 |
" <td>0.0</td>\n", |
|
|
3189 |
" <td>0.847493</td>\n", |
|
|
3190 |
" <td>0.094982</td>\n", |
|
|
3191 |
" <td>0.014763</td>\n", |
|
|
3192 |
" <td>0.044750</td>\n", |
|
|
3193 |
" <td>0.025390</td>\n", |
|
|
3194 |
" <td>0.064951</td>\n", |
|
|
3195 |
" <td>0.074592</td>\n", |
|
|
3196 |
" <td>15619</td>\n", |
|
|
3197 |
" <td>3936</td>\n", |
|
|
3198 |
" <td>32</td>\n", |
|
|
3199 |
" <td>85.367667</td>\n", |
|
|
3200 |
" <td>10.784082</td>\n", |
|
|
3201 |
" <td>0.000002</td>\n", |
|
|
3202 |
" <td>0.0001</td>\n", |
|
|
3203 |
" <td>NaN</td>\n", |
|
|
3204 |
" </tr>\n", |
|
|
3205 |
" <tr>\n", |
|
|
3206 |
" <td>25</td>\n", |
|
|
3207 |
" <td>14</td>\n", |
|
|
3208 |
" <td>13</td>\n", |
|
|
3209 |
" <td>0</td>\n", |
|
|
3210 |
" <td>0.031730</td>\n", |
|
|
3211 |
" <td>0.059394</td>\n", |
|
|
3212 |
" <td>0.063065</td>\n", |
|
|
3213 |
" <td>0.067177</td>\n", |
|
|
3214 |
" <td>NaN</td>\n", |
|
|
3215 |
" <td>0.847427</td>\n", |
|
|
3216 |
" <td>0.114279</td>\n", |
|
|
3217 |
" <td>0.018076</td>\n", |
|
|
3218 |
" <td>0.043661</td>\n", |
|
|
3219 |
" <td>0.037124</td>\n", |
|
|
3220 |
" <td>0.069421</td>\n", |
|
|
3221 |
" <td>0.073398</td>\n", |
|
|
3222 |
" <td>17369</td>\n", |
|
|
3223 |
" <td>4384</td>\n", |
|
|
3224 |
" <td>32</td>\n", |
|
|
3225 |
" <td>101.753389</td>\n", |
|
|
3226 |
" <td>11.775365</td>\n", |
|
|
3227 |
" <td>0.000002</td>\n", |
|
|
3228 |
" <td>0.0001</td>\n", |
|
|
3229 |
" <td>NaN</td>\n", |
|
|
3230 |
" </tr>\n", |
|
|
3231 |
" <tr>\n", |
|
|
3232 |
" <td>12</td>\n", |
|
|
3233 |
" <td>12</td>\n", |
|
|
3234 |
" <td>13</td>\n", |
|
|
3235 |
" <td>1</td>\n", |
|
|
3236 |
" <td>0.031786</td>\n", |
|
|
3237 |
" <td>0.059864</td>\n", |
|
|
3238 |
" <td>0.058066</td>\n", |
|
|
3239 |
" <td>0.000000</td>\n", |
|
|
3240 |
" <td>0.0</td>\n", |
|
|
3241 |
" <td>0.846629</td>\n", |
|
|
3242 |
" <td>0.097579</td>\n", |
|
|
3243 |
" <td>0.014948</td>\n", |
|
|
3244 |
" <td>0.040305</td>\n", |
|
|
3245 |
" <td>0.025653</td>\n", |
|
|
3246 |
" <td>0.063112</td>\n", |
|
|
3247 |
" <td>0.079871</td>\n", |
|
|
3248 |
" <td>15699</td>\n", |
|
|
3249 |
" <td>3840</td>\n", |
|
|
3250 |
" <td>32</td>\n", |
|
|
3251 |
" <td>85.526455</td>\n", |
|
|
3252 |
" <td>10.400574</td>\n", |
|
|
3253 |
" <td>0.000002</td>\n", |
|
|
3254 |
" <td>0.0001</td>\n", |
|
|
3255 |
" <td>NaN</td>\n", |
|
|
3256 |
" </tr>\n", |
|
|
3257 |
" <tr>\n", |
|
|
3258 |
" <td>25</td>\n", |
|
|
3259 |
" <td>14</td>\n", |
|
|
3260 |
" <td>13</td>\n", |
|
|
3261 |
" <td>1</td>\n", |
|
|
3262 |
" <td>0.031240</td>\n", |
|
|
3263 |
" <td>0.059391</td>\n", |
|
|
3264 |
" <td>0.057033</td>\n", |
|
|
3265 |
" <td>0.052189</td>\n", |
|
|
3266 |
" <td>NaN</td>\n", |
|
|
3267 |
" <td>0.848211</td>\n", |
|
|
3268 |
" <td>0.093570</td>\n", |
|
|
3269 |
" <td>0.009090</td>\n", |
|
|
3270 |
" <td>0.037775</td>\n", |
|
|
3271 |
" <td>0.020982</td>\n", |
|
|
3272 |
" <td>0.047940</td>\n", |
|
|
3273 |
" <td>0.062393</td>\n", |
|
|
3274 |
" <td>17468</td>\n", |
|
|
3275 |
" <td>4288</td>\n", |
|
|
3276 |
" <td>32</td>\n", |
|
|
3277 |
" <td>103.794368</td>\n", |
|
|
3278 |
" <td>11.503633</td>\n", |
|
|
3279 |
" <td>0.000002</td>\n", |
|
|
3280 |
" <td>0.0001</td>\n", |
|
|
3281 |
" <td>NaN</td>\n", |
|
|
3282 |
" </tr>\n", |
|
|
3283 |
" <tr>\n", |
|
|
3284 |
" <td>12</td>\n", |
|
|
3285 |
" <td>12</td>\n", |
|
|
3286 |
" <td>13</td>\n", |
|
|
3287 |
" <td>0</td>\n", |
|
|
3288 |
" <td>0.031486</td>\n", |
|
|
3289 |
" <td>0.059400</td>\n", |
|
|
3290 |
" <td>0.063696</td>\n", |
|
|
3291 |
" <td>0.071379</td>\n", |
|
|
3292 |
" <td>NaN</td>\n", |
|
|
3293 |
" <td>0.847333</td>\n", |
|
|
3294 |
" <td>0.121656</td>\n", |
|
|
3295 |
" <td>0.020638</td>\n", |
|
|
3296 |
" <td>0.043654</td>\n", |
|
|
3297 |
" <td>0.038130</td>\n", |
|
|
3298 |
" <td>0.077089</td>\n", |
|
|
3299 |
" <td>0.076830</td>\n", |
|
|
3300 |
" <td>17369</td>\n", |
|
|
3301 |
" <td>4384</td>\n", |
|
|
3302 |
" <td>32</td>\n", |
|
|
3303 |
" <td>99.668121</td>\n", |
|
|
3304 |
" <td>10.958552</td>\n", |
|
|
3305 |
" <td>0.000002</td>\n", |
|
|
3306 |
" <td>0.0001</td>\n", |
|
|
3307 |
" <td>33.0</td>\n", |
|
|
3308 |
" </tr>\n", |
|
|
3309 |
" <tr>\n", |
|
|
3310 |
" <td>12</td>\n", |
|
|
3311 |
" <td>12</td>\n", |
|
|
3312 |
" <td>13</td>\n", |
|
|
3313 |
" <td>1</td>\n", |
|
|
3314 |
" <td>0.032215</td>\n", |
|
|
3315 |
" <td>0.059726</td>\n", |
|
|
3316 |
" <td>0.057000</td>\n", |
|
|
3317 |
" <td>0.055488</td>\n", |
|
|
3318 |
" <td>NaN</td>\n", |
|
|
3319 |
" <td>0.846940</td>\n", |
|
|
3320 |
" <td>0.098863</td>\n", |
|
|
3321 |
" <td>0.011571</td>\n", |
|
|
3322 |
" <td>0.040962</td>\n", |
|
|
3323 |
" <td>0.021129</td>\n", |
|
|
3324 |
" <td>0.051022</td>\n", |
|
|
3325 |
" <td>0.066003</td>\n", |
|
|
3326 |
" <td>17468</td>\n", |
|
|
3327 |
" <td>4288</td>\n", |
|
|
3328 |
" <td>32</td>\n", |
|
|
3329 |
" <td>103.740352</td>\n", |
|
|
3330 |
" <td>10.660052</td>\n", |
|
|
3331 |
" <td>0.000002</td>\n", |
|
|
3332 |
" <td>0.0001</td>\n", |
|
|
3333 |
" <td>33.0</td>\n", |
|
|
3334 |
" </tr>\n", |
|
|
3335 |
" </tbody>\n", |
|
|
3336 |
"</table>\n", |
|
|
3337 |
"</div>" |
|
|
3338 |
], |
|
|
3339 |
"text/plain": [ |
|
|
3340 |
" dataset epoch fold train_loss val_loss val_w_loss val_loss2 \\\n", |
|
|
3341 |
"12 12 13 0 0.031208 0.059201 0.064282 0.000000 \n", |
|
|
3342 |
"25 14 13 0 0.031730 0.059394 0.063065 0.067177 \n", |
|
|
3343 |
"12 12 13 1 0.031786 0.059864 0.058066 0.000000 \n", |
|
|
3344 |
"25 14 13 1 0.031240 0.059391 0.057033 0.052189 \n", |
|
|
3345 |
"12 12 13 0 0.031486 0.059400 0.063696 0.071379 \n", |
|
|
3346 |
"12 12 13 1 0.032215 0.059726 0.057000 0.055488 \n", |
|
|
3347 |
"\n", |
|
|
3348 |
" val_w_loss2 cor any epidural intraparenchymal \\\n", |
|
|
3349 |
"12 0.0 0.847493 0.094982 0.014763 0.044750 \n", |
|
|
3350 |
"25 NaN 0.847427 0.114279 0.018076 0.043661 \n", |
|
|
3351 |
"12 0.0 0.846629 0.097579 0.014948 0.040305 \n", |
|
|
3352 |
"25 NaN 0.848211 0.093570 0.009090 0.037775 \n", |
|
|
3353 |
"12 NaN 0.847333 0.121656 0.020638 0.043654 \n", |
|
|
3354 |
"12 NaN 0.846940 0.098863 0.011571 0.040962 \n", |
|
|
3355 |
"\n", |
|
|
3356 |
" intraventricular subarachnoid subdural train_sz val_sz bs \\\n", |
|
|
3357 |
"12 0.025390 0.064951 0.074592 15619 3936 32 \n", |
|
|
3358 |
"25 0.037124 0.069421 0.073398 17369 4384 32 \n", |
|
|
3359 |
"12 0.025653 0.063112 0.079871 15699 3840 32 \n", |
|
|
3360 |
"25 0.020982 0.047940 0.062393 17468 4288 32 \n", |
|
|
3361 |
"12 0.038130 0.077089 0.076830 17369 4384 32 \n", |
|
|
3362 |
"12 0.021129 0.051022 0.066003 17468 4288 32 \n", |
|
|
3363 |
"\n", |
|
|
3364 |
" train_time valid_time lr wd ver \n", |
|
|
3365 |
"12 85.367667 10.784082 0.000002 0.0001 NaN \n", |
|
|
3366 |
"25 101.753389 11.775365 0.000002 0.0001 NaN \n", |
|
|
3367 |
"12 85.526455 10.400574 0.000002 0.0001 NaN \n", |
|
|
3368 |
"25 103.794368 11.503633 0.000002 0.0001 NaN \n", |
|
|
3369 |
"12 99.668121 10.958552 0.000002 0.0001 33.0 \n", |
|
|
3370 |
"12 103.740352 10.660052 0.000002 0.0001 33.0 " |
|
|
3371 |
] |
|
|
3372 |
}, |
|
|
3373 |
"execution_count": 40, |
|
|
3374 |
"metadata": {}, |
|
|
3375 |
"output_type": "execute_result" |
|
|
3376 |
} |
|
|
3377 |
], |
|
|
3378 |
"source": [ |
|
|
3379 |
"stats1.loc[stats1.epoch == 13]" |
|
|
3380 |
] |
|
|
3381 |
}, |
|
|
3382 |
{ |
|
|
3383 |
"cell_type": "code", |
|
|
3384 |
"execution_count": 41, |
|
|
3385 |
"metadata": {}, |
|
|
3386 |
"outputs": [ |
|
|
3387 |
{ |
|
|
3388 |
"data": { |
|
|
3389 |
"text/html": [ |
|
|
3390 |
"<div>\n", |
|
|
3391 |
"<style scoped>\n", |
|
|
3392 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
3393 |
" vertical-align: middle;\n", |
|
|
3394 |
" }\n", |
|
|
3395 |
"\n", |
|
|
3396 |
" .dataframe tbody tr th {\n", |
|
|
3397 |
" vertical-align: top;\n", |
|
|
3398 |
" }\n", |
|
|
3399 |
"\n", |
|
|
3400 |
" .dataframe thead th {\n", |
|
|
3401 |
" text-align: right;\n", |
|
|
3402 |
" }\n", |
|
|
3403 |
"</style>\n", |
|
|
3404 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
3405 |
" <thead>\n", |
|
|
3406 |
" <tr style=\"text-align: right;\">\n", |
|
|
3407 |
" <th></th>\n", |
|
|
3408 |
" <th>dataset</th>\n", |
|
|
3409 |
" <th>epoch</th>\n", |
|
|
3410 |
" <th>fold</th>\n", |
|
|
3411 |
" <th>train_loss</th>\n", |
|
|
3412 |
" <th>val_loss</th>\n", |
|
|
3413 |
" <th>val_w_loss</th>\n", |
|
|
3414 |
" <th>val_loss2</th>\n", |
|
|
3415 |
" <th>val_w_loss2</th>\n", |
|
|
3416 |
" <th>cor</th>\n", |
|
|
3417 |
" <th>any</th>\n", |
|
|
3418 |
" <th>epidural</th>\n", |
|
|
3419 |
" <th>intraparenchymal</th>\n", |
|
|
3420 |
" <th>intraventricular</th>\n", |
|
|
3421 |
" <th>subarachnoid</th>\n", |
|
|
3422 |
" <th>subdural</th>\n", |
|
|
3423 |
" <th>train_sz</th>\n", |
|
|
3424 |
" <th>val_sz</th>\n", |
|
|
3425 |
" <th>bs</th>\n", |
|
|
3426 |
" <th>train_time</th>\n", |
|
|
3427 |
" <th>valid_time</th>\n", |
|
|
3428 |
" <th>lr</th>\n", |
|
|
3429 |
" <th>wd</th>\n", |
|
|
3430 |
" <th>ver</th>\n", |
|
|
3431 |
" </tr>\n", |
|
|
3432 |
" </thead>\n", |
|
|
3433 |
" <tbody>\n", |
|
|
3434 |
" <tr>\n", |
|
|
3435 |
" <td>5</td>\n", |
|
|
3436 |
" <td>14</td>\n", |
|
|
3437 |
" <td>3</td>\n", |
|
|
3438 |
" <td>0</td>\n", |
|
|
3439 |
" <td>0.032872</td>\n", |
|
|
3440 |
" <td>0.061043</td>\n", |
|
|
3441 |
" <td>0.062914</td>\n", |
|
|
3442 |
" <td>0.066554</td>\n", |
|
|
3443 |
" <td>0.066554</td>\n", |
|
|
3444 |
" <td>0.844028</td>\n", |
|
|
3445 |
" <td>0.112525</td>\n", |
|
|
3446 |
" <td>0.017403</td>\n", |
|
|
3447 |
" <td>0.044180</td>\n", |
|
|
3448 |
" <td>0.036614</td>\n", |
|
|
3449 |
" <td>0.069472</td>\n", |
|
|
3450 |
" <td>0.073159</td>\n", |
|
|
3451 |
" <td>17369</td>\n", |
|
|
3452 |
" <td>4384</td>\n", |
|
|
3453 |
" <td>32</td>\n", |
|
|
3454 |
" <td>101.308828</td>\n", |
|
|
3455 |
" <td>11.260699</td>\n", |
|
|
3456 |
" <td>0.000005</td>\n", |
|
|
3457 |
" <td>0.0001</td>\n", |
|
|
3458 |
" <td>NaN</td>\n", |
|
|
3459 |
" </tr>\n", |
|
|
3460 |
" <tr>\n", |
|
|
3461 |
" <td>8</td>\n", |
|
|
3462 |
" <td>12</td>\n", |
|
|
3463 |
" <td>3</td>\n", |
|
|
3464 |
" <td>0</td>\n", |
|
|
3465 |
" <td>0.033388</td>\n", |
|
|
3466 |
" <td>0.060360</td>\n", |
|
|
3467 |
" <td>0.063039</td>\n", |
|
|
3468 |
" <td>0.000000</td>\n", |
|
|
3469 |
" <td>0.000000</td>\n", |
|
|
3470 |
" <td>0.845265</td>\n", |
|
|
3471 |
" <td>0.097268</td>\n", |
|
|
3472 |
" <td>0.014937</td>\n", |
|
|
3473 |
" <td>0.045735</td>\n", |
|
|
3474 |
" <td>0.025769</td>\n", |
|
|
3475 |
" <td>0.065610</td>\n", |
|
|
3476 |
" <td>0.075936</td>\n", |
|
|
3477 |
" <td>15619</td>\n", |
|
|
3478 |
" <td>3936</td>\n", |
|
|
3479 |
" <td>32</td>\n", |
|
|
3480 |
" <td>82.747959</td>\n", |
|
|
3481 |
" <td>10.047513</td>\n", |
|
|
3482 |
" <td>0.000005</td>\n", |
|
|
3483 |
" <td>0.0001</td>\n", |
|
|
3484 |
" <td>NaN</td>\n", |
|
|
3485 |
" </tr>\n", |
|
|
3486 |
" <tr>\n", |
|
|
3487 |
" <td>2</td>\n", |
|
|
3488 |
" <td>14</td>\n", |
|
|
3489 |
" <td>3</td>\n", |
|
|
3490 |
" <td>1</td>\n", |
|
|
3491 |
" <td>0.033915</td>\n", |
|
|
3492 |
" <td>0.060324</td>\n", |
|
|
3493 |
" <td>0.056965</td>\n", |
|
|
3494 |
" <td>0.051529</td>\n", |
|
|
3495 |
" <td>0.051529</td>\n", |
|
|
3496 |
" <td>0.846076</td>\n", |
|
|
3497 |
" <td>0.091700</td>\n", |
|
|
3498 |
" <td>0.009166</td>\n", |
|
|
3499 |
" <td>0.037559</td>\n", |
|
|
3500 |
" <td>0.020248</td>\n", |
|
|
3501 |
" <td>0.049256</td>\n", |
|
|
3502 |
" <td>0.061077</td>\n", |
|
|
3503 |
" <td>17468</td>\n", |
|
|
3504 |
" <td>4288</td>\n", |
|
|
3505 |
" <td>32</td>\n", |
|
|
3506 |
" <td>102.540290</td>\n", |
|
|
3507 |
" <td>10.781580</td>\n", |
|
|
3508 |
" <td>0.000005</td>\n", |
|
|
3509 |
" <td>0.0001</td>\n", |
|
|
3510 |
" <td>NaN</td>\n", |
|
|
3511 |
" </tr>\n", |
|
|
3512 |
" <tr>\n", |
|
|
3513 |
" <td>5</td>\n", |
|
|
3514 |
" <td>12</td>\n", |
|
|
3515 |
" <td>3</td>\n", |
|
|
3516 |
" <td>1</td>\n", |
|
|
3517 |
" <td>0.034051</td>\n", |
|
|
3518 |
" <td>0.060730</td>\n", |
|
|
3519 |
" <td>0.057532</td>\n", |
|
|
3520 |
" <td>0.000000</td>\n", |
|
|
3521 |
" <td>0.000000</td>\n", |
|
|
3522 |
" <td>0.844560</td>\n", |
|
|
3523 |
" <td>0.099342</td>\n", |
|
|
3524 |
" <td>0.015278</td>\n", |
|
|
3525 |
" <td>0.041240</td>\n", |
|
|
3526 |
" <td>0.025786</td>\n", |
|
|
3527 |
" <td>0.063912</td>\n", |
|
|
3528 |
" <td>0.080207</td>\n", |
|
|
3529 |
" <td>15699</td>\n", |
|
|
3530 |
" <td>3840</td>\n", |
|
|
3531 |
" <td>32</td>\n", |
|
|
3532 |
" <td>83.967543</td>\n", |
|
|
3533 |
" <td>9.888514</td>\n", |
|
|
3534 |
" <td>0.000005</td>\n", |
|
|
3535 |
" <td>0.0001</td>\n", |
|
|
3536 |
" <td>NaN</td>\n", |
|
|
3537 |
" </tr>\n", |
|
|
3538 |
" <tr>\n", |
|
|
3539 |
" <td>2</td>\n", |
|
|
3540 |
" <td>12</td>\n", |
|
|
3541 |
" <td>3</td>\n", |
|
|
3542 |
" <td>0</td>\n", |
|
|
3543 |
" <td>0.032771</td>\n", |
|
|
3544 |
" <td>0.060638</td>\n", |
|
|
3545 |
" <td>0.063045</td>\n", |
|
|
3546 |
" <td>0.070920</td>\n", |
|
|
3547 |
" <td>0.070920</td>\n", |
|
|
3548 |
" <td>0.844702</td>\n", |
|
|
3549 |
" <td>0.120086</td>\n", |
|
|
3550 |
" <td>0.020185</td>\n", |
|
|
3551 |
" <td>0.044548</td>\n", |
|
|
3552 |
" <td>0.037809</td>\n", |
|
|
3553 |
" <td>0.075970</td>\n", |
|
|
3554 |
" <td>0.077759</td>\n", |
|
|
3555 |
" <td>17369</td>\n", |
|
|
3556 |
" <td>4384</td>\n", |
|
|
3557 |
" <td>32</td>\n", |
|
|
3558 |
" <td>95.730286</td>\n", |
|
|
3559 |
" <td>10.998797</td>\n", |
|
|
3560 |
" <td>0.000005</td>\n", |
|
|
3561 |
" <td>0.0001</td>\n", |
|
|
3562 |
" <td>34.0</td>\n", |
|
|
3563 |
" </tr>\n", |
|
|
3564 |
" <tr>\n", |
|
|
3565 |
" <td>2</td>\n", |
|
|
3566 |
" <td>12</td>\n", |
|
|
3567 |
" <td>3</td>\n", |
|
|
3568 |
" <td>1</td>\n", |
|
|
3569 |
" <td>0.035328</td>\n", |
|
|
3570 |
" <td>0.060772</td>\n", |
|
|
3571 |
" <td>0.057422</td>\n", |
|
|
3572 |
" <td>0.055588</td>\n", |
|
|
3573 |
" <td>0.055588</td>\n", |
|
|
3574 |
" <td>0.844641</td>\n", |
|
|
3575 |
" <td>0.098851</td>\n", |
|
|
3576 |
" <td>0.011631</td>\n", |
|
|
3577 |
" <td>0.040441</td>\n", |
|
|
3578 |
" <td>0.021717</td>\n", |
|
|
3579 |
" <td>0.052439</td>\n", |
|
|
3580 |
" <td>0.065184</td>\n", |
|
|
3581 |
" <td>17468</td>\n", |
|
|
3582 |
" <td>4288</td>\n", |
|
|
3583 |
" <td>32</td>\n", |
|
|
3584 |
" <td>98.306380</td>\n", |
|
|
3585 |
" <td>10.804755</td>\n", |
|
|
3586 |
" <td>0.000005</td>\n", |
|
|
3587 |
" <td>0.0001</td>\n", |
|
|
3588 |
" <td>34.0</td>\n", |
|
|
3589 |
" </tr>\n", |
|
|
3590 |
" </tbody>\n", |
|
|
3591 |
"</table>\n", |
|
|
3592 |
"</div>" |
|
|
3593 |
], |
|
|
3594 |
"text/plain": [ |
|
|
3595 |
" dataset epoch fold train_loss val_loss val_w_loss val_loss2 \\\n", |
|
|
3596 |
"5 14 3 0 0.032872 0.061043 0.062914 0.066554 \n", |
|
|
3597 |
"8 12 3 0 0.033388 0.060360 0.063039 0.000000 \n", |
|
|
3598 |
"2 14 3 1 0.033915 0.060324 0.056965 0.051529 \n", |
|
|
3599 |
"5 12 3 1 0.034051 0.060730 0.057532 0.000000 \n", |
|
|
3600 |
"2 12 3 0 0.032771 0.060638 0.063045 0.070920 \n", |
|
|
3601 |
"2 12 3 1 0.035328 0.060772 0.057422 0.055588 \n", |
|
|
3602 |
"\n", |
|
|
3603 |
" val_w_loss2 cor any epidural intraparenchymal \\\n", |
|
|
3604 |
"5 0.066554 0.844028 0.112525 0.017403 0.044180 \n", |
|
|
3605 |
"8 0.000000 0.845265 0.097268 0.014937 0.045735 \n", |
|
|
3606 |
"2 0.051529 0.846076 0.091700 0.009166 0.037559 \n", |
|
|
3607 |
"5 0.000000 0.844560 0.099342 0.015278 0.041240 \n", |
|
|
3608 |
"2 0.070920 0.844702 0.120086 0.020185 0.044548 \n", |
|
|
3609 |
"2 0.055588 0.844641 0.098851 0.011631 0.040441 \n", |
|
|
3610 |
"\n", |
|
|
3611 |
" intraventricular subarachnoid subdural train_sz val_sz bs train_time \\\n", |
|
|
3612 |
"5 0.036614 0.069472 0.073159 17369 4384 32 101.308828 \n", |
|
|
3613 |
"8 0.025769 0.065610 0.075936 15619 3936 32 82.747959 \n", |
|
|
3614 |
"2 0.020248 0.049256 0.061077 17468 4288 32 102.540290 \n", |
|
|
3615 |
"5 0.025786 0.063912 0.080207 15699 3840 32 83.967543 \n", |
|
|
3616 |
"2 0.037809 0.075970 0.077759 17369 4384 32 95.730286 \n", |
|
|
3617 |
"2 0.021717 0.052439 0.065184 17468 4288 32 98.306380 \n", |
|
|
3618 |
"\n", |
|
|
3619 |
" valid_time lr wd ver \n", |
|
|
3620 |
"5 11.260699 0.000005 0.0001 NaN \n", |
|
|
3621 |
"8 10.047513 0.000005 0.0001 NaN \n", |
|
|
3622 |
"2 10.781580 0.000005 0.0001 NaN \n", |
|
|
3623 |
"5 9.888514 0.000005 0.0001 NaN \n", |
|
|
3624 |
"2 10.998797 0.000005 0.0001 34.0 \n", |
|
|
3625 |
"2 10.804755 0.000005 0.0001 34.0 " |
|
|
3626 |
] |
|
|
3627 |
}, |
|
|
3628 |
"execution_count": 41, |
|
|
3629 |
"metadata": {}, |
|
|
3630 |
"output_type": "execute_result" |
|
|
3631 |
} |
|
|
3632 |
], |
|
|
3633 |
"source": [ |
|
|
3634 |
"stats2.loc[stats2.epoch == 3]" |
|
|
3635 |
] |
|
|
3636 |
}, |
|
|
3637 |
{ |
|
|
3638 |
"cell_type": "code", |
|
|
3639 |
"execution_count": 64, |
|
|
3640 |
"metadata": {}, |
|
|
3641 |
"outputs": [], |
|
|
3642 |
"source": [ |
|
|
3643 |
"stats = pd.concat([stats1.loc[stats1.epoch == 13], stats2.loc[stats2.epoch == 3]], sort=False)" |
|
|
3644 |
] |
|
|
3645 |
}, |
|
|
3646 |
{ |
|
|
3647 |
"cell_type": "code", |
|
|
3648 |
"execution_count": 68, |
|
|
3649 |
"metadata": {}, |
|
|
3650 |
"outputs": [], |
|
|
3651 |
"source": [ |
|
|
3652 |
"stats['name'] = np.where(stats['dataset'] == 14, 'new feats', \n", |
|
|
3653 |
" np.where(stats['ver'].isnull(), 'old feats, train', 'old feats, test+train'))" |
|
|
3654 |
] |
|
|
3655 |
}, |
|
|
3656 |
{ |
|
|
3657 |
"cell_type": "code", |
|
|
3658 |
"execution_count": 69, |
|
|
3659 |
"metadata": {}, |
|
|
3660 |
"outputs": [], |
|
|
3661 |
"source": [ |
|
|
3662 |
"stats['weighted_training'] = stats.epoch == 3" |
|
|
3663 |
] |
|
|
3664 |
}, |
|
|
3665 |
{ |
|
|
3666 |
"cell_type": "code", |
|
|
3667 |
"execution_count": 70, |
|
|
3668 |
"metadata": {}, |
|
|
3669 |
"outputs": [ |
|
|
3670 |
{ |
|
|
3671 |
"data": { |
|
|
3672 |
"text/html": [ |
|
|
3673 |
"<div>\n", |
|
|
3674 |
"<style scoped>\n", |
|
|
3675 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
3676 |
" vertical-align: middle;\n", |
|
|
3677 |
" }\n", |
|
|
3678 |
"\n", |
|
|
3679 |
" .dataframe tbody tr th {\n", |
|
|
3680 |
" vertical-align: top;\n", |
|
|
3681 |
" }\n", |
|
|
3682 |
"\n", |
|
|
3683 |
" .dataframe thead th {\n", |
|
|
3684 |
" text-align: right;\n", |
|
|
3685 |
" }\n", |
|
|
3686 |
"</style>\n", |
|
|
3687 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
3688 |
" <thead>\n", |
|
|
3689 |
" <tr style=\"text-align: right;\">\n", |
|
|
3690 |
" <th></th>\n", |
|
|
3691 |
" <th></th>\n", |
|
|
3692 |
" <th></th>\n", |
|
|
3693 |
" <th>val_loss</th>\n", |
|
|
3694 |
" <th>val_w_loss</th>\n", |
|
|
3695 |
" </tr>\n", |
|
|
3696 |
" <tr>\n", |
|
|
3697 |
" <th>fold</th>\n", |
|
|
3698 |
" <th>weighted_training</th>\n", |
|
|
3699 |
" <th>name</th>\n", |
|
|
3700 |
" <th></th>\n", |
|
|
3701 |
" <th></th>\n", |
|
|
3702 |
" </tr>\n", |
|
|
3703 |
" </thead>\n", |
|
|
3704 |
" <tbody>\n", |
|
|
3705 |
" <tr>\n", |
|
|
3706 |
" <td rowspan=\"6\" valign=\"top\">0</td>\n", |
|
|
3707 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
3708 |
" <td>new feats</td>\n", |
|
|
3709 |
" <td>0.059394</td>\n", |
|
|
3710 |
" <td>0.063065</td>\n", |
|
|
3711 |
" </tr>\n", |
|
|
3712 |
" <tr>\n", |
|
|
3713 |
" <td>old feats, test+train</td>\n", |
|
|
3714 |
" <td>0.059400</td>\n", |
|
|
3715 |
" <td>0.063696</td>\n", |
|
|
3716 |
" </tr>\n", |
|
|
3717 |
" <tr>\n", |
|
|
3718 |
" <td>old feats, train</td>\n", |
|
|
3719 |
" <td>0.059201</td>\n", |
|
|
3720 |
" <td>0.064282</td>\n", |
|
|
3721 |
" </tr>\n", |
|
|
3722 |
" <tr>\n", |
|
|
3723 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
3724 |
" <td>new feats</td>\n", |
|
|
3725 |
" <td>0.061043</td>\n", |
|
|
3726 |
" <td>0.062914</td>\n", |
|
|
3727 |
" </tr>\n", |
|
|
3728 |
" <tr>\n", |
|
|
3729 |
" <td>old feats, test+train</td>\n", |
|
|
3730 |
" <td>0.060638</td>\n", |
|
|
3731 |
" <td>0.063045</td>\n", |
|
|
3732 |
" </tr>\n", |
|
|
3733 |
" <tr>\n", |
|
|
3734 |
" <td>old feats, train</td>\n", |
|
|
3735 |
" <td>0.060360</td>\n", |
|
|
3736 |
" <td>0.063039</td>\n", |
|
|
3737 |
" </tr>\n", |
|
|
3738 |
" <tr>\n", |
|
|
3739 |
" <td rowspan=\"6\" valign=\"top\">1</td>\n", |
|
|
3740 |
" <td rowspan=\"3\" valign=\"top\">False</td>\n", |
|
|
3741 |
" <td>new feats</td>\n", |
|
|
3742 |
" <td>0.059391</td>\n", |
|
|
3743 |
" <td>0.057033</td>\n", |
|
|
3744 |
" </tr>\n", |
|
|
3745 |
" <tr>\n", |
|
|
3746 |
" <td>old feats, test+train</td>\n", |
|
|
3747 |
" <td>0.059726</td>\n", |
|
|
3748 |
" <td>0.057000</td>\n", |
|
|
3749 |
" </tr>\n", |
|
|
3750 |
" <tr>\n", |
|
|
3751 |
" <td>old feats, train</td>\n", |
|
|
3752 |
" <td>0.059864</td>\n", |
|
|
3753 |
" <td>0.058066</td>\n", |
|
|
3754 |
" </tr>\n", |
|
|
3755 |
" <tr>\n", |
|
|
3756 |
" <td rowspan=\"3\" valign=\"top\">True</td>\n", |
|
|
3757 |
" <td>new feats</td>\n", |
|
|
3758 |
" <td>0.060324</td>\n", |
|
|
3759 |
" <td>0.056965</td>\n", |
|
|
3760 |
" </tr>\n", |
|
|
3761 |
" <tr>\n", |
|
|
3762 |
" <td>old feats, test+train</td>\n", |
|
|
3763 |
" <td>0.060772</td>\n", |
|
|
3764 |
" <td>0.057422</td>\n", |
|
|
3765 |
" </tr>\n", |
|
|
3766 |
" <tr>\n", |
|
|
3767 |
" <td>old feats, train</td>\n", |
|
|
3768 |
" <td>0.060730</td>\n", |
|
|
3769 |
" <td>0.057532</td>\n", |
|
|
3770 |
" </tr>\n", |
|
|
3771 |
" </tbody>\n", |
|
|
3772 |
"</table>\n", |
|
|
3773 |
"</div>" |
|
|
3774 |
], |
|
|
3775 |
"text/plain": [ |
|
|
3776 |
" val_loss val_w_loss\n", |
|
|
3777 |
"fold weighted_training name \n", |
|
|
3778 |
"0 False new feats 0.059394 0.063065\n", |
|
|
3779 |
" old feats, test+train 0.059400 0.063696\n", |
|
|
3780 |
" old feats, train 0.059201 0.064282\n", |
|
|
3781 |
" True new feats 0.061043 0.062914\n", |
|
|
3782 |
" old feats, test+train 0.060638 0.063045\n", |
|
|
3783 |
" old feats, train 0.060360 0.063039\n", |
|
|
3784 |
"1 False new feats 0.059391 0.057033\n", |
|
|
3785 |
" old feats, test+train 0.059726 0.057000\n", |
|
|
3786 |
" old feats, train 0.059864 0.058066\n", |
|
|
3787 |
" True new feats 0.060324 0.056965\n", |
|
|
3788 |
" old feats, test+train 0.060772 0.057422\n", |
|
|
3789 |
" old feats, train 0.060730 0.057532" |
|
|
3790 |
] |
|
|
3791 |
}, |
|
|
3792 |
"execution_count": 70, |
|
|
3793 |
"metadata": {}, |
|
|
3794 |
"output_type": "execute_result" |
|
|
3795 |
} |
|
|
3796 |
], |
|
|
3797 |
"source": [ |
|
|
3798 |
"stats.groupby(['fold','weighted_training','name'])[['val_loss','val_w_loss']].mean()" |
|
|
3799 |
] |
|
|
3800 |
}, |
|
|
3801 |
{ |
|
|
3802 |
"cell_type": "code", |
|
|
3803 |
"execution_count": null, |
|
|
3804 |
"metadata": {}, |
|
|
3805 |
"outputs": [], |
|
|
3806 |
"source": [] |
|
|
3807 |
}, |
|
|
3808 |
{ |
|
|
3809 |
"cell_type": "markdown", |
|
|
3810 |
"metadata": {}, |
|
|
3811 |
"source": [ |
|
|
3812 |
"# Analysis" |
|
|
3813 |
] |
|
|
3814 |
}, |
|
|
3815 |
{ |
|
|
3816 |
"cell_type": "code", |
|
|
3817 |
"execution_count": 13, |
|
|
3818 |
"metadata": { |
|
|
3819 |
"scrolled": true |
|
|
3820 |
}, |
|
|
3821 |
"outputs": [ |
|
|
3822 |
{ |
|
|
3823 |
"data": { |
|
|
3824 |
"text/html": [ |
|
|
3825 |
"<div>\n", |
|
|
3826 |
"<style scoped>\n", |
|
|
3827 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
3828 |
" vertical-align: middle;\n", |
|
|
3829 |
" }\n", |
|
|
3830 |
"\n", |
|
|
3831 |
" .dataframe tbody tr th {\n", |
|
|
3832 |
" vertical-align: top;\n", |
|
|
3833 |
" }\n", |
|
|
3834 |
"\n", |
|
|
3835 |
" .dataframe thead th {\n", |
|
|
3836 |
" text-align: right;\n", |
|
|
3837 |
" }\n", |
|
|
3838 |
"</style>\n", |
|
|
3839 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
3840 |
" <thead>\n", |
|
|
3841 |
" <tr style=\"text-align: right;\">\n", |
|
|
3842 |
" <th></th>\n", |
|
|
3843 |
" <th>val_loss</th>\n", |
|
|
3844 |
" <th>val_w_loss</th>\n", |
|
|
3845 |
" </tr>\n", |
|
|
3846 |
" <tr>\n", |
|
|
3847 |
" <th>dataset</th>\n", |
|
|
3848 |
" <th></th>\n", |
|
|
3849 |
" <th></th>\n", |
|
|
3850 |
" </tr>\n", |
|
|
3851 |
" </thead>\n", |
|
|
3852 |
" <tbody>\n", |
|
|
3853 |
" <tr>\n", |
|
|
3854 |
" <td>6</td>\n", |
|
|
3855 |
" <td>0.063542</td>\n", |
|
|
3856 |
" <td>0.069056</td>\n", |
|
|
3857 |
" </tr>\n", |
|
|
3858 |
" <tr>\n", |
|
|
3859 |
" <td>7</td>\n", |
|
|
3860 |
" <td>0.062726</td>\n", |
|
|
3861 |
" <td>0.068622</td>\n", |
|
|
3862 |
" </tr>\n", |
|
|
3863 |
" <tr>\n", |
|
|
3864 |
" <td>8</td>\n", |
|
|
3865 |
" <td>0.064020</td>\n", |
|
|
3866 |
" <td>0.069777</td>\n", |
|
|
3867 |
" </tr>\n", |
|
|
3868 |
" <tr>\n", |
|
|
3869 |
" <td>9</td>\n", |
|
|
3870 |
" <td>0.062367</td>\n", |
|
|
3871 |
" <td>0.067991</td>\n", |
|
|
3872 |
" </tr>\n", |
|
|
3873 |
" <tr>\n", |
|
|
3874 |
" <td>10</td>\n", |
|
|
3875 |
" <td>0.062012</td>\n", |
|
|
3876 |
" <td>0.067773</td>\n", |
|
|
3877 |
" </tr>\n", |
|
|
3878 |
" <tr>\n", |
|
|
3879 |
" <td>11</td>\n", |
|
|
3880 |
" <td>0.061149</td>\n", |
|
|
3881 |
" <td>0.066677</td>\n", |
|
|
3882 |
" </tr>\n", |
|
|
3883 |
" <tr>\n", |
|
|
3884 |
" <td>12</td>\n", |
|
|
3885 |
" <td>0.060799</td>\n", |
|
|
3886 |
" <td>0.066053</td>\n", |
|
|
3887 |
" </tr>\n", |
|
|
3888 |
" <tr>\n", |
|
|
3889 |
" <td>13</td>\n", |
|
|
3890 |
" <td>0.061087</td>\n", |
|
|
3891 |
" <td>0.066270</td>\n", |
|
|
3892 |
" </tr>\n", |
|
|
3893 |
" </tbody>\n", |
|
|
3894 |
"</table>\n", |
|
|
3895 |
"</div>" |
|
|
3896 |
], |
|
|
3897 |
"text/plain": [ |
|
|
3898 |
" val_loss val_w_loss\n", |
|
|
3899 |
"dataset \n", |
|
|
3900 |
"6 0.063542 0.069056\n", |
|
|
3901 |
"7 0.062726 0.068622\n", |
|
|
3902 |
"8 0.064020 0.069777\n", |
|
|
3903 |
"9 0.062367 0.067991\n", |
|
|
3904 |
"10 0.062012 0.067773\n", |
|
|
3905 |
"11 0.061149 0.066677\n", |
|
|
3906 |
"12 0.060799 0.066053\n", |
|
|
3907 |
"13 0.061087 0.066270" |
|
|
3908 |
] |
|
|
3909 |
}, |
|
|
3910 |
"execution_count": 13, |
|
|
3911 |
"metadata": {}, |
|
|
3912 |
"output_type": "execute_result" |
|
|
3913 |
} |
|
|
3914 |
], |
|
|
3915 |
"source": [ |
|
|
3916 |
"stats = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(f,31)) for f in range(5)],axis=0)\n", |
|
|
3917 |
"stats.loc[stats.epoch==13].groupby('dataset')[['val_loss','val_w_loss']].mean()" |
|
|
3918 |
] |
|
|
3919 |
}, |
|
|
3920 |
{ |
|
|
3921 |
"cell_type": "code", |
|
|
3922 |
"execution_count": 11, |
|
|
3923 |
"metadata": { |
|
|
3924 |
"scrolled": true |
|
|
3925 |
}, |
|
|
3926 |
"outputs": [ |
|
|
3927 |
{ |
|
|
3928 |
"data": { |
|
|
3929 |
"text/html": [ |
|
|
3930 |
"<div>\n", |
|
|
3931 |
"<style scoped>\n", |
|
|
3932 |
" .dataframe tbody tr th:only-of-type {\n", |
|
|
3933 |
" vertical-align: middle;\n", |
|
|
3934 |
" }\n", |
|
|
3935 |
"\n", |
|
|
3936 |
" .dataframe tbody tr th {\n", |
|
|
3937 |
" vertical-align: top;\n", |
|
|
3938 |
" }\n", |
|
|
3939 |
"\n", |
|
|
3940 |
" .dataframe thead th {\n", |
|
|
3941 |
" text-align: right;\n", |
|
|
3942 |
" }\n", |
|
|
3943 |
"</style>\n", |
|
|
3944 |
"<table border=\"1\" class=\"dataframe\">\n", |
|
|
3945 |
" <thead>\n", |
|
|
3946 |
" <tr style=\"text-align: right;\">\n", |
|
|
3947 |
" <th></th>\n", |
|
|
3948 |
" <th>val_loss</th>\n", |
|
|
3949 |
" <th>val_w_loss</th>\n", |
|
|
3950 |
" </tr>\n", |
|
|
3951 |
" <tr>\n", |
|
|
3952 |
" <th>dataset</th>\n", |
|
|
3953 |
" <th></th>\n", |
|
|
3954 |
" <th></th>\n", |
|
|
3955 |
" </tr>\n", |
|
|
3956 |
" </thead>\n", |
|
|
3957 |
" <tbody>\n", |
|
|
3958 |
" <tr>\n", |
|
|
3959 |
" <td>6</td>\n", |
|
|
3960 |
" <td>0.064378</td>\n", |
|
|
3961 |
" <td>0.068053</td>\n", |
|
|
3962 |
" </tr>\n", |
|
|
3963 |
" <tr>\n", |
|
|
3964 |
" <td>7</td>\n", |
|
|
3965 |
" <td>0.063824</td>\n", |
|
|
3966 |
" <td>0.067963</td>\n", |
|
|
3967 |
" </tr>\n", |
|
|
3968 |
" <tr>\n", |
|
|
3969 |
" <td>8</td>\n", |
|
|
3970 |
" <td>0.065129</td>\n", |
|
|
3971 |
" <td>0.068768</td>\n", |
|
|
3972 |
" </tr>\n", |
|
|
3973 |
" <tr>\n", |
|
|
3974 |
" <td>9</td>\n", |
|
|
3975 |
" <td>0.062979</td>\n", |
|
|
3976 |
" <td>0.067057</td>\n", |
|
|
3977 |
" </tr>\n", |
|
|
3978 |
" <tr>\n", |
|
|
3979 |
" <td>10</td>\n", |
|
|
3980 |
" <td>0.062737</td>\n", |
|
|
3981 |
" <td>0.067054</td>\n", |
|
|
3982 |
" </tr>\n", |
|
|
3983 |
" <tr>\n", |
|
|
3984 |
" <td>11</td>\n", |
|
|
3985 |
" <td>0.061973</td>\n", |
|
|
3986 |
" <td>0.065464</td>\n", |
|
|
3987 |
" </tr>\n", |
|
|
3988 |
" <tr>\n", |
|
|
3989 |
" <td>12</td>\n", |
|
|
3990 |
" <td>0.061613</td>\n", |
|
|
3991 |
" <td>0.064652</td>\n", |
|
|
3992 |
" </tr>\n", |
|
|
3993 |
" <tr>\n", |
|
|
3994 |
" <td>13</td>\n", |
|
|
3995 |
" <td>0.061888</td>\n", |
|
|
3996 |
" <td>0.064962</td>\n", |
|
|
3997 |
" </tr>\n", |
|
|
3998 |
" </tbody>\n", |
|
|
3999 |
"</table>\n", |
|
|
4000 |
"</div>" |
|
|
4001 |
], |
|
|
4002 |
"text/plain": [ |
|
|
4003 |
" val_loss val_w_loss\n", |
|
|
4004 |
"dataset \n", |
|
|
4005 |
"6 0.064378 0.068053\n", |
|
|
4006 |
"7 0.063824 0.067963\n", |
|
|
4007 |
"8 0.065129 0.068768\n", |
|
|
4008 |
"9 0.062979 0.067057\n", |
|
|
4009 |
"10 0.062737 0.067054\n", |
|
|
4010 |
"11 0.061973 0.065464\n", |
|
|
4011 |
"12 0.061613 0.064652\n", |
|
|
4012 |
"13 0.061888 0.064962" |
|
|
4013 |
] |
|
|
4014 |
}, |
|
|
4015 |
"execution_count": 11, |
|
|
4016 |
"metadata": {}, |
|
|
4017 |
"output_type": "execute_result" |
|
|
4018 |
} |
|
|
4019 |
], |
|
|
4020 |
"source": [ |
|
|
4021 |
"stats = pd.concat([pd.read_csv(PATH_WORK/'stats.f{}.v{}'.format(f,32)) for f in range(5)],axis=0)\n", |
|
|
4022 |
"stats.loc[stats.epoch==3].groupby('dataset')[['val_loss','val_w_loss']].mean()" |
|
|
4023 |
] |
|
|
4024 |
}, |
|
|
4025 |
{ |
|
|
4026 |
"cell_type": "code", |
|
|
4027 |
"execution_count": 14, |
|
|
4028 |
"metadata": {}, |
|
|
4029 |
"outputs": [ |
|
|
4030 |
{ |
|
|
4031 |
"data": { |
|
|
4032 |
"text/plain": [ |
|
|
4033 |
"[<matplotlib.lines.Line2D at 0x7efd58fd4b90>]" |
|
|
4034 |
] |
|
|
4035 |
}, |
|
|
4036 |
"execution_count": 14, |
|
|
4037 |
"metadata": {}, |
|
|
4038 |
"output_type": "execute_result" |
|
|
4039 |
}, |
|
|
4040 |
{ |
|
|
4041 |
"data": { |
|
|
4042 |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD4CAYAAADlwTGnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de5xdZX3v8c939t5zyWUmZBgIuUCYEMQAEcOAGaXlkgOCWlIUMZRSbKnIUaytbRGPtq8eL1VsS7CntJaKiqJyq7axRKJys9UQGSIJhBAIEcgkQCa3SUJmMrff+WOvxJ3JTGZnZpI9e/b3/XrNa+/1rGdlfguS9d3PevZaSxGBmZmVnrJCF2BmZoXhADAzK1EOADOzEuUAMDMrUQ4AM7MSlS50AYfi6KOPjunTpxe6DDOzovLkk09ujoi63u1FFQDTp0+nqamp0GWYmRUVSS/31e5TQGZmJcoBYGZWovIKAEkXS1ojaa2km/pYXyHpnmT9MknTc9bNlrRU0ipJT0uqTNqvTJZXSnpQ0tHDtVNmZjawAQNAUgq4DbgEmAVcKWlWr27XAtsi4iRgIXBzsm0auAu4PiJOBc4DOpP2rwDnR8RsYCVww7DskZmZ5SWfEcDZwNqIWBcRHcDdwPxefeYDdybv7wfmSRJwEbAyIlYARMSWiOgGlPyMTfpVAxuHvDdmZpa3fAJgCrA+Z7k5aeuzT0R0Aa1ALXAyEJKWSFou6cakTyfwv4GnyR74ZwF39PXLJV0nqUlSU0tLS947ZmZmB5dPAKiPtt63EO2vTxo4B7gqeb1M0jxJGbIB8FZgMtlTQJ/q65dHxO0R0RARDXV1B3yN1czMBimfAGgGpuUsT+XA0zX7+iTn92uArUn7YxGxOSJ2A4uBOcAZABHxYmTvR30v8PYh7MdBfXvpS/xwhc8wmZnlyicAngBmSjpRUjmwAFjUq88i4Jrk/eXAw8mBfQkwW9KYJBjOBZ4FNgCzJO39SH8hsHpou9K/+55s5ttL+7wOwsysZA14JXBEdEm6gezBPAV8PSJWSfos0BQRi8iev/+2pLVkP/kvSLbdJukWsiESwOKIeABA0v8FfiapE3gZ+OCw712isb6Wb/z8Jdo6uqkqTx2uX2NmVlRUTE8Ea2hoiMHcCuKRNZv4w288wXf++G284yRfbmBmpUXSkxHR0Lu9JK4EPmv6RFJlYumLWwpdipnZiFESATCuIs3pU2pYus4BYGa2V0kEAEDjjFpWrN/OG3u6Cl2KmdmIUDoBUF9LV0/Q9PK2QpdiZjYilEwANEw/inSZeNyngczMgBIKgDHlad4ybYIngs3MEiUTAJA9DfT0hlZ2eR7AzKzEAmBGLd09wRO/3lroUszMCq6kAuDME46iPFXmr4OamVFiAVCZSXHG8RM8EWxmRokFAGTnAZ7Z0MqO9s5Cl2JmVlAlFwBz62vpCfjlOs8DmFlpK7kAeOvxEyhPex7AzKzkAqAyk+LM44/y9QBmVvJKLgAg+3XQ1a/tYPvujkKXYmZWMCUbABGwzNcDmFkJK8kAmD21hspMmU8DmVlJK8kAqEinaDhhoq8HMLOSllcASLpY0hpJayXd1Mf6Ckn3JOuXSZqes262pKWSVkl6WlKlpPGSnsr52Szp1uHbrYE1zqjludd2smXXniP5a83MRowBA0BSCrgNuASYBVwpaVavbtcC2yLiJGAhcHOybRq4C7g+Ik4FzgM6I2JnRJyx94fsQ+G/P0z7lJe59bWA5wHMrHTlMwI4G1gbEesiogO4G5jfq8984M7k/f3APEkCLgJWRsQKgIjYEhHduRtKmgkcA/z34Hfj0M2eWsOY8pTnAcysZOUTAFOA9TnLzUlbn30iogtoBWqBk4GQtETSckk39vHnXwncExHR1y+XdJ2kJklNLS0teZSbn0yqjIbpngcws9KVTwCoj7beB+v++qSBc4CrktfLJM3r1W8B8L3+fnlE3B4RDRHRUFdXl0e5+Wusr+WFTbto2el5ADMrPfkEQDMwLWd5KrCxvz7Jef8aYGvS/lhEbI6I3cBiYM7ejSS9BUhHxJOD3oMhaJyRnQfwKMDMSlE+AfAEMFPSiZLKyX5iX9SrzyLgmuT95cDDySmdJcBsSWOSYDgXeDZnuys5yKf/w+20ydWMq0j7vkBmVpLSA3WIiC5JN5A9mKeAr0fEKkmfBZoiYhFwB/BtSWvJfvJfkGy7TdItZEMkgMUR8UDOH38F8K5h3aNDkE6VcfaJE3ncE8FmVoIGDACAiFhM9vRNbttf57xvB97fz7Z3kf0qaF/r6vOu9DBprK/l4ec28fqOdo6trix0OWZmR0xJXgmca+/1AJ4HMLNSU/IBMGtyNdWVaV8PYGYlp+QDIFUmzj6x1hPBZlZySj4AIPt10Je37Gbj9rZCl2JmdsQ4AMhOBAM+DWRmJcUBAJwyaTwTxmQ8EWxmJcUBAJSVibedONHzAGZWUhwAicb6Wpq3tbF+6+5Cl2JmdkQ4ABKNM44G8CjAzEqGAyBx8rHjmDi23LeFMLOS4QBISGJufXYeoJ9HE5iZjSoOgByN9bW82trOK54HMLMS4ADIsff5AL4ewMxKgQMgx4y6cdSNr/BEsJmVBAdAjuw8QC1LX/Q8gJmNfg6AXhrra9m0cw/rNr9R6FLMzA4rB0Avc+snAn4+gJmNfnkFgKSLJa2RtFbSTX2sr5B0T7J+maTpOetmS1oqaZWkpyVVJu3lkm6X9Lyk5yS9b7h2aihOPHosx1ZXeCLYzEa9AR8JKSkF3AZcCDQDT0haFBG5D3e/FtgWESdJWgDcDHwgeRD8XcDVEbFCUi3QmWzzaWBTRJwsqQyYOHy7NXiSaKyv5X/WZucBJBW6JDOzwyKfEcDZwNqIWBcRHcDdwPxefeYDdybv7wfmKXvkvAhYGRErACJiS0R0J/3+CPhi0t4TEZuHtivDp3FGLZt37WHtpl2FLsXM7LDJJwCmAOtzlpuTtj77REQX0ArUAicDIWmJpOWSbgSQNCHZ7nNJ+32Sjh3CfgyrxnrfF8jMRr98AqCvcyC9vyPZX580cA5wVfJ6maR5SftU4OcRMQdYCvx9n79cuk5Sk6SmlpaWPModumkTq5hcU+l5ADMb1fIJgGZgWs7yVGBjf32S8/41wNak/bGI2BwRu4HFwBxgC7Ab+EGy/X1J+wEi4vaIaIiIhrq6urx2aqgkMXdGLct+vZWeHl8PYGajUz4B8AQwU9KJksqBBcCiXn0WAdck7y8HHo7slVRLgNmSxiTBcC7wbLLuh8B5yTbzgGcZQRrra9n6RgfPb9pZ6FLMzA6LAb8FFBFdkm4gezBPAV+PiFWSPgs0RcQi4A7g25LWkv3kvyDZdpukW8iGSACLI+KB5I/+ZLLNrUAL8IfDvG9DkntfoFMmVRe4GjOz4adiuuVBQ0NDNDU1HbHf91tffpg3T6rm9j9oOGK/08xsuEl6MiIOOJD5SuCDmHui5wHMbPRyABxE44xaWts6Wf3ajkKXYmY27BwAB+HnA5jZaOYAOIjjaqqYXjvGN4Yzs1HJATCAxuR6gG7PA5jZKOMAGMDc+lp2tnexamNroUsxMxtWDoABNNZ7HsDMRicHwACOqa6kvm6s5wHMbNRxAOShsb6WJ17aRld3T6FLMTMbNg6APDTOqGXXni6e3uB5ADMbPRwAeZi7dx7Ap4HMbBRxAOTh6HEVzDxmnCeCzWxUcQDkqXFGLU0vbaOjy/MAZjY6OADy1FhfS1tnN09v2F7oUszMhoUDIE9v8/UAZjbKOADyNHFsOadMGu+JYDMbNRwAh2BufXYeYE9Xd6FLMTMbMgfAIWicUcuerh6eesXzAGZW/PIKAEkXS1ojaa2km/pYXyHpnmT9MknTc9bNlrRU0ipJT0uqTNofTf7Mp5KfY4Zrpw6XuSfWIsHj67YWuhQzsyEbMAAkpYDbgEuAWcCVkmb16nYtsC0iTgIWAjcn26aBu4DrI+JU4DygM2e7qyLijORn01B35nCrGZNh1nHVLF23udClmJkNWT4jgLOBtRGxLiI6gLuB+b36zAfuTN7fD8yTJOAiYGVErACIiC0RUdQn0Bvra1n+ynbaO4t6N8zM8gqAKcD6nOXmpK3PPhHRBbQCtcDJQEhaImm5pBt7bfeN5PTPXyWBcQBJ10lqktTU0tKSR7mH19z6Wjq6elj+yrZCl2JmNiT5BEBfB+bej8fqr08aOAe4Knm9TNK8ZP1VEXE68FvJz9V9/fKIuD0iGiKioa6uLo9yD6+z6ydSJnjc1wOYWZHLJwCagWk5y1OBjf31Sc771wBbk/bHImJzROwGFgNzACJiQ/K6E/gu2VNNI151ZYbTptT4egAzK3r5BMATwExJJ0oqBxYAi3r1WQRck7y/HHg4IgJYAsyWNCYJhnOBZyWlJR0NICkDvAd4Zui7c2Q01tfy1PrttHV4HsDMiteAAZCc07+B7MF8NXBvRKyS9FlJlybd7gBqJa0FPgHclGy7DbiFbIg8BSyPiAeACmCJpJVJ+wbg34Z1zw6juTNq6ewOnnzZ8wBmVrzS+XSKiMVkT9/ktv11zvt24P39bHsX2a+C5ra9AZx5qMWOFGdNn0iqTCxdt5lzZh5d6HLMzAbFVwIPwriKNKdPqfGN4cysqDkABqlxRi0rm1t5Y09XoUsxMxsUB8AgNdbX0tUT/PcLvirYzIqTA2CQzpo+kalHVfGX961gmb8SamZFyAEwSFXlKe67vpFjqiv4g6//kkeeG/G3MjIz248DYAiOq6ni3g83MvPYcXzoW03851MbCl2SmVneHABDVDuugu9+aC5zTjiKP73nKe56/OVCl2RmlhcHwDCorszwrT86m/PfdAyf+Y9n+OdH1xa6JDOzATkAhkllJsW/Xn0m88+YzJcfXMMXf7Sa7N0wzMxGpryuBLb8ZFJlLLziDMZXpvnXx9axo62Lz//uaaTK+rzTtZlZQTkAhllZmfjc/NOorszwz4++yI72ThZecQblaQ+2zGxkcQAcBpK48eJTqKnK8MUfPceu9i6++vtnUlWeKnRpZmb7+GPpYfThc2fwpfeezs9eaOHqO5bR2tY58EZmZkeIA+AwW3D28fzTlXNY0bydK29/nM279hS6JDMzwAFwRLx79nH82x80sG7zLq746lI2bG8rdElmZg6AI+W8Nx3DXde+jZZde3j/v/yCF1t2FbokMytxDoAjqGH6RO6+bi4d3T28/6tLeWZDa6FLMrMS5gA4wk6dXMO9H26kKpPiytsf55e/3lroksysROUVAJIulrRG0lpJN/WxvkLSPcn6ZZKm56ybLWmppFWSnpZU2WvbRZKK5oHww6G+bhz3Xd9IXXUFV9+xzHcSNbOCGDAAJKWA24BLgFnAlZJm9ep2LbAtIk4CFgI3J9umyT4P+PqIOBU4D9j3XUhJ7wVK8mT45AlV3JdzJ9FFKzYWuiQzKzH5jADOBtZGxLqI6ADuBub36jMfuDN5fz8wT5KAi4CVEbECICK2REQ3gKRxwCeAzw99N4pT7p1EP373r/jOMt9J1MyOnHwCYAqwPme5OWnrs09EdAGtQC1wMhCSlkhaLunGnG0+B/wDsPtgv1zSdZKaJDW1tLTkUW5xyb2T6Kd/4DuJmtmRk8+tIPq6k1nv21z21ycNnAOcRfZA/5CkJ4EtwEkR8We58wV9iYjbgdsBGhoaRuXtNffeSfTP713Blx9cQ8vOPbz79OOYPKGKY8ZXkE55rt7Mhl8+AdAMTMtZngr0PmG9t09zct6/BtiatD8WEZsBJC0G5pA973+mpJeSGo6R9GhEnDf4XSlumVQZCz9wBtVVab7x85f4xs9fAiBVJiZVVzJ5QiWTJ1Tt+5mSs1xdmSls8WZWlDTQPeuTA/rzwDxgA/AE8HsRsSqnz0eB0yPiekkLgPdGxBWSjgIeIjsK6AAeBBZGxAM5204H/isiThuo2IaGhmhqajq0PSxCL7bsYv3W3Wzc3s7G7W1s3N7Ghu1tbGxt49Xt7XT17P//bHxFmuNyAmHKhKpsYNRklyfVVJLxKMKsZEl6MiIaercPOAKIiC5JNwBLgBTw9YhYJemzQFNELALuAL4taS3ZT/4Lkm23SbqFbGgEsDj34G99m1E3jhl14/pc190TbN61JxsI+37a2bC9jVdb21jZ3MrWNzr220aCY8dX8s5Tj+VvLj2V7Py8mZW6AUcAI0mpjACGqq2jm42tbTmjh3ZWNm/n0TUtfPdDb+PtM44udIlmdgQNegRgxaeqPHXAKKK9s5tz/+4Rbv3JCzTW13oUYGa+FUSpqMyk+Mh5J/HLl7ay9MUthS7HzEYAB0AJ+cBZ05hUXcmtP33BD6w3MwdAKanMpPjI+TP45Utb+YVHAWYlzwFQYq5o2DsKeN6jALMS5wAoMXtHAU+8tM2jALMS5wAoQXvnAhb+xKMAs1LmAChBFekUHz1/Bk0vb+Pnaz0KMCtVDoASdcVZ0ziuppKFngswK1kOgBJVkU7xkfNP4smXt/E/azcXuhwzKwAHQAm7omEqx9X4ugCzUuUAKGG5o4D/fsGjALNS4wAocVc0TGVyja8LMCtFDoASt3cUsPyV7R4FmJUYB4Dx/mQU4G8EmZUWB4DtGwX86pXt/MyjALOS4QAwIHuPIM8FmJUWB4ABUJ4u46MXZEcBjz3fUuhyzOwIyCsAJF0saY2ktZJu6mN9haR7kvXLkge97103W9JSSaskPS2pMml/UNKKpP2rklLDtVM2OO8/cxpTJlT5ugCzEjFgACQH5tuAS4BZwJWSZvXqdi2wLSJOAhYCNyfbpoG7gOsj4lTgPKAz2eaKiHgLcBpQB7x/yHtjQ1KeLuOj55/EU+s9CjArBfmMAM4G1kbEuojoAO4G5vfqMx+4M3l/PzBP2YfOXgSsjIgVABGxJSK6k/c7kv5poBzwR84R4PIzpzJlQhULPQowG/XyCYApwPqc5eakrc8+EdEFtAK1wMlASFoiabmkG3M3krQE2ATsJBscB5B0naQmSU0tLf5UerjtHQWsWL+dRz0KMBvV8gkA9dHW+6Nhf33SwDnAVcnrZZLm7esQ8U7gOKACuKCvXx4Rt0dEQ0Q01NXV5VGuDdXeUYDnAsxGt3wCoBmYlrM8FdjYX5/kvH8NsDVpfywiNkfEbmAxMCd3w4hoBxZx4GklK5DydBk3XJCMAtZ4FGA2WuUTAE8AMyWdKKkcWED2gJ1rEXBN8v5y4OHIfnRcAsyWNCYJhnOBZyWNk3Qc7AuMdwHPDX13bLi8b87eUYCvCzAbrQYMgOSc/g1kD+argXsjYpWkz0q6NOl2B1AraS3wCeCmZNttwC1kQ+QpYHlEPACMBRZJWgmsIDsP8NVh3TMbkvJ0GR+74CRWNLd6FGA2SqmYPt01NDREU1NTocsoGR1dPVzwD48ycWw5//nRd5D9YpeZFRtJT0ZEQ+92Xwls/SpPl3HD+SexsrmVR9ZsKnQ5ZjbMHAB2UO87cypTj/I3gsxGIweAHVQmlZ0LWNncysPPeRRgNpo4AGxA750zlWkTPQowG20cADagTKqMj50/k6c3eBRgNpo4ACwvl82Z4lGA2SjjALC85I4CHlrtUYDZaOAAsLxdNmcKx08cw60P+epgs9HAAWB5y6Sy9wh6ZsMOfupRgFnRcwDYIbnsrckowPcIMit6DgA7JHuvC1i10aMAs2LnALBDdtlbp3BCrUcBZsXOAWCHLJ3K3iNo1cYd/OTZ1wtdjpkNkgPABuU3owBfF2BWrBwANijpVBkfu2Amz766g4U/fYHHnm9hzWs7ad3d6UAwKxLpQhdgxet3z5jMnb94iX986IX92iszZUyqruSY6komVVcyqaaSY5P3x1ZXcGx1drk87c8fZoXkALBBS6fK+P5H3s6r29t5fWc7r7W28/qO5HXnHl5vbeep9dt5bVU7HV09B2xfO7Y8CYmKXiGR/Zk4tpwJYzJUZlIF2Duz0c8BYEOSSZVxfO0Yjq8d02+fiKC1rZPXdvwmJF7fsYfXdrTzems7r+1o5+kNO9jyxh76OntUmSnjqDHlTBhTzoSqDEeNzVBTVc5RYzJMGJPJac++ThiTDY5MyiMMs4PJKwAkXQx8BUgBX4uIL/VaXwF8CzgT2AJ8ICJeStbNBv4VqAZ6gLPIzj3cB8wAuoEfRsRNw7A/NgJJSg7K5Zwyqbrffp3dPWzauYfXWtvZtKOdbbs72ba7g+27O9i+u5Ntuztpbevg+dd3sX13J9t3d9DV0/98w7iKdBIQGY4aU05NVSYJksx+77PL2UCpqcqQdnBYiRgwACSlgNuAC4Fm4AlJiyLi2Zxu1wLbIuIkSQuAm4EPSEoDdwFXR8QKSbVAJ1AB/H1EPCKpHHhI0iUR8aPh3T0rJplUGVMmVDFlQlVe/SOCXXu6kjDoZHtbRzYkdnfsC4/WvSHS1knztja27e5gR1snB8kNxlekqekVHBPGZJhQVb7fiGNCzgikujLjOQ0rOvmMAM4G1kbEOgBJdwPzgdwAmA/8TfL+fuCflH2C+EXAyohYARARW5I+u4FHkrYOScuBqUPbFSs1khhfmWF8ZYZpE/Pfrqcn2Nnexfa2jiQ4OveNMvYGSWvSvm13Bxu2te3rc7DgqEiXMb4yzbiKNOP2vlZkGF+Z3q99/L71GcZVpA9YX5EeWXMeEUF3T9DZHXR099DR1UNn929+OrqCzu4eeiIYX5mhujJNdVWGinQZ2cPAyNDdE+xq72JHeye79nRRXzd2xP23PtLyCYApwPqc5Wbgbf31iYguSa1ALXAyEJKWAHXA3RHx5dwNJU0AfofsKaYDSLoOuA7g+OOPz6Ncs4MrKxM1YzLUjMlwQm3+2/X0BDv3dCXhsH94tO7uZFdHF7vau9i1p4ud7dn3G7a3sWtPJ7vas20HO2W1V3mqbF+AZFKiTNkfKRt6AsrKyLaRtImkH4jfLOe+at966OoJOrp66Nh7IE8O4h37Duo9+w74nd09fc7N5LMf1VXp/UKhujIbiNn32dfxlWmqKzMHrB9bntoXIBFBe2cPO9o72dneSWtbFzvbO9nRnrwmyzuTA/zO9i52tO2/vGtP1371vXfOFG654oxD37FRJJ8A6CvCe/916K9PGjiH7Hn/3WRP9TwZEQ8BJKeIvgf8494RxgF/SMTtwO0ADQ0N/oK5FUxZmaipys4THE//k979iQj2dPXsOxjtau9iZxIO+0Jj32v2oNXVHQRBTw/0RBDJnxORXe4J+mjLvu/uiX3b7G2LZJtMSmRSZYyrSFOeKiOTKiOTLiOT0r7l8nTymvTNpH/Tvq9tX18hxM492QPvjpyD8o7237S92tq+731754HfDNvvv7eguiqDIK/wTJWJ6sokcKrSjK/IMP3oMckoMb1fuCxbt5V/X97MB98+ndlTJxzy/8vRIp8AaAam5SxPBTb206c5OajXAFuT9sciYjOApMXAHOChZLvbgRci4tZB74FZkZBEZSZFZSZF3fiKQpdTcB1dPQcExM72/QNkR3snPRHJwTuzb0Sxb9SQM4qoyqTyPuV0yWmTeHTNJj7/X6u558NzR9SpqiMpnwB4Apgp6URgA7AA+L1efRYB1wBLgcuBhyNi76mfGyWNATqAc4GFAJI+TzYo/ng4dsTMikt5uozacRXUjjvyYTi+MsOfXXgyn/mPZ1iy6nUuPm3SEa9hJBjwawsR0QXcACwBVgP3RsQqSZ+VdGnS7Q6gVtJa4BPATcm224BbyIbIU8DyiHhA0lTg08AsYLmkpyQ5CMzsiFlw1jRmHjOOL/5odZ8XKpYCFdN9WxoaGqKpqanQZZjZKPHomk188BtP8Jl3v5k//q36Qpdz2CRzrw292/3FZTMrWee96Rh+a+bR/L+H17J9d0ehyzniHABmVtI+/e43s7O9k6/0uqlhKXAAmFlJO2VSNR8463i+vfRl1rXsKnQ5R5QDwMxK3icuPJmKdBlf+tFzhS7liHIAmFnJqxtfwUfOP4kfP/s6S1/cMvAGo4QDwMwMuPacE5lcU8kXFj9LTx637BgNHABmZkBlJsUnLzmFZzbs4Ae/2lDoco4IB4CZWeJ3Zk/mLVNr+Lsla9jd0TXwBkXOAWBmligrE595zyxe29HOv/3s14Uu57BzAJiZ5Thr+kTedfokvvrYi7y+o73Q5RxWDgAzs14+efEpdPX08A8/XlPoUg4rB4CZWS8n1I7lg2+fzn1PNrNqY2uhyzlsHABmZn244YKZTKjK8IUHVlNMN808FA4AM7M+1FRl+Pi8mfzixS08/NymQpdzWDgAzMz6cdXcE6g/eixfWLyazu7R98wAB4CZWT8yqTL+z7vezLqWN/juslcKXc6wcwCYmR3EvDcfQ2N9Lbf+9Hla2zoLXc6wyisAJF0saY2ktZJu6mN9haR7kvXLJE3PWTdb0lJJqyQ9Lakyaf+CpPWSSuv+q2ZWVCTx6Xe/me1tndz2yNpClzOsBgwASSngNuASss/wvVLSrF7drgW2RcRJZB/6fnOybRq4C7g+Ik4FzgP2RugPgbOHYR/MzA6r06bUcPmcqXzz5y/xypbdhS5n2OQzAjgbWBsR6yKiA7gbmN+rz3zgzuT9/cA8SQIuAlZGxAqAiNgSEd3J+8cj4tXh2Akzs8PtL975JlJl4uYHR88zA/IJgCnA+pzl5qStzz4R0QW0ArXAyUBIWiJpuaQbh16ymdmRd2x1JR8+t54Hnn6Vppe2FrqcYZFPAKiPtt5XRfTXJw2cA1yVvF4mad6hFCjpOklNkppaWloOZVMzs2F13W/Xc2x1BZ97YPWoeGZAPgHQDEzLWZ4KbOyvT3LevwbYmrQ/FhGbI2I3sBiYcygFRsTtEdEQEQ11dXWHsqmZ2bAaU57mLy56EyvWb+eHK3sfBotPPgHwBDBT0omSyoEFwKJefRYB1yTvLwcejuy100uA2ZLGJMFwLvDs8JRuZnbkvW/OVE6dXM2XH1xDe2d3ocsZkgEDIDmnfwPZg/lq4N6IWCXps5IuTbrdAdRKWgt8Argp2XYbcAvZEHkKWB4RDwBI+rKkZmCMpGZJfzO8u2ZmNvzKyrJfC92wvY07/qe4nxmgYrrJUUNDQzQ1NRW6DDMzPvStJk+dSY4AAAVCSURBVH6xdjOP/uX51I2vKHQ5ByXpyYho6N3uK4HNzAbhU5ecwp6uHhb+9PlClzJoDgAzs0GorxvH7889gbt/+QprXttZ6HIGxQFgZjZIH583k3EVab6weHWhSxkUB4CZ2SAdNbacP5k3k58938Kja4rvmQEOADOzIbi68QROqB3D3y5eTVeRPTPAAWBmNgQV6RSfuuQUnn99F/c0rR94gxEkXegCzMyK3TtPncTZ0yey8CfP09bRTUW6jPJ0GRXpFOXpMspTZVRk9r6mei2XUZFK7VsuK+vrzjqHhwPAzGyIJPFX75nF733tcT7/wNAmhDMp9RkUP/zYOVRmUsNUcZYDwMxsGJw+tYblf3UhbZ3d7OnsoaO7hz2d3XR099DR1cOerr2v3fuW9/TT3tdy+jCMDBwAZmbDJJMqI5Mqg8pCV5IfTwKbmZUoB4CZWYlyAJiZlSgHgJlZiXIAmJmVKAeAmVmJcgCYmZUoB4CZWYkqqkdCSmoBXi50Hb0cDWwudBF5KqZaobjqLaZaobjqLaZaYWTWe0JE1PVuLKoAGIkkNfX1rM2RqJhqheKqt5hqheKqt5hqheKq16eAzMxKlAPAzKxEOQCG7vZCF3AIiqlWKK56i6lWKK56i6lWKKJ6PQdgZlaiPAIwMytRDgAzsxLlABgESdMkPSJptaRVkj5e6JoGIikl6VeS/qvQtQxE0gRJ90t6Lvlv3Fjomg5G0p8lfw+ekfQ9SSPmcSCSvi5pk6RnctomSvqJpBeS16MKWWOufur9u+TvwkpJP5A0oZA17tVXrTnr/kJSSDq6ELXlywEwOF3An0fEm4G5wEclzSpwTQP5ODC0h5UeOV8BHoyIU4C3MILrljQF+BOgISJOA1LAgsJWtZ9vAhf3arsJeCgiZgIPJcsjxTc5sN6fAKdFxGzgeeBTR7qofnyTA2tF0jTgQuCVI13QoXIADEJEvBoRy5P3O8keoKYUtqr+SZoKvBv4WqFrGYikauC3gTsAIqIjIrYXtqoBpYEqSWlgDLCxwPXsExE/A7b2ap4P3Jm8vxP43SNa1EH0VW9E/DgiupLFx4GpR7ywPvTz3xZgIXAjMOK/YeMAGCJJ04G3AssKW8lB3Ur2L2RPoQvJQz3QAnwjOWX1NUljC11UfyJiA/D3ZD/tvQq0RsSPC1vVgI6NiFch+2EGOKbA9RyKPwJ+VOgi+iPpUmBDRKwodC35cAAMgaRxwL8DfxoROwpdT18kvQfYFBFPFrqWPKWBOcC/RMRbgTcYWaco9pOcP58PnAhMBsZK+v3CVjU6Sfo02dOv3yl0LX2RNAb4NPDXha4lXw6AQZKUIXvw/05EfL/Q9RzEO4BLJb0E3A1cIOmuwpZ0UM1Ac0TsHVHdTzYQRqr/Bfw6IloiohP4PvD2Atc0kNclHQeQvG4qcD0DknQN8B7gqhi5Fy/NIPtBYEXy720qsFzSpIJWdRAOgEGQJLLnqFdHxC2FrudgIuJTETE1IqaTnZx8OCJG7CfUiHgNWC/pTUnTPODZApY0kFeAuZLGJH8v5jGCJ60Ti4BrkvfXAP9ZwFoGJOli4JPApRGxu9D19Ccino6IYyJievLvrRmYk/ydHpEcAIPzDuBqsp+mn0p+3lXookaRjwHfkbQSOAP42wLX069kpHI/sBx4muy/qRFzKwBJ3wOWAm+S1CzpWuBLwIWSXiD7bZUvFbLGXP3U+0/AeOAnyb+1rxa0yEQ/tRYV3wrCzKxEeQRgZlaiHABmZiXKAWBmVqIcAGZmJcoBYGZWohwAZmYlygFgZlai/j/ydXzjs9/peQAAAABJRU5ErkJggg==\n", |
|
|
4043 |
"text/plain": [ |
|
|
4044 |
"<Figure size 432x288 with 1 Axes>" |
|
|
4045 |
] |
|
|
4046 |
}, |
|
|
4047 |
"metadata": { |
|
|
4048 |
"needs_background": "light" |
|
|
4049 |
}, |
|
|
4050 |
"output_type": "display_data" |
|
|
4051 |
} |
|
|
4052 |
], |
|
|
4053 |
"source": [ |
|
|
4054 |
"plt.plot(stats.groupby('epoch').mean().val_loss)" |
|
|
4055 |
] |
|
|
4056 |
}, |
|
|
4057 |
{ |
|
|
4058 |
"cell_type": "code", |
|
|
4059 |
"execution_count": null, |
|
|
4060 |
"metadata": {}, |
|
|
4061 |
"outputs": [], |
|
|
4062 |
"source": [] |
|
|
4063 |
}, |
|
|
4064 |
{ |
|
|
4065 |
"cell_type": "code", |
|
|
4066 |
"execution_count": 34, |
|
|
4067 |
"metadata": { |
|
|
4068 |
"scrolled": true |
|
|
4069 |
}, |
|
|
4070 |
"outputs": [ |
|
|
4071 |
{ |
|
|
4072 |
"data": { |
|
|
4073 |
"text/plain": [ |
|
|
4074 |
"(4368, 60, 6)" |
|
|
4075 |
] |
|
|
4076 |
}, |
|
|
4077 |
"execution_count": 34, |
|
|
4078 |
"metadata": {}, |
|
|
4079 |
"output_type": "execute_result" |
|
|
4080 |
} |
|
|
4081 |
], |
|
|
4082 |
"source": [ |
|
|
4083 |
"predictions.shape" |
|
|
4084 |
] |
|
|
4085 |
}, |
|
|
4086 |
{ |
|
|
4087 |
"cell_type": "code", |
|
|
4088 |
"execution_count": 35, |
|
|
4089 |
"metadata": {}, |
|
|
4090 |
"outputs": [ |
|
|
4091 |
{ |
|
|
4092 |
"data": { |
|
|
4093 |
"text/plain": [ |
|
|
4094 |
"array([0.26533565, 0.14922237, 0.21536556, 0.21641548, 0.21140262,\n", |
|
|
4095 |
" 0.19743267], dtype=float32)" |
|
|
4096 |
] |
|
|
4097 |
}, |
|
|
4098 |
"execution_count": 35, |
|
|
4099 |
"metadata": {}, |
|
|
4100 |
"output_type": "execute_result" |
|
|
4101 |
} |
|
|
4102 |
], |
|
|
4103 |
"source": [ |
|
|
4104 |
"predictions.mean((0,1))" |
|
|
4105 |
] |
|
|
4106 |
}, |
|
|
4107 |
{ |
|
|
4108 |
"cell_type": "code", |
|
|
4109 |
"execution_count": 36, |
|
|
4110 |
"metadata": {}, |
|
|
4111 |
"outputs": [ |
|
|
4112 |
{ |
|
|
4113 |
"data": { |
|
|
4114 |
"text/plain": [ |
|
|
4115 |
"[<matplotlib.lines.Line2D at 0x7f7449493790>]" |
|
|
4116 |
] |
|
|
4117 |
}, |
|
|
4118 |
"execution_count": 36, |
|
|
4119 |
"metadata": {}, |
|
|
4120 |
"output_type": "execute_result" |
|
|
4121 |
}, |
|
|
4122 |
{ |
|
|
4123 |
"data": { |
|
|
4124 |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dd3hUZd7/8fc3nYSSQmiBFCCU0BISehMUBUWQ1VURhUWUxQV1F7fo6vrs6j5bdNey+2NVRMSyiKCCiAgqSlNaKAEChBJaCJAECCW93L8/MvpEDGQg5cyc+b6uKxeZM2dmPjeED4dT7iPGGJRSStmXl9UBlFJK1S0teqWUsjkteqWUsjkteqWUsjkteqWUsjkfqwNcqmnTpiY6OtrqGEop5Va2bNmSY4wJr+o5lyv66OhokpOTrY6hlFJuRUSOXO453XWjlFI2p0WvlFI2p0WvlFI2p0WvlFI2p0WvlFI2p0WvlFI2p0WvlFI251TRi8gIEUkTkQMi8vgV1rtDRIyIJDkeR4tIgYhsd3y9WlvBL5WbX8yLX+xj78nzdfURSinllqq9YEpEvIGZwHAgA9gsIkuMMbsvWa8R8Aiw8ZK3OGiMia+lvJfPifDKqoNcKCzl6Vvj6vrjlFLKbTizRd8bOGCMSTfGFAPzgTFVrPcs8BxQWIv5nNYk0JdhnZqxJCWT0rJyKyIopZRLcqboI4BjlR5nOJZ9T0QSgDbGmKVVvD5GRLaJyGoRGVTVB4jIFBFJFpHk7OxsZ7P/yNieEeRcLGLtgZxrfg+llLIbZ4peqlj2/f0HRcQLeBF4rIr1TgCRxpgEYAYwT0Qa/+jNjJlljEkyxiSFh1c5J49ThnZsRnCgL4u2Hr/m91BKKbtxpugzgDaVHrcGMis9bgR0BVaJyGGgL7BERJKMMUXGmNMAxpgtwEGgQ20Er4qfjxejurfk890nuVhUWlcfo5RSbsWZot8MxIpIjIj4AXcDS7570hhzzhjT1BgTbYyJBjYAo40xySIS7jiYi4i0BWKB9FofRSVjE1pTWFLOZztP1OXHKKWU26i26I0xpcB0YAWwB1hgjEkVkWdEZHQ1Lx8M7BCRFOADYKox5kxNQ19Jz8hgosMCWbRNd98opRQ4OR+9MWYZsOySZU9fZt3rKn3/IfBhDfJdNRHhtoQIXl65nxPnCmjZpEF9frxSSrkcW14ZOzYhAmNg8bbM6ldWSimbs2XRR4UF0TMymEXbMjDGVP8CpZSyMVsWPcDYnq3Zd+oiqZk6JYJSyrPZtuhHdWuJr7foQVmllMezbdGHBPkxtGMzPt6uUyIopTybbYse4CeOKRHW6ZQISikPZuuiH9qpGU0a+LIwOcPqKEopZRlbF72/jzd3JrVmRepJTp6zZFJNpZSynK2LHuDevlGUGcO8TUetjqKUUpawfdFHhQVxXYdw3tt0lOJSPSirlPI8ti96gAn9osm+UMSK1JNWR1FKqXrnEUU/pEM4kaGBvL3+sNVRlFKq3nlE0Xt5Cff1jWLz4bPs1itllVIexiOKHuCnSa0J8PXinQ2HrY6ilFL1ymOKPjjQjzE9Ili8LZNz+SVWx1FKqXrjMUUPcF+/KApKyli45Vj1KyullE14VNF3jWhCYlQI7244Qnm5Tl+slPIMHlX0ABP6RXH4dD5r9mdbHUUppeqFxxX9yK4tadrQnzfWHbI6ilJK1QuPK3o/Hy+mDmnL2v05rN6nW/VKKfvzuKKHioOykaGB/OXTPZTpvnqllM15ZNH7+3jzxMhOpJ26wIJkPQNHKWVvHln0ACO6tqBXdAj//DyNi0WlVsdRSqk647FFLyI8eUscOReLeWXVAavjKKVUnfHYogeIbxPMmPhWzF57iOO5BVbHUUqpOuHRRQ/w2xGdAHh++V6LkyilVN3w+KKPCG7A5IExLN6eScqxXKvjKKVUrfP4ogd46Lp2NG3ox18/22N1FKWUqnVOFb2IjBCRNBE5ICKPX2G9O0TEiEhSpWVPOF6XJiI31Ubo2tYowJdJA2LYkH5GbyKulLKdaoteRLyBmcBIIA4YJyJxVazXCHgE2FhpWRxwN9AFGAH8x/F+Lmd4XHMAVu49ZXESpZSqXc5s0fcGDhhj0o0xxcB8YEwV6z0LPAdU3iQeA8w3xhQZYw4BBxzv53JimzWkTWgDvtytRa+Ushdnij4CqHz5aIZj2fdEJAFoY4xZerWvdbx+iogki0hydrY188+ICDd0bs43B0+TX6wXUCml7MOZopcqln0/QYyIeAEvAo9d7Wu/X2DMLGNMkjEmKTw83IlIdeOGzs0pLi1n7f4cyzIopVRtc6boM4A2lR63BjIrPW4EdAVWichhoC+wxHFAtrrXupTeMaE0CvBh5R7dfaOUsg9nin4zECsiMSLiR8XB1SXfPWmMOWeMaWqMiTbGRAMbgNHGmGTHeneLiL+IxACxwKZaH0Ut8fX2YkiHcL7am6V3oFJK2Ua1RW+MKQWmAyuAPcACY0yqiDwjIqOreW0qsADYDSwHphljymoeu+4Mj2tOzsVitmfoxVNKKXvwcWYlY8wyYNkly56+zLrXXfL4f4H/vcZ89e66Ds3w9hK+3H2KnpEhVsdRSqka0ytjL9Ek0Jde0SGs3JNldRSllKoVWvRVuKFzc9JOXeDYmXyroyilVI1p0Vfh+s4VV8l+qWffKKVsQIu+CjFNg2gXHqRFr5SyBS36y7ghrjkb089wvrDE6ihKKVUjWvSXcUPn5pSWG9bss2ZKBqWUqi1a9JfRMzKEkEBfneRMKeX2tOgvw9tLGNqpGV+nZVNcWm51HKWUumZa9FcwukcrzhWU8EmKy07Po5RS1dKiv4IhHcLp2LwRs9akY4zOfaOUck9a9FcgIkwZ3Ja0UxdYpQdllVJuSou+Grf2aEWLxgG8tvqg1VGUUuqaaNFXw8/Hi8kDK24cnnJMZ7RUSrkfLXon3N27DY38fZi1Jt3qKEopddW06J3QKMCX8X2j+GzXCY6e1onOlFLuRYveSZMGROPtJcxep1v1Sin3okXvpOaNAxibEMGC5GOcySu2Oo5SSjlNi/4qTBnclsKSct5ef9jqKEopm8k6X1hnV+Fr0V+F9s0acUPnZrz17WHyi0utjqOUspHHP9rJ2P98UycXZ2rRX6WHrmvP2fwSnvlkt9VRlFI2kZ59ka/2ZjE8rjkiUuvvr0V/lRKjQvjFde2Yv/kYH28/bnUcpZQNvPnNYfy8vRjfJ6pO3l+L/hrMGN6BpKgQfv/RTtKzL1odRynlxs7ll/DBlgzGxLcivJF/nXyGFv018PH24l/jEvD18WL6vG0UlpRZHUkp5abe23yUgpIyJg2IqbPP0KK/Rq2CG/DCnT3YfeI8f1m2x+o4Sik3VFJWzlvfHqZ/uzDiWjWus8/Roq+BYZ2a8+CgGN5ef4RlO09YHUcp5WaW7zrJiXOF3F+HW/OgRV9jv7mpE/FtgvndBzvIOKvTIyilnDfnm0NEhwUyrFOzOv0cLfoa8vPx4t/jEigoKeOd9UesjqOUchNbj55l29FcJg2Iwcur9k+prEyLvha0CQ3kuo7hfLw9k7JyvROVUqp6c9YdolGAD3cktq7zz3Kq6EVkhIikicgBEXm8iuenishOEdkuIutEJM6xPFpEChzLt4vIq7U9AFcxJj6Ck+cL2Zh+2uooSikXdzy3gM92nWRc70iC/H3q/POqLXoR8QZmAiOBOGDcd0VeyTxjTDdjTDzwHPBCpecOGmPiHV9Tayu4q7mhc3Ma+vuwWC+iUkpV47v5sib2j66Xz3Nmi743cMAYk26MKQbmA2Mqr2CMOV/pYRDgcfsvGvh5M6JrCz7beVLPq1dKXVZeUSnvbTzKiC4tiAhuUC+f6UzRRwDHKj3OcCz7ARGZJiIHqdiif6TSUzEisk1EVovIoKo+QESmiEiyiCRnZ7vvTbhvi4/gQlEpK/dkWR1FKeWiPtyawfnCUu4fWLenVFbmTNFXdTj4R1vsxpiZxph2wO+ApxyLTwCRxpgEYAYwT0R+dFWAMWaWMSbJGJMUHh7ufHoX069dGM0a+evuG6VUlcrLDXPWHSK+TTCJUSH19rnOFH0G0KbS49ZA5hXWnw/cBmCMKTLGnHZ8vwU4CHS4tqiuz9tLGBPfilVpWZzVm5MopS6xcm8Wh0/n88Cg+tuaB+eKfjMQKyIxIuIH3A0sqbyCiMRWengLsN+xPNxxMBcRaQvEAra+F9+Y+AhKygyf6pWySqlLvLEunYjgBozo0qJeP7faojfGlALTgRXAHmCBMSZVRJ4RkdGO1aaLSKqIbKdiF81Ex/LBwA4RSQE+AKYaY87U+ihcSJdWjYlt1lCnMFZK/cCu4+fYkH6Gif2j8PGu30uYnDqB0xizDFh2ybKnK33/6GVe9yHwYU0CuhsR4baECJ5fkcaxM/m0CQ20OpJSygXMWXeIID9v7uoVWe+frVfG1oHRPVoBsCTlSocylFKe4tT5Qj7ZkclPk9rQpIFvvX++Fn0daBMaSO/oUD7amlEn939USrmXt9cfprTcMGlAtCWfr0VfR8YktOJgdh6pmeerX1kpZVsFxWX8d+NRhnduTlRYkCUZtOjryC3dWuLn7cXcbw9bHUUpZaGPtmWQm1/CA4PaWpZBi76OBAf6MWlANB9syWDr0bNWx1FKWaC83PDGukN0i2hCr+j6u0DqUlr0dejh62Np3tifpz/epdMXK+WBlqeeJD07jwcGxSBSt3POX4kWfR1q6O/Dk7fEsev4eeZvPmp1HKVUPSovN/xr5X7ahgcxqnsrS7No0dexW7u3pE9MKM+vSNNpEZTyIJ/vPsXekxd4eFh7vOv4DlLV0aKvYyLCM2O6cqGwlOdWpFkdRylVD4yp2JqPDgvkVou35kGLvl50bNGIif2imb/5KDsycq2Oo5SqY1/uyWL3ifNMHxZb79MdVMX6BB7il8NjCQvy5+mPUynXA7NK2ZYxhpdX7iMyNJDb4q3fmgct+nrTOMCX39/cie3Hcnk/+Vj1L1BKuaWv07LYdfw804e2d4mtedCir1djEyLo2zaU/1mSyga9ibhStmOM4eUv99M6pAFje/7oRnyW0aKvRyLCK+MTiQwN5MG3k9lzQqdHUMpOVu3LJiXjHNOGtsfXRbbmQYu+3oUE+fH2/b0J8vNh4pxNHDuTb3UkpVQt+G5rPiK4Abf3bG11nB/QordAq+AGvD25N4UlZUycs4nTF4usjqSUqqFVadlsP5bLL4a2w8/HtarVtdJ4kA7NGzHnZ704nlvA/XM3k1dUanUkpdQ1Kis3/O2zvUSFBfLTxDbVv6CeadFbKCk6lJn39GRX5nkenb9N565Xyk19uDWDtFMX+O1NnVxuax606C13Q1xznhjZiS/3ZLF0h95QXCl3U1hSxotf7KNHm2Bu7la/N/12lha9C5g0IIZuEU14duluzheWWB1HKXUV3vzmMCfOFfLEyE6WzlB5JVr0LsDbS/jL2G7kXCzihc/3WR1HKeWks3nF/GfVAYZ1akbftmFWx7ksLXoX0a11E+7rG8Xb6w+zM+Oc1XGUUk74f18fIK+olN+N6GR1lCvSonchj93UkbCG/jy5eKfeqEQpF3fsTD7vrD/CHYmt6diikdVxrkiL3oU0DvDlD6Pi2JFxjv9uPGJ1HKXUFfzz8zRE4FfDO1gdpVpa9C7m1u4tGRTblOeXp5F1vtDqOEqpKuzIyGXx9kzuHxhDyyYNrI5TLS16F/PdjUqKysp5Zuluq+MopS5RVFrGbxbuILyRP1OHtLM6jlO06F1QTNMgpg9tz9IdJ5i9Nt3qOEqpSl7+cj9ppy7w99u70aSBr9VxnOJjdQBVtelD25N28gJ//nQPzRoHMLqHa9zAQClPtvXoWV5dfZA7k1ozrFNzq+M4zaktehEZISJpInJARB6v4vmpIrJTRLaLyDoRiav03BOO16WJyE21Gd7OvLyEf97Zg94xofx6QQrrD+r89UpZqaC4jF8vSKFF4wCeGhVX/QtcSLVFLyLewExgJBAHjKtc5A7zjDHdjDHxwHPAC47XxgF3A12AEcB/HO+nnBDg683r9yURFRbIlHeS2XtS569XyirPr0gjPSeP5+7oQeMA99hl8x1ntuh7AweMMenGmGJgPjCm8grGmMoNFAR8dxL4GGC+MabIGHMIOOB4P+WkJoG+zL2/N4F+3vxszmYycwusjqSUx9mQfpo53xxiQr8oBsY2tTrOVXOm6COAyjc5zXAs+wERmSYiB6nYon/kKl87RUSSRSQ5Ozvb2eweIyK4AXMn9SavqJSfvbmJs3nFVkdSymNcLCrlNx+kEBUWyOMjXfsK2MtxpuirmqXnR5dtGmNmGmPaAb8DnrrK184yxiQZY5LCw8OdiOR5OrdszGv3JXL4dD73zN7IGS17perFX5btIeNsAf/4aQ8C/dzz/BVnij4DqDyTfmsg8wrrzwduu8bXqivo374psyckkZ59kXte36B3plKqjq3el828jUd5YGAMvaJDrY5zzZwp+s1ArIjEiIgfFQdXl1ReQURiKz28Bdjv+H4JcLeI+ItIDBALbKp5bM81uEM4b0zsxeHTedzz+kZytOyVqhPn8kv43Qc7aN+sIY/d2NHqODVSbdEbY0qB6cAKYA+wwBiTKiLPiMhox2rTRSRVRLYDM4CJjtemAguA3cByYJoxpqwOxuFRBsY2Zc7EXhw5k8e4WRvIvqBlr1Rt+9MnqWRfLOKFO3sQ4OveJwuKq92+LikpySQnJ1sdwy1sSD/NpDc30yo4gAU/70dYQ3+rIyllC8t3nWTqu1t45PpYZrjBpGUAIrLFGJNU1XM6BYIb69s2jLmTepFxtoAZC1Io16mNlaqx0xeLeHLRTrq0aszDw9pbHadWaNG7uT5tw/jDqDhW78tm9jqdF0epmjDG8OSiXVwoLOWFO+Px9bZHRdpjFB5ufJ9IRnZtwXPL09h+LNfqOEq5rY+3Z7I89SQzbuzg8jcTuRpa9DYgIvztJ91p3jiAh9/bqjcYV+oapJ28wO8X7SQpKoQHB7W1Ok6t0qK3iSaBvvxrXAKZuYU88eFOXO0gu1Ku7Fx+CVPeSaahvw8zx/fE26uqaz3dlxa9jSRGhfDYjR34dOcJ5m8+Vv0LlFKUlRsenr+NzNwCXrk3keaNA6yOVOu06G1m6uB2DGzflD8uSWXb0bNWx1HK5f3j8zTW7MvmmTFdSYwKsTpOndCitxkvL+GFu3oQGuTHHa+u5y/L9lBQrNeoKVWVpTsyeWXVQe7pE8m43pFWx6kzWvQ21KxRAMsfHcxPE1sza006N760mrX7dVZQpSrbc+I8v1m4g8SoEP54axer49QpLXqbahLoy99u7878KX3x9fLivjc2MeP97TrrpVJAZm4BD7yVTOMGPrwyvid+PvauQnuPTtG3bRjLHh3Ew8PasyQlk5Evr2FDut6WUHmurAuFjJ+9kfMFJbwxsRfNbHjw9VJa9B4gwNebx27syMfTBxDo58M9r2/gXyv3U6ZTJigPk5tfzIQ3NnHyXCFz7+9F14gmVkeqF1r0HqRLqyZ88vBAbu3Rihe+2MeEORvJulBodSyl6sWFwhImztlEek4esycmkRjlvvPLXy0teg/T0N+Hl+6K5++3d2PLkbPc/PI61h/UXTnK3gqKy5g8N5nUzPP8556eDGjvfvd9rQkteg8kItzVK5KPpw2kSQMfpryTzKnzumWv7KmkrJyp725h85EzvHhXPDfENbc6Ur3TovdgHVs0YvbEXhSXlvM/H6daHUepWmeM4alFu1i9L5u/ju3GrT1aWR3JElr0Hi6maRC/vKEDy1NPsnzXCavjKFWrXll9kPeTj/HwsPbcbeMLoqqjRa94YFAMnVs25umPUzlXoDNfKntYuiOT55anMbpHK7e5S1Rd0aJX+Hp78ffbu5FzsYi/fbbX6jhK1diWI2eZsSCFpKgQnrujOyL2mo3yamnRKwC6tw5m8sAY3tt0VC+oUm7t6Ol8Hnw7mVZNApg1Icntb+xdG7To1fd+NbwDbUIb8PuPdlJYohOhKfdzvrCESXM3UW4Mb07qTWiQn9WRXIIWvfpeoJ8PfxnbjfScPF78cp/evES5lfJyw2MLUjhyOp/X7k0kpmmQ1ZFchha9+oFBseHckdia11anc9drG0g+fMbqSEo55ZXVB/li9ymevKUzfdqGWR3HpWjRqx/560+68extXTl0Oo87Xl3PA29tZu/J81bHUuqy1u7P5p+fV5xh87P+0VbHcTniav89T0pKMsnJyVbHUEB+cSlvfnOYV1cf5GJRKWPjI/jtiE60aGL/2f6U+zieW8Cof60lvJE/i6dVTNzniURkizEmqarndIteXVagnw/ThrZn7W+HMmVwW5buPMH1/1zFa6sPUlxabnU8pSgqLeMX726hpMzw6r2JHlvy1dGiV9UKDvTjiZGd+fJXQ+jXLoy/fraXES+v0btWKcv96ZPdpGSc4x8/7UHb8IZWx3FZWvTKaZFhgcye2Is5P0uirNxw3xubeOjdLWTphGjKAou2ZTBv41Eeuq4dI7q2sDqOS3Oq6EVkhIikicgBEXm8iudniMhuEdkhIitFJKrSc2Uist3xtaQ2wytrDOvUnBW/HMyvb+zAV3uzuOmlNTpPjqpXh3PyeGrRLnpHh/KYh09v4Ixqi15EvIGZwEggDhgnInGXrLYNSDLGdAc+AJ6r9FyBMSbe8TW6lnIriwX4ejN9WCyfPjKINqGBTH13K48tSOFCoc6Vo+pWcWk5j87fhreX8OLd8fh4646J6jjzO9QbOGCMSTfGFAPzgTGVVzDGfG2MyXc83AC0rt2YylW1b9aQDx/qzyPD2rNoWwYjXlrLpkN67r2qO//8Io2UjHP8/fbuRAQ3sDqOW3Cm6COAY5UeZziWXc5k4LNKjwNEJFlENojIbVW9QESmONZJzs7WA3zuxtfbixk3dmTh1P74eAt3zVrPOxuOWB1L2dDa/dm8tjqdcb0jGdmtpdVx3IYzRV/VtG9VnnwvIvcCScDzlRZHOs7tvAd4SUTa/ejNjJlljEkyxiSFh4c7EUm5osSoEJY9MohhHZvx9Me7WJKSaXUkZSOnLxYxY0EK7Zs15OlRl+49VlfiTNFnAG0qPW4N/OhvsIjcADwJjDbGFH233BiT6fg1HVgFJNQgr3JxQf4+zBzfk17Rocx4fzur0rKsjqRswBjDrxemcK6ghH/dnUADP52R8mo4U/SbgVgRiRERP+Bu4Adnz4hIAvAaFSWfVWl5iIj4O75vCgwAdtdWeOWaAny9mT0xiQ7NG/HQu1vZcuSs1ZGUm5v59QG+Tsvm9yM7EdeqsdVx3E61RW+MKQWmAyuAPcACY0yqiDwjIt+dRfM80BBYeMlplJ2BZBFJAb4G/maM0aL3AI0DfHnr/t40b+zP/XM3k3bygtWRlJuat/Eo//h8H6N7tGKizmNzTXSuG1Wnjp3J5/ZXvgXgg6n9iQwLtDiRcifLdp5g2rytDOkQzusTkvDVUykvS+e6UZZpExrI25N7U1hSxm3/+UanTVBOW7s/m0fnbyMxMoRXxidqydeA/s6pOtepRWMWTRtA04Z+TJiziZe/3E95uWv9T1K5lm1Hz/Lzd7bQLrwhb0zspQdfa0iLXtWLduENWTxtALfFR/Dil/v42dzNnMkrtjqWckH7T11g0tzNNG3oz9v396ZJoK/VkdyeFr2qN4F+PrxwZw/+d2xXNhw8zah/rWXbUT0jR/2fs3nF3P/WZny9vXh3ch+aNdZ7H9QGLXpVr0SE8X2i+PCh/nh5CXe+tp431h3S+9MqSsvKeWT+Nk6dK2LWfYl64L4WadErS3Rr3YRPHx7EkA7NeHbpbqa+u4VzBTohmid7fkUaa/fn8OxtXUiIDLE6jq1o0SvLNAn05fUJiTx5c2dW7sli1L/XsiMj1+pYygIfbz/Oa2vSua9vFHf1irQ6ju1o0StLiQgPDm7L+z/vR1mZ4Y5XdEI0T5OaeY7ffbiDXtEh/EHnsKkTWvTKJSRGhfDpI4MY0D6MPyzexR+XpFKmp2Da3tm8Yn7+zhaCG/jxn/GJ+PloJdUF/V1VLiMkyI/ZE3vxwMAY5n57mClvJ5NXVGp1LFVHDuXkMfHNTWRdKOK1+xIJb+RvdSTb0qJXLsXbS3hqVBzPjunC12lZ3Pnaek7pPWltpbzcMPebQ4x8eQ2Hc/L4f+MS6NEm2OpYtqZFr1zSff2imT0xiUM5edw28xv2nDhvdSRVCzLO5nPvGxv54ye76ds2jM9/NYQbu+iNveuaFr1yWcM6NWfh1H6UG8Odr67nYPZFqyOpGliYfIwRL60l5Vguf/tJN978WS9aNNELouqDFr1yaV1aNeHDh/rj6+PFQ+9uIb9Y99m7m7Jyw58+SeU3H+ygS6vGLP/lYO7uHYlIVTevU3VBi165vNYhgbx0Vzz7sy7y1OJdehWtG7lYVMoDb23mzW8Oc/+AGOY92Jc2oXrFa33TolduYXCHcB4ZFstHW4/z/uZj1b9AWS7jbD53vPIta/bn8OfbuvL0rXF4e+lWvBV8rA6glLMeuT6WrUfP8vSSVLpGNKFrRBOrI6nL2Hb0LA++vYWi0jLmTurFoNhwqyN5NN2iV27D20t46a54QgP9+MV/t+rcOC7IGMN7m45y16wNNPDz4qOH+mvJuwAteuVWwhr6M3N8Apm5Bfx6YYrur3cheUWlzFiQwhMf7aRPTCiLfzGA2OaNrI6l0KJXbigxKpTHR3bii92neG5FmtVxFBU3Cxkz8xsWbz/OjOEdmDupN2EN9UpXV6H76JVbmjwwhvScPF5ZdZCwID8eGNTW6kge66OtGTy5aBdB/t68O7kPA9o3tTqSuoQWvXJLIsKzY7pyNq+YP3+6h7CGfoxNaG11LI9yMPsizy7dzaq0bPrEhPLvcQl6RygXpUWv3Ja3l/DS3fHkztnMbxbuIDjQj6Edm1kdy/YuFJbw768O8OY3h/D38ebJmzszaUA0Pt66J9hV6Z+Mcmv+Pt7MmpBIxxaNeOjdLWw5ovegrStl5YaFyccY+o/VzFqTztiECL7+9XU8OLitlryLE1c7ayEpKckkJydbHUO5mewLRdzx6u9Vb/8AAApSSURBVLfk5pcwd1IvvRVdLco6X8iC5GO8t+kYx3MLSIgM5o+3dtEZJ12MiGwxxiRV+ZwWvbKLo6fzGff6Bk6dL+RXwzswdUg7vRLzGpWXG9YdyGHexqN8uecUpeWG/u3CuK9vFDd1aYGX/r66nCsVve6jV7YRGRbIskcH8dTiXTy/Io01+7J58a54WgU3sDqayzPGsO/URdYfzGF9+mk2HjpDbn4JIYG+3D8whnG9I4lpGmR1THWNdIte2Y4xho+2Hufpj3fh7SX89SfduaV7S6tjuZTCkjJ2HT/H1qNn2Xokl82Hz3A6rxiA1iEN6Nc2jCEdwxke1xx/H2+L0ypn1HiLXkRGAC8D3sBsY8zfLnl+BvAAUApkA/cbY444npsIPOVY9c/GmLeuaRRKOUlEuD2xNYlRITz6/namzdvKom3NmD4slngP3a9sjGHX8fMs3ZHJhvTT7D5xnpKyio28NqENGNwhnH7twujXNkxnl7SharfoRcQb2AcMBzKAzcA4Y8zuSusMBTYaY/JF5CHgOmPMXSISCiQDSYABtgCJxpjLnhqhW/SqNpWUlTNrTTqz1qRzrqCEAe3DmDa0Pf3ahnnEfOgHsi6yJCWTT1IyOZSTh6+3kBAZQs/IEBIig0mIDKZZIz333Q5qukXfGzhgjEl3vNl8YAzwfdEbY76utP4G4F7H9zcBXxhjzjhe+wUwAnjvageh1LXw9fZi2tD2TOwfzbyNR3h97SHueX0jCZHBPHlzZ5KiQ62OWKvKyg3bj+WyKi2LL/dksefEeUSgb0wYUwa3ZWTXFgQH+lkdU9UzZ4o+Aqg8AXgG0OcK608GPrvCayMufYGITAGmAERGRjoRSamr09DfhymD2zGhXzQLt2Tw6qqDjJ+9kdcnJDG4g3vPrnihsISv9mbx9d4sVu/L5mx+Cd5eQs/IYP4wKo5R3VvSXK9Y9WjOFH1V/7+tcn+PiNxLxW6aIVfzWmPMLGAWVOy6cSKTUtckwNeb+/pGcUu3loyfvZEH305m9sQkt5tK90JhCSv3ZLF0xwnW7M+muLSc0KCKK4Ov69SMIbHhNAn0tTqmchHOFH0G0KbS49ZA5qUricgNwJPAEGNMUaXXXnfJa1ddS1ClalNokB//faAP97y+gQfeco+yP5dfwsq9p1i28+T35d6icQDj+0RyS7eWJESG6HUDqkrOHIz1oeJg7PXAcSoOxt5jjEmttE4C8AEwwhizv9LyUCoOwPZ0LNpKxcHYM5f7PD0Yq+rTmbxi7nl9A4dy8nhjYi8GxrrWzIunLxbx+e5TfLbrJN8eyKG03NCicQAju7VgVPeWJLQJ0YuXFFDDg7HGmFIRmQ6soOL0yjnGmFQReQZINsYsAZ4HGgILHWcyHDXGjDbGnBGRZ6n4xwHgmSuVvFL1LTTIj3kP9uWe1zcw+a3NPDUqjob+3hQUl1NYUkZBSRmBft6M6NqClk3q58KrrAuFrNh1kmU7T7Lx0GnKDUSGBjJ5YAwjuragR+tgLXd1VfSCKaX4vy37vScvVPm8CPRrG8bYhAhGdmtJQ//avaj89MUilu08wac7T7Dx0BmMgXbhQdzcrSUjurYgrmVjjzgdVF07netGKScUlZaRnp1HgK83DXy9CfD1IsDXm5PnClm8/TiLth3nyOl8Any9GB7Xgv7twugVHUK78IbXXMJ7TpznzW8OsXh7JsWl5bRv1pCbu7Xklm4t6dD82t9XeR4teqVqgTGGrUfP8tHW4yzfdfL7KQNCAn1JjAqhV3QoY3tGVHsBUnm54eu0LN5Yd4hvD56mga83tydGcG/fKDq1aFwfQ1E2pEWvVC0zxnAoJ4/kI2dJPnyG5CNnHf8b8OLePlH8fEg7whv98J6pufnFLEzO4N2NRzhyOp+WTQKY0C+acb3b6EVMqsa06JWqB4dz8vj3VwdYtC0DPx8v7usbxZTB7cjMLeCdDUf4JCWTotJykqJCmNA/mpFdW+CrN+xQtUSLXql6dCgnj39/tZ/F244jIpSVG4L8vLktoWL3TOeWuntG1T4teqUscCgnj/9uOEJkWCBjEyJoFKBXqqq6ozceUcoCMU2DeGpUnNUxlNKbgyullN1p0SullM1p0SullM1p0SullM1p0SullM1p0SullM1p0SullM1p0SullM253JWxIpINHKnBWzQFcmopjtXsNBaw13jsNBbQ8bgyZ8cSZYyp8n6YLlf0NSUiyZe7DNjd2GksYK/x2GksoONxZbUxFt11o5RSNqdFr5RSNmfHop9ldYBaZKexgL3GY6exgI7HldV4LLbbR6+UUuqH7LhFr5RSqhIteqWUsjnbFL2IjBCRNBE5ICKPW53naonIHBHJEpFdlZaFisgXIrLf8WuIlRmdJSJtRORrEdkjIqki8qhjubuOJ0BENolIimM8f3IsjxGRjY7xvC8ibnOHbxHxFpFtIrLU8didx3JYRHaKyHYRSXYsc8ufNQARCRaRD0Rkr+PvUL+ajscWRS8i3sBMYCQQB4wTEXe7tc9cYMQlyx4HVhpjYoGVjsfuoBR4zBjTGegLTHP8ebjreIqAYcaYHkA8MEJE+gJ/B150jOcsMNnCjFfrUWBPpcfuPBaAocaY+Ernm7vrzxrAy8ByY0wnoAcVf041G48xxu2/gH7AikqPnwCesDrXNYwjGthV6XEa0NLxfUsgzeqM1ziuj4HhdhgPEAhsBfpQcbWij2P5D34GXfkLaO0oi2HAUkDcdSyOvIeBppcsc8ufNaAxcAjHiTK1NR5bbNEDEcCxSo8zHMvcXXNjzAkAx6/NLM5z1UQkGkgANuLG43Hs6tgOZAFfAAeBXGNMqWMVd/qZewn4LVDueByG+44FwACfi8gWEZniWOauP2ttgWzgTceutdkiEkQNx2OXopcqlul5oxYTkYbAh8AvjTHnrc5TE8aYMmNMPBVbw72BzlWtVr+prp6IjAKyjDFbKi+uYlWXH0slA4wxPanYdTtNRAZbHagGfICewCvGmAQgj1rY7WSXos8A2lR63BrItChLbTolIi0BHL9mWZzHaSLiS0XJ/9cY85FjsduO5zvGmFxgFRXHHoJFxMfxlLv8zA0ARovIYWA+FbtvXsI9xwKAMSbT8WsWsIiKf4jd9WctA8gwxmx0PP6AiuKv0XjsUvSbgVjHmQN+wN3AEosz1YYlwETH9xOp2Nft8kREgDeAPcaYFyo95a7jCReRYMf3DYAbqDhA9jVwh2M1txiPMeYJY0xrY0w0FX9PvjLGjMcNxwIgIkEi0ui774EbgV246c+aMeYkcExEOjoWXQ/spqbjsfrgQy0exLgZ2EfFvtMnrc5zDfnfA04AJVT8qz6Zin2nK4H9jl9Drc7p5FgGUvFf/x3AdsfXzW48nu7ANsd4dgFPO5a3BTYBB4CFgL/VWa9yXNcBS915LI7cKY6v1O/+7rvrz5ojezyQ7Ph5WwyE1HQ8OgWCUkrZnF123SillLoMLXqllLI5LXqllLI5LXqllLI5LXqllLI5LXqllLI5LXqllLK5/w8YUfT+2+pufgAAAABJRU5ErkJggg==\n", |
|
|
4125 |
"text/plain": [ |
|
|
4126 |
"<Figure size 432x288 with 1 Axes>" |
|
|
4127 |
] |
|
|
4128 |
}, |
|
|
4129 |
"metadata": { |
|
|
4130 |
"needs_background": "light" |
|
|
4131 |
}, |
|
|
4132 |
"output_type": "display_data" |
|
|
4133 |
} |
|
|
4134 |
], |
|
|
4135 |
"source": [ |
|
|
4136 |
"plt.plot(predictions.mean(0)[:,0])" |
|
|
4137 |
] |
|
|
4138 |
}, |
|
|
4139 |
{ |
|
|
4140 |
"cell_type": "code", |
|
|
4141 |
"execution_count": 37, |
|
|
4142 |
"metadata": {}, |
|
|
4143 |
"outputs": [ |
|
|
4144 |
{ |
|
|
4145 |
"data": { |
|
|
4146 |
"text/plain": [ |
|
|
4147 |
"[<matplotlib.lines.Line2D at 0x7f744941c650>]" |
|
|
4148 |
] |
|
|
4149 |
}, |
|
|
4150 |
"execution_count": 37, |
|
|
4151 |
"metadata": {}, |
|
|
4152 |
"output_type": "execute_result" |
|
|
4153 |
}, |
|
|
4154 |
{ |
|
|
4155 |
"data": { |
|
|
4156 |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXxU9b3/8dcnk5WQkD0kJEASwhKQzbDJKiLgUrdaldZq60K1tdraXpfeVvuzt7dXvbd6tbYu1autWjdapYIiCoLshH1LIAkBkkBWCHuSyXx/f2SwMQaYJJOcmTOf5+ORR2bOnDP5fCF55+R7vuf7FWMMSiml7CvI6gKUUkp1LQ16pZSyOQ16pZSyOQ16pZSyOQ16pZSyuWCrC2gtISHB9O/f3+oylFLKr2zYsKHaGJPY1ms+F/T9+/cnLy/P6jKUUsqviMi+s72mXTdKKWVzGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVzPjeOXlnreL2TL3ZXUX28nllDe5MUHW51SUqpTtKgVxyqO82nuypYvLOC1UU1NDS5AHh0/g6mDEzkutFpzMxJJjzEYXGlSqmO0KAPYE0uwwPvbWXexlIA+sX34JYJ/bg0J5m4yFDe31zGPzaWce/fNhEVFswVw1O4bnQauf1iCQoSi6tXSnlKfG2FqdzcXKNTIHQ9Ywy/fH87b6zdz20TM5gzNp0BST0R+WqAu1yGNcU1zNtYxkfbD3KyoYm02AiuHdWHa0f1ITOxp0UtUEq1JCIbjDG5bb6mQR+Yfr94N898tocfTM3k4cuGeHTMyQYni3Yc4u8by1hZWI3LwKi+MTx5/QgGJGngK2UlDXr1Fa+tKuHR+Tu4ITeNx785/Gtn8Z6oOHqa+ZvLeWF5ESLCW3PHk6Vn90pZ5lxBr8MrA8z8LeX8+p87mDEkmf+89oIOhTxAcnQ4d07J5G93jscYw5wX11BcddzL1SqlvEGDPoAs313Fz97ZzJj+cfzh26MIdnT+vz87OYo37xxPk8sw56U17K0+4YVKlVLepEEfIE42OPnJ25vJSuzJS7fkenWo5MDkKN64cxyNTc1n9iUa9kr5FA36APH2+gPUnmjgP64ZRq+IEK+//+De0bxxxzjqnU3MeWkNB2pPev1rKKU6xqOgF5HZIlIgIoUi8lAbr98vIjtFZKuIfCYi/Vq81iQim90f871ZvPJMg9PFS8uLGds/jtz+cV32dYakRPPGHeM5Ue/k7jc2UO9s6rKvpZTy3HmDXkQcwHPAZUAOMEdEclrttgnINcYMB94Dnmjx2iljzEj3x1Veqlu1w/wt5ZTXnebuaVld/rVyUqP5nxtGsr3sKP+5YFeXfz2l1Pl5ckY/Fig0xhQbYxqAt4CrW+5gjFlqjDnzt/oaIM27ZaqOcrkMzy8rYnDvKKYNanPdYK+7NCeZOyZl8NrqfSzcdrBbvqZS6uw8Cfo+wIEWz0vd287mduCjFs/DRSRPRNaIyDVtHSAic9375FVVVXlQkvLU4l0VFFYe5+5pWR0eStkRD8wezMj0GB58byv7avTirFJW8iTo20qHNu+yEpGbgVzgyRab+7oH8X8beFpEvtZ/YIx50RiTa4zJTUzsnrPOQGCM4Y+fF9E3rgdXXJDSrV87NDiIP3x7FCLwozc3an+9UhbyJOhLgfQWz9OA8tY7icgM4N+Bq4wx9We2G2PK3Z+Lgc+BUZ2oV7XD6uIathw4wg+mZnplzHx7pcX20P56pXyAJz/964FsEckQkVDgJuAro2dEZBTwAs0hX9lie6yIhLkfJwATgZ3eKl6d258+LyIxKoxvjrbukknL/vqPtx+yrA6lAtl5g94Y4wTuARYBu4B3jDE7ROQxETkziuZJoCfwbqthlEOAPBHZAiwF/ssYo0HfDbaV1vHFnmpun5Rh+TzyD142mMG9o3ji43yaXL41t5JSgcCj+eiNMQuBha22PdLi8YyzHLcKuKAzBaqO+dOyQqLCg/nOuL5Wl0KII4h7pg/gnjc3sXjnIWYP697rBUoFOr0z1oYO1Z3mo+2HuHl8P6LCvX8XbEdcNiyFfvE9+NOyYnxtxlSl7E6D3obmbynDGLghN/38O3cTR5Bw5+RMthw4wuriGqvLUSqgaNDb0AebyxmR1ouMhEirS/mK6y9MI6FnKM8vK7a6FKUCiga9zRRWHmNH+VGuHnmue9qsER7i4PsTM1i+u4od5XVWl6NUwNCgt5n3N5UTJHDlCN+84Hnz+H70DAvmBT2rV6rbaNDbiDGGD7aUMXFAAklR4VaX06ZeESF8e1xfPtxarlMZK9VNNOhtZOP+IxyoPeWT3TYt3TYxA0eQ8NIXelavVHfQoLeRDzaXERYcxKyhyVaXck69e4Vz7ag+vL3+ANXH689/gFKqUzTobaKxycWCrQeZMSTZZ8bOn8vcKVk0NLl4bVWJ1aUoZXsa9DaxorCamhMNXD0y1epSPDIgqSezh/bm1ZUlHD7RYHU5StmaBr1NfLCpjF4RIUwblGR1KR776aUDOd7g5PllRVaXopStadDbwMkGJ5/srODyC1IIDfaf/9KByVFcO7IPr64qoeLoaavLUcq2/CcV1Fkt3lnByYYmv+m2aemnlw7EZQzPLtljdSlK2ZYGvQ18sLmc1F7hjO0fZ3Up7ZYe14ObxvTlrXUHdMlBpbqIBr2fO3yigeW7q/jGyFSCgrpvTVhv+vH0AQQ7hKc/1bN6pbqCBr2fW1pQidNlun1NWG9Kig7n1ov68/7mMgoOHbO6HKVsR4Pezy3JryQxKoxhqb2sLqVT7p6aRc/QYP77kwKrS1HKdjTo/ZizycXy3VVMG5jot902Z8T0CGXulEwW76xg0/7DVpejlK1o0PuxDfsOc/S0k+mD/Wfs/Ll8f1IG8ZGhelavlJdp0PuxpQVVhDiESdkJVpfiFT3Dgrl7WhYrC2vYsK/W6nKUsg0Nej+2NL+SMf3j/GJuG0/NGduX2B4h/HGp3i2rlLdo0PupsiOnKKg4ZptumzMiw4L5/sQMPsuvZGf5UavLUcoWNOj91JL8SgC/mtvGU7dO6E9kqIM/6Rw4SnmFBr2fWppfSd+4HmQl+tYC4N7Qq0cIN0/ox4Kt5eyt1rtlleosDXo/dLqxiVVF1UwfnISIfw+rPJs7JmUS4gjiBT2rV6rTNOj90OriGk43urjYZv3zLSVGhXHjmHTmbSyl/Mgpq8tRyq9p0PuhpfmVRIQ4GJfhf5OYtcfcKZkYg64tq1QnadD7GWMMS/IrmTggnvAQh9XldKm02B5cPbIPf1u3nxpdW1apDvMo6EVktogUiEihiDzUxuv3i8hOEdkqIp+JSL8Wr90qInvcH7d6s/hAVFh5nNLDp2zdbdPS3dOyqHe6+L+VJVaXopTfOm/Qi4gDeA64DMgB5ohITqvdNgG5xpjhwHvAE+5j44BHgXHAWOBREYn1XvmB58ywyottOKyyLQOSenLZsN68tqqE4/VOq8tRyi95ckY/Fig0xhQbYxqAt4CrW+5gjFlqjDnpfroGSHM/ngUsNsbUGmMOA4uB2d4pPTAtya9kcO8oUmMirC6l29wxOZNj9U7e31RmdSlK+SVPgr4PcKDF81L3trO5HfioPceKyFwRyRORvKqqKg9KCkxHTzeSt++w7e6GPZ9R6THkpETz+pp9GGOsLkcpv+NJ0Lc1ULvNnzYRuRnIBZ5sz7HGmBeNMbnGmNzExEQPSgpMX+yupsllAqZ//gwR4ebx/cg/dIyNOoWxUu3mSdCXAuktnqcB5a13EpEZwL8DVxlj6ttzrPLMst2VRIcHMyo9xupSut3VI1PpGRbM62v2W12KUn7Hk6BfD2SLSIaIhAI3AfNb7iAio4AXaA75yhYvLQJmikis+yLsTPc21U7GGJbtrmJydiLBjsAbFRsZFsx1o/uwYOtBak80WF2OUn7lvIlhjHEC99Ac0LuAd4wxO0TkMRG5yr3bk0BP4F0R2Swi893H1gK/ofmXxXrgMfc21U75h45RcbSeqYMCt2vr5vH9aGhy8W7egfPvrJT6UrAnOxljFgILW217pMXjGec49hXglY4WqJp9XtB8kXrqwMAN+oHJUYzNiOONtfu5c3Km3y+fqFR3Cbw+AD+1bHclQ1KiSY4Ot7oUS908vh/7a0+yfI+OzlLKUxr0fuDY6UbySg4H9Nn8GbOH9iahZ6helFWqHTTo/cCqohqcLsO0AO6fPyM0OIgbctNZkl9Bmc5qqZRHNOj9wOcFVfQMC+bCfjp7BDSvK2uAt9bpWb1SntCg93HGGJbvrmLigHhCAnBYZVvS43pw8aAk3lp/gMYml9XlKOXzNDl8XGHlccqOnGLqwMC6G/Z8vjOuL1XH6lmxp9rqUpTyeRr0Pm7ZbvewSu2f/4pJ2Qn0CHV8OZunUursNOh93LLdVWQn9aRPAM1W6YmwYAcTBySwJL9SJzpT6jw06H3YyQYna4trdVjlWUwfnETZkVPsrjhudSlK+TQNeh+2priGhiYX0wJkkZH2OrP4inbfKHVuGvQ+7POCKiJCHIzJ0GGVbendK5yhqdEsya+wuhSlfJoGvQ9btruKi7LiCQu29yLgnTF9cBIb9h3myEmd0VKps9Gg91F7q0+wr+akjrY5j+mDk3CZf41OUkp9nQa9j1pW0NzvPE3Hz5/TiLQY4iNDtZ9eqXPQoPdRSwqqyEyIpG98D6tL8WlBQcLUQYks211Fk0uHWSrVFg16H3Si3smaopqAWwS8o6YPTuLIyUY26XqySrVJg94HrSyspqHJpUHvocnZiQQHCZ9p941SbdKg90FLCyrpGRZMbv84q0vxC70iQsjtH8tSDXql2qRB72OMMSzJr2TKwARCg/W/x1PTByeRf+iYzlGvVBs0SXzMjvKjVByt//KuT+WZ6YOTAb1LVqm2aND7mCX5lYig0x60U1ZiJH3jemj3jVJt0KD3MUvyKxmeFkNiVJjVpfgVEWH64CRWFlZzqqHJ6nKU8ika9D6k+ng9W0qPMF3P5jtk+uAk6p0uVhfrYiRKtaRB70M+L6jCGLhkiAZ9R4zLjCMixMGyAp0OQamWNOh9yNL8SpKiwhiaGm11KX4pLNjBhKx4nfdGqVY06H1EY5OL5buruHhQEiJidTl+a+rAREpqTlJSfcLqUpTyGRr0PmJ9SS3H6p1M126bTjmzGtfyPXpWr9QZGvQ+Yml+JaGOICYNSLC6FL/WPyGSfvE9tJ9eqRY8CnoRmS0iBSJSKCIPtfH6FBHZKCJOEbm+1WtNIrLZ/THfW4XbzWf5lYzLjCMyLNjqUvze1IGJrCqqod6pwyyVAg+CXkQcwHPAZUAOMEdEclrtth/4HvBmG29xyhgz0v1xVSfrtaV9NScorjqhk5h5ydSBiZxqbCKvRGezVAo8O6MfCxQaY4qNMQ3AW8DVLXcwxpQYY7YCri6o0fbO3LavQe8d4zPjCXGIjr5Rys2ToO8DHGjxvNS9zVPhIpInImtE5Jp2VRcgluRXkpkYSb/4SKtLsYXIsGDG9I/Tfnql3DwJ+rbG+rVnKZ++xphc4NvA0yKS9bUvIDLX/csgr6oqsH44T9Q7WVtcq3fDetnUgYkUVBzjYJ3OZqmUJ0FfCqS3eJ4GlHv6BYwx5e7PxcDnwKg29nnRGJNrjMlNTAysxbBXFdXQ0OTiYu228aozi6ov1+4bpTwK+vVAtohkiEgocBPg0egZEYkVkTD34wRgIrCzo8Xa0ZL8SiJDHYzRRUa8alByFMnRYdpPrxQeBL0xxgncAywCdgHvGGN2iMhjInIVgIiMEZFS4FvACyKyw334ECBPRLYAS4H/MsZo0LsZY/i8oJJJ2brIiLeJCFMHJvLFnmqcTTpGQAU2jwZtG2MWAgtbbXukxeP1NHfptD5uFXBBJ2u0reY+5NP8ZEa21aXY0tSBSbyTV8rmA0d0WUYV0PQ00kJnhlXqIiNdY9KABIIE7b5RAU+D3kKf51cxNDWa5Ohwq0uxpV49QhjVN1aDXgU8DXqL1J1sZMP+w7o2bBebOjCRraV1VB+vt7oUpSyjQW+R5XuqaHIZLh4cWMNJu9uZ2SxX7NFVp1Tg0qC3yNKCSmJ6hDAyPdbqUmztgj69iO0RouPpVUDToLeAy2VYVlDF1IGJOIJ0kZGuFBQkTByQwIrCaoxpzw3dStmHBr0FtpbVUXOiQScx6yaTsxOoPFbP7orjVpeilCU06C2wNL+SIIEp2do/3x0muf+dv9BVp1SA0qC3wNKCSkb1jSU2MtTqUgJCn5gIMhMiWVGoF2RVYNKg72ZVx+rZWlrHxYP0bL47TcpOYG1xra46pQKSBn03O3Pzjs5W2b0mDUjgVGMTG/cdsboUpbqdBn03W5pfSVJUGDkp0VaXElDGZ8XjCBJWFGo/vQo8GvTdyOUyrCisZurARER0WGV3ig4PYVR6jN44pQKSBn032nnwKHWnGpk4IMHqUgLSpOwEtpbVceRkg9WlKNWtNOi70eqiGgAmZMVbXElgmpydgDGwsrDG6lKU6lYa9N1oVVE1WYmROlulRUakxRAVFqz99CrgaNB3k8YmF+v21urZvIWCHUGMz4rniz06HYIKLBr03WRbWR0nGpq4KEv75600OTuB0sOn2Fdz0upSlOo2GvTd5Ez//PhMPaO30iT3hfAv9C5ZFUA06LvJqqJqhqREE6fTHlgqIyGSPjERrNB5b1QA0aDvBqcbm8grOcxF2j9vORFhcnYCq4pqcDa5rC5HqW6hQd8NNu0/Qr3TpUHvIyZlJ3DstJOtZXVWl6JUt9Cg7wari6oJEhiTEWd1KQqYmJWAiC4vqAKHBn03WF1cwwVpMUSHh1hdigJiI0MZmR7D3zeW0uDU7htlfxr0Xexkg5NN+49ot42PuXd6NiU1J3l9zT6rS1Gqy2nQd7H1JYdxuowGvY+ZNiiRydkJPLNkD3UnG60uR6kupUHfxVYVVRPiEHL7af+8LxERfnH5EOpONfLskj1Wl6NUl9Kg72Kri2oY1TeWiFCH1aWoVoakRHPDhem8trqEkuoTVpejVJfRoO9Cdaca2V5Wp902PuxnMwcS4gji8Y/zrS5FqS7jUdCLyGwRKRCRQhF5qI3Xp4jIRhFxisj1rV67VUT2uD9u9Vbh/mDd3lpcBibotAc+Kyk6nLumZvHR9kOsL6m1uhylusR5g15EHMBzwGVADjBHRHJa7bYf+B7wZqtj44BHgXHAWOBREYntfNn+YVVRNeEhQYzsG2N1Keoc7pycSe/ocP7jw524XDqrpbIfT87oxwKFxphiY0wD8BZwdcsdjDElxpitQOtBybOAxcaYWmPMYWAxMNsLdfuF1UU1jOkfR1iw9s/7sohQBz+fNYgtpXX8c2u51eUo5XWeBH0f4ECL56XubZ7w6FgRmSsieSKSV1Vlj8mmqo7Vk3/omM4/7yeuG9WHoanRPLV4t85Vr2zHk6BvaxVrT38SPDrWGPOiMSbXGJObmJjo4Vv7tlVFzbfXT9L1Yf1CUJBwy4R+lNScZHvZUavLUcqrPAn6UiC9xfM0wNO/bztzrF9bsaeamB4hDE3tZXUpykMzc3rjCBIWbDtodSlKeZUnQb8eyBaRDBEJBW4C5nv4/ouAmSIS674IO9O9zdaMMawsrOairHgcQW39UaN8UWxkKBdlxbNgW7l23yhbOW/QG2OcwD00B/Qu4B1jzA4ReUxErgIQkTEiUgp8C3hBRHa4j60FfkPzL4v1wGPubbZWXH2C8rrTTNRuG79z5fAUDtSe0u4bZSvBnuxkjFkILGy17ZEWj9fT3C3T1rGvAK90oka/s7JQ++f91cyc3vziH9tZsO0gF6Rpt5uyB70ztgus2FNNelwE/eIjrS5FtdOZ7puF2w5q942yDQ16L3M2uVhdXKNn837sigtS2F97kh3l2n2j7EGD3su2ltVx7LRT++f92MyhOvpG2YsGvZet3FONCFyUpUHvr+K0+0bZjAa9l60orGZoajRxkaFWl6I64fILUthXo903yh406L3oRL2TjfsPa7eNDcxyd98s1O4bZQMa9F60rqSWxiajF2JtIC4ylAmZ2n2j7EGD3otW7qkmNDiIMf112UA7uPyCFEq0+0bZgAa9F60orGZM/1jCQ3RaYjuYNTRZu2+ULWjQe0nlsdPkHzqm/fM2Et8zTLtvlC1o0HvJ6qIaQKc9sJtrR/WhpOYkn+yssLoUpTpMg95LvthTTa8InZbYbq4emUpmQiT/vaiAJl1mUPkpDXovODMt8cQBOi2x3QQ7grh/5kD2VB7ng81lVpejVIdo0HtBUdVxDuq0xLZ1+bCU5mUGP91Ng7P1sshK+T4Nei/4dFclABcPSrK4EtUVgoKEn88axIHaU7y9fr/V5SjVbhr0XvDpzgpyUqJJjYmwuhTVRaYNTGRs/zieWVLIqYYmq8tRql006Dup9kQDG/cfZkZOstWlqC4kIvzb7EFUHavn1VUlVpejVLto0HfS0vxKXAZmDNFuG7sb0z+Oiwcl8vyyIupONVpdTkDZXlbHT97axJ+/KGZr6RGcTXqtpD08WkpQnd2nuypIigpjmA6rDAg/nzWIK55ZwUvLi/n5rEFWlxMwXlhezIdby3l/czkAkaEORveLZXxmPDeNSSe+Z5jFFfo2PaPvhHpnE8t3V3HJkGSCdFhlQBia2osrh6fwysq9VB2rt7qcgFDvbGJpfiU35qaz5uFLeGbOKK4bnUbVsXqeXFTA5CeW8sTH+Rw+0WB1qT5Lg74T1hbXcqKhSbttAsz9lw6k3uniuaWFVpcSEFYV1nC83smsob3p3Sucq0ak8ptrhvHxT6bw6f1TuGRIMn9aVsTkJ5byP58UUHdSu9Va06DvhE93VRAeEqTj5wNMZmJPvnVhGm+u3U/p4ZNWl2N7H28/RM+wYC4aEP+11wYkRfHsnFF8fN8UpgxM4NklhUx6Ygk7dcbRr9Cg7yBjDJ/tqmTSgESdrTIA3XtJNgg889mec+6nFw07p8llWLyrgumDkwgLPvvP2aDeUfzxOxey8N7JOJsMb67b141V+j4N+g7adfAYZUdOabdNgEqNieC74/vx3oZSCiuPt7nPnopjjP3Pz/jtgp3dXJ19rC+ppfZEA7OG9vZo/5zUaKYNSmTRjgpcOjfRlzToO+izXc2zGU4frEEfqH44LYuIEAdPLd79tddqjtdz22vrqTvVyEtf7GX+lnILKvR/H28/RGhwENMGJXp8zOxhvak6Vs+G/Ye7sDL/okHfQZ/mVzIiPYak6HCrS1EWie8Zxu2TM1mw7SDby+q+3F7vbOIHf91A5dF63p47ntx+sTw0byt7Ko5ZWK3/McbwyY5DTMlOJDLM85Hg0wcnEeoI4qNth7qwOv+iQd8BlUdPs+XAEWbo2XzAu2NyBjE9QnhyUQHQHE4Pz9tG3r7D/M8NI8jtH8dz3xlNj1AHP3h9A8frnRZX3PWMMbyz/gB5JbWdWrBlW1kd5XWnmTW0fXedR4WHMDk7gUU7DumCMW4a9B2wJL95EjOd9kBFh4fww2lZLNtdxdriGv74eRF/31TG/ZcO5MrhqQAkR4fz7JzRlFSf4MH3tto+fLaXHeWBeVu5/vnVzHxqOa+s2MuRk+0f4/7x9kM4goQZQ9r/czZrWG/KjpxiW4u/tAKZR0EvIrNFpEBECkXkoTZeDxORt92vrxWR/u7t/UXklIhsdn88793yrfHprkr6xEQwuHeU1aUoH3DLhP4kR4dx/ztbeHJRAdeMTOXH0wd8ZZ8JWfE8MHswC7Yd5JWVJdYU2k2W7W4+EXr0Gzn0CAvmsQ93MvY/P+Onb29my4EjHr/Poh2HGJcRR2xkaLtruHRI83q/H21vu/vGGMMP39jAHa/lUVJ9ot3v72/OG/Qi4gCeAy4DcoA5IpLTarfbgcPGmAHAU8DjLV4rMsaMdH/c5aW6LXO6sYkVhVVcMiQJEb0bVkF4iIN7L8mm7MgpLuwXy399c3ib3xs/mJLJzJxkfrdwFx9vP2TbUSHLdldxQZ9efH9iBh/8aCIL7p3EjbnpfLqzgqufW8ktr6xj3d7ac75HYeUxiqpOMHuYZ6NtWouNDGVCZjwfb2+7+2bBtoMs3HaIzwsqmfn0cn6/eDenG+07K6knZ/RjgUJjTLExpgF4C7i61T5XA6+5H78HXCI2TcFlu6s43ejikg78Oans68bcdJ64fjh/viX3rPdViAj/fcMI+sb34K7XNzDp8SU8/nG+rS7S1p1qZOP+I0wd+K9RMkNTe/Gba4ax6uHpPDh7MDvK6rjhhdXc8MJqvthT1WYQf+w+E5+Z07Ggh+bRN3urT1DQ6t/3dGMTv1uYz5CUaL548GJmD+3NM5/t4dKnln05ms5uPAn6PsCBFs9L3dva3McY4wTqgDO3sWWIyCYRWSYik9v6AiIyV0TyRCSvqqqqXQ3obu/mlZIYFcbErK/fpacCV7AjiBty08/bzRAdHsKCH0/mmTmjGNg7iheXF3PpU8v5xrMr+Mem0m6qtuusKqymyWWY2sZwyKjwEO6elsWKB6fzyJU57Ks5wXdfXsc1z63k4+0Hv/IXzqIdFYxMj6F3r46Paps5NBmRf/3SOOPPXxRTduQUv7pyCCm9InhmzijevGMcoY4gbn8tjx/8NY/q4/aax8iToG/rzLz1r+Cz7XMQ6GuMGQXcD7wpItFf29GYF40xucaY3MREz8fLdrfKo6dZWlDJdaP7EOzQ69iqYyJCHVw1IpVXvz+WNQ9fwq+uzKGxycVP397CL9/fRqMf3027fE8VUWHBjEyPOes+EaEObpuUwfIHLua31w7j8MlG7np9IzOeWsY76w+wt/oE28rqOtxtc0ZSVDi5/WK/EvQVR0/zx8+LmDU0mYuy/jV1yUUDEvjovik8MHsQS/OrmP30chbvtM/ZvSdpVQqkt3ieBrS+++PLfUQkGOgF1Bpj6o0xNQDGmA1AETCws0Vb5e+bymhyGW7ITT//zkp5IDEqjNsnZbDg3sn8YGomr6/Zz3dfXkutH87EaIxhWUEVEwckEOLBiVBYsIPvjOvHkp9N5dk5owgPdvDAvK3Meno5gMd3w57L7GEp5B86xl73BdcnFxXgbDL84kai5uoAAA+SSURBVPIhX9s3NDiIH04bwPwfTyQxKpw7/5LHQ/O22mJIrCdBvx7IFpEMEQkFbgLmt9pnPnCr+/H1wBJjjBGRRPfFXEQkE8gGir1Tevc6MzY4t18sWYk9rS5H2YwjSHj4siE8deMINu4/wlV/WEH+oX9NzFV3qpEPNpdx79828Y1nV1BY6b1+/S0Hjpz34qgnCiuPU153us1um3MJdgTxjRGpLLh3En+5bSxj+scyY0gyGQmRna7pzF8FH20/yLbSOt7bUMr3J/WnX/zZ33tw72je/9FF3DU1i7fzDnD5/37Bhn2d//ex0nlvNzPGOEXkHmAR4ABeMcbsEJHHgDxjzHzgZeCvIlII1NL8ywBgCvCYiDiBJuAuY4xf/ott2HeY4uoT3DUty+pSlI1dOyqNjISezP1LHtf9cRXfn9ifTfubg9jpMsRHhuIyhjkvreWtueM7fdLR2ORi7l/zqDnewEu35HJxJ24CXLa7+fralIEd634VEaYMTOzw8W3pExPBiLRefLTtEEvzK0noGco9Fw8473FhwQ4eumww0wcncf87m/nuy+tY84tLiA4P8Vpt3cmjjmZjzEJjzEBjTJYx5rfubY+4Qx5jzGljzLeMMQOMMWONMcXu7fOMMUONMSOMMaONMf/suqZ0rbfXHyAy1MEVF6RYXYqyuZHpMfzzx5PITo7iuaVFVB+vZ+6UTObdfRHr/n0G7941AWMMc15cQ3FV2xOqeWrRjkNUHK0nLjKUu9/YQF5Jx8/Dlu2uYkBST/rERHSqJm+bNaw328rqWF9ymJ/NHERUO8J6bEYc/3vTSE42NPn1iBy9ouiB4/VOFmw7yJXDU9s154ZSHZUcHc7f776IvF/O4JOfTuWB2YO5sF8sjiBhQFIUb945niaXYc5Lazp1w89rq0roG9eDD++dREqvCG57df1Xuow8daqhibV7a78yrNJXXDas+eRsSEp0h66vjUqPJbVXOB9uOejt0rqNBr0HFmwt52RDEzeMSbO6FBVAHEFCwlnWQh2YHMUbd46jwelizktr2F/T/gVQdpQ3n+XeMqEfSVHh/OW2sUSEOrjl5XUcqG3f+63ZW0OD0+WTQZ+REMmvv5HD0zeOxNGBJT+DgoTLL0hh+Z4qv10UXoPeA+/klZKVGMnovrFWl6LUlwb3juaNO8ZzqrGJm15czetr9rVrxavXVpUQEeLgW+6z3PS4HvzltnHUO1189+W17VoTd1lBFeEhQYzNiGt3O7rD9yZmMKgTU5ZcMTyFxibjt0MuNejPo7DyGBv2HeaG3HSd8kD5nJzUaF6/fRyhwUH88v3tTHp8KZf+fhm/XbCTVYXVZ51mofZEAx9sLufa0X3oFfGvPutBvaN45Xu5HDp6mm89v4q31u3nVMP5pwZYvqeKcRnxtl1tbWR6DH1iIvhwq3+uK6BBfx7v5pXiCBKuHd36ZmClfMOwPr1Y+vNpfHr/FH55xRCSo8N5bdU+vv3ntfz8vS1tTjHw1vr91Dtd3Dqh/9deu7BfHK98bwzhIQ4e+vs2JvzXZ/xu4a6zduccqD1JcdUJn+y28RYR4crhKazYU92hmTitpkF/Do1NLuZtLGX64CSSonSBEeW7RJov0t4xOZPX7xjHpkcu5UcXZ/H3jWU84Z4r/wxnk4vXV+/joqz4s3ZnXJSVwEf3TebtueO5KCueP6/Yy9QnlzL3L3lsK/3q1L9nhlW2d/y8v7lyeCpOl2HRDv9b0ESHkJzDpzsrqD7eoHfCKr8TGRbMz2cO4vDJRv70eRG9o8O59aL+AHy6q4LyutM8etXQc76HiDAuM55xmfGUHznFm2v389c1+/jkDyuYMSSZn8zIZlifXizbXUWfmAgyvXCDky8b1ie6eYTS1oPcOKav1eW0iwb9WZxscPLbhbvITIhs13qVSvkKEeGxq4ZSebSeX/9zB0lRYVx2QQqvriqhT0xEuxb0SI2J4OezBvGDqZm8urKEl74o5spnK7g0J5nVRTVcNTLV9tewznTfvLC8mNoTDcR1YJ58q2jXzVn8zye7KT18it9dd4FH83Yo5YuCHUE8O2cUo9JjuO/tzfx1dQlrimv57oR+HRpqGBUewo8vyWbFQ9P56YyBrCmu4Xi9k2k27p9v6YrhKTS5zNdmxPR1mmBt2HzgCP+3ci/fGdeXcZk6HbHybxGhDl6+dQxpsRH86oMdhAUHcWMnuyOjw0O4b0Y2Kx6czku35HZouT9/lJMSTUZCJAu2+dfoGw36VhqcLh58bytJUeE8dNlgq8tRyitiI0N57ftj6RMTwXfG9evQ8nxt6RURwqU5yQR14K8Df3Sm+2Z1UY1fzVmvQd/K88uKKKg4xn9cM6xdc2Io5evS43qw7N+m8asrvz5Fr/LcFcNTcBnOuh6tL9Kgb6Gw8hh/WFLIlcNTmJETGH+KqsAS7Aiy/UXTrjYoOYoBST1Z4Ec3T2nQu7lchgfnbaNHmINfn2fYmVIqcIkIV1yQwtq9tVQcPW11OR7RoHd7dVUJG/Yd5ldX5Jx1IimllAK4emQqxsD7m8qsLsUjGvTAhn21/O6jXVwyOInrdKoDpdR5ZCb2ZHTfGOZtLG1ziglfE/BBX3n0NHe/vpHUmAh+f8NI7b9USnnkmxemsbviONvK6s6/s8UCOugbnC5++MZGjp128vzNF9Krh46yUUp55srhqYQGBzFvQ6nVpZxXQAf9bxfsJG/fYR6/fjhDUqKtLkcp5Ud6RYQwMyeZD7aUU+88/1TOVgrYoJ+3oZTXVu/jjkkZXDUi1epylFJ+6JsXpnHkZCNL8yutLuWcAjLot5fV8Yt/bGN8Zpze/aqU6rDJAxJIigrjvQ2+PfomoGavPFR3mueXFfHmuv3ER4byh2+PJlgnLFNKdVCwI4hrR/Xh5RV7qT5e77NDswMi5Q7WneKRD7Yz5YmlvL5mH9eMTOXduyb47H+KUsp/fPPCNJwuwwebffdOWVue0btchsKq42zaf5i1xbV8uPUgLmO4/sI0fnTxANLjelhdolLKJgYmRzE8rRfvbSjl9kkZVpfTJtsE/eETDbyyci+b9h9hy4EjHKt3As1Xxr95YRo/nJalAa+U6hLfHJ3Go/N3sLP8KDmpvjeCzzZBH+wQXlhWTHZyT64elcqo9FhG9Y0hIyFSb4JSSnWpq0ak8h8LdjJvYyk5qTlWl/M1tgn6qPAQtv56JuEhDqtLUUoFmNjIUC4ZnMz7m8p46LLBPrcqnW9V00ka8kopq1x/YRo1Jxr4ydubqTrmW4uSeBT0IjJbRApEpFBEHmrj9TARedv9+loR6d/itYfd2wtEZJb3SldKKd9xyZAkfjpjIIt3VDDj98t4e/1+n5nw7LxBLyIO4DngMiAHmCMirTuhbgcOG2MGAE8Bj7uPzQFuAoYCs4E/ut9PKaVsRUS4b0Y2C++bzKDeUTw4bxs3vriGwsrjVpfmUR/9WKDQGFMMICJvAVcDO1vsczXwa/fj94A/SPMV0KuBt4wx9cBeESl0v99q75SvlFK+ZUBST966czzvbjjAbxfs4vL//YI+sREIgIDQ/EvBZQxNLoOzyf3ZZRiaGs1rt431ek2eBH0f4ECL56XAuLPtY4xxikgdEO/evqbVsV+b8F1E5gJzAfr27etp7Uop5ZOCgoQbx/Rl+uBk/vR5EVXH6zHGYAAMGAxBIgQHCY6gIEIcgiNI6NtFQ8A9Cfq2xia27ng62z6eHIsx5kXgRYDc3Fzf6NRSSqlOSowK45FvWD/c0pOLsaVAeovnaUDre32/3EdEgoFeQK2HxyqllOpCngT9eiBbRDJEJJTmi6vzW+0zH7jV/fh6YIlpvtw8H7jJPSonA8gG1nmndKWUUp44b9eNu8/9HmAR4ABeMcbsEJHHgDxjzHzgZeCv7outtTT/MsC93zs0X7h1Aj8yxvj2DP1KKWUz4ivjPM/Izc01eXl5VpehlFJ+RUQ2GGNy23rNVnfGKqWU+joNeqWUsjkNeqWUsjkNeqWUsjmfuxgrIlXAvk68RQJQ7aVyrGantoC92mOntoC2x5d52pZ+xpjEtl7wuaDvLBHJO9uVZ39jp7aAvdpjp7aAtseXeaMt2nWjlFI2p0GvlFI2Z8egf9HqArzITm0Be7XHTm0BbY8v63RbbNdHr5RS6qvseEavlFKqBQ16pZSyOdsE/fkWMPd1IvKKiFSKyPYW2+JEZLGI7HF/jrWyRk+JSLqILBWRXSKyQ0Tuc2/31/aEi8g6Ednibs//c2/PEJG17va87Z7G2y+IiENENonIh+7n/tyWEhHZJiKbRSTPvc0vv9cARCRGRN4TkXz3z9CEzrbHFkHv4QLmvu5VmhdQb+kh4DNjTDbwmfu5P3ACPzPGDAHGAz9y/3/4a3vqgenGmBHASGC2iIwHHgeecrfnMHC7hTW2133ArhbP/bktABcbY0a2GG/ur99rAP8LfGyMGQyMoPn/qXPtMcb4/QcwAVjU4vnDwMNW19WBdvQHtrd4XgCkuB+nAAVW19jBdn0AXGqH9gA9gI00r5tcDQS7t3/le9CXP2he6e0zYDrwIc1LfvplW9z1lgAJrbb55fcaEA3sxT1QxlvtscUZPW0vYP61Rcj9ULIx5iCA+3OSxfW0m4j0B0YBa/Hj9ri7OjYDlcBioAg4Yoxxunfxp++5p4EHAJf7eTz+2xZoXof6ExHZICJz3dv89XstE6gC/s/dtfZnEYmkk+2xS9B7tAi56l4i0hOYB/zEGHPU6no6wxjTZIwZSfPZ8FhgSFu7dW9V7SciVwKVxpgNLTe3savPt6WFicaY0TR33f5IRKZYXVAnBAOjgT8ZY0YBJ/BCt5Ndgt6ui5BXiEgKgPtzpcX1eExEQmgO+TeMMX93b/bb9pxhjDkCfE7ztYcYETmzHKe/fM9NBK4SkRLgLZq7b57GP9sCgDGm3P25EvgHzb+I/fV7rRQoNcasdT9/j+bg71R77BL0nixg7o9aLrp+K8193T5PRITmdYR3GWN+3+Ilf21PoojEuB9HADNovkC2FLjevZtftMcY87AxJs0Y05/mn5Mlxpjv4IdtARCRSBGJOvMYmAlsx0+/14wxh4ADIjLIvekSmtfc7lx7rL744MWLGJcDu2nuO/13q+vpQP1/Aw4CjTT/Vr+d5r7Tz4A97s9xVtfpYVsm0fyn/1Zgs/vjcj9uz3Bgk7s924FH3NszgXVAIfAuEGZ1re1s1zTgQ39ui7vuLe6PHWd+9v31e81d+0ggz/399j4Q29n26BQISillc3bpulFKKXUWGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVzGvRKKWVz/x+R4dzubPrqGAAAAABJRU5ErkJggg==\n", |
|
|
4157 |
"text/plain": [ |
|
|
4158 |
"<Figure size 432x288 with 1 Axes>" |
|
|
4159 |
] |
|
|
4160 |
}, |
|
|
4161 |
"metadata": { |
|
|
4162 |
"needs_background": "light" |
|
|
4163 |
}, |
|
|
4164 |
"output_type": "display_data" |
|
|
4165 |
} |
|
|
4166 |
], |
|
|
4167 |
"source": [ |
|
|
4168 |
"dd = train_md.loc[train_md.fold5==4].copy()\n", |
|
|
4169 |
"dd['res'] = val_results[:,0]\n", |
|
|
4170 |
"\n", |
|
|
4171 |
"plt.plot(dd[['res','pos_idx']].groupby('pos_idx').mean().values)" |
|
|
4172 |
] |
|
|
4173 |
}, |
|
|
4174 |
{ |
|
|
4175 |
"cell_type": "code", |
|
|
4176 |
"execution_count": null, |
|
|
4177 |
"metadata": {}, |
|
|
4178 |
"outputs": [], |
|
|
4179 |
"source": [] |
|
|
4180 |
} |
|
|
4181 |
], |
|
|
4182 |
"metadata": { |
|
|
4183 |
"kernelspec": { |
|
|
4184 |
"display_name": "Python 3", |
|
|
4185 |
"language": "python", |
|
|
4186 |
"name": "python3" |
|
|
4187 |
}, |
|
|
4188 |
"language_info": { |
|
|
4189 |
"codemirror_mode": { |
|
|
4190 |
"name": "ipython", |
|
|
4191 |
"version": 3 |
|
|
4192 |
}, |
|
|
4193 |
"file_extension": ".py", |
|
|
4194 |
"mimetype": "text/x-python", |
|
|
4195 |
"name": "python", |
|
|
4196 |
"nbconvert_exporter": "python", |
|
|
4197 |
"pygments_lexer": "ipython3", |
|
|
4198 |
"version": "3.7.4" |
|
|
4199 |
} |
|
|
4200 |
}, |
|
|
4201 |
"nbformat": 4, |
|
|
4202 |
"nbformat_minor": 2 |
|
|
4203 |
} |