6815 lines (6814 with data), 509.7 kB
[19:52:49.514] Namespace(base_lr=0.01, batch_size=4, consistency=0.1, consistency_rampup=40.0, consistency_type='mse', dataset_name='LA', deterministic=1, ema_decay=0.99, exp='vnet', gpu='0', labeled_bs=2, labelnum=25, max_iterations=6000, max_samples=123, model='URPC', root_path='/data/omnisky/postgraduate/Yb/data_set/LASet/data', seed=1337)
[19:52:52.278] 12 itertations per epoch
[19:52:54.100] iteration 1 : loss : 0.761397, supervised_loss: 0.761365
[19:52:54.958] iteration 2 : loss : 0.709286, supervised_loss: 0.709248
[19:52:55.815] iteration 3 : loss : 0.669152, supervised_loss: 0.669123
[19:52:56.672] iteration 4 : loss : 0.662713, supervised_loss: 0.662674
[19:52:57.536] iteration 5 : loss : 0.663392, supervised_loss: 0.663344
[19:52:58.402] iteration 6 : loss : 0.619559, supervised_loss: 0.619520
[19:52:59.266] iteration 7 : loss : 0.631882, supervised_loss: 0.631843
[19:53:00.132] iteration 8 : loss : 0.596151, supervised_loss: 0.596112
[19:53:00.997] iteration 9 : loss : 0.574272, supervised_loss: 0.574236
[19:53:01.866] iteration 10 : loss : 0.548257, supervised_loss: 0.548220
[19:53:02.732] iteration 11 : loss : 0.549875, supervised_loss: 0.549841
[19:53:03.598] iteration 12 : loss : 0.518709, supervised_loss: 0.518675
[19:53:05.130] iteration 13 : loss : 0.546023, supervised_loss: 0.545978
[19:53:05.999] iteration 14 : loss : 0.541097, supervised_loss: 0.541064
[19:53:06.870] iteration 15 : loss : 0.532779, supervised_loss: 0.532741
[19:53:07.741] iteration 16 : loss : 0.526418, supervised_loss: 0.526391
[19:53:08.615] iteration 17 : loss : 0.522547, supervised_loss: 0.522523
[19:53:09.488] iteration 18 : loss : 0.493390, supervised_loss: 0.493359
[19:53:10.361] iteration 19 : loss : 0.523968, supervised_loss: 0.523947
[19:53:11.234] iteration 20 : loss : 0.486748, supervised_loss: 0.486724
[19:53:12.105] iteration 21 : loss : 0.484271, supervised_loss: 0.484243
[19:53:12.984] iteration 22 : loss : 0.476106, supervised_loss: 0.476069
[19:53:13.864] iteration 23 : loss : 0.495235, supervised_loss: 0.495201
[19:53:14.743] iteration 24 : loss : 0.455921, supervised_loss: 0.455884
[19:53:16.289] iteration 25 : loss : 0.494340, supervised_loss: 0.494317
[19:53:17.168] iteration 26 : loss : 0.450715, supervised_loss: 0.450684
[19:53:18.049] iteration 27 : loss : 0.447848, supervised_loss: 0.447821
[19:53:18.931] iteration 28 : loss : 0.439581, supervised_loss: 0.439557
[19:53:19.815] iteration 29 : loss : 0.460827, supervised_loss: 0.460796
[19:53:20.698] iteration 30 : loss : 0.446577, supervised_loss: 0.446544
[19:53:21.582] iteration 31 : loss : 0.437183, supervised_loss: 0.437147
[19:53:22.466] iteration 32 : loss : 0.443239, supervised_loss: 0.443206
[19:53:23.352] iteration 33 : loss : 0.444360, supervised_loss: 0.444326
[19:53:24.240] iteration 34 : loss : 0.420783, supervised_loss: 0.420764
[19:53:25.130] iteration 35 : loss : 0.444360, supervised_loss: 0.444341
[19:53:26.021] iteration 36 : loss : 0.468335, supervised_loss: 0.468314
[19:53:27.572] iteration 37 : loss : 0.441917, supervised_loss: 0.441894
[19:53:28.463] iteration 38 : loss : 0.472819, supervised_loss: 0.472798
[19:53:29.354] iteration 39 : loss : 0.431891, supervised_loss: 0.431867
[19:53:30.246] iteration 40 : loss : 0.445723, supervised_loss: 0.445696
[19:53:31.139] iteration 41 : loss : 0.426518, supervised_loss: 0.426500
[19:53:32.032] iteration 42 : loss : 0.428812, supervised_loss: 0.428789
[19:53:32.926] iteration 43 : loss : 0.465814, supervised_loss: 0.465792
[19:53:33.820] iteration 44 : loss : 0.409396, supervised_loss: 0.409375
[19:53:34.715] iteration 45 : loss : 0.444170, supervised_loss: 0.444154
[19:53:35.611] iteration 46 : loss : 0.410404, supervised_loss: 0.410381
[19:53:36.507] iteration 47 : loss : 0.426669, supervised_loss: 0.426647
[19:53:37.405] iteration 48 : loss : 0.422162, supervised_loss: 0.422137
[19:53:38.909] iteration 49 : loss : 0.408449, supervised_loss: 0.408425
[19:53:39.808] iteration 50 : loss : 0.435756, supervised_loss: 0.435733
[19:53:40.707] iteration 51 : loss : 0.395119, supervised_loss: 0.395099
[19:53:41.608] iteration 52 : loss : 0.394767, supervised_loss: 0.394747
[19:53:42.508] iteration 53 : loss : 0.392013, supervised_loss: 0.391994
[19:53:43.411] iteration 54 : loss : 0.401127, supervised_loss: 0.401112
[19:53:44.314] iteration 55 : loss : 0.455325, supervised_loss: 0.455306
[19:53:45.217] iteration 56 : loss : 0.436367, supervised_loss: 0.436346
[19:53:46.121] iteration 57 : loss : 0.418364, supervised_loss: 0.418349
[19:53:47.025] iteration 58 : loss : 0.385960, supervised_loss: 0.385935
[19:53:47.930] iteration 59 : loss : 0.394588, supervised_loss: 0.394571
[19:53:48.888] iteration 60 : loss : 0.403471, supervised_loss: 0.403455
[19:53:50.460] iteration 61 : loss : 0.425145, supervised_loss: 0.425132
[19:53:51.367] iteration 62 : loss : 0.392827, supervised_loss: 0.392813
[19:53:52.274] iteration 63 : loss : 0.413513, supervised_loss: 0.413497
[19:53:53.182] iteration 64 : loss : 0.393619, supervised_loss: 0.393602
[19:53:54.090] iteration 65 : loss : 0.438238, supervised_loss: 0.438216
[19:53:54.996] iteration 66 : loss : 0.368493, supervised_loss: 0.368479
[19:53:55.904] iteration 67 : loss : 0.398137, supervised_loss: 0.398123
[19:53:56.812] iteration 68 : loss : 0.381407, supervised_loss: 0.381393
[19:53:57.722] iteration 69 : loss : 0.389354, supervised_loss: 0.389328
[19:53:58.633] iteration 70 : loss : 0.422169, supervised_loss: 0.422148
[19:53:59.542] iteration 71 : loss : 0.378010, supervised_loss: 0.377994
[19:54:00.451] iteration 72 : loss : 0.412618, supervised_loss: 0.412604
[19:54:02.016] iteration 73 : loss : 0.409433, supervised_loss: 0.409419
[19:54:02.924] iteration 74 : loss : 0.385972, supervised_loss: 0.385954
[19:54:03.833] iteration 75 : loss : 0.367908, supervised_loss: 0.367895
[19:54:04.744] iteration 76 : loss : 0.375672, supervised_loss: 0.375655
[19:54:05.653] iteration 77 : loss : 0.413692, supervised_loss: 0.413672
[19:54:06.562] iteration 78 : loss : 0.367702, supervised_loss: 0.367688
[19:54:07.472] iteration 79 : loss : 0.384176, supervised_loss: 0.384163
[19:54:08.381] iteration 80 : loss : 0.458058, supervised_loss: 0.458046
[19:54:09.291] iteration 81 : loss : 0.374405, supervised_loss: 0.374396
[19:54:10.201] iteration 82 : loss : 0.448014, supervised_loss: 0.448001
[19:54:11.110] iteration 83 : loss : 0.372919, supervised_loss: 0.372907
[19:54:12.020] iteration 84 : loss : 0.374638, supervised_loss: 0.374624
[19:54:13.544] iteration 85 : loss : 0.378189, supervised_loss: 0.378179
[19:54:14.453] iteration 86 : loss : 0.398672, supervised_loss: 0.398655
[19:54:15.362] iteration 87 : loss : 0.442182, supervised_loss: 0.442170
[19:54:16.271] iteration 88 : loss : 0.398883, supervised_loss: 0.398874
[19:54:17.181] iteration 89 : loss : 0.392269, supervised_loss: 0.392254
[19:54:18.090] iteration 90 : loss : 0.380537, supervised_loss: 0.380524
[19:54:19.000] iteration 91 : loss : 0.385586, supervised_loss: 0.385568
[19:54:19.911] iteration 92 : loss : 0.395113, supervised_loss: 0.395093
[19:54:20.820] iteration 93 : loss : 0.364821, supervised_loss: 0.364808
[19:54:21.730] iteration 94 : loss : 0.370417, supervised_loss: 0.370401
[19:54:22.642] iteration 95 : loss : 0.383953, supervised_loss: 0.383940
[19:54:23.552] iteration 96 : loss : 0.375714, supervised_loss: 0.375704
[19:54:25.100] iteration 97 : loss : 0.370056, supervised_loss: 0.370046
[19:54:26.009] iteration 98 : loss : 0.374664, supervised_loss: 0.374648
[19:54:26.919] iteration 99 : loss : 0.391347, supervised_loss: 0.391335
[19:54:27.828] iteration 100 : loss : 0.355122, supervised_loss: 0.355110
[19:54:28.737] iteration 101 : loss : 0.358730, supervised_loss: 0.358717
[19:54:29.648] iteration 102 : loss : 0.371447, supervised_loss: 0.371435
[19:54:30.558] iteration 103 : loss : 0.389484, supervised_loss: 0.389474
[19:54:31.468] iteration 104 : loss : 0.396378, supervised_loss: 0.396368
[19:54:32.378] iteration 105 : loss : 0.417482, supervised_loss: 0.417470
[19:54:33.288] iteration 106 : loss : 0.361507, supervised_loss: 0.361496
[19:54:34.195] iteration 107 : loss : 0.383458, supervised_loss: 0.383445
[19:54:35.107] iteration 108 : loss : 0.374438, supervised_loss: 0.374420
[19:54:36.713] iteration 109 : loss : 0.352865, supervised_loss: 0.352856
[19:54:37.623] iteration 110 : loss : 0.358184, supervised_loss: 0.358172
[19:54:38.533] iteration 111 : loss : 0.386390, supervised_loss: 0.386377
[19:54:39.442] iteration 112 : loss : 0.412814, supervised_loss: 0.412798
[19:54:40.353] iteration 113 : loss : 0.370752, supervised_loss: 0.370741
[19:54:41.264] iteration 114 : loss : 0.393380, supervised_loss: 0.393364
[19:54:42.174] iteration 115 : loss : 0.423599, supervised_loss: 0.423586
[19:54:43.084] iteration 116 : loss : 0.365046, supervised_loss: 0.365034
[19:54:43.995] iteration 117 : loss : 0.370660, supervised_loss: 0.370648
[19:54:44.904] iteration 118 : loss : 0.378319, supervised_loss: 0.378305
[19:54:45.814] iteration 119 : loss : 0.348674, supervised_loss: 0.348664
[19:54:46.725] iteration 120 : loss : 0.373081, supervised_loss: 0.373070
[19:54:48.228] iteration 121 : loss : 0.361632, supervised_loss: 0.361620
[19:54:49.137] iteration 122 : loss : 0.427052, supervised_loss: 0.427040
[19:54:50.048] iteration 123 : loss : 0.404821, supervised_loss: 0.404810
[19:54:50.959] iteration 124 : loss : 0.367005, supervised_loss: 0.366990
[19:54:51.869] iteration 125 : loss : 0.354891, supervised_loss: 0.354882
[19:54:52.779] iteration 126 : loss : 0.381762, supervised_loss: 0.381752
[19:54:53.690] iteration 127 : loss : 0.379795, supervised_loss: 0.379783
[19:54:54.601] iteration 128 : loss : 0.370010, supervised_loss: 0.369995
[19:54:55.511] iteration 129 : loss : 0.340662, supervised_loss: 0.340650
[19:54:56.423] iteration 130 : loss : 0.361661, supervised_loss: 0.361650
[19:54:57.333] iteration 131 : loss : 0.381241, supervised_loss: 0.381226
[19:54:58.244] iteration 132 : loss : 0.357323, supervised_loss: 0.357313
[19:54:59.770] iteration 133 : loss : 0.373851, supervised_loss: 0.373837
[19:55:00.680] iteration 134 : loss : 0.382526, supervised_loss: 0.382518
[19:55:01.590] iteration 135 : loss : 0.388288, supervised_loss: 0.388275
[19:55:02.502] iteration 136 : loss : 0.359590, supervised_loss: 0.359582
[19:55:03.411] iteration 137 : loss : 0.365793, supervised_loss: 0.365781
[19:55:04.322] iteration 138 : loss : 0.353429, supervised_loss: 0.353418
[19:55:05.233] iteration 139 : loss : 0.396450, supervised_loss: 0.396437
[19:55:06.144] iteration 140 : loss : 0.353394, supervised_loss: 0.353385
[19:55:07.055] iteration 141 : loss : 0.347822, supervised_loss: 0.347808
[19:55:07.966] iteration 142 : loss : 0.343312, supervised_loss: 0.343298
[19:55:08.878] iteration 143 : loss : 0.353316, supervised_loss: 0.353306
[19:55:09.789] iteration 144 : loss : 0.332910, supervised_loss: 0.332901
[19:55:11.294] iteration 145 : loss : 0.355541, supervised_loss: 0.355530
[19:55:12.206] iteration 146 : loss : 0.357820, supervised_loss: 0.357806
[19:55:13.118] iteration 147 : loss : 0.395824, supervised_loss: 0.395814
[19:55:14.030] iteration 148 : loss : 0.357302, supervised_loss: 0.357289
[19:55:14.942] iteration 149 : loss : 0.343466, supervised_loss: 0.343454
[19:55:15.853] iteration 150 : loss : 0.338091, supervised_loss: 0.338079
[19:55:16.764] iteration 151 : loss : 0.347499, supervised_loss: 0.347486
[19:55:17.675] iteration 152 : loss : 0.374085, supervised_loss: 0.374074
[19:55:18.587] iteration 153 : loss : 0.354198, supervised_loss: 0.354185
[19:55:19.498] iteration 154 : loss : 0.375349, supervised_loss: 0.375329
[19:55:20.409] iteration 155 : loss : 0.346524, supervised_loss: 0.346510
[19:55:21.322] iteration 156 : loss : 0.368909, supervised_loss: 0.368896
[19:55:22.862] iteration 157 : loss : 0.386614, supervised_loss: 0.386594
[19:55:23.773] iteration 158 : loss : 0.357545, supervised_loss: 0.357530
[19:55:24.684] iteration 159 : loss : 0.345704, supervised_loss: 0.345692
[19:55:25.595] iteration 160 : loss : 0.357951, supervised_loss: 0.357935
[19:55:26.507] iteration 161 : loss : 0.344297, supervised_loss: 0.344283
[19:55:27.417] iteration 162 : loss : 0.340591, supervised_loss: 0.340578
[19:55:28.329] iteration 163 : loss : 0.349683, supervised_loss: 0.349667
[19:55:29.241] iteration 164 : loss : 0.356857, supervised_loss: 0.356842
[19:55:30.152] iteration 165 : loss : 0.337483, supervised_loss: 0.337471
[19:55:31.064] iteration 166 : loss : 0.359038, supervised_loss: 0.359025
[19:55:31.975] iteration 167 : loss : 0.384834, supervised_loss: 0.384823
[19:55:32.889] iteration 168 : loss : 0.379505, supervised_loss: 0.379490
[19:55:34.490] iteration 169 : loss : 0.335842, supervised_loss: 0.335826
[19:55:35.401] iteration 170 : loss : 0.385904, supervised_loss: 0.385887
[19:55:36.312] iteration 171 : loss : 0.353604, supervised_loss: 0.353586
[19:55:37.223] iteration 172 : loss : 0.343591, supervised_loss: 0.343580
[19:55:38.135] iteration 173 : loss : 0.334786, supervised_loss: 0.334772
[19:55:39.048] iteration 174 : loss : 0.333657, supervised_loss: 0.333643
[19:55:39.960] iteration 175 : loss : 0.363710, supervised_loss: 0.363693
[19:55:40.871] iteration 176 : loss : 0.342932, supervised_loss: 0.342917
[19:55:41.782] iteration 177 : loss : 0.335119, supervised_loss: 0.335109
[19:55:42.695] iteration 178 : loss : 0.431081, supervised_loss: 0.431066
[19:55:43.607] iteration 179 : loss : 0.348258, supervised_loss: 0.348239
[19:55:44.518] iteration 180 : loss : 0.340147, supervised_loss: 0.340133
[19:55:46.115] iteration 181 : loss : 0.366257, supervised_loss: 0.366246
[19:55:47.026] iteration 182 : loss : 0.354538, supervised_loss: 0.354523
[19:55:47.937] iteration 183 : loss : 0.359514, supervised_loss: 0.359501
[19:55:48.849] iteration 184 : loss : 0.369172, supervised_loss: 0.369161
[19:55:49.762] iteration 185 : loss : 0.331231, supervised_loss: 0.331221
[19:55:50.674] iteration 186 : loss : 0.360715, supervised_loss: 0.360699
[19:55:51.585] iteration 187 : loss : 0.382443, supervised_loss: 0.382431
[19:55:52.497] iteration 188 : loss : 0.329869, supervised_loss: 0.329857
[19:55:53.408] iteration 189 : loss : 0.340778, supervised_loss: 0.340767
[19:55:54.322] iteration 190 : loss : 0.362002, supervised_loss: 0.361991
[19:55:55.235] iteration 191 : loss : 0.326925, supervised_loss: 0.326910
[19:55:56.146] iteration 192 : loss : 0.358362, supervised_loss: 0.358349
[19:55:57.657] iteration 193 : loss : 0.338789, supervised_loss: 0.338778
[19:55:58.568] iteration 194 : loss : 0.362743, supervised_loss: 0.362734
[19:55:59.479] iteration 195 : loss : 0.383072, supervised_loss: 0.383060
[19:56:00.392] iteration 196 : loss : 0.321621, supervised_loss: 0.321609
[19:56:01.304] iteration 197 : loss : 0.344835, supervised_loss: 0.344822
[19:56:02.215] iteration 198 : loss : 0.340318, supervised_loss: 0.340306
[19:56:03.128] iteration 199 : loss : 0.338157, supervised_loss: 0.338143
[19:56:04.041] iteration 200 : loss : 0.341405, supervised_loss: 0.341390
[19:56:04.955] iteration 201 : loss : 0.361051, supervised_loss: 0.361039
[19:56:05.867] iteration 202 : loss : 0.330867, supervised_loss: 0.330845
[19:56:06.780] iteration 203 : loss : 0.412807, supervised_loss: 0.412794
[19:56:07.691] iteration 204 : loss : 0.334709, supervised_loss: 0.334696
[19:56:09.195] iteration 205 : loss : 0.369561, supervised_loss: 0.369552
[19:56:10.103] iteration 206 : loss : 0.328078, supervised_loss: 0.328062
[19:56:11.013] iteration 207 : loss : 0.357356, supervised_loss: 0.357342
[19:56:11.925] iteration 208 : loss : 0.331273, supervised_loss: 0.331258
[19:56:12.837] iteration 209 : loss : 0.355952, supervised_loss: 0.355941
[19:56:13.750] iteration 210 : loss : 0.337147, supervised_loss: 0.337130
[19:56:14.662] iteration 211 : loss : 0.347014, supervised_loss: 0.347003
[19:56:15.574] iteration 212 : loss : 0.395980, supervised_loss: 0.395971
[19:56:16.486] iteration 213 : loss : 0.327025, supervised_loss: 0.327011
[19:56:17.400] iteration 214 : loss : 0.325279, supervised_loss: 0.325264
[19:56:18.313] iteration 215 : loss : 0.358153, supervised_loss: 0.358141
[19:56:19.226] iteration 216 : loss : 0.352351, supervised_loss: 0.352338
[19:56:20.845] iteration 217 : loss : 0.329770, supervised_loss: 0.329755
[19:56:21.757] iteration 218 : loss : 0.338608, supervised_loss: 0.338597
[19:56:22.669] iteration 219 : loss : 0.345479, supervised_loss: 0.345468
[19:56:23.581] iteration 220 : loss : 0.337470, supervised_loss: 0.337459
[19:56:24.493] iteration 221 : loss : 0.335259, supervised_loss: 0.335247
[19:56:25.403] iteration 222 : loss : 0.348006, supervised_loss: 0.347989
[19:56:26.317] iteration 223 : loss : 0.421433, supervised_loss: 0.421418
[19:56:27.230] iteration 224 : loss : 0.407104, supervised_loss: 0.407097
[19:56:28.141] iteration 225 : loss : 0.333807, supervised_loss: 0.333793
[19:56:29.054] iteration 226 : loss : 0.337817, supervised_loss: 0.337802
[19:56:29.967] iteration 227 : loss : 0.336385, supervised_loss: 0.336373
[19:56:30.880] iteration 228 : loss : 0.340036, supervised_loss: 0.340025
[19:56:32.410] iteration 229 : loss : 0.361827, supervised_loss: 0.361808
[19:56:33.321] iteration 230 : loss : 0.349311, supervised_loss: 0.349300
[19:56:34.232] iteration 231 : loss : 0.351902, supervised_loss: 0.351885
[19:56:35.144] iteration 232 : loss : 0.371578, supervised_loss: 0.371558
[19:56:36.056] iteration 233 : loss : 0.334305, supervised_loss: 0.334289
[19:56:36.968] iteration 234 : loss : 0.339564, supervised_loss: 0.339552
[19:56:37.879] iteration 235 : loss : 0.346775, supervised_loss: 0.346764
[19:56:38.791] iteration 236 : loss : 0.354693, supervised_loss: 0.354682
[19:56:39.703] iteration 237 : loss : 0.338988, supervised_loss: 0.338974
[19:56:40.619] iteration 238 : loss : 0.363649, supervised_loss: 0.363640
[19:56:41.533] iteration 239 : loss : 0.326435, supervised_loss: 0.326425
[19:56:42.448] iteration 240 : loss : 0.380645, supervised_loss: 0.380638
[19:56:44.061] iteration 241 : loss : 0.341883, supervised_loss: 0.341872
[19:56:44.971] iteration 242 : loss : 0.330313, supervised_loss: 0.330306
[19:56:45.884] iteration 243 : loss : 0.327276, supervised_loss: 0.327264
[19:56:46.796] iteration 244 : loss : 0.318457, supervised_loss: 0.318444
[19:56:47.709] iteration 245 : loss : 0.334999, supervised_loss: 0.334990
[19:56:48.621] iteration 246 : loss : 0.393346, supervised_loss: 0.393336
[19:56:49.532] iteration 247 : loss : 0.329920, supervised_loss: 0.329906
[19:56:50.445] iteration 248 : loss : 0.341573, supervised_loss: 0.341561
[19:56:51.357] iteration 249 : loss : 0.318755, supervised_loss: 0.318746
[19:56:52.268] iteration 250 : loss : 0.349481, supervised_loss: 0.349470
[19:56:53.181] iteration 251 : loss : 0.371031, supervised_loss: 0.371021
[19:56:54.094] iteration 252 : loss : 0.332378, supervised_loss: 0.332369
[19:56:55.697] iteration 253 : loss : 0.334516, supervised_loss: 0.334504
[19:56:56.607] iteration 254 : loss : 0.315189, supervised_loss: 0.315177
[19:56:57.519] iteration 255 : loss : 0.331934, supervised_loss: 0.331924
[19:56:58.431] iteration 256 : loss : 0.315002, supervised_loss: 0.314987
[19:56:59.345] iteration 257 : loss : 0.365865, supervised_loss: 0.365858
[19:57:00.259] iteration 258 : loss : 0.334369, supervised_loss: 0.334356
[19:57:01.171] iteration 259 : loss : 0.355853, supervised_loss: 0.355840
[19:57:02.083] iteration 260 : loss : 0.339178, supervised_loss: 0.339169
[19:57:02.994] iteration 261 : loss : 0.326101, supervised_loss: 0.326091
[19:57:03.907] iteration 262 : loss : 0.367423, supervised_loss: 0.367411
[19:57:04.820] iteration 263 : loss : 0.324509, supervised_loss: 0.324499
[19:57:05.731] iteration 264 : loss : 0.338746, supervised_loss: 0.338737
[19:57:07.287] iteration 265 : loss : 0.312035, supervised_loss: 0.312016
[19:57:08.200] iteration 266 : loss : 0.314238, supervised_loss: 0.314226
[19:57:09.113] iteration 267 : loss : 0.322056, supervised_loss: 0.322044
[19:57:10.025] iteration 268 : loss : 0.348057, supervised_loss: 0.348044
[19:57:10.935] iteration 269 : loss : 0.345027, supervised_loss: 0.345016
[19:57:11.847] iteration 270 : loss : 0.343328, supervised_loss: 0.343315
[19:57:12.759] iteration 271 : loss : 0.323574, supervised_loss: 0.323559
[19:57:13.671] iteration 272 : loss : 0.337990, supervised_loss: 0.337979
[19:57:14.584] iteration 273 : loss : 0.338964, supervised_loss: 0.338952
[19:57:15.495] iteration 274 : loss : 0.362429, supervised_loss: 0.362417
[19:57:16.408] iteration 275 : loss : 0.330547, supervised_loss: 0.330534
[19:57:17.321] iteration 276 : loss : 0.349223, supervised_loss: 0.349212
[19:57:18.829] iteration 277 : loss : 0.334510, supervised_loss: 0.334502
[19:57:19.741] iteration 278 : loss : 0.343433, supervised_loss: 0.343423
[19:57:20.653] iteration 279 : loss : 0.317562, supervised_loss: 0.317551
[19:57:21.565] iteration 280 : loss : 0.375963, supervised_loss: 0.375956
[19:57:22.476] iteration 281 : loss : 0.317505, supervised_loss: 0.317493
[19:57:23.390] iteration 282 : loss : 0.313827, supervised_loss: 0.313815
[19:57:24.302] iteration 283 : loss : 0.316357, supervised_loss: 0.316348
[19:57:25.214] iteration 284 : loss : 0.346725, supervised_loss: 0.346711
[19:57:26.125] iteration 285 : loss : 0.354585, supervised_loss: 0.354571
[19:57:27.037] iteration 286 : loss : 0.316593, supervised_loss: 0.316580
[19:57:27.950] iteration 287 : loss : 0.356878, supervised_loss: 0.356868
[19:57:28.863] iteration 288 : loss : 0.336747, supervised_loss: 0.336737
[19:57:30.378] iteration 289 : loss : 0.308917, supervised_loss: 0.308901
[19:57:31.289] iteration 290 : loss : 0.326321, supervised_loss: 0.326310
[19:57:32.200] iteration 291 : loss : 0.321411, supervised_loss: 0.321400
[19:57:33.111] iteration 292 : loss : 0.317568, supervised_loss: 0.317557
[19:57:34.023] iteration 293 : loss : 0.334050, supervised_loss: 0.334041
[19:57:34.935] iteration 294 : loss : 0.317905, supervised_loss: 0.317894
[19:57:35.847] iteration 295 : loss : 0.316138, supervised_loss: 0.316128
[19:57:36.759] iteration 296 : loss : 0.353655, supervised_loss: 0.353643
[19:57:37.671] iteration 297 : loss : 0.334526, supervised_loss: 0.334512
[19:57:38.583] iteration 298 : loss : 0.376440, supervised_loss: 0.376426
[19:57:39.495] iteration 299 : loss : 0.344693, supervised_loss: 0.344680
[19:57:40.405] iteration 300 : loss : 0.317414, supervised_loss: 0.317401
[19:57:41.914] iteration 301 : loss : 0.321034, supervised_loss: 0.321019
[19:57:42.825] iteration 302 : loss : 0.317579, supervised_loss: 0.317559
[19:57:43.735] iteration 303 : loss : 0.315157, supervised_loss: 0.315144
[19:57:44.647] iteration 304 : loss : 0.314866, supervised_loss: 0.314850
[19:57:45.558] iteration 305 : loss : 0.323312, supervised_loss: 0.323298
[19:57:46.469] iteration 306 : loss : 0.318726, supervised_loss: 0.318712
[19:57:47.380] iteration 307 : loss : 0.368011, supervised_loss: 0.367991
[19:57:48.293] iteration 308 : loss : 0.329158, supervised_loss: 0.329141
[19:57:49.206] iteration 309 : loss : 0.319077, supervised_loss: 0.319062
[19:57:50.120] iteration 310 : loss : 0.382883, supervised_loss: 0.382865
[19:57:51.031] iteration 311 : loss : 0.340567, supervised_loss: 0.340550
[19:57:51.943] iteration 312 : loss : 0.339290, supervised_loss: 0.339280
[19:57:53.447] iteration 313 : loss : 0.322313, supervised_loss: 0.322301
[19:57:54.358] iteration 314 : loss : 0.323867, supervised_loss: 0.323856
[19:57:55.271] iteration 315 : loss : 0.317314, supervised_loss: 0.317298
[19:57:56.182] iteration 316 : loss : 0.362432, supervised_loss: 0.362422
[19:57:57.093] iteration 317 : loss : 0.326934, supervised_loss: 0.326921
[19:57:58.005] iteration 318 : loss : 0.327174, supervised_loss: 0.327164
[19:57:58.918] iteration 319 : loss : 0.321734, supervised_loss: 0.321719
[19:57:59.828] iteration 320 : loss : 0.311584, supervised_loss: 0.311575
[19:58:00.740] iteration 321 : loss : 0.317604, supervised_loss: 0.317590
[19:58:01.653] iteration 322 : loss : 0.324612, supervised_loss: 0.324602
[19:58:02.566] iteration 323 : loss : 0.317492, supervised_loss: 0.317475
[19:58:03.475] iteration 324 : loss : 0.322114, supervised_loss: 0.322099
[19:58:05.092] iteration 325 : loss : 0.322462, supervised_loss: 0.322445
[19:58:06.004] iteration 326 : loss : 0.335356, supervised_loss: 0.335339
[19:58:06.914] iteration 327 : loss : 0.321660, supervised_loss: 0.321649
[19:58:07.826] iteration 328 : loss : 0.323685, supervised_loss: 0.323670
[19:58:08.738] iteration 329 : loss : 0.305265, supervised_loss: 0.305251
[19:58:09.651] iteration 330 : loss : 0.317550, supervised_loss: 0.317537
[19:58:10.561] iteration 331 : loss : 0.325907, supervised_loss: 0.325892
[19:58:11.473] iteration 332 : loss : 0.345158, supervised_loss: 0.345141
[19:58:12.384] iteration 333 : loss : 0.320708, supervised_loss: 0.320693
[19:58:13.297] iteration 334 : loss : 0.351973, supervised_loss: 0.351960
[19:58:14.209] iteration 335 : loss : 0.316612, supervised_loss: 0.316598
[19:58:15.120] iteration 336 : loss : 0.327807, supervised_loss: 0.327793
[19:58:16.681] iteration 337 : loss : 0.314248, supervised_loss: 0.314236
[19:58:17.593] iteration 338 : loss : 0.311685, supervised_loss: 0.311670
[19:58:18.505] iteration 339 : loss : 0.313472, supervised_loss: 0.313455
[19:58:19.414] iteration 340 : loss : 0.309787, supervised_loss: 0.309770
[19:58:20.325] iteration 341 : loss : 0.337426, supervised_loss: 0.337412
[19:58:21.236] iteration 342 : loss : 0.310266, supervised_loss: 0.310250
[19:58:22.147] iteration 343 : loss : 0.343686, supervised_loss: 0.343672
[19:58:23.064] iteration 344 : loss : 0.339000, supervised_loss: 0.338988
[19:58:23.974] iteration 345 : loss : 0.310572, supervised_loss: 0.310559
[19:58:24.886] iteration 346 : loss : 0.312047, supervised_loss: 0.312032
[19:58:25.799] iteration 347 : loss : 0.355796, supervised_loss: 0.355786
[19:58:26.711] iteration 348 : loss : 0.309682, supervised_loss: 0.309668
[19:58:28.272] iteration 349 : loss : 0.313374, supervised_loss: 0.313359
[19:58:29.183] iteration 350 : loss : 0.311415, supervised_loss: 0.311402
[19:58:30.094] iteration 351 : loss : 0.308914, supervised_loss: 0.308895
[19:58:31.005] iteration 352 : loss : 0.320681, supervised_loss: 0.320667
[19:58:31.916] iteration 353 : loss : 0.312517, supervised_loss: 0.312502
[19:58:32.828] iteration 354 : loss : 0.352931, supervised_loss: 0.352914
[19:58:33.740] iteration 355 : loss : 0.331767, supervised_loss: 0.331755
[19:58:34.652] iteration 356 : loss : 0.311887, supervised_loss: 0.311869
[19:58:35.562] iteration 357 : loss : 0.335590, supervised_loss: 0.335577
[19:58:36.473] iteration 358 : loss : 0.313119, supervised_loss: 0.313105
[19:58:37.384] iteration 359 : loss : 0.335953, supervised_loss: 0.335936
[19:58:38.297] iteration 360 : loss : 0.312205, supervised_loss: 0.312190
[19:58:39.912] iteration 361 : loss : 0.316773, supervised_loss: 0.316752
[19:58:40.823] iteration 362 : loss : 0.316218, supervised_loss: 0.316208
[19:58:41.735] iteration 363 : loss : 0.328137, supervised_loss: 0.328120
[19:58:42.646] iteration 364 : loss : 0.316774, supervised_loss: 0.316756
[19:58:43.556] iteration 365 : loss : 0.327035, supervised_loss: 0.327017
[19:58:44.467] iteration 366 : loss : 0.335070, supervised_loss: 0.335052
[19:58:45.378] iteration 367 : loss : 0.326125, supervised_loss: 0.326114
[19:58:46.289] iteration 368 : loss : 0.301562, supervised_loss: 0.301548
[19:58:47.201] iteration 369 : loss : 0.321256, supervised_loss: 0.321243
[19:58:48.113] iteration 370 : loss : 0.319194, supervised_loss: 0.319173
[19:58:49.025] iteration 371 : loss : 0.312155, supervised_loss: 0.312143
[19:58:49.936] iteration 372 : loss : 0.309926, supervised_loss: 0.309914
[19:58:51.484] iteration 373 : loss : 0.361364, supervised_loss: 0.361349
[19:58:52.396] iteration 374 : loss : 0.396280, supervised_loss: 0.396269
[19:58:53.307] iteration 375 : loss : 0.321749, supervised_loss: 0.321737
[19:58:54.219] iteration 376 : loss : 0.310183, supervised_loss: 0.310164
[19:58:55.130] iteration 377 : loss : 0.319398, supervised_loss: 0.319384
[19:58:56.041] iteration 378 : loss : 0.310728, supervised_loss: 0.310713
[19:58:56.952] iteration 379 : loss : 0.320514, supervised_loss: 0.320498
[19:58:57.864] iteration 380 : loss : 0.327670, supervised_loss: 0.327659
[19:58:58.776] iteration 381 : loss : 0.316106, supervised_loss: 0.316090
[19:58:59.688] iteration 382 : loss : 0.306905, supervised_loss: 0.306890
[19:59:00.599] iteration 383 : loss : 0.315687, supervised_loss: 0.315671
[19:59:01.511] iteration 384 : loss : 0.320232, supervised_loss: 0.320219
[19:59:03.123] iteration 385 : loss : 0.331072, supervised_loss: 0.331057
[19:59:04.035] iteration 386 : loss : 0.316874, supervised_loss: 0.316860
[19:59:04.946] iteration 387 : loss : 0.322086, supervised_loss: 0.322074
[19:59:05.857] iteration 388 : loss : 0.351388, supervised_loss: 0.351377
[19:59:06.770] iteration 389 : loss : 0.310444, supervised_loss: 0.310430
[19:59:07.683] iteration 390 : loss : 0.309378, supervised_loss: 0.309365
[19:59:08.594] iteration 391 : loss : 0.305973, supervised_loss: 0.305960
[19:59:09.505] iteration 392 : loss : 0.297029, supervised_loss: 0.297017
[19:59:10.416] iteration 393 : loss : 0.313821, supervised_loss: 0.313809
[19:59:11.329] iteration 394 : loss : 0.310846, supervised_loss: 0.310834
[19:59:12.240] iteration 395 : loss : 0.328270, supervised_loss: 0.328259
[19:59:13.152] iteration 396 : loss : 0.311440, supervised_loss: 0.311428
[19:59:14.688] iteration 397 : loss : 0.313559, supervised_loss: 0.313544
[19:59:15.600] iteration 398 : loss : 0.344896, supervised_loss: 0.344878
[19:59:16.510] iteration 399 : loss : 0.324569, supervised_loss: 0.324555
[19:59:17.420] iteration 400 : loss : 0.306919, supervised_loss: 0.306906
[19:59:18.332] iteration 401 : loss : 0.315103, supervised_loss: 0.315087
[19:59:19.245] iteration 402 : loss : 0.324210, supervised_loss: 0.324198
[19:59:20.157] iteration 403 : loss : 0.301235, supervised_loss: 0.301211
[19:59:21.067] iteration 404 : loss : 0.309533, supervised_loss: 0.309521
[19:59:21.978] iteration 405 : loss : 0.306256, supervised_loss: 0.306238
[19:59:22.893] iteration 406 : loss : 0.314566, supervised_loss: 0.314556
[19:59:23.805] iteration 407 : loss : 0.337684, supervised_loss: 0.337673
[19:59:24.716] iteration 408 : loss : 0.321112, supervised_loss: 0.321099
[19:59:26.184] iteration 409 : loss : 0.305972, supervised_loss: 0.305959
[19:59:27.095] iteration 410 : loss : 0.318553, supervised_loss: 0.318540
[19:59:28.006] iteration 411 : loss : 0.309373, supervised_loss: 0.309359
[19:59:28.917] iteration 412 : loss : 0.309287, supervised_loss: 0.309268
[19:59:29.831] iteration 413 : loss : 0.315253, supervised_loss: 0.315233
[19:59:30.742] iteration 414 : loss : 0.336092, supervised_loss: 0.336081
[19:59:31.654] iteration 415 : loss : 0.355327, supervised_loss: 0.355310
[19:59:32.566] iteration 416 : loss : 0.321589, supervised_loss: 0.321578
[19:59:33.477] iteration 417 : loss : 0.309960, supervised_loss: 0.309950
[19:59:34.390] iteration 418 : loss : 0.311703, supervised_loss: 0.311683
[19:59:35.300] iteration 419 : loss : 0.304983, supervised_loss: 0.304963
[19:59:36.212] iteration 420 : loss : 0.314518, supervised_loss: 0.314498
[19:59:37.727] iteration 421 : loss : 0.334277, supervised_loss: 0.334264
[19:59:38.640] iteration 422 : loss : 0.316944, supervised_loss: 0.316929
[19:59:39.551] iteration 423 : loss : 0.316434, supervised_loss: 0.316418
[19:59:40.462] iteration 424 : loss : 0.332379, supervised_loss: 0.332362
[19:59:41.375] iteration 425 : loss : 0.331448, supervised_loss: 0.331435
[19:59:42.287] iteration 426 : loss : 0.307462, supervised_loss: 0.307452
[19:59:43.199] iteration 427 : loss : 0.313407, supervised_loss: 0.313393
[19:59:44.110] iteration 428 : loss : 0.313432, supervised_loss: 0.313420
[19:59:45.022] iteration 429 : loss : 0.294750, supervised_loss: 0.294737
[19:59:45.933] iteration 430 : loss : 0.320006, supervised_loss: 0.319990
[19:59:46.845] iteration 431 : loss : 0.308199, supervised_loss: 0.308178
[19:59:47.756] iteration 432 : loss : 0.349291, supervised_loss: 0.349280
[19:59:49.382] iteration 433 : loss : 0.313645, supervised_loss: 0.313624
[19:59:50.294] iteration 434 : loss : 0.334596, supervised_loss: 0.334583
[19:59:51.205] iteration 435 : loss : 0.318434, supervised_loss: 0.318420
[19:59:52.117] iteration 436 : loss : 0.314086, supervised_loss: 0.314071
[19:59:53.028] iteration 437 : loss : 0.319615, supervised_loss: 0.319602
[19:59:53.939] iteration 438 : loss : 0.309334, supervised_loss: 0.309318
[19:59:54.851] iteration 439 : loss : 0.321579, supervised_loss: 0.321566
[19:59:55.763] iteration 440 : loss : 0.315118, supervised_loss: 0.315104
[19:59:56.675] iteration 441 : loss : 0.305158, supervised_loss: 0.305145
[19:59:57.587] iteration 442 : loss : 0.301240, supervised_loss: 0.301229
[19:59:58.500] iteration 443 : loss : 0.312463, supervised_loss: 0.312445
[19:59:59.505] iteration 444 : loss : 0.332992, supervised_loss: 0.332978
[20:00:01.058] iteration 445 : loss : 0.304455, supervised_loss: 0.304443
[20:00:01.970] iteration 446 : loss : 0.317355, supervised_loss: 0.317340
[20:00:02.880] iteration 447 : loss : 0.364954, supervised_loss: 0.364940
[20:00:03.792] iteration 448 : loss : 0.304051, supervised_loss: 0.304037
[20:00:04.703] iteration 449 : loss : 0.319760, supervised_loss: 0.319748
[20:00:05.614] iteration 450 : loss : 0.321474, supervised_loss: 0.321460
[20:00:06.525] iteration 451 : loss : 0.303980, supervised_loss: 0.303968
[20:00:07.438] iteration 452 : loss : 0.305392, supervised_loss: 0.305375
[20:00:08.351] iteration 453 : loss : 0.302777, supervised_loss: 0.302758
[20:00:09.263] iteration 454 : loss : 0.331210, supervised_loss: 0.331187
[20:00:10.174] iteration 455 : loss : 0.322453, supervised_loss: 0.322428
[20:00:11.087] iteration 456 : loss : 0.307164, supervised_loss: 0.307143
[20:00:12.649] iteration 457 : loss : 0.314690, supervised_loss: 0.314676
[20:00:13.561] iteration 458 : loss : 0.304608, supervised_loss: 0.304593
[20:00:14.471] iteration 459 : loss : 0.305185, supervised_loss: 0.305172
[20:00:15.384] iteration 460 : loss : 0.312910, supervised_loss: 0.312892
[20:00:16.297] iteration 461 : loss : 0.327964, supervised_loss: 0.327942
[20:00:17.207] iteration 462 : loss : 0.314003, supervised_loss: 0.313982
[20:00:18.119] iteration 463 : loss : 0.329990, supervised_loss: 0.329958
[20:00:19.030] iteration 464 : loss : 0.307117, supervised_loss: 0.307101
[20:00:19.942] iteration 465 : loss : 0.304841, supervised_loss: 0.304821
[20:00:20.853] iteration 466 : loss : 0.301938, supervised_loss: 0.301926
[20:00:21.764] iteration 467 : loss : 0.326746, supervised_loss: 0.326732
[20:00:22.677] iteration 468 : loss : 0.308977, supervised_loss: 0.308960
[20:00:24.177] iteration 469 : loss : 0.337135, supervised_loss: 0.337120
[20:00:25.087] iteration 470 : loss : 0.315365, supervised_loss: 0.315354
[20:00:25.998] iteration 471 : loss : 0.306005, supervised_loss: 0.305985
[20:00:26.911] iteration 472 : loss : 0.309740, supervised_loss: 0.309726
[20:00:27.823] iteration 473 : loss : 0.311468, supervised_loss: 0.311455
[20:00:28.734] iteration 474 : loss : 0.329884, supervised_loss: 0.329869
[20:00:29.646] iteration 475 : loss : 0.325645, supervised_loss: 0.325628
[20:00:30.557] iteration 476 : loss : 0.300374, supervised_loss: 0.300359
[20:00:31.470] iteration 477 : loss : 0.301477, supervised_loss: 0.301460
[20:00:32.382] iteration 478 : loss : 0.297493, supervised_loss: 0.297471
[20:00:33.293] iteration 479 : loss : 0.327671, supervised_loss: 0.327654
[20:00:34.205] iteration 480 : loss : 0.299303, supervised_loss: 0.299290
[20:00:35.717] iteration 481 : loss : 0.295429, supervised_loss: 0.295411
[20:00:36.627] iteration 482 : loss : 0.336496, supervised_loss: 0.336481
[20:00:37.539] iteration 483 : loss : 0.325262, supervised_loss: 0.325242
[20:00:38.450] iteration 484 : loss : 0.329813, supervised_loss: 0.329790
[20:00:39.363] iteration 485 : loss : 0.301848, supervised_loss: 0.301838
[20:00:40.275] iteration 486 : loss : 0.301602, supervised_loss: 0.301592
[20:00:41.185] iteration 487 : loss : 0.314992, supervised_loss: 0.314976
[20:00:42.096] iteration 488 : loss : 0.307906, supervised_loss: 0.307891
[20:00:43.008] iteration 489 : loss : 0.321270, supervised_loss: 0.321254
[20:00:43.921] iteration 490 : loss : 0.304878, supervised_loss: 0.304862
[20:00:44.834] iteration 491 : loss : 0.306353, supervised_loss: 0.306330
[20:00:45.746] iteration 492 : loss : 0.312779, supervised_loss: 0.312761
[20:00:47.334] iteration 493 : loss : 0.302390, supervised_loss: 0.302373
[20:00:48.245] iteration 494 : loss : 0.293151, supervised_loss: 0.293136
[20:00:49.156] iteration 495 : loss : 0.316885, supervised_loss: 0.316871
[20:00:50.068] iteration 496 : loss : 0.300723, supervised_loss: 0.300711
[20:00:50.979] iteration 497 : loss : 0.292651, supervised_loss: 0.292635
[20:00:51.891] iteration 498 : loss : 0.306042, supervised_loss: 0.306032
[20:00:52.802] iteration 499 : loss : 0.340615, supervised_loss: 0.340594
[20:00:53.713] iteration 500 : loss : 0.318381, supervised_loss: 0.318369
[20:00:54.627] iteration 501 : loss : 0.319468, supervised_loss: 0.319455
[20:00:55.540] iteration 502 : loss : 0.304893, supervised_loss: 0.304872
[20:00:56.451] iteration 503 : loss : 0.296153, supervised_loss: 0.296138
[20:00:57.362] iteration 504 : loss : 0.308430, supervised_loss: 0.308414
[20:00:58.976] iteration 505 : loss : 0.305917, supervised_loss: 0.305904
[20:00:59.887] iteration 506 : loss : 0.309515, supervised_loss: 0.309502
[20:01:00.798] iteration 507 : loss : 0.308644, supervised_loss: 0.308630
[20:01:01.709] iteration 508 : loss : 0.305527, supervised_loss: 0.305514
[20:01:02.621] iteration 509 : loss : 0.298281, supervised_loss: 0.298263
[20:01:03.533] iteration 510 : loss : 0.306813, supervised_loss: 0.306800
[20:01:04.443] iteration 511 : loss : 0.295974, supervised_loss: 0.295957
[20:01:05.355] iteration 512 : loss : 0.298302, supervised_loss: 0.298289
[20:01:06.266] iteration 513 : loss : 0.311494, supervised_loss: 0.311473
[20:01:07.176] iteration 514 : loss : 0.325405, supervised_loss: 0.325392
[20:01:08.087] iteration 515 : loss : 0.319959, supervised_loss: 0.319949
[20:01:09.000] iteration 516 : loss : 0.296884, supervised_loss: 0.296866
[20:01:10.481] iteration 517 : loss : 0.304250, supervised_loss: 0.304240
[20:01:11.393] iteration 518 : loss : 0.344544, supervised_loss: 0.344531
[20:01:12.303] iteration 519 : loss : 0.291655, supervised_loss: 0.291642
[20:01:13.215] iteration 520 : loss : 0.314587, supervised_loss: 0.314569
[20:01:14.128] iteration 521 : loss : 0.319647, supervised_loss: 0.319631
[20:01:15.038] iteration 522 : loss : 0.337362, supervised_loss: 0.337350
[20:01:15.950] iteration 523 : loss : 0.312157, supervised_loss: 0.312144
[20:01:16.884] iteration 524 : loss : 0.304026, supervised_loss: 0.304008
[20:01:17.797] iteration 525 : loss : 0.325472, supervised_loss: 0.325458
[20:01:18.708] iteration 526 : loss : 0.318851, supervised_loss: 0.318836
[20:01:19.620] iteration 527 : loss : 0.304666, supervised_loss: 0.304651
[20:01:20.532] iteration 528 : loss : 0.320443, supervised_loss: 0.320426
[20:01:22.126] iteration 529 : loss : 0.317867, supervised_loss: 0.317849
[20:01:23.037] iteration 530 : loss : 0.310719, supervised_loss: 0.310699
[20:01:23.950] iteration 531 : loss : 0.303747, supervised_loss: 0.303730
[20:01:24.863] iteration 532 : loss : 0.317534, supervised_loss: 0.317516
[20:01:25.775] iteration 533 : loss : 0.312521, supervised_loss: 0.312508
[20:01:26.687] iteration 534 : loss : 0.314953, supervised_loss: 0.314930
[20:01:27.599] iteration 535 : loss : 0.308774, supervised_loss: 0.308756
[20:01:28.511] iteration 536 : loss : 0.319219, supervised_loss: 0.319206
[20:01:29.423] iteration 537 : loss : 0.303888, supervised_loss: 0.303872
[20:01:30.335] iteration 538 : loss : 0.325353, supervised_loss: 0.325334
[20:01:31.245] iteration 539 : loss : 0.303120, supervised_loss: 0.303104
[20:01:32.158] iteration 540 : loss : 0.291648, supervised_loss: 0.291627
[20:01:33.667] iteration 541 : loss : 0.304418, supervised_loss: 0.304402
[20:01:34.578] iteration 542 : loss : 0.318145, supervised_loss: 0.318134
[20:01:35.492] iteration 543 : loss : 0.319809, supervised_loss: 0.319795
[20:01:36.405] iteration 544 : loss : 0.338298, supervised_loss: 0.338278
[20:01:37.315] iteration 545 : loss : 0.312845, supervised_loss: 0.312827
[20:01:38.227] iteration 546 : loss : 0.304894, supervised_loss: 0.304878
[20:01:39.139] iteration 547 : loss : 0.312012, supervised_loss: 0.311996
[20:01:40.051] iteration 548 : loss : 0.305217, supervised_loss: 0.305201
[20:01:40.962] iteration 549 : loss : 0.303056, supervised_loss: 0.303043
[20:01:41.872] iteration 550 : loss : 0.304246, supervised_loss: 0.304228
[20:01:42.786] iteration 551 : loss : 0.315187, supervised_loss: 0.315174
[20:01:43.699] iteration 552 : loss : 0.292571, supervised_loss: 0.292556
[20:01:45.254] iteration 553 : loss : 0.304245, supervised_loss: 0.304226
[20:01:46.164] iteration 554 : loss : 0.307491, supervised_loss: 0.307476
[20:01:47.075] iteration 555 : loss : 0.296825, supervised_loss: 0.296813
[20:01:47.986] iteration 556 : loss : 0.300927, supervised_loss: 0.300911
[20:01:48.898] iteration 557 : loss : 0.340912, supervised_loss: 0.340895
[20:01:49.811] iteration 558 : loss : 0.311329, supervised_loss: 0.311314
[20:01:50.724] iteration 559 : loss : 0.300352, supervised_loss: 0.300329
[20:01:51.636] iteration 560 : loss : 0.328949, supervised_loss: 0.328918
[20:01:52.546] iteration 561 : loss : 0.308381, supervised_loss: 0.308366
[20:01:53.458] iteration 562 : loss : 0.311193, supervised_loss: 0.311181
[20:01:54.370] iteration 563 : loss : 0.302902, supervised_loss: 0.302888
[20:01:55.283] iteration 564 : loss : 0.308501, supervised_loss: 0.308483
[20:01:56.765] iteration 565 : loss : 0.307324, supervised_loss: 0.307309
[20:01:57.677] iteration 566 : loss : 0.301237, supervised_loss: 0.301215
[20:01:58.588] iteration 567 : loss : 0.306471, supervised_loss: 0.306452
[20:01:59.499] iteration 568 : loss : 0.305086, supervised_loss: 0.305073
[20:02:00.413] iteration 569 : loss : 0.328757, supervised_loss: 0.328743
[20:02:01.325] iteration 570 : loss : 0.326418, supervised_loss: 0.326399
[20:02:02.237] iteration 571 : loss : 0.306534, supervised_loss: 0.306518
[20:02:03.148] iteration 572 : loss : 0.293907, supervised_loss: 0.293878
[20:02:04.060] iteration 573 : loss : 0.307012, supervised_loss: 0.306978
[20:02:04.982] iteration 574 : loss : 0.304360, supervised_loss: 0.304343
[20:02:05.894] iteration 575 : loss : 0.305725, supervised_loss: 0.305709
[20:02:06.807] iteration 576 : loss : 0.296264, supervised_loss: 0.296248
[20:02:08.289] iteration 577 : loss : 0.318993, supervised_loss: 0.318973
[20:02:09.201] iteration 578 : loss : 0.293373, supervised_loss: 0.293352
[20:02:10.113] iteration 579 : loss : 0.304802, supervised_loss: 0.304791
[20:02:11.025] iteration 580 : loss : 0.291581, supervised_loss: 0.291563
[20:02:11.937] iteration 581 : loss : 0.330825, supervised_loss: 0.330806
[20:02:12.850] iteration 582 : loss : 0.312970, supervised_loss: 0.312955
[20:02:13.763] iteration 583 : loss : 0.305111, supervised_loss: 0.305093
[20:02:14.674] iteration 584 : loss : 0.298881, supervised_loss: 0.298863
[20:02:15.586] iteration 585 : loss : 0.301091, supervised_loss: 0.301073
[20:02:16.499] iteration 586 : loss : 0.322148, supervised_loss: 0.322129
[20:02:17.412] iteration 587 : loss : 0.289451, supervised_loss: 0.289432
[20:02:18.324] iteration 588 : loss : 0.303912, supervised_loss: 0.303900
[20:02:19.805] iteration 589 : loss : 0.331087, supervised_loss: 0.331070
[20:02:20.717] iteration 590 : loss : 0.300431, supervised_loss: 0.300413
[20:02:21.630] iteration 591 : loss : 0.318610, supervised_loss: 0.318593
[20:02:22.542] iteration 592 : loss : 0.310195, supervised_loss: 0.310180
[20:02:23.455] iteration 593 : loss : 0.302377, supervised_loss: 0.302362
[20:02:24.369] iteration 594 : loss : 0.314180, supervised_loss: 0.314163
[20:02:25.280] iteration 595 : loss : 0.293294, supervised_loss: 0.293277
[20:02:26.192] iteration 596 : loss : 0.295511, supervised_loss: 0.295495
[20:02:27.104] iteration 597 : loss : 0.313056, supervised_loss: 0.313039
[20:02:28.042] iteration 598 : loss : 0.301124, supervised_loss: 0.301110
[20:02:28.954] iteration 599 : loss : 0.307388, supervised_loss: 0.307366
[20:02:29.865] iteration 600 : loss : 0.297587, supervised_loss: 0.297560
[20:02:31.538] iteration 601 : loss : 0.303621, supervised_loss: 0.303605
[20:02:32.449] iteration 602 : loss : 0.299706, supervised_loss: 0.299688
[20:02:33.361] iteration 603 : loss : 0.305284, supervised_loss: 0.305262
[20:02:34.272] iteration 604 : loss : 0.298387, supervised_loss: 0.298368
[20:02:35.184] iteration 605 : loss : 0.295473, supervised_loss: 0.295458
[20:02:36.098] iteration 606 : loss : 0.294041, supervised_loss: 0.294014
[20:02:37.011] iteration 607 : loss : 0.343858, supervised_loss: 0.343837
[20:02:37.923] iteration 608 : loss : 0.297621, supervised_loss: 0.297600
[20:02:38.835] iteration 609 : loss : 0.324106, supervised_loss: 0.324089
[20:02:39.745] iteration 610 : loss : 0.304769, supervised_loss: 0.304747
[20:02:40.659] iteration 611 : loss : 0.296189, supervised_loss: 0.296162
[20:02:41.573] iteration 612 : loss : 0.294270, supervised_loss: 0.294254
[20:02:43.134] iteration 613 : loss : 0.299698, supervised_loss: 0.299681
[20:02:44.044] iteration 614 : loss : 0.292195, supervised_loss: 0.292175
[20:02:44.955] iteration 615 : loss : 0.315854, supervised_loss: 0.315835
[20:02:45.869] iteration 616 : loss : 0.309398, supervised_loss: 0.309362
[20:02:46.782] iteration 617 : loss : 0.293463, supervised_loss: 0.293448
[20:02:47.694] iteration 618 : loss : 0.299611, supervised_loss: 0.299585
[20:02:48.606] iteration 619 : loss : 0.307449, supervised_loss: 0.307432
[20:02:49.519] iteration 620 : loss : 0.303851, supervised_loss: 0.303828
[20:02:50.432] iteration 621 : loss : 0.296523, supervised_loss: 0.296501
[20:02:51.344] iteration 622 : loss : 0.297461, supervised_loss: 0.297437
[20:02:52.255] iteration 623 : loss : 0.301246, supervised_loss: 0.301229
[20:02:53.168] iteration 624 : loss : 0.315976, supervised_loss: 0.315950
[20:02:54.769] iteration 625 : loss : 0.347766, supervised_loss: 0.347747
[20:02:55.680] iteration 626 : loss : 0.296051, supervised_loss: 0.296023
[20:02:56.593] iteration 627 : loss : 0.294323, supervised_loss: 0.294308
[20:02:57.505] iteration 628 : loss : 0.309455, supervised_loss: 0.309435
[20:02:58.417] iteration 629 : loss : 0.303427, supervised_loss: 0.303409
[20:02:59.328] iteration 630 : loss : 0.307894, supervised_loss: 0.307872
[20:03:00.240] iteration 631 : loss : 0.299302, supervised_loss: 0.299284
[20:03:01.152] iteration 632 : loss : 0.335705, supervised_loss: 0.335643
[20:03:02.064] iteration 633 : loss : 0.297107, supervised_loss: 0.297086
[20:03:02.975] iteration 634 : loss : 0.299546, supervised_loss: 0.299532
[20:03:03.887] iteration 635 : loss : 0.312042, supervised_loss: 0.312013
[20:03:04.800] iteration 636 : loss : 0.303387, supervised_loss: 0.303370
[20:03:06.276] iteration 637 : loss : 0.297151, supervised_loss: 0.297134
[20:03:07.187] iteration 638 : loss : 0.313181, supervised_loss: 0.313160
[20:03:08.098] iteration 639 : loss : 0.294960, supervised_loss: 0.294940
[20:03:09.011] iteration 640 : loss : 0.299044, supervised_loss: 0.299017
[20:03:09.924] iteration 641 : loss : 0.303445, supervised_loss: 0.303427
[20:03:10.835] iteration 642 : loss : 0.296559, supervised_loss: 0.296544
[20:03:11.747] iteration 643 : loss : 0.298099, supervised_loss: 0.298085
[20:03:12.659] iteration 644 : loss : 0.304720, supervised_loss: 0.304697
[20:03:13.573] iteration 645 : loss : 0.300820, supervised_loss: 0.300806
[20:03:14.486] iteration 646 : loss : 0.302206, supervised_loss: 0.302191
[20:03:15.400] iteration 647 : loss : 0.307792, supervised_loss: 0.307773
[20:03:16.311] iteration 648 : loss : 0.297431, supervised_loss: 0.297407
[20:03:17.873] iteration 649 : loss : 0.311479, supervised_loss: 0.311461
[20:03:18.784] iteration 650 : loss : 0.292190, supervised_loss: 0.292171
[20:03:19.698] iteration 651 : loss : 0.340784, supervised_loss: 0.340762
[20:03:20.609] iteration 652 : loss : 0.296237, supervised_loss: 0.296225
[20:03:21.521] iteration 653 : loss : 0.308359, supervised_loss: 0.308339
[20:03:22.432] iteration 654 : loss : 0.297835, supervised_loss: 0.297809
[20:03:23.343] iteration 655 : loss : 0.302807, supervised_loss: 0.302787
[20:03:24.254] iteration 656 : loss : 0.305526, supervised_loss: 0.305503
[20:03:25.165] iteration 657 : loss : 0.302105, supervised_loss: 0.302085
[20:03:26.077] iteration 658 : loss : 0.299113, supervised_loss: 0.299088
[20:03:26.990] iteration 659 : loss : 0.301941, supervised_loss: 0.301919
[20:03:27.902] iteration 660 : loss : 0.344273, supervised_loss: 0.344248
[20:03:29.494] iteration 661 : loss : 0.319370, supervised_loss: 0.319349
[20:03:30.406] iteration 662 : loss : 0.299883, supervised_loss: 0.299867
[20:03:31.317] iteration 663 : loss : 0.310023, supervised_loss: 0.309998
[20:03:32.229] iteration 664 : loss : 0.320138, supervised_loss: 0.320114
[20:03:33.142] iteration 665 : loss : 0.298820, supervised_loss: 0.298783
[20:03:34.055] iteration 666 : loss : 0.307102, supervised_loss: 0.307083
[20:03:34.966] iteration 667 : loss : 0.305429, supervised_loss: 0.305411
[20:03:35.877] iteration 668 : loss : 0.305074, supervised_loss: 0.305054
[20:03:36.789] iteration 669 : loss : 0.309549, supervised_loss: 0.309527
[20:03:37.701] iteration 670 : loss : 0.320568, supervised_loss: 0.320516
[20:03:38.613] iteration 671 : loss : 0.310038, supervised_loss: 0.310018
[20:03:39.525] iteration 672 : loss : 0.316622, supervised_loss: 0.316593
[20:03:41.013] iteration 673 : loss : 0.333802, supervised_loss: 0.333786
[20:03:41.924] iteration 674 : loss : 0.325658, supervised_loss: 0.325632
[20:03:42.837] iteration 675 : loss : 0.367518, supervised_loss: 0.367493
[20:03:43.749] iteration 676 : loss : 0.316104, supervised_loss: 0.316090
[20:03:44.662] iteration 677 : loss : 0.319178, supervised_loss: 0.319152
[20:03:45.573] iteration 678 : loss : 0.294213, supervised_loss: 0.294187
[20:03:46.485] iteration 679 : loss : 0.313790, supervised_loss: 0.313772
[20:03:47.397] iteration 680 : loss : 0.314116, supervised_loss: 0.314098
[20:03:48.310] iteration 681 : loss : 0.306794, supervised_loss: 0.306769
[20:03:49.222] iteration 682 : loss : 0.305588, supervised_loss: 0.305572
[20:03:50.133] iteration 683 : loss : 0.311794, supervised_loss: 0.311779
[20:03:51.046] iteration 684 : loss : 0.303927, supervised_loss: 0.303912
[20:03:52.595] iteration 685 : loss : 0.304106, supervised_loss: 0.304087
[20:03:53.506] iteration 686 : loss : 0.297362, supervised_loss: 0.297345
[20:03:54.418] iteration 687 : loss : 0.308914, supervised_loss: 0.308894
[20:03:55.330] iteration 688 : loss : 0.303095, supervised_loss: 0.303073
[20:03:56.242] iteration 689 : loss : 0.320604, supervised_loss: 0.320577
[20:03:57.153] iteration 690 : loss : 0.317324, supervised_loss: 0.317306
[20:03:58.065] iteration 691 : loss : 0.308370, supervised_loss: 0.308349
[20:03:58.977] iteration 692 : loss : 0.316085, supervised_loss: 0.316066
[20:03:59.889] iteration 693 : loss : 0.300593, supervised_loss: 0.300570
[20:04:00.801] iteration 694 : loss : 0.324126, supervised_loss: 0.324111
[20:04:01.714] iteration 695 : loss : 0.292228, supervised_loss: 0.292207
[20:04:02.626] iteration 696 : loss : 0.297918, supervised_loss: 0.297894
[20:04:04.114] iteration 697 : loss : 0.306979, supervised_loss: 0.306962
[20:04:05.027] iteration 698 : loss : 0.305234, supervised_loss: 0.305215
[20:04:05.940] iteration 699 : loss : 0.298399, supervised_loss: 0.298381
[20:04:06.852] iteration 700 : loss : 0.316799, supervised_loss: 0.316777
[20:04:07.765] iteration 701 : loss : 0.293397, supervised_loss: 0.293377
[20:04:08.677] iteration 702 : loss : 0.293957, supervised_loss: 0.293936
[20:04:09.590] iteration 703 : loss : 0.313434, supervised_loss: 0.313415
[20:04:10.503] iteration 704 : loss : 0.306296, supervised_loss: 0.306280
[20:04:11.416] iteration 705 : loss : 0.294228, supervised_loss: 0.294211
[20:04:12.328] iteration 706 : loss : 0.303160, supervised_loss: 0.303136
[20:04:13.239] iteration 707 : loss : 0.320310, supervised_loss: 0.320288
[20:04:14.152] iteration 708 : loss : 0.304886, supervised_loss: 0.304869
[20:04:15.656] iteration 709 : loss : 0.321850, supervised_loss: 0.321832
[20:04:16.569] iteration 710 : loss : 0.296380, supervised_loss: 0.296356
[20:04:17.481] iteration 711 : loss : 0.301896, supervised_loss: 0.301877
[20:04:18.394] iteration 712 : loss : 0.315221, supervised_loss: 0.315208
[20:04:19.305] iteration 713 : loss : 0.311771, supervised_loss: 0.311752
[20:04:20.218] iteration 714 : loss : 0.304116, supervised_loss: 0.304101
[20:04:21.130] iteration 715 : loss : 0.290783, supervised_loss: 0.290755
[20:04:22.043] iteration 716 : loss : 0.300402, supervised_loss: 0.300376
[20:04:22.955] iteration 717 : loss : 0.307405, supervised_loss: 0.307383
[20:04:23.868] iteration 718 : loss : 0.298970, supervised_loss: 0.298938
[20:04:24.780] iteration 719 : loss : 0.293180, supervised_loss: 0.293153
[20:04:25.693] iteration 720 : loss : 0.309080, supervised_loss: 0.309063
[20:04:27.255] iteration 721 : loss : 0.297623, supervised_loss: 0.297590
[20:04:28.167] iteration 722 : loss : 0.295092, supervised_loss: 0.295078
[20:04:29.079] iteration 723 : loss : 0.305657, supervised_loss: 0.305640
[20:04:29.991] iteration 724 : loss : 0.306041, supervised_loss: 0.306028
[20:04:30.903] iteration 725 : loss : 0.321996, supervised_loss: 0.321979
[20:04:31.816] iteration 726 : loss : 0.293092, supervised_loss: 0.293080
[20:04:32.728] iteration 727 : loss : 0.302742, supervised_loss: 0.302727
[20:04:33.641] iteration 728 : loss : 0.294904, supervised_loss: 0.294887
[20:04:34.552] iteration 729 : loss : 0.296381, supervised_loss: 0.296358
[20:04:35.464] iteration 730 : loss : 0.299251, supervised_loss: 0.299223
[20:04:36.376] iteration 731 : loss : 0.308725, supervised_loss: 0.308710
[20:04:37.289] iteration 732 : loss : 0.305711, supervised_loss: 0.305696
[20:04:38.771] iteration 733 : loss : 0.294780, supervised_loss: 0.294760
[20:04:39.682] iteration 734 : loss : 0.292859, supervised_loss: 0.292839
[20:04:40.592] iteration 735 : loss : 0.293445, supervised_loss: 0.293412
[20:04:41.504] iteration 736 : loss : 0.295382, supervised_loss: 0.295358
[20:04:42.415] iteration 737 : loss : 0.317500, supervised_loss: 0.317485
[20:04:43.327] iteration 738 : loss : 0.303641, supervised_loss: 0.303625
[20:04:44.239] iteration 739 : loss : 0.297885, supervised_loss: 0.297865
[20:04:45.152] iteration 740 : loss : 0.295696, supervised_loss: 0.295678
[20:04:46.063] iteration 741 : loss : 0.289655, supervised_loss: 0.289635
[20:04:46.975] iteration 742 : loss : 0.303023, supervised_loss: 0.303007
[20:04:47.888] iteration 743 : loss : 0.315516, supervised_loss: 0.315488
[20:04:48.802] iteration 744 : loss : 0.293042, supervised_loss: 0.293026
[20:04:50.283] iteration 745 : loss : 0.305532, supervised_loss: 0.305512
[20:04:51.194] iteration 746 : loss : 0.296604, supervised_loss: 0.296588
[20:04:52.106] iteration 747 : loss : 0.307267, supervised_loss: 0.307252
[20:04:53.018] iteration 748 : loss : 0.296728, supervised_loss: 0.296704
[20:04:53.931] iteration 749 : loss : 0.296912, supervised_loss: 0.296894
[20:04:54.844] iteration 750 : loss : 0.297104, supervised_loss: 0.297088
[20:04:55.755] iteration 751 : loss : 0.289153, supervised_loss: 0.289116
[20:04:56.667] iteration 752 : loss : 0.303873, supervised_loss: 0.303837
[20:04:57.580] iteration 753 : loss : 0.318231, supervised_loss: 0.318213
[20:04:58.493] iteration 754 : loss : 0.328643, supervised_loss: 0.328627
[20:04:59.406] iteration 755 : loss : 0.307762, supervised_loss: 0.307744
[20:05:00.319] iteration 756 : loss : 0.292539, supervised_loss: 0.292513
[20:05:01.905] iteration 757 : loss : 0.302383, supervised_loss: 0.302361
[20:05:02.816] iteration 758 : loss : 0.295565, supervised_loss: 0.295543
[20:05:03.728] iteration 759 : loss : 0.301797, supervised_loss: 0.301767
[20:05:04.641] iteration 760 : loss : 0.300633, supervised_loss: 0.300608
[20:05:05.554] iteration 761 : loss : 0.295913, supervised_loss: 0.295881
[20:05:06.467] iteration 762 : loss : 0.290080, supervised_loss: 0.290051
[20:05:07.379] iteration 763 : loss : 0.302144, supervised_loss: 0.302121
[20:05:08.290] iteration 764 : loss : 0.314518, supervised_loss: 0.314497
[20:05:09.203] iteration 765 : loss : 0.293258, supervised_loss: 0.293230
[20:05:10.116] iteration 766 : loss : 0.313218, supervised_loss: 0.313193
[20:05:11.028] iteration 767 : loss : 0.298095, supervised_loss: 0.298071
[20:05:11.940] iteration 768 : loss : 0.292453, supervised_loss: 0.292434
[20:05:13.528] iteration 769 : loss : 0.287379, supervised_loss: 0.287366
[20:05:14.439] iteration 770 : loss : 0.301143, supervised_loss: 0.301128
[20:05:15.351] iteration 771 : loss : 0.295738, supervised_loss: 0.295718
[20:05:16.266] iteration 772 : loss : 0.287444, supervised_loss: 0.287422
[20:05:17.178] iteration 773 : loss : 0.303675, supervised_loss: 0.303655
[20:05:18.090] iteration 774 : loss : 0.299650, supervised_loss: 0.299631
[20:05:19.002] iteration 775 : loss : 0.313823, supervised_loss: 0.313803
[20:05:19.913] iteration 776 : loss : 0.293010, supervised_loss: 0.292986
[20:05:20.826] iteration 777 : loss : 0.296561, supervised_loss: 0.296532
[20:05:21.739] iteration 778 : loss : 0.304379, supervised_loss: 0.304354
[20:05:22.652] iteration 779 : loss : 0.309970, supervised_loss: 0.309942
[20:05:23.564] iteration 780 : loss : 0.296501, supervised_loss: 0.296485
[20:05:25.135] iteration 781 : loss : 0.286879, supervised_loss: 0.286865
[20:05:26.048] iteration 782 : loss : 0.298539, supervised_loss: 0.298516
[20:05:26.958] iteration 783 : loss : 0.296801, supervised_loss: 0.296778
[20:05:27.870] iteration 784 : loss : 0.293676, supervised_loss: 0.293649
[20:05:28.783] iteration 785 : loss : 0.299800, supervised_loss: 0.299773
[20:05:29.696] iteration 786 : loss : 0.294720, supervised_loss: 0.294705
[20:05:30.607] iteration 787 : loss : 0.291976, supervised_loss: 0.291959
[20:05:31.519] iteration 788 : loss : 0.298043, supervised_loss: 0.298028
[20:05:32.431] iteration 789 : loss : 0.294359, supervised_loss: 0.294339
[20:05:33.345] iteration 790 : loss : 0.284765, supervised_loss: 0.284746
[20:05:34.258] iteration 791 : loss : 0.293936, supervised_loss: 0.293914
[20:05:35.170] iteration 792 : loss : 0.291885, supervised_loss: 0.291862
[20:05:36.706] iteration 793 : loss : 0.289884, supervised_loss: 0.289863
[20:05:37.617] iteration 794 : loss : 0.293349, supervised_loss: 0.293327
[20:05:38.529] iteration 795 : loss : 0.302935, supervised_loss: 0.302916
[20:05:39.440] iteration 796 : loss : 0.297151, supervised_loss: 0.297129
[20:05:40.352] iteration 797 : loss : 0.298887, supervised_loss: 0.298862
[20:05:41.265] iteration 798 : loss : 0.306508, supervised_loss: 0.306486
[20:05:42.178] iteration 799 : loss : 0.294450, supervised_loss: 0.294430
[20:05:43.090] iteration 800 : loss : 0.286308, supervised_loss: 0.286285
[20:09:27.224] Namespace(base_lr=0.01, batch_size=4, consistency=0.1, consistency_rampup=40.0, consistency_type='mse', dataset_name='LA', deterministic=1, ema_decay=0.99, exp='vnet', gpu='0', labeled_bs=2, labelnum=25, max_iterations=6000, max_samples=123, model='URPC', root_path='/data/omnisky/postgraduate/Yb/data_set/LASet/data', seed=1337)
[20:09:29.847] 12 itertations per epoch
[20:09:31.746] iteration 1 : loss : 0.761397, supervised_loss: 0.761365
[20:09:32.605] iteration 2 : loss : 0.709286, supervised_loss: 0.709248
[20:09:33.468] iteration 3 : loss : 0.669151, supervised_loss: 0.669122
[20:09:34.330] iteration 4 : loss : 0.662708, supervised_loss: 0.662669
[20:09:35.196] iteration 5 : loss : 0.663373, supervised_loss: 0.663325
[20:09:36.061] iteration 6 : loss : 0.619493, supervised_loss: 0.619454
[20:09:36.932] iteration 7 : loss : 0.632124, supervised_loss: 0.632085
[20:09:37.798] iteration 8 : loss : 0.596366, supervised_loss: 0.596328
[20:09:38.667] iteration 9 : loss : 0.574371, supervised_loss: 0.574335
[20:09:39.538] iteration 10 : loss : 0.548224, supervised_loss: 0.548187
[20:09:40.406] iteration 11 : loss : 0.549988, supervised_loss: 0.549954
[20:09:41.276] iteration 12 : loss : 0.518826, supervised_loss: 0.518792
[20:09:42.776] iteration 13 : loss : 0.545668, supervised_loss: 0.545623
[20:09:43.650] iteration 14 : loss : 0.540610, supervised_loss: 0.540577
[20:09:44.525] iteration 15 : loss : 0.533069, supervised_loss: 0.533032
[20:09:45.401] iteration 16 : loss : 0.526565, supervised_loss: 0.526537
[20:09:46.277] iteration 17 : loss : 0.523808, supervised_loss: 0.523784
[20:09:47.155] iteration 18 : loss : 0.493119, supervised_loss: 0.493088
[20:09:48.032] iteration 19 : loss : 0.523907, supervised_loss: 0.523887
[20:09:48.908] iteration 20 : loss : 0.486228, supervised_loss: 0.486204
[20:09:49.787] iteration 21 : loss : 0.484061, supervised_loss: 0.484033
[20:09:50.669] iteration 22 : loss : 0.475708, supervised_loss: 0.475671
[20:09:51.552] iteration 23 : loss : 0.494011, supervised_loss: 0.493977
[20:09:52.435] iteration 24 : loss : 0.456142, supervised_loss: 0.456106
[20:09:53.938] iteration 25 : loss : 0.495768, supervised_loss: 0.495745
[20:09:54.821] iteration 26 : loss : 0.449094, supervised_loss: 0.449063
[20:09:55.707] iteration 27 : loss : 0.446289, supervised_loss: 0.446262
[20:09:56.592] iteration 28 : loss : 0.439178, supervised_loss: 0.439154
[20:09:57.478] iteration 29 : loss : 0.464077, supervised_loss: 0.464047
[20:09:58.366] iteration 30 : loss : 0.444761, supervised_loss: 0.444728
[20:09:59.253] iteration 31 : loss : 0.436114, supervised_loss: 0.436079
[20:10:00.141] iteration 32 : loss : 0.443603, supervised_loss: 0.443571
[20:10:01.031] iteration 33 : loss : 0.444803, supervised_loss: 0.444769
[20:10:01.922] iteration 34 : loss : 0.420913, supervised_loss: 0.420894
[20:10:02.815] iteration 35 : loss : 0.445536, supervised_loss: 0.445517
[20:10:03.709] iteration 36 : loss : 0.469259, supervised_loss: 0.469238
[20:10:05.326] iteration 37 : loss : 0.442870, supervised_loss: 0.442848
[20:10:06.220] iteration 38 : loss : 0.473594, supervised_loss: 0.473573
[20:10:07.115] iteration 39 : loss : 0.428313, supervised_loss: 0.428289
[20:10:08.011] iteration 40 : loss : 0.446099, supervised_loss: 0.446072
[20:10:08.906] iteration 41 : loss : 0.420382, supervised_loss: 0.420364
[20:10:09.804] iteration 42 : loss : 0.428674, supervised_loss: 0.428652
[20:10:10.701] iteration 43 : loss : 0.463789, supervised_loss: 0.463767
[20:10:11.600] iteration 44 : loss : 0.410513, supervised_loss: 0.410493
[20:10:12.497] iteration 45 : loss : 0.443635, supervised_loss: 0.443619
[20:10:13.396] iteration 46 : loss : 0.411431, supervised_loss: 0.411408
[20:10:14.295] iteration 47 : loss : 0.423822, supervised_loss: 0.423799
[20:10:15.195] iteration 48 : loss : 0.418941, supervised_loss: 0.418916
[20:10:16.723] iteration 49 : loss : 0.404946, supervised_loss: 0.404922
[20:10:17.626] iteration 50 : loss : 0.437035, supervised_loss: 0.437012
[20:10:18.530] iteration 51 : loss : 0.395794, supervised_loss: 0.395774
[20:10:19.437] iteration 52 : loss : 0.394726, supervised_loss: 0.394706
[20:10:20.343] iteration 53 : loss : 0.391601, supervised_loss: 0.391582
[20:10:21.249] iteration 54 : loss : 0.401338, supervised_loss: 0.401323
[20:10:22.156] iteration 55 : loss : 0.456767, supervised_loss: 0.456748
[20:10:23.065] iteration 56 : loss : 0.432990, supervised_loss: 0.432969
[20:10:23.974] iteration 57 : loss : 0.420818, supervised_loss: 0.420803
[20:10:24.883] iteration 58 : loss : 0.386807, supervised_loss: 0.386782
[20:10:25.792] iteration 59 : loss : 0.394614, supervised_loss: 0.394597
[20:10:26.702] iteration 60 : loss : 0.405369, supervised_loss: 0.405353
[20:10:28.249] iteration 61 : loss : 0.426742, supervised_loss: 0.426729
[20:10:29.159] iteration 62 : loss : 0.399695, supervised_loss: 0.399682
[20:10:30.070] iteration 63 : loss : 0.413551, supervised_loss: 0.413536
[20:10:30.980] iteration 64 : loss : 0.393503, supervised_loss: 0.393485
[20:10:31.891] iteration 65 : loss : 0.439011, supervised_loss: 0.438989
[20:10:32.802] iteration 66 : loss : 0.368819, supervised_loss: 0.368804
[20:10:33.713] iteration 67 : loss : 0.400054, supervised_loss: 0.400040
[20:10:34.625] iteration 68 : loss : 0.383022, supervised_loss: 0.383008
[20:10:35.537] iteration 69 : loss : 0.389521, supervised_loss: 0.389495
[20:10:36.448] iteration 70 : loss : 0.424913, supervised_loss: 0.424893
[20:10:37.357] iteration 71 : loss : 0.374180, supervised_loss: 0.374164
[20:10:38.271] iteration 72 : loss : 0.411636, supervised_loss: 0.411621
[20:10:39.761] iteration 73 : loss : 0.405011, supervised_loss: 0.404997
[20:10:40.672] iteration 74 : loss : 0.382415, supervised_loss: 0.382396
[20:10:41.583] iteration 75 : loss : 0.367402, supervised_loss: 0.367388
[20:10:42.494] iteration 76 : loss : 0.374525, supervised_loss: 0.374508
[20:10:43.406] iteration 77 : loss : 0.407809, supervised_loss: 0.407789
[20:10:44.316] iteration 78 : loss : 0.367414, supervised_loss: 0.367400
[20:10:45.228] iteration 79 : loss : 0.383087, supervised_loss: 0.383075
[20:10:46.141] iteration 80 : loss : 0.456668, supervised_loss: 0.456657
[20:10:47.053] iteration 81 : loss : 0.376954, supervised_loss: 0.376944
[20:10:47.964] iteration 82 : loss : 0.447434, supervised_loss: 0.447421
[20:10:48.877] iteration 83 : loss : 0.372614, supervised_loss: 0.372601
[20:10:49.789] iteration 84 : loss : 0.374808, supervised_loss: 0.374794
[20:10:51.292] iteration 85 : loss : 0.377503, supervised_loss: 0.377492
[20:10:52.202] iteration 86 : loss : 0.395191, supervised_loss: 0.395173
[20:10:53.113] iteration 87 : loss : 0.436420, supervised_loss: 0.436408
[20:10:54.025] iteration 88 : loss : 0.400941, supervised_loss: 0.400932
[20:10:54.937] iteration 89 : loss : 0.391497, supervised_loss: 0.391482
[20:10:55.957] iteration 90 : loss : 0.381677, supervised_loss: 0.381664
[20:10:56.869] iteration 91 : loss : 0.385803, supervised_loss: 0.385784
[20:10:57.780] iteration 92 : loss : 0.397018, supervised_loss: 0.396998
[20:10:58.691] iteration 93 : loss : 0.364686, supervised_loss: 0.364673
[20:10:59.602] iteration 94 : loss : 0.370169, supervised_loss: 0.370153
[20:11:00.514] iteration 95 : loss : 0.382002, supervised_loss: 0.381989
[20:11:01.427] iteration 96 : loss : 0.375631, supervised_loss: 0.375621
[20:11:02.929] iteration 97 : loss : 0.368515, supervised_loss: 0.368505
[20:11:03.841] iteration 98 : loss : 0.374098, supervised_loss: 0.374082
[20:11:04.752] iteration 99 : loss : 0.393629, supervised_loss: 0.393616
[20:11:05.664] iteration 100 : loss : 0.354709, supervised_loss: 0.354698
[20:11:06.576] iteration 101 : loss : 0.358665, supervised_loss: 0.358652
[20:11:07.488] iteration 102 : loss : 0.369999, supervised_loss: 0.369987
[20:11:08.399] iteration 103 : loss : 0.393690, supervised_loss: 0.393680
[20:11:09.311] iteration 104 : loss : 0.394481, supervised_loss: 0.394471
[20:11:10.223] iteration 105 : loss : 0.413550, supervised_loss: 0.413538
[20:11:11.136] iteration 106 : loss : 0.360433, supervised_loss: 0.360422
[20:11:12.048] iteration 107 : loss : 0.383829, supervised_loss: 0.383817
[20:11:12.961] iteration 108 : loss : 0.373700, supervised_loss: 0.373682
[20:11:14.498] iteration 109 : loss : 0.355406, supervised_loss: 0.355396
[20:11:15.411] iteration 110 : loss : 0.357595, supervised_loss: 0.357583
[20:11:16.322] iteration 111 : loss : 0.389313, supervised_loss: 0.389299
[20:11:17.234] iteration 112 : loss : 0.413442, supervised_loss: 0.413425
[20:11:18.146] iteration 113 : loss : 0.366779, supervised_loss: 0.366768
[20:11:19.057] iteration 114 : loss : 0.390660, supervised_loss: 0.390644
[20:11:19.970] iteration 115 : loss : 0.424070, supervised_loss: 0.424057
[20:11:20.882] iteration 116 : loss : 0.366932, supervised_loss: 0.366920
[20:11:21.794] iteration 117 : loss : 0.388077, supervised_loss: 0.388064
[20:11:22.706] iteration 118 : loss : 0.375345, supervised_loss: 0.375330
[20:11:23.619] iteration 119 : loss : 0.348382, supervised_loss: 0.348372
[20:11:24.532] iteration 120 : loss : 0.375448, supervised_loss: 0.375436
[20:11:26.026] iteration 121 : loss : 0.361787, supervised_loss: 0.361775
[20:11:26.937] iteration 122 : loss : 0.438102, supervised_loss: 0.438091
[20:11:27.850] iteration 123 : loss : 0.405210, supervised_loss: 0.405199
[20:11:28.762] iteration 124 : loss : 0.365630, supervised_loss: 0.365614
[20:11:29.674] iteration 125 : loss : 0.354741, supervised_loss: 0.354732
[20:11:30.587] iteration 126 : loss : 0.382937, supervised_loss: 0.382927
[20:11:31.500] iteration 127 : loss : 0.378927, supervised_loss: 0.378916
[20:11:32.412] iteration 128 : loss : 0.383528, supervised_loss: 0.383514
[20:11:33.324] iteration 129 : loss : 0.341939, supervised_loss: 0.341926
[20:11:34.237] iteration 130 : loss : 0.362581, supervised_loss: 0.362571
[20:11:35.150] iteration 131 : loss : 0.387413, supervised_loss: 0.387397
[20:11:36.063] iteration 132 : loss : 0.358062, supervised_loss: 0.358050
[20:11:37.763] iteration 133 : loss : 0.371713, supervised_loss: 0.371699
[20:11:38.675] iteration 134 : loss : 0.379361, supervised_loss: 0.379354
[20:11:39.586] iteration 135 : loss : 0.382842, supervised_loss: 0.382829
[20:11:40.500] iteration 136 : loss : 0.363075, supervised_loss: 0.363066
[20:11:41.411] iteration 137 : loss : 0.374360, supervised_loss: 0.374348
[20:11:42.323] iteration 138 : loss : 0.356084, supervised_loss: 0.356073
[20:11:43.236] iteration 139 : loss : 0.399362, supervised_loss: 0.399348
[20:11:44.150] iteration 140 : loss : 0.354921, supervised_loss: 0.354912
[20:11:45.062] iteration 141 : loss : 0.349614, supervised_loss: 0.349601
[20:11:45.975] iteration 142 : loss : 0.344764, supervised_loss: 0.344750
[20:11:46.889] iteration 143 : loss : 0.349939, supervised_loss: 0.349929
[20:11:47.803] iteration 144 : loss : 0.333005, supervised_loss: 0.332996
[20:11:49.306] iteration 145 : loss : 0.352553, supervised_loss: 0.352542
[20:11:50.220] iteration 146 : loss : 0.366300, supervised_loss: 0.366287
[20:11:51.132] iteration 147 : loss : 0.391670, supervised_loss: 0.391661
[20:11:52.044] iteration 148 : loss : 0.359734, supervised_loss: 0.359721
[20:11:52.957] iteration 149 : loss : 0.344001, supervised_loss: 0.343988
[20:11:53.870] iteration 150 : loss : 0.336784, supervised_loss: 0.336771
[20:11:54.783] iteration 151 : loss : 0.344059, supervised_loss: 0.344046
[20:11:55.697] iteration 152 : loss : 0.375258, supervised_loss: 0.375246
[20:11:56.610] iteration 153 : loss : 0.353782, supervised_loss: 0.353769
[20:11:57.522] iteration 154 : loss : 0.373322, supervised_loss: 0.373302
[20:11:58.435] iteration 155 : loss : 0.349689, supervised_loss: 0.349675
[20:11:59.350] iteration 156 : loss : 0.366493, supervised_loss: 0.366479
[20:12:00.872] iteration 157 : loss : 0.380947, supervised_loss: 0.380926
[20:12:01.786] iteration 158 : loss : 0.356107, supervised_loss: 0.356090
[20:12:02.698] iteration 159 : loss : 0.346481, supervised_loss: 0.346468
[20:12:03.610] iteration 160 : loss : 0.357890, supervised_loss: 0.357874
[20:12:04.523] iteration 161 : loss : 0.348372, supervised_loss: 0.348358
[20:12:05.435] iteration 162 : loss : 0.340828, supervised_loss: 0.340815
[20:12:06.347] iteration 163 : loss : 0.344558, supervised_loss: 0.344542
[20:12:07.260] iteration 164 : loss : 0.354321, supervised_loss: 0.354305
[20:12:08.173] iteration 165 : loss : 0.337620, supervised_loss: 0.337608
[20:12:09.085] iteration 166 : loss : 0.350779, supervised_loss: 0.350765
[20:12:09.999] iteration 167 : loss : 0.373389, supervised_loss: 0.373377
[20:12:10.911] iteration 168 : loss : 0.380799, supervised_loss: 0.380784
[20:12:12.500] iteration 169 : loss : 0.339633, supervised_loss: 0.339616
[20:12:13.412] iteration 170 : loss : 0.378125, supervised_loss: 0.378109
[20:12:14.324] iteration 171 : loss : 0.340230, supervised_loss: 0.340213
[20:12:15.238] iteration 172 : loss : 0.346139, supervised_loss: 0.346128
[20:12:16.149] iteration 173 : loss : 0.333624, supervised_loss: 0.333611
[20:12:17.062] iteration 174 : loss : 0.331570, supervised_loss: 0.331556
[20:12:17.975] iteration 175 : loss : 0.360223, supervised_loss: 0.360206
[20:12:18.890] iteration 176 : loss : 0.341629, supervised_loss: 0.341614
[20:12:19.803] iteration 177 : loss : 0.333604, supervised_loss: 0.333594
[20:12:20.715] iteration 178 : loss : 0.423916, supervised_loss: 0.423900
[20:12:21.628] iteration 179 : loss : 0.354756, supervised_loss: 0.354736
[20:12:22.539] iteration 180 : loss : 0.340189, supervised_loss: 0.340175
[20:12:24.118] iteration 181 : loss : 0.366779, supervised_loss: 0.366768
[20:12:25.030] iteration 182 : loss : 0.351102, supervised_loss: 0.351087
[20:12:25.944] iteration 183 : loss : 0.355957, supervised_loss: 0.355944
[20:12:26.857] iteration 184 : loss : 0.369134, supervised_loss: 0.369123
[20:12:27.770] iteration 185 : loss : 0.329407, supervised_loss: 0.329397
[20:12:28.682] iteration 186 : loss : 0.357499, supervised_loss: 0.357482
[20:12:29.594] iteration 187 : loss : 0.386009, supervised_loss: 0.385997
[20:12:30.508] iteration 188 : loss : 0.327910, supervised_loss: 0.327898
[20:12:31.421] iteration 189 : loss : 0.340638, supervised_loss: 0.340627
[20:12:32.335] iteration 190 : loss : 0.359522, supervised_loss: 0.359511
[20:12:33.247] iteration 191 : loss : 0.327090, supervised_loss: 0.327075
[20:12:34.159] iteration 192 : loss : 0.357459, supervised_loss: 0.357445
[20:12:35.768] iteration 193 : loss : 0.338308, supervised_loss: 0.338296
[20:12:36.681] iteration 194 : loss : 0.361282, supervised_loss: 0.361272
[20:12:37.594] iteration 195 : loss : 0.387697, supervised_loss: 0.387684
[20:12:38.506] iteration 196 : loss : 0.320517, supervised_loss: 0.320504
[20:12:39.418] iteration 197 : loss : 0.344156, supervised_loss: 0.344142
[20:12:40.331] iteration 198 : loss : 0.340743, supervised_loss: 0.340731
[20:12:41.244] iteration 199 : loss : 0.336048, supervised_loss: 0.336035
[20:12:42.157] iteration 200 : loss : 0.340443, supervised_loss: 0.340429
[20:12:43.070] iteration 201 : loss : 0.355717, supervised_loss: 0.355705
[20:12:43.982] iteration 202 : loss : 0.329138, supervised_loss: 0.329117
[20:12:44.896] iteration 203 : loss : 0.411815, supervised_loss: 0.411803
[20:12:45.809] iteration 204 : loss : 0.333195, supervised_loss: 0.333181
[20:12:47.299] iteration 205 : loss : 0.379416, supervised_loss: 0.379407
[20:12:48.213] iteration 206 : loss : 0.327509, supervised_loss: 0.327493
[20:12:49.125] iteration 207 : loss : 0.356075, supervised_loss: 0.356061
[20:12:50.038] iteration 208 : loss : 0.335373, supervised_loss: 0.335359
[20:12:50.951] iteration 209 : loss : 0.356239, supervised_loss: 0.356228
[20:12:51.864] iteration 210 : loss : 0.334518, supervised_loss: 0.334502
[20:12:52.778] iteration 211 : loss : 0.351046, supervised_loss: 0.351036
[20:12:53.693] iteration 212 : loss : 0.397068, supervised_loss: 0.397058
[20:12:54.607] iteration 213 : loss : 0.327254, supervised_loss: 0.327240
[20:12:55.521] iteration 214 : loss : 0.324120, supervised_loss: 0.324106
[20:12:56.435] iteration 215 : loss : 0.357312, supervised_loss: 0.357300
[20:12:57.349] iteration 216 : loss : 0.346535, supervised_loss: 0.346521
[20:12:58.878] iteration 217 : loss : 0.329762, supervised_loss: 0.329748
[20:12:59.790] iteration 218 : loss : 0.336325, supervised_loss: 0.336314
[20:13:00.702] iteration 219 : loss : 0.346109, supervised_loss: 0.346098
[20:13:01.615] iteration 220 : loss : 0.343628, supervised_loss: 0.343616
[20:13:02.529] iteration 221 : loss : 0.334876, supervised_loss: 0.334863
[20:13:03.441] iteration 222 : loss : 0.348915, supervised_loss: 0.348898
[20:13:04.353] iteration 223 : loss : 0.414369, supervised_loss: 0.414353
[20:13:05.265] iteration 224 : loss : 0.404359, supervised_loss: 0.404351
[20:13:06.178] iteration 225 : loss : 0.334217, supervised_loss: 0.334203
[20:13:07.093] iteration 226 : loss : 0.336720, supervised_loss: 0.336705
[20:13:08.006] iteration 227 : loss : 0.333309, supervised_loss: 0.333298
[20:13:08.921] iteration 228 : loss : 0.337413, supervised_loss: 0.337403
[20:13:10.418] iteration 229 : loss : 0.359601, supervised_loss: 0.359583
[20:13:11.332] iteration 230 : loss : 0.353351, supervised_loss: 0.353340
[20:13:12.245] iteration 231 : loss : 0.351327, supervised_loss: 0.351309
[20:13:13.159] iteration 232 : loss : 0.376837, supervised_loss: 0.376817
[20:13:14.073] iteration 233 : loss : 0.329886, supervised_loss: 0.329870
[20:13:14.986] iteration 234 : loss : 0.342257, supervised_loss: 0.342245
[20:13:15.899] iteration 235 : loss : 0.361013, supervised_loss: 0.361002
[20:13:16.813] iteration 236 : loss : 0.363744, supervised_loss: 0.363732
[20:13:17.726] iteration 237 : loss : 0.335998, supervised_loss: 0.335983
[20:13:18.640] iteration 238 : loss : 0.360575, supervised_loss: 0.360566
[20:13:19.552] iteration 239 : loss : 0.326457, supervised_loss: 0.326448
[20:13:20.467] iteration 240 : loss : 0.394081, supervised_loss: 0.394073
[20:13:21.954] iteration 241 : loss : 0.342629, supervised_loss: 0.342618
[20:13:22.867] iteration 242 : loss : 0.333264, supervised_loss: 0.333257
[20:13:23.779] iteration 243 : loss : 0.334484, supervised_loss: 0.334474
[20:13:24.692] iteration 244 : loss : 0.324770, supervised_loss: 0.324758
[20:13:25.606] iteration 245 : loss : 0.336312, supervised_loss: 0.336303
[20:13:26.519] iteration 246 : loss : 0.392177, supervised_loss: 0.392168
[20:13:27.433] iteration 247 : loss : 0.321700, supervised_loss: 0.321687
[20:13:28.347] iteration 248 : loss : 0.342501, supervised_loss: 0.342489
[20:13:29.261] iteration 249 : loss : 0.325652, supervised_loss: 0.325643
[20:13:30.174] iteration 250 : loss : 0.354287, supervised_loss: 0.354277
[20:13:31.087] iteration 251 : loss : 0.404407, supervised_loss: 0.404397
[20:13:32.000] iteration 252 : loss : 0.341529, supervised_loss: 0.341519
[20:13:33.542] iteration 253 : loss : 0.333114, supervised_loss: 0.333102
[20:13:34.456] iteration 254 : loss : 0.312802, supervised_loss: 0.312790
[20:13:35.370] iteration 255 : loss : 0.327840, supervised_loss: 0.327829
[20:13:36.284] iteration 256 : loss : 0.313923, supervised_loss: 0.313907
[20:13:37.196] iteration 257 : loss : 0.387390, supervised_loss: 0.387382
[20:13:38.108] iteration 258 : loss : 0.335458, supervised_loss: 0.335445
[20:13:39.021] iteration 259 : loss : 0.356531, supervised_loss: 0.356516
[20:13:39.935] iteration 260 : loss : 0.336072, supervised_loss: 0.336063
[20:13:40.849] iteration 261 : loss : 0.327653, supervised_loss: 0.327642
[20:13:41.764] iteration 262 : loss : 0.358401, supervised_loss: 0.358389
[20:13:42.680] iteration 263 : loss : 0.331620, supervised_loss: 0.331609
[20:13:43.592] iteration 264 : loss : 0.345062, supervised_loss: 0.345052
[20:13:45.129] iteration 265 : loss : 0.320380, supervised_loss: 0.320362
[20:13:46.043] iteration 266 : loss : 0.316647, supervised_loss: 0.316634
[20:13:46.956] iteration 267 : loss : 0.323374, supervised_loss: 0.323361
[20:13:47.869] iteration 268 : loss : 0.340265, supervised_loss: 0.340251
[20:13:48.783] iteration 269 : loss : 0.342258, supervised_loss: 0.342246
[20:13:49.696] iteration 270 : loss : 0.336118, supervised_loss: 0.336105
[20:13:50.608] iteration 271 : loss : 0.320892, supervised_loss: 0.320877
[20:13:51.521] iteration 272 : loss : 0.341073, supervised_loss: 0.341061
[20:13:52.435] iteration 273 : loss : 0.336894, supervised_loss: 0.336882
[20:13:53.348] iteration 274 : loss : 0.359140, supervised_loss: 0.359128
[20:13:54.261] iteration 275 : loss : 0.327879, supervised_loss: 0.327868
[20:13:55.176] iteration 276 : loss : 0.349353, supervised_loss: 0.349345
[20:13:56.714] iteration 277 : loss : 0.329809, supervised_loss: 0.329801
[20:13:57.628] iteration 278 : loss : 0.347828, supervised_loss: 0.347817
[20:13:58.540] iteration 279 : loss : 0.314622, supervised_loss: 0.314611
[20:13:59.453] iteration 280 : loss : 0.370804, supervised_loss: 0.370797
[20:14:00.366] iteration 281 : loss : 0.316107, supervised_loss: 0.316095
[20:14:01.279] iteration 282 : loss : 0.312695, supervised_loss: 0.312684
[20:14:02.193] iteration 283 : loss : 0.315135, supervised_loss: 0.315127
[20:14:03.106] iteration 284 : loss : 0.338550, supervised_loss: 0.338537
[20:14:04.019] iteration 285 : loss : 0.357749, supervised_loss: 0.357735
[20:14:04.932] iteration 286 : loss : 0.316095, supervised_loss: 0.316081
[20:14:05.846] iteration 287 : loss : 0.355275, supervised_loss: 0.355265
[20:14:06.759] iteration 288 : loss : 0.337047, supervised_loss: 0.337036
[20:14:08.355] iteration 289 : loss : 0.309521, supervised_loss: 0.309505
[20:14:09.267] iteration 290 : loss : 0.324309, supervised_loss: 0.324299
[20:14:10.180] iteration 291 : loss : 0.319784, supervised_loss: 0.319773
[20:14:11.092] iteration 292 : loss : 0.317710, supervised_loss: 0.317699
[20:14:12.005] iteration 293 : loss : 0.331985, supervised_loss: 0.331976
[20:14:12.918] iteration 294 : loss : 0.314737, supervised_loss: 0.314725
[20:14:13.834] iteration 295 : loss : 0.315434, supervised_loss: 0.315423
[20:14:14.748] iteration 296 : loss : 0.351668, supervised_loss: 0.351657
[20:14:15.662] iteration 297 : loss : 0.331934, supervised_loss: 0.331919
[20:14:16.577] iteration 298 : loss : 0.369543, supervised_loss: 0.369529
[20:14:17.490] iteration 299 : loss : 0.343056, supervised_loss: 0.343044
[20:14:18.403] iteration 300 : loss : 0.317887, supervised_loss: 0.317874
[20:14:20.064] iteration 301 : loss : 0.323923, supervised_loss: 0.323908
[20:14:20.975] iteration 302 : loss : 0.318547, supervised_loss: 0.318526
[20:14:21.889] iteration 303 : loss : 0.316299, supervised_loss: 0.316287
[20:14:22.802] iteration 304 : loss : 0.317262, supervised_loss: 0.317245
[20:14:23.715] iteration 305 : loss : 0.323847, supervised_loss: 0.323834
[20:14:24.629] iteration 306 : loss : 0.318607, supervised_loss: 0.318592
[20:14:25.542] iteration 307 : loss : 0.371638, supervised_loss: 0.371617
[20:14:26.455] iteration 308 : loss : 0.330468, supervised_loss: 0.330451
[20:14:27.369] iteration 309 : loss : 0.314524, supervised_loss: 0.314509
[20:14:28.283] iteration 310 : loss : 0.378065, supervised_loss: 0.378047
[20:14:29.196] iteration 311 : loss : 0.335436, supervised_loss: 0.335419
[20:14:30.110] iteration 312 : loss : 0.339524, supervised_loss: 0.339514
[20:14:31.585] iteration 313 : loss : 0.326271, supervised_loss: 0.326259
[20:14:32.497] iteration 314 : loss : 0.324447, supervised_loss: 0.324436
[20:14:33.410] iteration 315 : loss : 0.317820, supervised_loss: 0.317805
[20:14:34.324] iteration 316 : loss : 0.361398, supervised_loss: 0.361388
[20:14:35.235] iteration 317 : loss : 0.328453, supervised_loss: 0.328439
[20:14:36.148] iteration 318 : loss : 0.327810, supervised_loss: 0.327799
[20:14:37.063] iteration 319 : loss : 0.319918, supervised_loss: 0.319901
[20:14:37.976] iteration 320 : loss : 0.307982, supervised_loss: 0.307972
[20:14:38.889] iteration 321 : loss : 0.316564, supervised_loss: 0.316549
[20:14:39.803] iteration 322 : loss : 0.323073, supervised_loss: 0.323062
[20:14:40.716] iteration 323 : loss : 0.316399, supervised_loss: 0.316382
[20:14:41.629] iteration 324 : loss : 0.320753, supervised_loss: 0.320738
[20:14:43.185] iteration 325 : loss : 0.325760, supervised_loss: 0.325743
[20:14:44.098] iteration 326 : loss : 0.332604, supervised_loss: 0.332587
[20:14:45.011] iteration 327 : loss : 0.320658, supervised_loss: 0.320647
[20:14:45.924] iteration 328 : loss : 0.324846, supervised_loss: 0.324832
[20:14:46.839] iteration 329 : loss : 0.307084, supervised_loss: 0.307070
[20:14:47.752] iteration 330 : loss : 0.319112, supervised_loss: 0.319101
[20:14:48.667] iteration 331 : loss : 0.323397, supervised_loss: 0.323382
[20:14:49.580] iteration 332 : loss : 0.347505, supervised_loss: 0.347489
[20:14:50.493] iteration 333 : loss : 0.319053, supervised_loss: 0.319039
[20:14:51.406] iteration 334 : loss : 0.357176, supervised_loss: 0.357163
[20:14:52.320] iteration 335 : loss : 0.314924, supervised_loss: 0.314911
[20:14:53.233] iteration 336 : loss : 0.337956, supervised_loss: 0.337942
[20:14:54.889] iteration 337 : loss : 0.313502, supervised_loss: 0.313491
[20:14:55.801] iteration 338 : loss : 0.312030, supervised_loss: 0.312015
[20:14:56.713] iteration 339 : loss : 0.313997, supervised_loss: 0.313980
[20:14:57.626] iteration 340 : loss : 0.311254, supervised_loss: 0.311237
[20:14:58.540] iteration 341 : loss : 0.338008, supervised_loss: 0.337995
[20:14:59.452] iteration 342 : loss : 0.310744, supervised_loss: 0.310728
[20:15:00.367] iteration 343 : loss : 0.349174, supervised_loss: 0.349161
[20:15:01.279] iteration 344 : loss : 0.341379, supervised_loss: 0.341368
[20:15:02.192] iteration 345 : loss : 0.315029, supervised_loss: 0.315015
[20:15:03.105] iteration 346 : loss : 0.315063, supervised_loss: 0.315048
[20:15:04.019] iteration 347 : loss : 0.353402, supervised_loss: 0.353392
[20:15:04.933] iteration 348 : loss : 0.311085, supervised_loss: 0.311071
[20:15:06.475] iteration 349 : loss : 0.323599, supervised_loss: 0.323583
[20:15:07.386] iteration 350 : loss : 0.310602, supervised_loss: 0.310590
[20:15:08.300] iteration 351 : loss : 0.309064, supervised_loss: 0.309046
[20:15:09.212] iteration 352 : loss : 0.318801, supervised_loss: 0.318787
[20:15:10.125] iteration 353 : loss : 0.310620, supervised_loss: 0.310605
[20:15:11.037] iteration 354 : loss : 0.355956, supervised_loss: 0.355940
[20:15:11.949] iteration 355 : loss : 0.328768, supervised_loss: 0.328756
[20:15:12.861] iteration 356 : loss : 0.313424, supervised_loss: 0.313407
[20:15:13.774] iteration 357 : loss : 0.332771, supervised_loss: 0.332758
[20:15:14.687] iteration 358 : loss : 0.313863, supervised_loss: 0.313849
[20:15:15.600] iteration 359 : loss : 0.338608, supervised_loss: 0.338593
[20:15:16.513] iteration 360 : loss : 0.311918, supervised_loss: 0.311901
[20:15:18.066] iteration 361 : loss : 0.318680, supervised_loss: 0.318660
[20:15:18.977] iteration 362 : loss : 0.314451, supervised_loss: 0.314440
[20:15:19.889] iteration 363 : loss : 0.329280, supervised_loss: 0.329263
[20:15:20.801] iteration 364 : loss : 0.325402, supervised_loss: 0.325385
[20:15:21.716] iteration 365 : loss : 0.328893, supervised_loss: 0.328874
[20:15:22.631] iteration 366 : loss : 0.335735, supervised_loss: 0.335716
[20:15:23.544] iteration 367 : loss : 0.325595, supervised_loss: 0.325585
[20:15:24.456] iteration 368 : loss : 0.300099, supervised_loss: 0.300085
[20:15:25.368] iteration 369 : loss : 0.326542, supervised_loss: 0.326529
[20:15:26.280] iteration 370 : loss : 0.325266, supervised_loss: 0.325245
[20:15:27.194] iteration 371 : loss : 0.309714, supervised_loss: 0.309702
[20:15:28.107] iteration 372 : loss : 0.311747, supervised_loss: 0.311735
[20:15:29.732] iteration 373 : loss : 0.368860, supervised_loss: 0.368844
[20:15:30.644] iteration 374 : loss : 0.390237, supervised_loss: 0.390226
[20:15:31.557] iteration 375 : loss : 0.323830, supervised_loss: 0.323817
[20:15:32.469] iteration 376 : loss : 0.308908, supervised_loss: 0.308890
[20:15:33.382] iteration 377 : loss : 0.322056, supervised_loss: 0.322043
[20:15:34.296] iteration 378 : loss : 0.312349, supervised_loss: 0.312334
[20:15:35.209] iteration 379 : loss : 0.312985, supervised_loss: 0.312964
[20:15:36.122] iteration 380 : loss : 0.324499, supervised_loss: 0.324487
[20:15:37.034] iteration 381 : loss : 0.316822, supervised_loss: 0.316808
[20:15:37.948] iteration 382 : loss : 0.309395, supervised_loss: 0.309379
[20:15:38.860] iteration 383 : loss : 0.314897, supervised_loss: 0.314882
[20:15:39.773] iteration 384 : loss : 0.320182, supervised_loss: 0.320169
[20:15:41.421] iteration 385 : loss : 0.327985, supervised_loss: 0.327970
[20:15:42.333] iteration 386 : loss : 0.317916, supervised_loss: 0.317902
[20:15:43.246] iteration 387 : loss : 0.325677, supervised_loss: 0.325666
[20:15:44.158] iteration 388 : loss : 0.351916, supervised_loss: 0.351905
[20:15:45.071] iteration 389 : loss : 0.311386, supervised_loss: 0.311372
[20:15:45.984] iteration 390 : loss : 0.307861, supervised_loss: 0.307848
[20:15:46.896] iteration 391 : loss : 0.304763, supervised_loss: 0.304749
[20:15:47.809] iteration 392 : loss : 0.296474, supervised_loss: 0.296462
[20:15:48.720] iteration 393 : loss : 0.313106, supervised_loss: 0.313094
[20:15:49.633] iteration 394 : loss : 0.309683, supervised_loss: 0.309671
[20:15:50.548] iteration 395 : loss : 0.331496, supervised_loss: 0.331485
[20:15:51.462] iteration 396 : loss : 0.312429, supervised_loss: 0.312416
[20:15:52.951] iteration 397 : loss : 0.311169, supervised_loss: 0.311153
[20:15:53.863] iteration 398 : loss : 0.349769, supervised_loss: 0.349752
[20:15:54.776] iteration 399 : loss : 0.325866, supervised_loss: 0.325851
[20:15:55.689] iteration 400 : loss : 0.307824, supervised_loss: 0.307811
[20:15:56.600] iteration 401 : loss : 0.317088, supervised_loss: 0.317072
[20:15:57.514] iteration 402 : loss : 0.322749, supervised_loss: 0.322737
[20:15:58.426] iteration 403 : loss : 0.301533, supervised_loss: 0.301508
[20:15:59.339] iteration 404 : loss : 0.310595, supervised_loss: 0.310583
[20:16:00.251] iteration 405 : loss : 0.308757, supervised_loss: 0.308739
[20:16:01.163] iteration 406 : loss : 0.338526, supervised_loss: 0.338515
[20:16:02.077] iteration 407 : loss : 0.351353, supervised_loss: 0.351341
[20:16:02.990] iteration 408 : loss : 0.324761, supervised_loss: 0.324748
[20:16:04.565] iteration 409 : loss : 0.309122, supervised_loss: 0.309108
[20:16:05.478] iteration 410 : loss : 0.320036, supervised_loss: 0.320022
[20:16:06.392] iteration 411 : loss : 0.312357, supervised_loss: 0.312341
[20:16:07.304] iteration 412 : loss : 0.311473, supervised_loss: 0.311455
[20:16:08.215] iteration 413 : loss : 0.319882, supervised_loss: 0.319862
[20:16:09.127] iteration 414 : loss : 0.336728, supervised_loss: 0.336717
[20:16:10.041] iteration 415 : loss : 0.365309, supervised_loss: 0.365292
[20:16:10.955] iteration 416 : loss : 0.322877, supervised_loss: 0.322865
[20:16:11.868] iteration 417 : loss : 0.314952, supervised_loss: 0.314942
[20:16:12.780] iteration 418 : loss : 0.317552, supervised_loss: 0.317533
[20:16:13.693] iteration 419 : loss : 0.306019, supervised_loss: 0.305999
[20:16:14.606] iteration 420 : loss : 0.322581, supervised_loss: 0.322562
[20:16:16.192] iteration 421 : loss : 0.335390, supervised_loss: 0.335378
[20:16:17.105] iteration 422 : loss : 0.321266, supervised_loss: 0.321251
[20:16:18.020] iteration 423 : loss : 0.319391, supervised_loss: 0.319375
[20:16:18.933] iteration 424 : loss : 0.334856, supervised_loss: 0.334838
[20:16:19.844] iteration 425 : loss : 0.330871, supervised_loss: 0.330859
[20:16:20.756] iteration 426 : loss : 0.307116, supervised_loss: 0.307106
[20:16:21.669] iteration 427 : loss : 0.310054, supervised_loss: 0.310040
[20:16:22.583] iteration 428 : loss : 0.315410, supervised_loss: 0.315397
[20:16:23.497] iteration 429 : loss : 0.293965, supervised_loss: 0.293952
[20:16:24.411] iteration 430 : loss : 0.315197, supervised_loss: 0.315181
[20:16:25.324] iteration 431 : loss : 0.306819, supervised_loss: 0.306800
[20:16:26.238] iteration 432 : loss : 0.339254, supervised_loss: 0.339243
[20:16:27.902] iteration 433 : loss : 0.313705, supervised_loss: 0.313684
[20:16:28.814] iteration 434 : loss : 0.336993, supervised_loss: 0.336978
[20:16:29.727] iteration 435 : loss : 0.317506, supervised_loss: 0.317493
[20:16:30.640] iteration 436 : loss : 0.312492, supervised_loss: 0.312477
[20:16:31.552] iteration 437 : loss : 0.318446, supervised_loss: 0.318432
[20:16:32.464] iteration 438 : loss : 0.310323, supervised_loss: 0.310307
[20:16:33.377] iteration 439 : loss : 0.319548, supervised_loss: 0.319535
[20:16:34.291] iteration 440 : loss : 0.315795, supervised_loss: 0.315783
[20:16:35.205] iteration 441 : loss : 0.306090, supervised_loss: 0.306078
[20:16:36.117] iteration 442 : loss : 0.300795, supervised_loss: 0.300783
[20:16:37.032] iteration 443 : loss : 0.309845, supervised_loss: 0.309827
[20:16:37.945] iteration 444 : loss : 0.337579, supervised_loss: 0.337566
[20:16:39.498] iteration 445 : loss : 0.304696, supervised_loss: 0.304684
[20:16:40.410] iteration 446 : loss : 0.318658, supervised_loss: 0.318642
[20:16:41.323] iteration 447 : loss : 0.354994, supervised_loss: 0.354978
[20:16:42.236] iteration 448 : loss : 0.302012, supervised_loss: 0.301999
[20:16:43.148] iteration 449 : loss : 0.323791, supervised_loss: 0.323778
[20:16:44.061] iteration 450 : loss : 0.318162, supervised_loss: 0.318147
[20:16:44.972] iteration 451 : loss : 0.305381, supervised_loss: 0.305368
[20:16:45.885] iteration 452 : loss : 0.304982, supervised_loss: 0.304965
[20:16:46.799] iteration 453 : loss : 0.303041, supervised_loss: 0.303023
[20:16:47.713] iteration 454 : loss : 0.326990, supervised_loss: 0.326967
[20:16:48.625] iteration 455 : loss : 0.326006, supervised_loss: 0.325982
[20:16:49.537] iteration 456 : loss : 0.306365, supervised_loss: 0.306346
[20:16:51.143] iteration 457 : loss : 0.313229, supervised_loss: 0.313216
[20:16:52.056] iteration 458 : loss : 0.301833, supervised_loss: 0.301818
[20:16:52.969] iteration 459 : loss : 0.304892, supervised_loss: 0.304879
[20:16:53.882] iteration 460 : loss : 0.314566, supervised_loss: 0.314545
[20:16:54.794] iteration 461 : loss : 0.326437, supervised_loss: 0.326415
[20:16:55.706] iteration 462 : loss : 0.312320, supervised_loss: 0.312296
[20:16:56.619] iteration 463 : loss : 0.327489, supervised_loss: 0.327457
[20:16:57.533] iteration 464 : loss : 0.303193, supervised_loss: 0.303176
[20:16:58.446] iteration 465 : loss : 0.301542, supervised_loss: 0.301522
[20:16:59.359] iteration 466 : loss : 0.303436, supervised_loss: 0.303424
[20:17:00.272] iteration 467 : loss : 0.321199, supervised_loss: 0.321183
[20:17:01.185] iteration 468 : loss : 0.304360, supervised_loss: 0.304344
[20:17:02.637] iteration 469 : loss : 0.334791, supervised_loss: 0.334775
[20:17:03.550] iteration 470 : loss : 0.313272, supervised_loss: 0.313261
[20:17:04.462] iteration 471 : loss : 0.307778, supervised_loss: 0.307759
[20:17:05.376] iteration 472 : loss : 0.310235, supervised_loss: 0.310220
[20:17:06.289] iteration 473 : loss : 0.311465, supervised_loss: 0.311451
[20:17:07.200] iteration 474 : loss : 0.332414, supervised_loss: 0.332398
[20:17:08.114] iteration 475 : loss : 0.328384, supervised_loss: 0.328368
[20:17:09.027] iteration 476 : loss : 0.301371, supervised_loss: 0.301357
[20:17:09.940] iteration 477 : loss : 0.302762, supervised_loss: 0.302745
[20:17:10.853] iteration 478 : loss : 0.297040, supervised_loss: 0.297017
[20:17:11.765] iteration 479 : loss : 0.332727, supervised_loss: 0.332711
[20:17:12.677] iteration 480 : loss : 0.300443, supervised_loss: 0.300430
[20:17:14.125] iteration 481 : loss : 0.295812, supervised_loss: 0.295796
[20:17:15.038] iteration 482 : loss : 0.336541, supervised_loss: 0.336526
[20:17:15.952] iteration 483 : loss : 0.328931, supervised_loss: 0.328911
[20:17:16.865] iteration 484 : loss : 0.329935, supervised_loss: 0.329912
[20:17:17.778] iteration 485 : loss : 0.300464, supervised_loss: 0.300453
[20:17:18.692] iteration 486 : loss : 0.301665, supervised_loss: 0.301655
[20:17:19.604] iteration 487 : loss : 0.313230, supervised_loss: 0.313214
[20:17:20.518] iteration 488 : loss : 0.309123, supervised_loss: 0.309108
[20:17:21.431] iteration 489 : loss : 0.319004, supervised_loss: 0.318988
[20:17:22.345] iteration 490 : loss : 0.307719, supervised_loss: 0.307705
[20:17:23.258] iteration 491 : loss : 0.302984, supervised_loss: 0.302962
[20:17:24.172] iteration 492 : loss : 0.310403, supervised_loss: 0.310384
[20:17:25.734] iteration 493 : loss : 0.303697, supervised_loss: 0.303681
[20:17:26.647] iteration 494 : loss : 0.294100, supervised_loss: 0.294084
[20:17:27.559] iteration 495 : loss : 0.316774, supervised_loss: 0.316759
[20:17:28.473] iteration 496 : loss : 0.302221, supervised_loss: 0.302207
[20:17:29.386] iteration 497 : loss : 0.294465, supervised_loss: 0.294450
[20:17:30.301] iteration 498 : loss : 0.307900, supervised_loss: 0.307890
[20:17:31.214] iteration 499 : loss : 0.339685, supervised_loss: 0.339663
[20:17:32.127] iteration 500 : loss : 0.316817, supervised_loss: 0.316803
[20:17:33.041] iteration 501 : loss : 0.316741, supervised_loss: 0.316728
[20:17:33.955] iteration 502 : loss : 0.302822, supervised_loss: 0.302801
[20:17:34.869] iteration 503 : loss : 0.296416, supervised_loss: 0.296400
[20:17:35.783] iteration 504 : loss : 0.309805, supervised_loss: 0.309789
[20:17:37.373] iteration 505 : loss : 0.306414, supervised_loss: 0.306401
[20:17:38.287] iteration 506 : loss : 0.306172, supervised_loss: 0.306159
[20:17:39.200] iteration 507 : loss : 0.309099, supervised_loss: 0.309085
[20:17:40.113] iteration 508 : loss : 0.303265, supervised_loss: 0.303251
[20:17:41.026] iteration 509 : loss : 0.296633, supervised_loss: 0.296614
[20:17:41.939] iteration 510 : loss : 0.303989, supervised_loss: 0.303974
[20:17:42.852] iteration 511 : loss : 0.294434, supervised_loss: 0.294417
[20:17:43.765] iteration 512 : loss : 0.298900, supervised_loss: 0.298887
[20:17:44.678] iteration 513 : loss : 0.307425, supervised_loss: 0.307403
[20:17:45.592] iteration 514 : loss : 0.322724, supervised_loss: 0.322711
[20:17:46.505] iteration 515 : loss : 0.318746, supervised_loss: 0.318735
[20:17:47.418] iteration 516 : loss : 0.297835, supervised_loss: 0.297818
[20:17:48.943] iteration 517 : loss : 0.300425, supervised_loss: 0.300414
[20:17:49.855] iteration 518 : loss : 0.339867, supervised_loss: 0.339854
[20:17:50.769] iteration 519 : loss : 0.292732, supervised_loss: 0.292717
[20:17:51.681] iteration 520 : loss : 0.316354, supervised_loss: 0.316335
[20:17:52.597] iteration 521 : loss : 0.311804, supervised_loss: 0.311789
[20:17:53.510] iteration 522 : loss : 0.338501, supervised_loss: 0.338490
[20:17:54.422] iteration 523 : loss : 0.309140, supervised_loss: 0.309127
[20:17:55.335] iteration 524 : loss : 0.300860, supervised_loss: 0.300841
[20:17:56.248] iteration 525 : loss : 0.319316, supervised_loss: 0.319303
[20:17:57.160] iteration 526 : loss : 0.312857, supervised_loss: 0.312841
[20:17:58.074] iteration 527 : loss : 0.304337, supervised_loss: 0.304322
[20:17:58.988] iteration 528 : loss : 0.320713, supervised_loss: 0.320695
[20:18:00.481] iteration 529 : loss : 0.311800, supervised_loss: 0.311782
[20:18:01.395] iteration 530 : loss : 0.308832, supervised_loss: 0.308811
[20:18:02.308] iteration 531 : loss : 0.301949, supervised_loss: 0.301935
[20:18:03.222] iteration 532 : loss : 0.314621, supervised_loss: 0.314601
[20:18:04.134] iteration 533 : loss : 0.313002, supervised_loss: 0.312989
[20:18:05.047] iteration 534 : loss : 0.305984, supervised_loss: 0.305963
[20:18:05.961] iteration 535 : loss : 0.308225, supervised_loss: 0.308210
[20:18:06.875] iteration 536 : loss : 0.319169, supervised_loss: 0.319155
[20:18:07.789] iteration 537 : loss : 0.302306, supervised_loss: 0.302289
[20:18:08.703] iteration 538 : loss : 0.321082, supervised_loss: 0.321062
[20:18:09.615] iteration 539 : loss : 0.300769, supervised_loss: 0.300754
[20:18:10.528] iteration 540 : loss : 0.293108, supervised_loss: 0.293085
[20:18:12.025] iteration 541 : loss : 0.303847, supervised_loss: 0.303829
[20:18:12.939] iteration 542 : loss : 0.316272, supervised_loss: 0.316260
[20:18:13.855] iteration 543 : loss : 0.313264, supervised_loss: 0.313249
[20:18:14.769] iteration 544 : loss : 0.332899, supervised_loss: 0.332879
[20:18:15.681] iteration 545 : loss : 0.311488, supervised_loss: 0.311472
[20:18:16.594] iteration 546 : loss : 0.306688, supervised_loss: 0.306672
[20:18:17.508] iteration 547 : loss : 0.313010, supervised_loss: 0.312995
[20:18:18.423] iteration 548 : loss : 0.305197, supervised_loss: 0.305179
[20:18:19.337] iteration 549 : loss : 0.304602, supervised_loss: 0.304590
[20:18:20.250] iteration 550 : loss : 0.304572, supervised_loss: 0.304553
[20:18:21.163] iteration 551 : loss : 0.315176, supervised_loss: 0.315163
[20:18:22.076] iteration 552 : loss : 0.292490, supervised_loss: 0.292474
[20:18:23.654] iteration 553 : loss : 0.305845, supervised_loss: 0.305824
[20:18:24.566] iteration 554 : loss : 0.309476, supervised_loss: 0.309460
[20:18:25.479] iteration 555 : loss : 0.298833, supervised_loss: 0.298822
[20:18:26.391] iteration 556 : loss : 0.298587, supervised_loss: 0.298571
[20:18:27.305] iteration 557 : loss : 0.340519, supervised_loss: 0.340500
[20:18:28.219] iteration 558 : loss : 0.312995, supervised_loss: 0.312980
[20:18:29.133] iteration 559 : loss : 0.299526, supervised_loss: 0.299506
[20:18:30.047] iteration 560 : loss : 0.330444, supervised_loss: 0.330415
[20:18:30.959] iteration 561 : loss : 0.305123, supervised_loss: 0.305109
[20:18:31.871] iteration 562 : loss : 0.302768, supervised_loss: 0.302757
[20:18:32.785] iteration 563 : loss : 0.300324, supervised_loss: 0.300311
[20:18:33.698] iteration 564 : loss : 0.305686, supervised_loss: 0.305668
[20:18:35.241] iteration 565 : loss : 0.303374, supervised_loss: 0.303362
[20:18:36.154] iteration 566 : loss : 0.303752, supervised_loss: 0.303727
[20:18:37.068] iteration 567 : loss : 0.308470, supervised_loss: 0.308451
[20:18:37.979] iteration 568 : loss : 0.305074, supervised_loss: 0.305056
[20:18:38.891] iteration 569 : loss : 0.330152, supervised_loss: 0.330135
[20:18:39.805] iteration 570 : loss : 0.327013, supervised_loss: 0.326993
[20:18:40.718] iteration 571 : loss : 0.305295, supervised_loss: 0.305278
[20:18:41.632] iteration 572 : loss : 0.296356, supervised_loss: 0.296323
[20:18:42.544] iteration 573 : loss : 0.307486, supervised_loss: 0.307453
[20:18:43.457] iteration 574 : loss : 0.300741, supervised_loss: 0.300723
[20:18:44.370] iteration 575 : loss : 0.303151, supervised_loss: 0.303134
[20:18:45.284] iteration 576 : loss : 0.300746, supervised_loss: 0.300729
[20:18:46.836] iteration 577 : loss : 0.320883, supervised_loss: 0.320861
[20:18:47.747] iteration 578 : loss : 0.299096, supervised_loss: 0.299077
[20:18:48.659] iteration 579 : loss : 0.305001, supervised_loss: 0.304990
[20:18:49.571] iteration 580 : loss : 0.291001, supervised_loss: 0.290981
[20:18:50.485] iteration 581 : loss : 0.333806, supervised_loss: 0.333786
[20:18:51.399] iteration 582 : loss : 0.311190, supervised_loss: 0.311177
[20:18:52.314] iteration 583 : loss : 0.307662, supervised_loss: 0.307645
[20:18:53.226] iteration 584 : loss : 0.301434, supervised_loss: 0.301418
[20:18:54.139] iteration 585 : loss : 0.298950, supervised_loss: 0.298933
[20:18:55.052] iteration 586 : loss : 0.311880, supervised_loss: 0.311862
[20:18:55.966] iteration 587 : loss : 0.289898, supervised_loss: 0.289878
[20:18:56.880] iteration 588 : loss : 0.310863, supervised_loss: 0.310852
[20:18:58.572] iteration 589 : loss : 0.327013, supervised_loss: 0.326997
[20:18:59.484] iteration 590 : loss : 0.297466, supervised_loss: 0.297450
[20:19:00.397] iteration 591 : loss : 0.318341, supervised_loss: 0.318324
[20:19:01.310] iteration 592 : loss : 0.313259, supervised_loss: 0.313245
[20:19:02.221] iteration 593 : loss : 0.302189, supervised_loss: 0.302176
[20:19:03.134] iteration 594 : loss : 0.310719, supervised_loss: 0.310705
[20:19:04.047] iteration 595 : loss : 0.295892, supervised_loss: 0.295877
[20:19:04.959] iteration 596 : loss : 0.295343, supervised_loss: 0.295328
[20:19:05.871] iteration 597 : loss : 0.311736, supervised_loss: 0.311718
[20:19:06.783] iteration 598 : loss : 0.299192, supervised_loss: 0.299179
[20:19:07.695] iteration 599 : loss : 0.306167, supervised_loss: 0.306146
[20:19:08.607] iteration 600 : loss : 0.297946, supervised_loss: 0.297919
[20:19:10.181] iteration 601 : loss : 0.302022, supervised_loss: 0.302004
[20:19:11.092] iteration 602 : loss : 0.298047, supervised_loss: 0.298031
[20:19:12.005] iteration 603 : loss : 0.304525, supervised_loss: 0.304505
[20:19:12.919] iteration 604 : loss : 0.296797, supervised_loss: 0.296777
[20:19:13.831] iteration 605 : loss : 0.293737, supervised_loss: 0.293723
[20:19:14.744] iteration 606 : loss : 0.295411, supervised_loss: 0.295388
[20:19:15.656] iteration 607 : loss : 0.343492, supervised_loss: 0.343469
[20:19:16.568] iteration 608 : loss : 0.298672, supervised_loss: 0.298652
[20:19:17.482] iteration 609 : loss : 0.322271, supervised_loss: 0.322255
[20:19:18.394] iteration 610 : loss : 0.303651, supervised_loss: 0.303631
[20:19:19.308] iteration 611 : loss : 0.296330, supervised_loss: 0.296305
[20:19:20.221] iteration 612 : loss : 0.294091, supervised_loss: 0.294076
[20:19:21.753] iteration 613 : loss : 0.300056, supervised_loss: 0.300039
[20:19:22.666] iteration 614 : loss : 0.294033, supervised_loss: 0.294011
[20:19:23.579] iteration 615 : loss : 0.313687, supervised_loss: 0.313668
[20:19:24.493] iteration 616 : loss : 0.312553, supervised_loss: 0.312515
[20:19:25.408] iteration 617 : loss : 0.290857, supervised_loss: 0.290841
[20:19:26.320] iteration 618 : loss : 0.300358, supervised_loss: 0.300333
[20:19:27.231] iteration 619 : loss : 0.308489, supervised_loss: 0.308473
[20:19:28.144] iteration 620 : loss : 0.305752, supervised_loss: 0.305732
[20:19:29.057] iteration 621 : loss : 0.296738, supervised_loss: 0.296718
[20:19:29.970] iteration 622 : loss : 0.295369, supervised_loss: 0.295347
[20:19:30.883] iteration 623 : loss : 0.301022, supervised_loss: 0.301005
[20:19:31.795] iteration 624 : loss : 0.315070, supervised_loss: 0.315044
[20:19:33.337] iteration 625 : loss : 0.343726, supervised_loss: 0.343708
[20:19:34.250] iteration 626 : loss : 0.295384, supervised_loss: 0.295358
[20:19:35.164] iteration 627 : loss : 0.293587, supervised_loss: 0.293570
[20:19:36.076] iteration 628 : loss : 0.310545, supervised_loss: 0.310525
[20:19:36.989] iteration 629 : loss : 0.304716, supervised_loss: 0.304699
[20:19:37.901] iteration 630 : loss : 0.308731, supervised_loss: 0.308711
[20:19:38.813] iteration 631 : loss : 0.299283, supervised_loss: 0.299265
[20:19:39.726] iteration 632 : loss : 0.339320, supervised_loss: 0.339260
[20:19:40.638] iteration 633 : loss : 0.298759, supervised_loss: 0.298738
[20:19:41.550] iteration 634 : loss : 0.297263, supervised_loss: 0.297248
[20:19:42.464] iteration 635 : loss : 0.311615, supervised_loss: 0.311586
[20:19:43.378] iteration 636 : loss : 0.302626, supervised_loss: 0.302608
[20:19:44.920] iteration 637 : loss : 0.297915, supervised_loss: 0.297898
[20:19:45.832] iteration 638 : loss : 0.311495, supervised_loss: 0.311474
[20:19:46.744] iteration 639 : loss : 0.297179, supervised_loss: 0.297158
[20:19:47.658] iteration 640 : loss : 0.296809, supervised_loss: 0.296783
[20:19:48.571] iteration 641 : loss : 0.302136, supervised_loss: 0.302116
[20:19:49.483] iteration 642 : loss : 0.294012, supervised_loss: 0.293998
[20:19:50.396] iteration 643 : loss : 0.297260, supervised_loss: 0.297245
[20:19:51.309] iteration 644 : loss : 0.307047, supervised_loss: 0.307024
[20:19:52.222] iteration 645 : loss : 0.301315, supervised_loss: 0.301300
[20:19:53.134] iteration 646 : loss : 0.303718, supervised_loss: 0.303701
[20:19:54.047] iteration 647 : loss : 0.309869, supervised_loss: 0.309851
[20:19:54.959] iteration 648 : loss : 0.298063, supervised_loss: 0.298040
[20:19:56.631] iteration 649 : loss : 0.310975, supervised_loss: 0.310956
[20:19:57.543] iteration 650 : loss : 0.291128, supervised_loss: 0.291110
[20:19:58.457] iteration 651 : loss : 0.329715, supervised_loss: 0.329693
[20:19:59.369] iteration 652 : loss : 0.298057, supervised_loss: 0.298044
[20:20:00.282] iteration 653 : loss : 0.310389, supervised_loss: 0.310368
[20:20:01.206] iteration 654 : loss : 0.299350, supervised_loss: 0.299324
[20:20:02.118] iteration 655 : loss : 0.302218, supervised_loss: 0.302199
[20:20:03.032] iteration 656 : loss : 0.303024, supervised_loss: 0.303003
[20:20:03.945] iteration 657 : loss : 0.302921, supervised_loss: 0.302902
[20:20:04.859] iteration 658 : loss : 0.300500, supervised_loss: 0.300477
[20:20:05.772] iteration 659 : loss : 0.301446, supervised_loss: 0.301426
[20:20:06.685] iteration 660 : loss : 0.350229, supervised_loss: 0.350204
[20:20:08.176] iteration 661 : loss : 0.326300, supervised_loss: 0.326280
[20:20:09.088] iteration 662 : loss : 0.298007, supervised_loss: 0.297989
[20:20:10.001] iteration 663 : loss : 0.313069, supervised_loss: 0.313045
[20:20:10.914] iteration 664 : loss : 0.321411, supervised_loss: 0.321384
[20:20:11.827] iteration 665 : loss : 0.297188, supervised_loss: 0.297150
[20:20:12.741] iteration 666 : loss : 0.303090, supervised_loss: 0.303072
[20:20:13.653] iteration 667 : loss : 0.304905, supervised_loss: 0.304886
[20:20:14.566] iteration 668 : loss : 0.303151, supervised_loss: 0.303128
[20:20:15.478] iteration 669 : loss : 0.307292, supervised_loss: 0.307267
[20:20:16.391] iteration 670 : loss : 0.321778, supervised_loss: 0.321724
[20:20:17.304] iteration 671 : loss : 0.308663, supervised_loss: 0.308644
[20:20:18.218] iteration 672 : loss : 0.312768, supervised_loss: 0.312741
[20:20:19.828] iteration 673 : loss : 0.334494, supervised_loss: 0.334477
[20:20:20.740] iteration 674 : loss : 0.317051, supervised_loss: 0.317028
[20:20:21.653] iteration 675 : loss : 0.367832, supervised_loss: 0.367808
[20:20:22.566] iteration 676 : loss : 0.316542, supervised_loss: 0.316529
[20:20:23.478] iteration 677 : loss : 0.306308, supervised_loss: 0.306281
[20:20:24.390] iteration 678 : loss : 0.292655, supervised_loss: 0.292630
[20:20:25.304] iteration 679 : loss : 0.315998, supervised_loss: 0.315980
[20:20:26.217] iteration 680 : loss : 0.314101, supervised_loss: 0.314082
[20:20:27.129] iteration 681 : loss : 0.310990, supervised_loss: 0.310965
[20:20:28.042] iteration 682 : loss : 0.309508, supervised_loss: 0.309492
[20:20:28.954] iteration 683 : loss : 0.324710, supervised_loss: 0.324695
[20:20:29.867] iteration 684 : loss : 0.304948, supervised_loss: 0.304932
[20:20:31.479] iteration 685 : loss : 0.302704, supervised_loss: 0.302684
[20:20:32.390] iteration 686 : loss : 0.293891, supervised_loss: 0.293875
[20:20:33.303] iteration 687 : loss : 0.311592, supervised_loss: 0.311573
[20:20:34.216] iteration 688 : loss : 0.298369, supervised_loss: 0.298345
[20:20:35.128] iteration 689 : loss : 0.323987, supervised_loss: 0.323961
[20:20:36.043] iteration 690 : loss : 0.316366, supervised_loss: 0.316349
[20:20:36.957] iteration 691 : loss : 0.308266, supervised_loss: 0.308244
[20:20:37.871] iteration 692 : loss : 0.316135, supervised_loss: 0.316116
[20:20:38.782] iteration 693 : loss : 0.298366, supervised_loss: 0.298343
[20:20:39.694] iteration 694 : loss : 0.321431, supervised_loss: 0.321417
[20:20:40.607] iteration 695 : loss : 0.291299, supervised_loss: 0.291280
[20:20:41.520] iteration 696 : loss : 0.296366, supervised_loss: 0.296343
[20:20:43.001] iteration 697 : loss : 0.308298, supervised_loss: 0.308280
[20:20:43.914] iteration 698 : loss : 0.302510, supervised_loss: 0.302493
[20:20:44.827] iteration 699 : loss : 0.294303, supervised_loss: 0.294285
[20:20:45.740] iteration 700 : loss : 0.314832, supervised_loss: 0.314812
[20:20:46.652] iteration 701 : loss : 0.292968, supervised_loss: 0.292950
[20:20:47.565] iteration 702 : loss : 0.293017, supervised_loss: 0.292997
[20:20:48.479] iteration 703 : loss : 0.314938, supervised_loss: 0.314918
[20:20:49.392] iteration 704 : loss : 0.309145, supervised_loss: 0.309130
[20:20:50.306] iteration 705 : loss : 0.292740, supervised_loss: 0.292722
[20:20:51.218] iteration 706 : loss : 0.302507, supervised_loss: 0.302484
[20:20:52.131] iteration 707 : loss : 0.321724, supervised_loss: 0.321703
[20:20:53.045] iteration 708 : loss : 0.302591, supervised_loss: 0.302574
[20:20:54.551] iteration 709 : loss : 0.323236, supervised_loss: 0.323218
[20:20:55.464] iteration 710 : loss : 0.295982, supervised_loss: 0.295959
[20:20:56.377] iteration 711 : loss : 0.301019, supervised_loss: 0.300998
[20:20:57.290] iteration 712 : loss : 0.316065, supervised_loss: 0.316053
[20:20:58.205] iteration 713 : loss : 0.309085, supervised_loss: 0.309066
[20:20:59.118] iteration 714 : loss : 0.305501, supervised_loss: 0.305486
[20:21:00.030] iteration 715 : loss : 0.293245, supervised_loss: 0.293216
[20:21:00.943] iteration 716 : loss : 0.301592, supervised_loss: 0.301567
[20:21:01.856] iteration 717 : loss : 0.306662, supervised_loss: 0.306643
[20:21:02.769] iteration 718 : loss : 0.299225, supervised_loss: 0.299192
[20:21:03.683] iteration 719 : loss : 0.296542, supervised_loss: 0.296511
[20:21:04.598] iteration 720 : loss : 0.308757, supervised_loss: 0.308740
[20:21:06.122] iteration 721 : loss : 0.297015, supervised_loss: 0.296982
[20:21:07.035] iteration 722 : loss : 0.296395, supervised_loss: 0.296380
[20:21:07.947] iteration 723 : loss : 0.306269, supervised_loss: 0.306253
[20:21:08.857] iteration 724 : loss : 0.308900, supervised_loss: 0.308886
[20:21:09.769] iteration 725 : loss : 0.328099, supervised_loss: 0.328081
[20:21:10.682] iteration 726 : loss : 0.293696, supervised_loss: 0.293684
[20:21:11.596] iteration 727 : loss : 0.304034, supervised_loss: 0.304018
[20:21:12.507] iteration 728 : loss : 0.298268, supervised_loss: 0.298250
[20:21:13.420] iteration 729 : loss : 0.300493, supervised_loss: 0.300470
[20:21:14.332] iteration 730 : loss : 0.305863, supervised_loss: 0.305832
[20:21:15.245] iteration 731 : loss : 0.312414, supervised_loss: 0.312397
[20:21:16.160] iteration 732 : loss : 0.310571, supervised_loss: 0.310557
[20:21:17.646] iteration 733 : loss : 0.296623, supervised_loss: 0.296603
[20:21:18.560] iteration 734 : loss : 0.296465, supervised_loss: 0.296444
[20:21:19.473] iteration 735 : loss : 0.293029, supervised_loss: 0.292997
[20:21:20.386] iteration 736 : loss : 0.297132, supervised_loss: 0.297109
[20:21:21.298] iteration 737 : loss : 0.316764, supervised_loss: 0.316748
[20:21:22.211] iteration 738 : loss : 0.304782, supervised_loss: 0.304766
[20:21:23.126] iteration 739 : loss : 0.301443, supervised_loss: 0.301425
[20:21:24.039] iteration 740 : loss : 0.297538, supervised_loss: 0.297521
[20:21:24.953] iteration 741 : loss : 0.290976, supervised_loss: 0.290957
[20:21:25.866] iteration 742 : loss : 0.307916, supervised_loss: 0.307898
[20:21:26.779] iteration 743 : loss : 0.328903, supervised_loss: 0.328874
[20:21:27.692] iteration 744 : loss : 0.295434, supervised_loss: 0.295416
[20:21:29.176] iteration 745 : loss : 0.309730, supervised_loss: 0.309709
[20:21:30.090] iteration 746 : loss : 0.299347, supervised_loss: 0.299330
[20:21:31.003] iteration 747 : loss : 0.305931, supervised_loss: 0.305916
[20:21:31.916] iteration 748 : loss : 0.302151, supervised_loss: 0.302124
[20:21:32.828] iteration 749 : loss : 0.298390, supervised_loss: 0.298372
[20:21:33.741] iteration 750 : loss : 0.298051, supervised_loss: 0.298034
[20:21:34.654] iteration 751 : loss : 0.291870, supervised_loss: 0.291833
[20:21:35.568] iteration 752 : loss : 0.309652, supervised_loss: 0.309616
[20:21:36.480] iteration 753 : loss : 0.323702, supervised_loss: 0.323685
[20:21:37.393] iteration 754 : loss : 0.331741, supervised_loss: 0.331726
[20:21:38.308] iteration 755 : loss : 0.310063, supervised_loss: 0.310044
[20:21:39.221] iteration 756 : loss : 0.293871, supervised_loss: 0.293844
[20:21:40.709] iteration 757 : loss : 0.304155, supervised_loss: 0.304131
[20:21:41.618] iteration 758 : loss : 0.298821, supervised_loss: 0.298796
[20:21:42.530] iteration 759 : loss : 0.303967, supervised_loss: 0.303935
[20:21:43.442] iteration 760 : loss : 0.302595, supervised_loss: 0.302569
[20:21:44.354] iteration 761 : loss : 0.298692, supervised_loss: 0.298661
[20:21:45.267] iteration 762 : loss : 0.292166, supervised_loss: 0.292136
[20:21:46.183] iteration 763 : loss : 0.304984, supervised_loss: 0.304959
[20:21:47.097] iteration 764 : loss : 0.310696, supervised_loss: 0.310675
[20:21:48.011] iteration 765 : loss : 0.294272, supervised_loss: 0.294246
[20:21:48.925] iteration 766 : loss : 0.318722, supervised_loss: 0.318697
[20:21:49.839] iteration 767 : loss : 0.298811, supervised_loss: 0.298787
[20:21:50.751] iteration 768 : loss : 0.295818, supervised_loss: 0.295797
[20:21:52.303] iteration 769 : loss : 0.288978, supervised_loss: 0.288964
[20:21:53.214] iteration 770 : loss : 0.302790, supervised_loss: 0.302773
[20:21:54.125] iteration 771 : loss : 0.296057, supervised_loss: 0.296037
[20:21:55.038] iteration 772 : loss : 0.288932, supervised_loss: 0.288910
[20:21:55.952] iteration 773 : loss : 0.306311, supervised_loss: 0.306291
[20:21:56.866] iteration 774 : loss : 0.299056, supervised_loss: 0.299037
[20:21:57.779] iteration 775 : loss : 0.317403, supervised_loss: 0.317384
[20:21:58.692] iteration 776 : loss : 0.294102, supervised_loss: 0.294078
[20:21:59.605] iteration 777 : loss : 0.296547, supervised_loss: 0.296520
[20:22:00.518] iteration 778 : loss : 0.298184, supervised_loss: 0.298157
[20:22:01.432] iteration 779 : loss : 0.310378, supervised_loss: 0.310351
[20:22:02.345] iteration 780 : loss : 0.299562, supervised_loss: 0.299546
[20:22:03.867] iteration 781 : loss : 0.287287, supervised_loss: 0.287273
[20:22:04.779] iteration 782 : loss : 0.302637, supervised_loss: 0.302614
[20:22:05.691] iteration 783 : loss : 0.297137, supervised_loss: 0.297114
[20:22:06.605] iteration 784 : loss : 0.294018, supervised_loss: 0.293988
[20:22:07.518] iteration 785 : loss : 0.301247, supervised_loss: 0.301221
[20:22:08.431] iteration 786 : loss : 0.293317, supervised_loss: 0.293302
[20:22:09.343] iteration 787 : loss : 0.292079, supervised_loss: 0.292062
[20:22:10.255] iteration 788 : loss : 0.295654, supervised_loss: 0.295638
[20:22:11.168] iteration 789 : loss : 0.295034, supervised_loss: 0.295014
[20:22:12.082] iteration 790 : loss : 0.286537, supervised_loss: 0.286520
[20:22:12.995] iteration 791 : loss : 0.300254, supervised_loss: 0.300234
[20:22:13.909] iteration 792 : loss : 0.293133, supervised_loss: 0.293110
[20:22:15.442] iteration 793 : loss : 0.290382, supervised_loss: 0.290361
[20:22:16.354] iteration 794 : loss : 0.294844, supervised_loss: 0.294821
[20:22:17.267] iteration 795 : loss : 0.306983, supervised_loss: 0.306964
[20:22:18.179] iteration 796 : loss : 0.295419, supervised_loss: 0.295397
[20:22:19.091] iteration 797 : loss : 0.298602, supervised_loss: 0.298577
[20:22:20.003] iteration 798 : loss : 0.304354, supervised_loss: 0.304330
[20:22:20.916] iteration 799 : loss : 0.295896, supervised_loss: 0.295878
[20:22:21.830] iteration 800 : loss : 0.288327, supervised_loss: 0.288303
[20:22:23.867] save best model to model/LA_vnet_25_labeled/URPC/iter_800_dice_0.9172200560569763.pth
[20:22:24.781] iteration 801 : loss : 0.308141, supervised_loss: 0.308119
[20:22:25.693] iteration 802 : loss : 0.305529, supervised_loss: 0.305503
[20:22:26.607] iteration 803 : loss : 0.311068, supervised_loss: 0.311043
[20:22:27.520] iteration 804 : loss : 0.290239, supervised_loss: 0.290214
[20:22:29.149] iteration 805 : loss : 0.304699, supervised_loss: 0.304675
[20:22:30.062] iteration 806 : loss : 0.287497, supervised_loss: 0.287469
[20:22:30.975] iteration 807 : loss : 0.294502, supervised_loss: 0.294476
[20:22:31.888] iteration 808 : loss : 0.285591, supervised_loss: 0.285569
[20:22:32.800] iteration 809 : loss : 0.298469, supervised_loss: 0.298446
[20:22:33.714] iteration 810 : loss : 0.308606, supervised_loss: 0.308587
[20:22:34.627] iteration 811 : loss : 0.313492, supervised_loss: 0.313477
[20:22:35.541] iteration 812 : loss : 0.297213, supervised_loss: 0.297187
[20:22:36.453] iteration 813 : loss : 0.295598, supervised_loss: 0.295574
[20:22:37.365] iteration 814 : loss : 0.295857, supervised_loss: 0.295817
[20:22:38.277] iteration 815 : loss : 0.332923, supervised_loss: 0.332906
[20:22:39.191] iteration 816 : loss : 0.302593, supervised_loss: 0.302570
[20:22:40.758] iteration 817 : loss : 0.293761, supervised_loss: 0.293738
[20:22:41.671] iteration 818 : loss : 0.286330, supervised_loss: 0.286305
[20:22:42.584] iteration 819 : loss : 0.297278, supervised_loss: 0.297255
[20:22:43.497] iteration 820 : loss : 0.294664, supervised_loss: 0.294634
[20:22:44.409] iteration 821 : loss : 0.302966, supervised_loss: 0.302949
[20:22:45.322] iteration 822 : loss : 0.286203, supervised_loss: 0.286179
[20:22:46.236] iteration 823 : loss : 0.314407, supervised_loss: 0.314390
[20:22:47.150] iteration 824 : loss : 0.297342, supervised_loss: 0.297317
[20:22:48.062] iteration 825 : loss : 0.304249, supervised_loss: 0.304230
[20:22:48.974] iteration 826 : loss : 0.313882, supervised_loss: 0.313865
[20:22:49.888] iteration 827 : loss : 0.283065, supervised_loss: 0.283046
[20:22:50.802] iteration 828 : loss : 0.292842, supervised_loss: 0.292820
[20:22:52.373] iteration 829 : loss : 0.294884, supervised_loss: 0.294868
[20:22:53.286] iteration 830 : loss : 0.293504, supervised_loss: 0.293481
[20:22:54.199] iteration 831 : loss : 0.295576, supervised_loss: 0.295556
[20:22:55.112] iteration 832 : loss : 0.291049, supervised_loss: 0.291028
[20:22:56.024] iteration 833 : loss : 0.292286, supervised_loss: 0.292271
[20:22:56.937] iteration 834 : loss : 0.296571, supervised_loss: 0.296550
[20:22:57.851] iteration 835 : loss : 0.291253, supervised_loss: 0.291235
[20:22:58.763] iteration 836 : loss : 0.317808, supervised_loss: 0.317788
[20:22:59.678] iteration 837 : loss : 0.297120, supervised_loss: 0.297099
[20:23:00.592] iteration 838 : loss : 0.302006, supervised_loss: 0.301982
[20:23:01.505] iteration 839 : loss : 0.327682, supervised_loss: 0.327660
[20:23:02.419] iteration 840 : loss : 0.297464, supervised_loss: 0.297446
[20:23:03.979] iteration 841 : loss : 0.295944, supervised_loss: 0.295925
[20:23:04.891] iteration 842 : loss : 0.302840, supervised_loss: 0.302824
[20:23:05.805] iteration 843 : loss : 0.293658, supervised_loss: 0.293643
[20:23:06.717] iteration 844 : loss : 0.285995, supervised_loss: 0.285975
[20:23:07.628] iteration 845 : loss : 0.318424, supervised_loss: 0.318398
[20:23:08.540] iteration 846 : loss : 0.286571, supervised_loss: 0.286546
[20:23:09.453] iteration 847 : loss : 0.285503, supervised_loss: 0.285477
[20:23:10.367] iteration 848 : loss : 0.292875, supervised_loss: 0.292856
[20:23:11.281] iteration 849 : loss : 0.291913, supervised_loss: 0.291883
[20:23:12.192] iteration 850 : loss : 0.294334, supervised_loss: 0.294318
[20:23:13.105] iteration 851 : loss : 0.292104, supervised_loss: 0.292087
[20:23:14.017] iteration 852 : loss : 0.303999, supervised_loss: 0.303980
[20:23:15.510] iteration 853 : loss : 0.310023, supervised_loss: 0.310003
[20:23:16.423] iteration 854 : loss : 0.316326, supervised_loss: 0.316307
[20:23:17.337] iteration 855 : loss : 0.290652, supervised_loss: 0.290627
[20:23:18.251] iteration 856 : loss : 0.300165, supervised_loss: 0.300146
[20:23:19.164] iteration 857 : loss : 0.300186, supervised_loss: 0.300170
[20:23:20.079] iteration 858 : loss : 0.286269, supervised_loss: 0.286238
[20:23:20.992] iteration 859 : loss : 0.291101, supervised_loss: 0.291085
[20:23:21.904] iteration 860 : loss : 0.296717, supervised_loss: 0.296699
[20:23:22.819] iteration 861 : loss : 0.284243, supervised_loss: 0.284222
[20:23:23.732] iteration 862 : loss : 0.289146, supervised_loss: 0.289129
[20:23:24.646] iteration 863 : loss : 0.288877, supervised_loss: 0.288860
[20:23:25.559] iteration 864 : loss : 0.288550, supervised_loss: 0.288517
[20:23:27.146] iteration 865 : loss : 0.298269, supervised_loss: 0.298249
[20:23:28.058] iteration 866 : loss : 0.295770, supervised_loss: 0.295749
[20:23:28.972] iteration 867 : loss : 0.290999, supervised_loss: 0.290981
[20:23:29.885] iteration 868 : loss : 0.303976, supervised_loss: 0.303948
[20:23:30.799] iteration 869 : loss : 0.301141, supervised_loss: 0.301107
[20:23:31.712] iteration 870 : loss : 0.300926, supervised_loss: 0.300902
[20:23:32.625] iteration 871 : loss : 0.302074, supervised_loss: 0.302051
[20:23:33.538] iteration 872 : loss : 0.289525, supervised_loss: 0.289492
[20:23:34.452] iteration 873 : loss : 0.297910, supervised_loss: 0.297893
[20:23:35.367] iteration 874 : loss : 0.302008, supervised_loss: 0.301992
[20:23:36.281] iteration 875 : loss : 0.289293, supervised_loss: 0.289270
[20:23:37.196] iteration 876 : loss : 0.294727, supervised_loss: 0.294703
[20:23:38.783] iteration 877 : loss : 0.289092, supervised_loss: 0.289070
[20:23:39.695] iteration 878 : loss : 0.293078, supervised_loss: 0.293061
[20:23:40.609] iteration 879 : loss : 0.295440, supervised_loss: 0.295424
[20:23:41.522] iteration 880 : loss : 0.323021, supervised_loss: 0.322999
[20:23:42.436] iteration 881 : loss : 0.297068, supervised_loss: 0.297050
[20:23:43.349] iteration 882 : loss : 0.303318, supervised_loss: 0.303289
[20:23:44.262] iteration 883 : loss : 0.297766, supervised_loss: 0.297750
[20:23:45.177] iteration 884 : loss : 0.297765, supervised_loss: 0.297738
[20:23:46.091] iteration 885 : loss : 0.300258, supervised_loss: 0.300239
[20:23:47.003] iteration 886 : loss : 0.293717, supervised_loss: 0.293698
[20:23:47.916] iteration 887 : loss : 0.297689, supervised_loss: 0.297659
[20:23:48.829] iteration 888 : loss : 0.295316, supervised_loss: 0.295300
[20:23:50.345] iteration 889 : loss : 0.293683, supervised_loss: 0.293661
[20:23:51.257] iteration 890 : loss : 0.292503, supervised_loss: 0.292482
[20:23:52.170] iteration 891 : loss : 0.293263, supervised_loss: 0.293237
[20:23:53.084] iteration 892 : loss : 0.314779, supervised_loss: 0.314758
[20:23:53.997] iteration 893 : loss : 0.290336, supervised_loss: 0.290298
[20:23:54.910] iteration 894 : loss : 0.289085, supervised_loss: 0.289055
[20:23:55.823] iteration 895 : loss : 0.287619, supervised_loss: 0.287598
[20:23:56.736] iteration 896 : loss : 0.288989, supervised_loss: 0.288968
[20:23:57.650] iteration 897 : loss : 0.308240, supervised_loss: 0.308213
[20:23:58.564] iteration 898 : loss : 0.295035, supervised_loss: 0.295018
[20:23:59.476] iteration 899 : loss : 0.304742, supervised_loss: 0.304723
[20:24:00.390] iteration 900 : loss : 0.287567, supervised_loss: 0.287542
[20:24:01.961] iteration 901 : loss : 0.294263, supervised_loss: 0.294238
[20:24:02.874] iteration 902 : loss : 0.298552, supervised_loss: 0.298508
[20:24:03.788] iteration 903 : loss : 0.290238, supervised_loss: 0.290207
[20:24:04.701] iteration 904 : loss : 0.303064, supervised_loss: 0.303033
[20:24:05.614] iteration 905 : loss : 0.292801, supervised_loss: 0.292780
[20:24:06.528] iteration 906 : loss : 0.297378, supervised_loss: 0.297356
[20:24:07.442] iteration 907 : loss : 0.290746, supervised_loss: 0.290723
[20:24:08.357] iteration 908 : loss : 0.294561, supervised_loss: 0.294541
[20:24:09.272] iteration 909 : loss : 0.304243, supervised_loss: 0.304219
[20:24:10.188] iteration 910 : loss : 0.285787, supervised_loss: 0.285760
[20:24:11.101] iteration 911 : loss : 0.293611, supervised_loss: 0.293587
[20:24:12.015] iteration 912 : loss : 0.298459, supervised_loss: 0.298430
[20:24:13.674] iteration 913 : loss : 0.289790, supervised_loss: 0.289762
[20:24:14.587] iteration 914 : loss : 0.314710, supervised_loss: 0.314638
[20:24:15.500] iteration 915 : loss : 0.300585, supervised_loss: 0.300555
[20:24:16.413] iteration 916 : loss : 0.303548, supervised_loss: 0.303532
[20:24:17.327] iteration 917 : loss : 0.297197, supervised_loss: 0.297178
[20:24:18.241] iteration 918 : loss : 0.300871, supervised_loss: 0.300842
[20:24:19.153] iteration 919 : loss : 0.297686, supervised_loss: 0.297653
[20:24:20.065] iteration 920 : loss : 0.301139, supervised_loss: 0.301098
[20:24:20.979] iteration 921 : loss : 0.289822, supervised_loss: 0.289797
[20:24:21.892] iteration 922 : loss : 0.285193, supervised_loss: 0.285165
[20:24:22.805] iteration 923 : loss : 0.287493, supervised_loss: 0.287470
[20:24:23.717] iteration 924 : loss : 0.291698, supervised_loss: 0.291670
[20:24:25.195] iteration 925 : loss : 0.289841, supervised_loss: 0.289819
[20:24:26.108] iteration 926 : loss : 0.292022, supervised_loss: 0.291990
[20:24:27.022] iteration 927 : loss : 0.293686, supervised_loss: 0.293666
[20:24:27.935] iteration 928 : loss : 0.289964, supervised_loss: 0.289939
[20:24:28.848] iteration 929 : loss : 0.289815, supervised_loss: 0.289789
[20:24:29.760] iteration 930 : loss : 0.310055, supervised_loss: 0.310034
[20:24:30.674] iteration 931 : loss : 0.314687, supervised_loss: 0.314663
[20:24:31.587] iteration 932 : loss : 0.290834, supervised_loss: 0.290807
[20:24:32.500] iteration 933 : loss : 0.297231, supervised_loss: 0.297203
[20:24:33.413] iteration 934 : loss : 0.294387, supervised_loss: 0.294342
[20:24:34.327] iteration 935 : loss : 0.313822, supervised_loss: 0.313788
[20:24:35.240] iteration 936 : loss : 0.290826, supervised_loss: 0.290798
[20:24:36.768] iteration 937 : loss : 0.297422, supervised_loss: 0.297399
[20:24:37.681] iteration 938 : loss : 0.312212, supervised_loss: 0.312188
[20:24:38.593] iteration 939 : loss : 0.287968, supervised_loss: 0.287929
[20:24:39.506] iteration 940 : loss : 0.284739, supervised_loss: 0.284703
[20:24:40.418] iteration 941 : loss : 0.295825, supervised_loss: 0.295801
[20:24:41.332] iteration 942 : loss : 0.299236, supervised_loss: 0.299215
[20:24:42.246] iteration 943 : loss : 0.288974, supervised_loss: 0.288954
[20:24:43.158] iteration 944 : loss : 0.297815, supervised_loss: 0.297789
[20:24:44.070] iteration 945 : loss : 0.302945, supervised_loss: 0.302908
[20:24:44.983] iteration 946 : loss : 0.291363, supervised_loss: 0.291342
[20:24:45.896] iteration 947 : loss : 0.291822, supervised_loss: 0.291796
[20:24:46.808] iteration 948 : loss : 0.288454, supervised_loss: 0.288418
[20:24:48.323] iteration 949 : loss : 0.295893, supervised_loss: 0.295867
[20:24:49.236] iteration 950 : loss : 0.286294, supervised_loss: 0.286265
[20:24:50.150] iteration 951 : loss : 0.298460, supervised_loss: 0.298439
[20:24:51.063] iteration 952 : loss : 0.298404, supervised_loss: 0.298369
[20:24:51.976] iteration 953 : loss : 0.306425, supervised_loss: 0.306371
[20:24:52.889] iteration 954 : loss : 0.289116, supervised_loss: 0.289094
[20:24:53.803] iteration 955 : loss : 0.304712, supervised_loss: 0.304682
[20:24:54.716] iteration 956 : loss : 0.304847, supervised_loss: 0.304806
[20:24:55.629] iteration 957 : loss : 0.288970, supervised_loss: 0.288939
[20:24:56.543] iteration 958 : loss : 0.288423, supervised_loss: 0.288395
[20:24:57.457] iteration 959 : loss : 0.285871, supervised_loss: 0.285842
[20:24:58.372] iteration 960 : loss : 0.291468, supervised_loss: 0.291443
[20:24:59.904] iteration 961 : loss : 0.292423, supervised_loss: 0.292398
[20:25:00.819] iteration 962 : loss : 0.293905, supervised_loss: 0.293876
[20:25:01.731] iteration 963 : loss : 0.303375, supervised_loss: 0.303354
[20:25:02.645] iteration 964 : loss : 0.297529, supervised_loss: 0.297503
[20:25:03.558] iteration 965 : loss : 0.298105, supervised_loss: 0.298070
[20:25:04.471] iteration 966 : loss : 0.298352, supervised_loss: 0.298316
[20:25:05.385] iteration 967 : loss : 0.294536, supervised_loss: 0.294512
[20:25:06.297] iteration 968 : loss : 0.284032, supervised_loss: 0.284011
[20:25:07.210] iteration 969 : loss : 0.291206, supervised_loss: 0.291179
[20:25:08.123] iteration 970 : loss : 0.283218, supervised_loss: 0.283195
[20:25:09.036] iteration 971 : loss : 0.293441, supervised_loss: 0.293419
[20:25:09.950] iteration 972 : loss : 0.284691, supervised_loss: 0.284655
[20:25:11.497] iteration 973 : loss : 0.292809, supervised_loss: 0.292769
[20:25:12.410] iteration 974 : loss : 0.290552, supervised_loss: 0.290517
[20:25:13.321] iteration 975 : loss : 0.288130, supervised_loss: 0.288103
[20:25:14.234] iteration 976 : loss : 0.283365, supervised_loss: 0.283336
[20:25:15.147] iteration 977 : loss : 0.296393, supervised_loss: 0.296366
[20:25:16.060] iteration 978 : loss : 0.293651, supervised_loss: 0.293630
[20:25:16.974] iteration 979 : loss : 0.286210, supervised_loss: 0.286174
[20:25:17.888] iteration 980 : loss : 0.294474, supervised_loss: 0.294449
[20:25:18.802] iteration 981 : loss : 0.282675, supervised_loss: 0.282657
[20:25:19.716] iteration 982 : loss : 0.288758, supervised_loss: 0.288738
[20:25:20.627] iteration 983 : loss : 0.299377, supervised_loss: 0.299345
[20:25:21.540] iteration 984 : loss : 0.287606, supervised_loss: 0.287581
[20:25:23.194] iteration 985 : loss : 0.289711, supervised_loss: 0.289695
[20:25:24.106] iteration 986 : loss : 0.291372, supervised_loss: 0.291348
[20:25:25.018] iteration 987 : loss : 0.315211, supervised_loss: 0.315186
[20:25:25.932] iteration 988 : loss : 0.286811, supervised_loss: 0.286781
[20:25:26.844] iteration 989 : loss : 0.296875, supervised_loss: 0.296832
[20:25:27.757] iteration 990 : loss : 0.294808, supervised_loss: 0.294784
[20:25:28.671] iteration 991 : loss : 0.292390, supervised_loss: 0.292368
[20:25:29.585] iteration 992 : loss : 0.290810, supervised_loss: 0.290782
[20:25:30.498] iteration 993 : loss : 0.305015, supervised_loss: 0.304995
[20:25:31.411] iteration 994 : loss : 0.286850, supervised_loss: 0.286826
[20:25:32.323] iteration 995 : loss : 0.287724, supervised_loss: 0.287683
[20:25:33.238] iteration 996 : loss : 0.289589, supervised_loss: 0.289546
[20:25:34.843] iteration 997 : loss : 0.293216, supervised_loss: 0.293195
[20:25:35.756] iteration 998 : loss : 0.292852, supervised_loss: 0.292823
[20:25:36.669] iteration 999 : loss : 0.287691, supervised_loss: 0.287670
[20:25:37.582] iteration 1000 : loss : 0.311215, supervised_loss: 0.311194
[20:25:40.440] iteration 1001 : loss : 0.292647, supervised_loss: 0.292620
[20:25:41.353] iteration 1002 : loss : 0.296930, supervised_loss: 0.296907
[20:25:42.265] iteration 1003 : loss : 0.287547, supervised_loss: 0.287510
[20:25:43.178] iteration 1004 : loss : 0.294039, supervised_loss: 0.294002
[20:25:44.091] iteration 1005 : loss : 0.298874, supervised_loss: 0.298851
[20:25:45.003] iteration 1006 : loss : 0.289809, supervised_loss: 0.289782
[20:25:45.916] iteration 1007 : loss : 0.287680, supervised_loss: 0.287659
[20:25:46.829] iteration 1008 : loss : 0.301446, supervised_loss: 0.301414
[20:25:48.421] iteration 1009 : loss : 0.284276, supervised_loss: 0.284255
[20:25:49.334] iteration 1010 : loss : 0.282145, supervised_loss: 0.282122
[20:25:50.246] iteration 1011 : loss : 0.286601, supervised_loss: 0.286581
[20:25:51.160] iteration 1012 : loss : 0.310114, supervised_loss: 0.310082
[20:25:52.071] iteration 1013 : loss : 0.293574, supervised_loss: 0.293555
[20:25:52.983] iteration 1014 : loss : 0.292681, supervised_loss: 0.292666
[20:25:53.896] iteration 1015 : loss : 0.293114, supervised_loss: 0.293085
[20:25:54.810] iteration 1016 : loss : 0.299905, supervised_loss: 0.299885
[20:25:55.723] iteration 1017 : loss : 0.286410, supervised_loss: 0.286387
[20:25:56.637] iteration 1018 : loss : 0.292552, supervised_loss: 0.292519
[20:25:57.551] iteration 1019 : loss : 0.293217, supervised_loss: 0.293186
[20:25:58.464] iteration 1020 : loss : 0.295848, supervised_loss: 0.295823
[20:25:59.961] iteration 1021 : loss : 0.283429, supervised_loss: 0.283404
[20:26:00.872] iteration 1022 : loss : 0.285510, supervised_loss: 0.285485
[20:26:01.785] iteration 1023 : loss : 0.292539, supervised_loss: 0.292510
[20:26:02.696] iteration 1024 : loss : 0.297125, supervised_loss: 0.297105
[20:26:03.609] iteration 1025 : loss : 0.290696, supervised_loss: 0.290676
[20:26:04.523] iteration 1026 : loss : 0.288158, supervised_loss: 0.288131
[20:26:05.436] iteration 1027 : loss : 0.299609, supervised_loss: 0.299585
[20:26:06.349] iteration 1028 : loss : 0.285862, supervised_loss: 0.285832
[20:26:07.262] iteration 1029 : loss : 0.301364, supervised_loss: 0.301339
[20:26:08.176] iteration 1030 : loss : 0.287640, supervised_loss: 0.287617
[20:26:09.090] iteration 1031 : loss : 0.284229, supervised_loss: 0.284204
[20:26:10.004] iteration 1032 : loss : 0.284568, supervised_loss: 0.284550
[20:26:11.512] iteration 1033 : loss : 0.285638, supervised_loss: 0.285597
[20:26:12.423] iteration 1034 : loss : 0.285608, supervised_loss: 0.285580
[20:26:13.337] iteration 1035 : loss : 0.295076, supervised_loss: 0.295058
[20:26:14.250] iteration 1036 : loss : 0.286515, supervised_loss: 0.286485
[20:26:15.162] iteration 1037 : loss : 0.284313, supervised_loss: 0.284293
[20:26:16.075] iteration 1038 : loss : 0.284113, supervised_loss: 0.284090
[20:26:16.988] iteration 1039 : loss : 0.297473, supervised_loss: 0.297453
[20:26:17.900] iteration 1040 : loss : 0.291736, supervised_loss: 0.291716
[20:26:18.813] iteration 1041 : loss : 0.291109, supervised_loss: 0.291082
[20:26:19.725] iteration 1042 : loss : 0.308111, supervised_loss: 0.308082
[20:26:20.639] iteration 1043 : loss : 0.285929, supervised_loss: 0.285909
[20:26:21.552] iteration 1044 : loss : 0.307628, supervised_loss: 0.307608
[20:26:23.136] iteration 1045 : loss : 0.279498, supervised_loss: 0.279480
[20:26:24.047] iteration 1046 : loss : 0.284104, supervised_loss: 0.284086
[20:26:24.960] iteration 1047 : loss : 0.286001, supervised_loss: 0.285978
[20:26:25.873] iteration 1048 : loss : 0.287678, supervised_loss: 0.287647
[20:26:26.786] iteration 1049 : loss : 0.304061, supervised_loss: 0.304015
[20:26:27.700] iteration 1050 : loss : 0.300727, supervised_loss: 0.300697
[20:26:28.613] iteration 1051 : loss : 0.286596, supervised_loss: 0.286570
[20:26:29.525] iteration 1052 : loss : 0.304267, supervised_loss: 0.304229
[20:26:30.438] iteration 1053 : loss : 0.281866, supervised_loss: 0.281832
[20:26:31.350] iteration 1054 : loss : 0.286753, supervised_loss: 0.286725
[20:26:32.264] iteration 1055 : loss : 0.293525, supervised_loss: 0.293467
[20:26:33.177] iteration 1056 : loss : 0.289964, supervised_loss: 0.289937
[20:26:34.809] iteration 1057 : loss : 0.287874, supervised_loss: 0.287852
[20:26:35.722] iteration 1058 : loss : 0.293125, supervised_loss: 0.293105
[20:26:36.635] iteration 1059 : loss : 0.294759, supervised_loss: 0.294728
[20:26:37.547] iteration 1060 : loss : 0.282634, supervised_loss: 0.282608
[20:26:38.460] iteration 1061 : loss : 0.319401, supervised_loss: 0.319380
[20:26:39.373] iteration 1062 : loss : 0.283071, supervised_loss: 0.283044
[20:26:40.286] iteration 1063 : loss : 0.294769, supervised_loss: 0.294724
[20:26:41.199] iteration 1064 : loss : 0.292197, supervised_loss: 0.292166
[20:26:42.111] iteration 1065 : loss : 0.300342, supervised_loss: 0.300319
[20:26:43.025] iteration 1066 : loss : 0.290035, supervised_loss: 0.290008
[20:26:43.938] iteration 1067 : loss : 0.290383, supervised_loss: 0.290345
[20:26:44.851] iteration 1068 : loss : 0.293984, supervised_loss: 0.293960
[20:26:46.402] iteration 1069 : loss : 0.306340, supervised_loss: 0.306321
[20:26:47.314] iteration 1070 : loss : 0.311823, supervised_loss: 0.311787
[20:26:48.226] iteration 1071 : loss : 0.293174, supervised_loss: 0.293136
[20:26:49.140] iteration 1072 : loss : 0.306154, supervised_loss: 0.306123
[20:26:50.052] iteration 1073 : loss : 0.283310, supervised_loss: 0.283272
[20:26:50.964] iteration 1074 : loss : 0.296103, supervised_loss: 0.296061
[20:26:51.876] iteration 1075 : loss : 0.293159, supervised_loss: 0.293117
[20:26:52.789] iteration 1076 : loss : 0.294959, supervised_loss: 0.294933
[20:26:53.703] iteration 1077 : loss : 0.300282, supervised_loss: 0.300227
[20:26:54.616] iteration 1078 : loss : 0.286826, supervised_loss: 0.286788
[20:26:55.528] iteration 1079 : loss : 0.289789, supervised_loss: 0.289772
[20:26:56.441] iteration 1080 : loss : 0.289563, supervised_loss: 0.289530
[20:26:57.991] iteration 1081 : loss : 0.289682, supervised_loss: 0.289655
[20:26:58.903] iteration 1082 : loss : 0.291395, supervised_loss: 0.291338
[20:26:59.815] iteration 1083 : loss : 0.288001, supervised_loss: 0.287967
[20:27:00.728] iteration 1084 : loss : 0.287968, supervised_loss: 0.287935
[20:27:01.642] iteration 1085 : loss : 0.296666, supervised_loss: 0.296629
[20:27:02.555] iteration 1086 : loss : 0.293940, supervised_loss: 0.293905
[20:27:03.468] iteration 1087 : loss : 0.282267, supervised_loss: 0.282222
[20:27:04.380] iteration 1088 : loss : 0.286365, supervised_loss: 0.286342
[20:27:05.293] iteration 1089 : loss : 0.292270, supervised_loss: 0.292243
[20:27:06.206] iteration 1090 : loss : 0.295893, supervised_loss: 0.295865
[20:27:07.120] iteration 1091 : loss : 0.296460, supervised_loss: 0.296425
[20:27:08.034] iteration 1092 : loss : 0.309940, supervised_loss: 0.309901
[20:27:09.567] iteration 1093 : loss : 0.282395, supervised_loss: 0.282362
[20:27:10.480] iteration 1094 : loss : 0.287047, supervised_loss: 0.287012
[20:27:11.392] iteration 1095 : loss : 0.295704, supervised_loss: 0.295660
[20:27:12.306] iteration 1096 : loss : 0.294184, supervised_loss: 0.294162
[20:27:13.219] iteration 1097 : loss : 0.288176, supervised_loss: 0.288145
[20:27:14.131] iteration 1098 : loss : 0.285852, supervised_loss: 0.285818
[20:27:15.043] iteration 1099 : loss : 0.287000, supervised_loss: 0.286973
[20:27:15.958] iteration 1100 : loss : 0.303416, supervised_loss: 0.303384
[20:27:16.870] iteration 1101 : loss : 0.293693, supervised_loss: 0.293636
[20:27:17.783] iteration 1102 : loss : 0.288483, supervised_loss: 0.288444
[20:27:18.697] iteration 1103 : loss : 0.288395, supervised_loss: 0.288370
[20:27:19.609] iteration 1104 : loss : 0.302623, supervised_loss: 0.302597
[20:27:21.117] iteration 1105 : loss : 0.289076, supervised_loss: 0.289046
[20:27:22.030] iteration 1106 : loss : 0.303507, supervised_loss: 0.303473
[20:27:22.943] iteration 1107 : loss : 0.283880, supervised_loss: 0.283850
[20:27:23.856] iteration 1108 : loss : 0.302791, supervised_loss: 0.302732
[20:27:24.769] iteration 1109 : loss : 0.296453, supervised_loss: 0.296421
[20:27:25.681] iteration 1110 : loss : 0.294093, supervised_loss: 0.294054
[20:27:26.593] iteration 1111 : loss : 0.293599, supervised_loss: 0.293573
[20:27:27.507] iteration 1112 : loss : 0.287053, supervised_loss: 0.287004
[20:27:28.418] iteration 1113 : loss : 0.294047, supervised_loss: 0.294010
[20:27:29.330] iteration 1114 : loss : 0.283575, supervised_loss: 0.283549
[20:27:30.243] iteration 1115 : loss : 0.293907, supervised_loss: 0.293877
[20:27:31.157] iteration 1116 : loss : 0.283999, supervised_loss: 0.283976
[20:27:32.657] iteration 1117 : loss : 0.292954, supervised_loss: 0.292922
[20:27:33.569] iteration 1118 : loss : 0.286274, supervised_loss: 0.286220
[20:27:34.481] iteration 1119 : loss : 0.284442, supervised_loss: 0.284416
[20:27:35.393] iteration 1120 : loss : 0.284986, supervised_loss: 0.284967
[20:27:36.306] iteration 1121 : loss : 0.289679, supervised_loss: 0.289635
[20:27:37.220] iteration 1122 : loss : 0.297914, supervised_loss: 0.297875
[20:27:38.134] iteration 1123 : loss : 0.295827, supervised_loss: 0.295780
[20:27:39.047] iteration 1124 : loss : 0.294754, supervised_loss: 0.294698
[20:27:39.960] iteration 1125 : loss : 0.292190, supervised_loss: 0.292149
[20:27:40.873] iteration 1126 : loss : 0.287194, supervised_loss: 0.287159
[20:27:41.785] iteration 1127 : loss : 0.296860, supervised_loss: 0.296839
[20:27:42.698] iteration 1128 : loss : 0.287366, supervised_loss: 0.287333
[20:27:44.370] iteration 1129 : loss : 0.299147, supervised_loss: 0.299119
[20:27:45.283] iteration 1130 : loss : 0.304844, supervised_loss: 0.304802
[20:27:46.194] iteration 1131 : loss : 0.292770, supervised_loss: 0.292747
[20:27:47.107] iteration 1132 : loss : 0.290735, supervised_loss: 0.290713
[20:27:48.020] iteration 1133 : loss : 0.289050, supervised_loss: 0.289018
[20:27:48.934] iteration 1134 : loss : 0.286764, supervised_loss: 0.286730
[20:27:49.846] iteration 1135 : loss : 0.281376, supervised_loss: 0.281332
[20:27:50.758] iteration 1136 : loss : 0.298451, supervised_loss: 0.298424
[20:27:51.671] iteration 1137 : loss : 0.289489, supervised_loss: 0.289461
[20:27:52.583] iteration 1138 : loss : 0.287615, supervised_loss: 0.287585
[20:27:53.497] iteration 1139 : loss : 0.293964, supervised_loss: 0.293929
[20:27:54.412] iteration 1140 : loss : 0.289817, supervised_loss: 0.289791
[20:27:55.918] iteration 1141 : loss : 0.293130, supervised_loss: 0.293107
[20:27:56.831] iteration 1142 : loss : 0.302702, supervised_loss: 0.302675
[20:27:57.743] iteration 1143 : loss : 0.282745, supervised_loss: 0.282712
[20:27:58.658] iteration 1144 : loss : 0.285668, supervised_loss: 0.285635
[20:27:59.571] iteration 1145 : loss : 0.289652, supervised_loss: 0.289631
[20:28:00.485] iteration 1146 : loss : 0.287228, supervised_loss: 0.287191
[20:28:01.397] iteration 1147 : loss : 0.289611, supervised_loss: 0.289570
[20:28:02.309] iteration 1148 : loss : 0.294352, supervised_loss: 0.294325
[20:28:03.221] iteration 1149 : loss : 0.288653, supervised_loss: 0.288627
[20:28:04.135] iteration 1150 : loss : 0.281235, supervised_loss: 0.281194
[20:28:05.049] iteration 1151 : loss : 0.294492, supervised_loss: 0.294453
[20:28:05.963] iteration 1152 : loss : 0.299524, supervised_loss: 0.299479
[20:28:07.507] iteration 1153 : loss : 0.291292, supervised_loss: 0.291252
[20:28:08.418] iteration 1154 : loss : 0.288516, supervised_loss: 0.288487
[20:28:09.332] iteration 1155 : loss : 0.289889, supervised_loss: 0.289853
[20:28:10.244] iteration 1156 : loss : 0.287431, supervised_loss: 0.287397
[20:28:11.157] iteration 1157 : loss : 0.308454, supervised_loss: 0.308409
[20:28:12.070] iteration 1158 : loss : 0.289014, supervised_loss: 0.288969
[20:28:12.983] iteration 1159 : loss : 0.283130, supervised_loss: 0.283096
[20:28:13.895] iteration 1160 : loss : 0.284532, supervised_loss: 0.284496
[20:28:14.807] iteration 1161 : loss : 0.288737, supervised_loss: 0.288699
[20:28:15.721] iteration 1162 : loss : 0.296859, supervised_loss: 0.296825
[20:28:16.633] iteration 1163 : loss : 0.281387, supervised_loss: 0.281363
[20:28:17.546] iteration 1164 : loss : 0.286346, supervised_loss: 0.286313
[20:28:19.116] iteration 1165 : loss : 0.278182, supervised_loss: 0.278150
[20:28:20.029] iteration 1166 : loss : 0.280093, supervised_loss: 0.280067
[20:28:20.941] iteration 1167 : loss : 0.287537, supervised_loss: 0.287503
[20:28:21.853] iteration 1168 : loss : 0.294870, supervised_loss: 0.294834
[20:28:22.765] iteration 1169 : loss : 0.305364, supervised_loss: 0.305320
[20:28:23.679] iteration 1170 : loss : 0.286136, supervised_loss: 0.286117
[20:28:24.592] iteration 1171 : loss : 0.291072, supervised_loss: 0.291041
[20:28:25.505] iteration 1172 : loss : 0.292280, supervised_loss: 0.292257
[20:28:26.418] iteration 1173 : loss : 0.294587, supervised_loss: 0.294540
[20:28:27.330] iteration 1174 : loss : 0.284345, supervised_loss: 0.284285
[20:28:28.243] iteration 1175 : loss : 0.287688, supervised_loss: 0.287656
[20:28:29.158] iteration 1176 : loss : 0.284593, supervised_loss: 0.284563
[20:28:30.774] iteration 1177 : loss : 0.288025, supervised_loss: 0.288004
[20:28:31.687] iteration 1178 : loss : 0.294411, supervised_loss: 0.294389
[20:28:32.600] iteration 1179 : loss : 0.299111, supervised_loss: 0.299075
[20:28:33.513] iteration 1180 : loss : 0.323943, supervised_loss: 0.323912
[20:28:34.425] iteration 1181 : loss : 0.280866, supervised_loss: 0.280835
[20:28:35.337] iteration 1182 : loss : 0.291501, supervised_loss: 0.291480
[20:28:36.249] iteration 1183 : loss : 0.284298, supervised_loss: 0.284269
[20:28:37.162] iteration 1184 : loss : 0.292742, supervised_loss: 0.292705
[20:28:38.075] iteration 1185 : loss : 0.290774, supervised_loss: 0.290745
[20:28:38.988] iteration 1186 : loss : 0.290581, supervised_loss: 0.290526
[20:28:39.901] iteration 1187 : loss : 0.291345, supervised_loss: 0.291298
[20:28:40.814] iteration 1188 : loss : 0.289733, supervised_loss: 0.289691
[20:28:42.305] iteration 1189 : loss : 0.283900, supervised_loss: 0.283864
[20:28:43.219] iteration 1190 : loss : 0.285145, supervised_loss: 0.285104
[20:28:44.130] iteration 1191 : loss : 0.293474, supervised_loss: 0.293442
[20:28:45.043] iteration 1192 : loss : 0.289996, supervised_loss: 0.289967
[20:28:45.956] iteration 1193 : loss : 0.286415, supervised_loss: 0.286395
[20:28:46.869] iteration 1194 : loss : 0.283827, supervised_loss: 0.283793
[20:28:47.783] iteration 1195 : loss : 0.293461, supervised_loss: 0.293439
[20:28:48.698] iteration 1196 : loss : 0.297166, supervised_loss: 0.297144
[20:28:49.610] iteration 1197 : loss : 0.290041, supervised_loss: 0.290003
[20:28:50.523] iteration 1198 : loss : 0.293192, supervised_loss: 0.293169
[20:28:51.435] iteration 1199 : loss : 0.281359, supervised_loss: 0.281334
[20:28:52.349] iteration 1200 : loss : 0.280239, supervised_loss: 0.280194
[20:28:54.414] save best model to model/LA_vnet_25_labeled/URPC/iter_1200_dice_0.9175089001655579.pth
[20:28:55.937] iteration 1201 : loss : 0.299120, supervised_loss: 0.299090
[20:28:56.849] iteration 1202 : loss : 0.290833, supervised_loss: 0.290800
[20:28:57.761] iteration 1203 : loss : 0.288122, supervised_loss: 0.288079
[20:28:58.674] iteration 1204 : loss : 0.301617, supervised_loss: 0.301568
[20:28:59.587] iteration 1205 : loss : 0.305231, supervised_loss: 0.305197
[20:29:00.500] iteration 1206 : loss : 0.291361, supervised_loss: 0.291332
[20:29:01.413] iteration 1207 : loss : 0.291754, supervised_loss: 0.291732
[20:29:02.325] iteration 1208 : loss : 0.290042, supervised_loss: 0.290014
[20:29:03.237] iteration 1209 : loss : 0.295202, supervised_loss: 0.295160
[20:29:04.149] iteration 1210 : loss : 0.289740, supervised_loss: 0.289693
[20:29:05.063] iteration 1211 : loss : 0.286695, supervised_loss: 0.286656
[20:29:05.976] iteration 1212 : loss : 0.287446, supervised_loss: 0.287418
[20:29:07.498] iteration 1213 : loss : 0.280316, supervised_loss: 0.280284
[20:29:08.411] iteration 1214 : loss : 0.285571, supervised_loss: 0.285539
[20:29:09.323] iteration 1215 : loss : 0.290127, supervised_loss: 0.290096
[20:29:10.237] iteration 1216 : loss : 0.288551, supervised_loss: 0.288518
[20:29:11.150] iteration 1217 : loss : 0.283520, supervised_loss: 0.283477
[20:29:12.062] iteration 1218 : loss : 0.281605, supervised_loss: 0.281564
[20:29:12.974] iteration 1219 : loss : 0.287458, supervised_loss: 0.287428
[20:29:13.886] iteration 1220 : loss : 0.281746, supervised_loss: 0.281716
[20:29:14.799] iteration 1221 : loss : 0.287535, supervised_loss: 0.287505
[20:29:15.713] iteration 1222 : loss : 0.289167, supervised_loss: 0.289130
[20:29:16.628] iteration 1223 : loss : 0.326312, supervised_loss: 0.326248
[20:29:17.541] iteration 1224 : loss : 0.282807, supervised_loss: 0.282775
[20:29:19.032] iteration 1225 : loss : 0.281999, supervised_loss: 0.281967
[20:29:19.946] iteration 1226 : loss : 0.291151, supervised_loss: 0.291125
[20:29:20.859] iteration 1227 : loss : 0.289751, supervised_loss: 0.289717
[20:29:21.771] iteration 1228 : loss : 0.285709, supervised_loss: 0.285675
[20:29:22.684] iteration 1229 : loss : 0.281173, supervised_loss: 0.281141
[20:29:23.595] iteration 1230 : loss : 0.291388, supervised_loss: 0.291347
[20:29:24.509] iteration 1231 : loss : 0.283954, supervised_loss: 0.283906
[20:29:25.422] iteration 1232 : loss : 0.285854, supervised_loss: 0.285822
[20:29:26.335] iteration 1233 : loss : 0.304868, supervised_loss: 0.304834
[20:29:27.248] iteration 1234 : loss : 0.290572, supervised_loss: 0.290548
[20:29:28.163] iteration 1235 : loss : 0.292849, supervised_loss: 0.292808
[20:29:29.076] iteration 1236 : loss : 0.290304, supervised_loss: 0.290230
[20:29:30.551] iteration 1237 : loss : 0.294651, supervised_loss: 0.294611
[20:29:31.465] iteration 1238 : loss : 0.282822, supervised_loss: 0.282790
[20:29:32.377] iteration 1239 : loss : 0.284851, supervised_loss: 0.284810
[20:29:33.290] iteration 1240 : loss : 0.298769, supervised_loss: 0.298741
[20:29:34.202] iteration 1241 : loss : 0.278291, supervised_loss: 0.278252
[20:29:35.117] iteration 1242 : loss : 0.285146, supervised_loss: 0.285112
[20:29:36.030] iteration 1243 : loss : 0.282629, supervised_loss: 0.282581
[20:29:36.943] iteration 1244 : loss : 0.284649, supervised_loss: 0.284614
[20:29:37.856] iteration 1245 : loss : 0.294762, supervised_loss: 0.294719
[20:29:38.769] iteration 1246 : loss : 0.291337, supervised_loss: 0.291303
[20:29:39.683] iteration 1247 : loss : 0.287756, supervised_loss: 0.287719
[20:29:40.596] iteration 1248 : loss : 0.286304, supervised_loss: 0.286245
[20:29:42.105] iteration 1249 : loss : 0.286667, supervised_loss: 0.286638
[20:29:43.018] iteration 1250 : loss : 0.279138, supervised_loss: 0.279110
[20:29:43.932] iteration 1251 : loss : 0.304148, supervised_loss: 0.304082
[20:29:44.843] iteration 1252 : loss : 0.286144, supervised_loss: 0.286106
[20:29:45.756] iteration 1253 : loss : 0.291095, supervised_loss: 0.291047
[20:29:46.671] iteration 1254 : loss : 0.297290, supervised_loss: 0.297239
[20:29:47.584] iteration 1255 : loss : 0.286009, supervised_loss: 0.285951
[20:29:48.496] iteration 1256 : loss : 0.295771, supervised_loss: 0.295747
[20:29:49.410] iteration 1257 : loss : 0.288093, supervised_loss: 0.288056
[20:29:50.321] iteration 1258 : loss : 0.283649, supervised_loss: 0.283618
[20:29:51.233] iteration 1259 : loss : 0.286610, supervised_loss: 0.286563
[20:29:52.148] iteration 1260 : loss : 0.289404, supervised_loss: 0.289371
[20:29:53.721] iteration 1261 : loss : 0.289780, supervised_loss: 0.289739
[20:29:54.634] iteration 1262 : loss : 0.280670, supervised_loss: 0.280642
[20:29:55.548] iteration 1263 : loss : 0.285793, supervised_loss: 0.285756
[20:29:56.461] iteration 1264 : loss : 0.292250, supervised_loss: 0.292219
[20:29:57.375] iteration 1265 : loss : 0.287428, supervised_loss: 0.287400
[20:29:58.288] iteration 1266 : loss : 0.282252, supervised_loss: 0.282220
[20:29:59.200] iteration 1267 : loss : 0.286184, supervised_loss: 0.286151
[20:30:00.113] iteration 1268 : loss : 0.308975, supervised_loss: 0.308939
[20:30:01.026] iteration 1269 : loss : 0.287095, supervised_loss: 0.287061
[20:30:01.939] iteration 1270 : loss : 0.290980, supervised_loss: 0.290948
[20:30:02.854] iteration 1271 : loss : 0.289569, supervised_loss: 0.289535
[20:30:03.767] iteration 1272 : loss : 0.299929, supervised_loss: 0.299887
[20:30:05.373] iteration 1273 : loss : 0.296828, supervised_loss: 0.296784
[20:30:06.286] iteration 1274 : loss : 0.282395, supervised_loss: 0.282354
[20:30:07.198] iteration 1275 : loss : 0.283545, supervised_loss: 0.283518
[20:30:08.111] iteration 1276 : loss : 0.281944, supervised_loss: 0.281912
[20:30:09.024] iteration 1277 : loss : 0.281892, supervised_loss: 0.281860
[20:30:09.938] iteration 1278 : loss : 0.286488, supervised_loss: 0.286457
[20:30:10.852] iteration 1279 : loss : 0.288818, supervised_loss: 0.288790
[20:30:11.767] iteration 1280 : loss : 0.285501, supervised_loss: 0.285469
[20:30:12.680] iteration 1281 : loss : 0.319837, supervised_loss: 0.319780
[20:30:13.593] iteration 1282 : loss : 0.289898, supervised_loss: 0.289865
[20:30:14.505] iteration 1283 : loss : 0.292496, supervised_loss: 0.292457
[20:30:15.418] iteration 1284 : loss : 0.276436, supervised_loss: 0.276393
[20:30:16.997] iteration 1285 : loss : 0.294975, supervised_loss: 0.294937
[20:30:17.909] iteration 1286 : loss : 0.291484, supervised_loss: 0.291451
[20:30:18.822] iteration 1287 : loss : 0.310293, supervised_loss: 0.310234
[20:30:19.735] iteration 1288 : loss : 0.294822, supervised_loss: 0.294787
[20:30:20.648] iteration 1289 : loss : 0.287307, supervised_loss: 0.287274
[20:30:21.562] iteration 1290 : loss : 0.296641, supervised_loss: 0.296611
[20:30:22.475] iteration 1291 : loss : 0.296265, supervised_loss: 0.296227
[20:30:23.388] iteration 1292 : loss : 0.297097, supervised_loss: 0.297065
[20:30:24.303] iteration 1293 : loss : 0.291873, supervised_loss: 0.291840
[20:30:25.216] iteration 1294 : loss : 0.292034, supervised_loss: 0.291993
[20:30:26.128] iteration 1295 : loss : 0.289281, supervised_loss: 0.289242
[20:30:27.041] iteration 1296 : loss : 0.290367, supervised_loss: 0.290339
[20:30:28.617] iteration 1297 : loss : 0.287705, supervised_loss: 0.287661
[20:30:29.530] iteration 1298 : loss : 0.282266, supervised_loss: 0.282238
[20:30:30.442] iteration 1299 : loss : 0.290851, supervised_loss: 0.290808
[20:30:31.354] iteration 1300 : loss : 0.311229, supervised_loss: 0.311188
[20:30:32.268] iteration 1301 : loss : 0.289664, supervised_loss: 0.289632
[20:30:33.180] iteration 1302 : loss : 0.290123, supervised_loss: 0.290078
[20:30:34.092] iteration 1303 : loss : 0.315158, supervised_loss: 0.315132
[20:30:35.006] iteration 1304 : loss : 0.291347, supervised_loss: 0.291321
[20:30:35.920] iteration 1305 : loss : 0.293898, supervised_loss: 0.293878
[20:30:36.833] iteration 1306 : loss : 0.290133, supervised_loss: 0.290106
[20:30:37.746] iteration 1307 : loss : 0.298313, supervised_loss: 0.298280
[20:30:38.659] iteration 1308 : loss : 0.298144, supervised_loss: 0.298107
[20:30:40.176] iteration 1309 : loss : 0.286952, supervised_loss: 0.286912
[20:30:41.089] iteration 1310 : loss : 0.303670, supervised_loss: 0.303647
[20:30:42.004] iteration 1311 : loss : 0.291816, supervised_loss: 0.291785
[20:30:42.917] iteration 1312 : loss : 0.283032, supervised_loss: 0.283006
[20:30:43.831] iteration 1313 : loss : 0.282368, supervised_loss: 0.282341
[20:30:44.743] iteration 1314 : loss : 0.279542, supervised_loss: 0.279496
[20:30:45.656] iteration 1315 : loss : 0.279831, supervised_loss: 0.279804
[20:30:46.570] iteration 1316 : loss : 0.295881, supervised_loss: 0.295838
[20:30:47.484] iteration 1317 : loss : 0.280002, supervised_loss: 0.279967
[20:30:48.397] iteration 1318 : loss : 0.287296, supervised_loss: 0.287260
[20:30:49.311] iteration 1319 : loss : 0.288683, supervised_loss: 0.288647
[20:30:50.225] iteration 1320 : loss : 0.286465, supervised_loss: 0.286433
[20:30:51.738] iteration 1321 : loss : 0.290394, supervised_loss: 0.290360
[20:30:52.652] iteration 1322 : loss : 0.302020, supervised_loss: 0.301985
[20:30:53.563] iteration 1323 : loss : 0.285939, supervised_loss: 0.285905
[20:30:54.474] iteration 1324 : loss : 0.284499, supervised_loss: 0.284465
[20:30:55.387] iteration 1325 : loss : 0.295440, supervised_loss: 0.295404
[20:30:56.301] iteration 1326 : loss : 0.291507, supervised_loss: 0.291465
[20:30:57.214] iteration 1327 : loss : 0.285334, supervised_loss: 0.285299
[20:30:58.128] iteration 1328 : loss : 0.283420, supervised_loss: 0.283386
[20:30:59.041] iteration 1329 : loss : 0.278462, supervised_loss: 0.278434
[20:30:59.954] iteration 1330 : loss : 0.281387, supervised_loss: 0.281328
[20:31:00.868] iteration 1331 : loss : 0.279557, supervised_loss: 0.279525
[20:31:01.782] iteration 1332 : loss : 0.290113, supervised_loss: 0.290088
[20:31:03.332] iteration 1333 : loss : 0.285133, supervised_loss: 0.285097
[20:31:04.244] iteration 1334 : loss : 0.280419, supervised_loss: 0.280382
[20:31:05.156] iteration 1335 : loss : 0.281348, supervised_loss: 0.281312
[20:31:06.068] iteration 1336 : loss : 0.285065, supervised_loss: 0.285036
[20:31:06.981] iteration 1337 : loss : 0.292683, supervised_loss: 0.292619
[20:31:07.894] iteration 1338 : loss : 0.297470, supervised_loss: 0.297435
[20:31:08.807] iteration 1339 : loss : 0.280377, supervised_loss: 0.280349
[20:31:09.719] iteration 1340 : loss : 0.281758, supervised_loss: 0.281697
[20:31:10.631] iteration 1341 : loss : 0.287863, supervised_loss: 0.287841
[20:31:11.543] iteration 1342 : loss : 0.294638, supervised_loss: 0.294589
[20:31:12.457] iteration 1343 : loss : 0.282710, supervised_loss: 0.282679
[20:31:13.369] iteration 1344 : loss : 0.282904, supervised_loss: 0.282876
[20:31:14.832] iteration 1345 : loss : 0.285669, supervised_loss: 0.285612
[20:31:15.744] iteration 1346 : loss : 0.291513, supervised_loss: 0.291471
[20:31:16.658] iteration 1347 : loss : 0.289722, supervised_loss: 0.289685
[20:31:17.572] iteration 1348 : loss : 0.289943, supervised_loss: 0.289890
[20:31:18.485] iteration 1349 : loss : 0.290751, supervised_loss: 0.290701
[20:31:19.397] iteration 1350 : loss : 0.288726, supervised_loss: 0.288699
[20:31:20.310] iteration 1351 : loss : 0.286298, supervised_loss: 0.286266
[20:31:21.223] iteration 1352 : loss : 0.287516, supervised_loss: 0.287482
[20:31:22.136] iteration 1353 : loss : 0.285948, supervised_loss: 0.285912
[20:31:23.051] iteration 1354 : loss : 0.288585, supervised_loss: 0.288545
[20:31:23.965] iteration 1355 : loss : 0.298090, supervised_loss: 0.298043
[20:31:24.879] iteration 1356 : loss : 0.296237, supervised_loss: 0.296202
[20:31:26.375] iteration 1357 : loss : 0.294325, supervised_loss: 0.294285
[20:31:27.286] iteration 1358 : loss : 0.288498, supervised_loss: 0.288450
[20:31:28.200] iteration 1359 : loss : 0.288667, supervised_loss: 0.288638
[20:31:29.113] iteration 1360 : loss : 0.285508, supervised_loss: 0.285474
[20:31:30.026] iteration 1361 : loss : 0.284867, supervised_loss: 0.284816
[20:31:30.939] iteration 1362 : loss : 0.292571, supervised_loss: 0.292515
[20:31:31.851] iteration 1363 : loss : 0.295800, supervised_loss: 0.295743
[20:31:32.765] iteration 1364 : loss : 0.285305, supervised_loss: 0.285258
[20:31:33.678] iteration 1365 : loss : 0.286100, supervised_loss: 0.286029
[20:31:34.592] iteration 1366 : loss : 0.292379, supervised_loss: 0.292328
[20:31:35.505] iteration 1367 : loss : 0.295156, supervised_loss: 0.295117
[20:31:36.419] iteration 1368 : loss : 0.303580, supervised_loss: 0.303533
[20:31:38.063] iteration 1369 : loss : 0.291442, supervised_loss: 0.291390
[20:31:38.975] iteration 1370 : loss : 0.279086, supervised_loss: 0.279038
[20:31:39.888] iteration 1371 : loss : 0.289007, supervised_loss: 0.288976
[20:31:40.802] iteration 1372 : loss : 0.287745, supervised_loss: 0.287710
[20:31:41.716] iteration 1373 : loss : 0.281905, supervised_loss: 0.281859
[20:31:42.628] iteration 1374 : loss : 0.284275, supervised_loss: 0.284228
[20:31:43.541] iteration 1375 : loss : 0.288761, supervised_loss: 0.288719
[20:31:44.454] iteration 1376 : loss : 0.289405, supervised_loss: 0.289368
[20:31:45.366] iteration 1377 : loss : 0.302987, supervised_loss: 0.302943
[20:31:46.279] iteration 1378 : loss : 0.283155, supervised_loss: 0.283112
[20:31:47.194] iteration 1379 : loss : 0.282219, supervised_loss: 0.282177
[20:31:48.107] iteration 1380 : loss : 0.289024, supervised_loss: 0.288974
[20:31:49.606] iteration 1381 : loss : 0.289817, supervised_loss: 0.289728
[20:31:50.520] iteration 1382 : loss : 0.281525, supervised_loss: 0.281486
[20:31:51.433] iteration 1383 : loss : 0.291449, supervised_loss: 0.291409
[20:31:52.346] iteration 1384 : loss : 0.288098, supervised_loss: 0.288047
[20:31:53.259] iteration 1385 : loss : 0.297733, supervised_loss: 0.297685
[20:31:54.172] iteration 1386 : loss : 0.297618, supervised_loss: 0.297552
[20:31:55.084] iteration 1387 : loss : 0.290197, supervised_loss: 0.290137
[20:31:55.998] iteration 1388 : loss : 0.288347, supervised_loss: 0.288295
[20:31:56.911] iteration 1389 : loss : 0.287178, supervised_loss: 0.287127
[20:31:57.823] iteration 1390 : loss : 0.289541, supervised_loss: 0.289495
[20:31:58.735] iteration 1391 : loss : 0.284709, supervised_loss: 0.284682
[20:31:59.648] iteration 1392 : loss : 0.280693, supervised_loss: 0.280654
[20:32:01.341] iteration 1393 : loss : 0.288143, supervised_loss: 0.288092
[20:32:02.252] iteration 1394 : loss : 0.286611, supervised_loss: 0.286574
[20:32:03.165] iteration 1395 : loss : 0.294452, supervised_loss: 0.294419
[20:32:04.078] iteration 1396 : loss : 0.299459, supervised_loss: 0.299416
[20:32:04.992] iteration 1397 : loss : 0.286713, supervised_loss: 0.286671
[20:32:05.904] iteration 1398 : loss : 0.283451, supervised_loss: 0.283409
[20:32:06.816] iteration 1399 : loss : 0.291322, supervised_loss: 0.291291
[20:32:07.728] iteration 1400 : loss : 0.284033, supervised_loss: 0.283985
[20:32:10.580] iteration 1401 : loss : 0.277830, supervised_loss: 0.277773
[20:32:11.493] iteration 1402 : loss : 0.290945, supervised_loss: 0.290910
[20:32:12.407] iteration 1403 : loss : 0.289775, supervised_loss: 0.289739
[20:32:13.320] iteration 1404 : loss : 0.285544, supervised_loss: 0.285509
[20:32:14.964] iteration 1405 : loss : 0.286390, supervised_loss: 0.286319
[20:32:15.876] iteration 1406 : loss : 0.291827, supervised_loss: 0.291784
[20:32:16.788] iteration 1407 : loss : 0.293238, supervised_loss: 0.293181
[20:32:17.700] iteration 1408 : loss : 0.287733, supervised_loss: 0.287691
[20:32:18.612] iteration 1409 : loss : 0.281415, supervised_loss: 0.281386
[20:32:19.526] iteration 1410 : loss : 0.294791, supervised_loss: 0.294764
[20:32:20.440] iteration 1411 : loss : 0.305951, supervised_loss: 0.305916
[20:32:21.353] iteration 1412 : loss : 0.287353, supervised_loss: 0.287325
[20:32:22.265] iteration 1413 : loss : 0.304187, supervised_loss: 0.304157
[20:32:23.177] iteration 1414 : loss : 0.290959, supervised_loss: 0.290927
[20:32:24.090] iteration 1415 : loss : 0.287014, supervised_loss: 0.286980
[20:32:25.002] iteration 1416 : loss : 0.298497, supervised_loss: 0.298436
[20:32:26.505] iteration 1417 : loss : 0.278404, supervised_loss: 0.278371
[20:32:27.417] iteration 1418 : loss : 0.287329, supervised_loss: 0.287284
[20:32:28.330] iteration 1419 : loss : 0.285975, supervised_loss: 0.285927
[20:32:29.243] iteration 1420 : loss : 0.279856, supervised_loss: 0.279814
[20:32:30.156] iteration 1421 : loss : 0.284419, supervised_loss: 0.284364
[20:32:31.068] iteration 1422 : loss : 0.284680, supervised_loss: 0.284608
[20:32:31.981] iteration 1423 : loss : 0.280768, supervised_loss: 0.280714
[20:32:32.894] iteration 1424 : loss : 0.288430, supervised_loss: 0.288403
[20:32:33.807] iteration 1425 : loss : 0.290542, supervised_loss: 0.290507
[20:32:34.720] iteration 1426 : loss : 0.291533, supervised_loss: 0.291480
[20:32:35.634] iteration 1427 : loss : 0.286952, supervised_loss: 0.286918
[20:32:36.546] iteration 1428 : loss : 0.300445, supervised_loss: 0.300419
[20:32:38.029] iteration 1429 : loss : 0.289978, supervised_loss: 0.289926
[20:32:38.940] iteration 1430 : loss : 0.288221, supervised_loss: 0.288193
[20:32:39.852] iteration 1431 : loss : 0.289325, supervised_loss: 0.289280
[20:32:40.764] iteration 1432 : loss : 0.284017, supervised_loss: 0.283981
[20:32:41.677] iteration 1433 : loss : 0.294325, supervised_loss: 0.294292
[20:32:42.591] iteration 1434 : loss : 0.290841, supervised_loss: 0.290783
[20:32:43.505] iteration 1435 : loss : 0.284827, supervised_loss: 0.284795
[20:32:44.417] iteration 1436 : loss : 0.280789, supervised_loss: 0.280761
[20:32:45.331] iteration 1437 : loss : 0.288160, supervised_loss: 0.288126
[20:32:46.243] iteration 1438 : loss : 0.293298, supervised_loss: 0.293267
[20:32:47.155] iteration 1439 : loss : 0.297624, supervised_loss: 0.297584
[20:32:48.068] iteration 1440 : loss : 0.288761, supervised_loss: 0.288661
[20:32:49.677] iteration 1441 : loss : 0.283519, supervised_loss: 0.283472
[20:32:50.590] iteration 1442 : loss : 0.289097, supervised_loss: 0.289044
[20:32:51.502] iteration 1443 : loss : 0.283348, supervised_loss: 0.283316
[20:32:52.414] iteration 1444 : loss : 0.288863, supervised_loss: 0.288805
[20:32:53.327] iteration 1445 : loss : 0.285976, supervised_loss: 0.285921
[20:32:54.241] iteration 1446 : loss : 0.301455, supervised_loss: 0.301407
[20:32:55.153] iteration 1447 : loss : 0.297082, supervised_loss: 0.297036
[20:32:56.067] iteration 1448 : loss : 0.282543, supervised_loss: 0.282500
[20:32:56.980] iteration 1449 : loss : 0.290353, supervised_loss: 0.290318
[20:32:57.893] iteration 1450 : loss : 0.289002, supervised_loss: 0.288970
[20:32:58.806] iteration 1451 : loss : 0.291866, supervised_loss: 0.291825
[20:32:59.718] iteration 1452 : loss : 0.295470, supervised_loss: 0.295421
[20:33:01.270] iteration 1453 : loss : 0.290651, supervised_loss: 0.290608
[20:33:02.182] iteration 1454 : loss : 0.291957, supervised_loss: 0.291904
[20:33:03.094] iteration 1455 : loss : 0.280242, supervised_loss: 0.280205
[20:33:04.007] iteration 1456 : loss : 0.293299, supervised_loss: 0.293259
[20:33:04.920] iteration 1457 : loss : 0.300879, supervised_loss: 0.300841
[20:33:05.833] iteration 1458 : loss : 0.287694, supervised_loss: 0.287603
[20:33:06.745] iteration 1459 : loss : 0.284121, supervised_loss: 0.284086
[20:33:07.658] iteration 1460 : loss : 0.278839, supervised_loss: 0.278796
[20:33:08.571] iteration 1461 : loss : 0.293655, supervised_loss: 0.293610
[20:33:09.484] iteration 1462 : loss : 0.300833, supervised_loss: 0.300804
[20:33:10.396] iteration 1463 : loss : 0.277055, supervised_loss: 0.277027
[20:33:11.308] iteration 1464 : loss : 0.289020, supervised_loss: 0.288959
[20:33:12.925] iteration 1465 : loss : 0.287731, supervised_loss: 0.287689
[20:33:13.839] iteration 1466 : loss : 0.299446, supervised_loss: 0.299393
[20:33:14.750] iteration 1467 : loss : 0.295735, supervised_loss: 0.295703
[20:33:15.664] iteration 1468 : loss : 0.288133, supervised_loss: 0.288101
[20:33:16.576] iteration 1469 : loss : 0.285224, supervised_loss: 0.285195
[20:33:17.488] iteration 1470 : loss : 0.283143, supervised_loss: 0.283086
[20:33:18.401] iteration 1471 : loss : 0.293868, supervised_loss: 0.293832
[20:33:19.315] iteration 1472 : loss : 0.285091, supervised_loss: 0.285036
[20:33:20.228] iteration 1473 : loss : 0.289985, supervised_loss: 0.289952
[20:33:21.140] iteration 1474 : loss : 0.287728, supervised_loss: 0.287675
[20:33:22.054] iteration 1475 : loss : 0.284908, supervised_loss: 0.284862
[20:33:22.968] iteration 1476 : loss : 0.284203, supervised_loss: 0.284158
[20:33:24.654] iteration 1477 : loss : 0.287868, supervised_loss: 0.287830
[20:33:25.566] iteration 1478 : loss : 0.299978, supervised_loss: 0.299944
[20:33:26.478] iteration 1479 : loss : 0.286058, supervised_loss: 0.286017
[20:33:27.390] iteration 1480 : loss : 0.287689, supervised_loss: 0.287638
[20:33:28.302] iteration 1481 : loss : 0.295078, supervised_loss: 0.295019
[20:33:29.215] iteration 1482 : loss : 0.289126, supervised_loss: 0.289089
[20:33:30.128] iteration 1483 : loss : 0.290399, supervised_loss: 0.290351
[20:33:31.041] iteration 1484 : loss : 0.286621, supervised_loss: 0.286584
[20:33:31.953] iteration 1485 : loss : 0.281466, supervised_loss: 0.281422
[20:33:32.867] iteration 1486 : loss : 0.279970, supervised_loss: 0.279924
[20:33:33.780] iteration 1487 : loss : 0.278134, supervised_loss: 0.278056
[20:33:34.693] iteration 1488 : loss : 0.286114, supervised_loss: 0.286070
[20:33:36.195] iteration 1489 : loss : 0.284329, supervised_loss: 0.284259
[20:33:37.107] iteration 1490 : loss : 0.294753, supervised_loss: 0.294719
[20:33:38.018] iteration 1491 : loss : 0.285725, supervised_loss: 0.285676
[20:33:38.931] iteration 1492 : loss : 0.287108, supervised_loss: 0.287043
[20:33:39.844] iteration 1493 : loss : 0.280805, supervised_loss: 0.280761
[20:33:40.757] iteration 1494 : loss : 0.294083, supervised_loss: 0.294029
[20:33:41.669] iteration 1495 : loss : 0.293544, supervised_loss: 0.293506
[20:33:42.581] iteration 1496 : loss : 0.289304, supervised_loss: 0.289237
[20:33:43.493] iteration 1497 : loss : 0.291989, supervised_loss: 0.291960
[20:33:44.407] iteration 1498 : loss : 0.286616, supervised_loss: 0.286577
[20:33:45.320] iteration 1499 : loss : 0.285363, supervised_loss: 0.285332
[20:33:46.234] iteration 1500 : loss : 0.303784, supervised_loss: 0.303757
[20:33:47.861] iteration 1501 : loss : 0.281398, supervised_loss: 0.281357
[20:33:48.773] iteration 1502 : loss : 0.289938, supervised_loss: 0.289884
[20:33:49.686] iteration 1503 : loss : 0.282034, supervised_loss: 0.281996
[20:33:50.599] iteration 1504 : loss : 0.285550, supervised_loss: 0.285496
[20:33:51.511] iteration 1505 : loss : 0.294271, supervised_loss: 0.294231
[20:33:52.423] iteration 1506 : loss : 0.288334, supervised_loss: 0.288268
[20:33:53.335] iteration 1507 : loss : 0.288219, supervised_loss: 0.288112
[20:33:54.247] iteration 1508 : loss : 0.296694, supervised_loss: 0.296653
[20:33:55.160] iteration 1509 : loss : 0.283825, supervised_loss: 0.283789
[20:33:56.072] iteration 1510 : loss : 0.289706, supervised_loss: 0.289665
[20:33:56.985] iteration 1511 : loss : 0.284757, supervised_loss: 0.284704
[20:33:57.899] iteration 1512 : loss : 0.287120, supervised_loss: 0.287081
[20:33:59.531] iteration 1513 : loss : 0.286596, supervised_loss: 0.286545
[20:34:00.444] iteration 1514 : loss : 0.302974, supervised_loss: 0.302884
[20:34:01.355] iteration 1515 : loss : 0.283151, supervised_loss: 0.283070
[20:34:02.267] iteration 1516 : loss : 0.279735, supervised_loss: 0.279688
[20:34:03.179] iteration 1517 : loss : 0.285907, supervised_loss: 0.285825
[20:34:04.092] iteration 1518 : loss : 0.303963, supervised_loss: 0.303922
[20:34:05.004] iteration 1519 : loss : 0.292420, supervised_loss: 0.292384
[20:34:05.916] iteration 1520 : loss : 0.287455, supervised_loss: 0.287396
[20:34:06.828] iteration 1521 : loss : 0.277145, supervised_loss: 0.277096
[20:34:07.740] iteration 1522 : loss : 0.284485, supervised_loss: 0.284421
[20:34:08.653] iteration 1523 : loss : 0.292096, supervised_loss: 0.292037
[20:34:09.565] iteration 1524 : loss : 0.287150, supervised_loss: 0.287095
[20:34:11.066] iteration 1525 : loss : 0.300630, supervised_loss: 0.300576
[20:34:11.979] iteration 1526 : loss : 0.291754, supervised_loss: 0.291704
[20:34:12.890] iteration 1527 : loss : 0.291630, supervised_loss: 0.291566
[20:34:13.802] iteration 1528 : loss : 0.285615, supervised_loss: 0.285563
[20:34:14.714] iteration 1529 : loss : 0.292791, supervised_loss: 0.292743
[20:34:15.626] iteration 1530 : loss : 0.289905, supervised_loss: 0.289844
[20:34:16.539] iteration 1531 : loss : 0.284926, supervised_loss: 0.284870
[20:34:17.452] iteration 1532 : loss : 0.286192, supervised_loss: 0.286140
[20:34:18.364] iteration 1533 : loss : 0.288942, supervised_loss: 0.288893
[20:34:19.276] iteration 1534 : loss : 0.281058, supervised_loss: 0.281002
[20:34:20.189] iteration 1535 : loss : 0.283784, supervised_loss: 0.283736
[20:34:21.101] iteration 1536 : loss : 0.291128, supervised_loss: 0.291080
[20:34:22.591] iteration 1537 : loss : 0.284332, supervised_loss: 0.284298
[20:34:23.502] iteration 1538 : loss : 0.291594, supervised_loss: 0.291525
[20:34:24.413] iteration 1539 : loss : 0.276651, supervised_loss: 0.276604
[20:34:25.325] iteration 1540 : loss : 0.287880, supervised_loss: 0.287824
[20:34:26.237] iteration 1541 : loss : 0.284253, supervised_loss: 0.284148
[20:34:27.150] iteration 1542 : loss : 0.284431, supervised_loss: 0.284388
[20:34:28.063] iteration 1543 : loss : 0.281275, supervised_loss: 0.281240
[20:34:28.978] iteration 1544 : loss : 0.290596, supervised_loss: 0.290544
[20:34:29.891] iteration 1545 : loss : 0.283726, supervised_loss: 0.283657
[20:34:30.803] iteration 1546 : loss : 0.314627, supervised_loss: 0.314587
[20:34:31.716] iteration 1547 : loss : 0.287807, supervised_loss: 0.287746
[20:34:32.628] iteration 1548 : loss : 0.289487, supervised_loss: 0.289442
[20:34:34.125] iteration 1549 : loss : 0.301223, supervised_loss: 0.301181
[20:34:35.038] iteration 1550 : loss : 0.281425, supervised_loss: 0.281373
[20:34:35.950] iteration 1551 : loss : 0.289515, supervised_loss: 0.289455
[20:34:36.864] iteration 1552 : loss : 0.279697, supervised_loss: 0.279649
[20:34:37.776] iteration 1553 : loss : 0.285214, supervised_loss: 0.285180
[20:34:38.688] iteration 1554 : loss : 0.285974, supervised_loss: 0.285935
[20:34:39.600] iteration 1555 : loss : 0.279869, supervised_loss: 0.279803
[20:34:40.512] iteration 1556 : loss : 0.290516, supervised_loss: 0.290479
[20:34:41.426] iteration 1557 : loss : 0.296516, supervised_loss: 0.296470
[20:34:42.339] iteration 1558 : loss : 0.276082, supervised_loss: 0.276044
[20:34:43.251] iteration 1559 : loss : 0.287836, supervised_loss: 0.287788
[20:34:44.165] iteration 1560 : loss : 0.292155, supervised_loss: 0.292103
[20:34:45.694] iteration 1561 : loss : 0.288229, supervised_loss: 0.288181
[20:34:46.605] iteration 1562 : loss : 0.279860, supervised_loss: 0.279818
[20:34:47.518] iteration 1563 : loss : 0.287184, supervised_loss: 0.287154
[20:34:48.430] iteration 1564 : loss : 0.281702, supervised_loss: 0.281628
[20:34:49.343] iteration 1565 : loss : 0.285307, supervised_loss: 0.285244
[20:34:50.255] iteration 1566 : loss : 0.288735, supervised_loss: 0.288668
[20:34:51.168] iteration 1567 : loss : 0.289594, supervised_loss: 0.289524
[20:34:52.080] iteration 1568 : loss : 0.287834, supervised_loss: 0.287798
[20:34:52.993] iteration 1569 : loss : 0.277072, supervised_loss: 0.277039
[20:34:53.906] iteration 1570 : loss : 0.302669, supervised_loss: 0.302642
[20:34:54.820] iteration 1571 : loss : 0.290296, supervised_loss: 0.290249
[20:34:55.734] iteration 1572 : loss : 0.294357, supervised_loss: 0.294311
[20:34:57.353] iteration 1573 : loss : 0.281045, supervised_loss: 0.280983
[20:34:58.265] iteration 1574 : loss : 0.288591, supervised_loss: 0.288553
[20:34:59.178] iteration 1575 : loss : 0.290235, supervised_loss: 0.290179
[20:35:00.091] iteration 1576 : loss : 0.287286, supervised_loss: 0.287236
[20:35:01.004] iteration 1577 : loss : 0.287722, supervised_loss: 0.287683
[20:35:01.916] iteration 1578 : loss : 0.290184, supervised_loss: 0.290093
[20:35:02.830] iteration 1579 : loss : 0.282036, supervised_loss: 0.281988
[20:35:03.743] iteration 1580 : loss : 0.291995, supervised_loss: 0.291959
[20:35:04.656] iteration 1581 : loss : 0.280852, supervised_loss: 0.280816
[20:35:05.569] iteration 1582 : loss : 0.294047, supervised_loss: 0.293999
[20:35:06.482] iteration 1583 : loss : 0.274811, supervised_loss: 0.274774
[20:35:07.394] iteration 1584 : loss : 0.280506, supervised_loss: 0.280462
[20:35:08.917] iteration 1585 : loss : 0.281301, supervised_loss: 0.281245
[20:35:09.830] iteration 1586 : loss : 0.281738, supervised_loss: 0.281671
[20:35:10.743] iteration 1587 : loss : 0.277603, supervised_loss: 0.277545
[20:35:11.655] iteration 1588 : loss : 0.295523, supervised_loss: 0.295467
[20:35:12.569] iteration 1589 : loss : 0.283276, supervised_loss: 0.283240
[20:35:13.482] iteration 1590 : loss : 0.287369, supervised_loss: 0.287303
[20:35:14.395] iteration 1591 : loss : 0.289199, supervised_loss: 0.289137
[20:35:15.308] iteration 1592 : loss : 0.280294, supervised_loss: 0.280264
[20:35:16.220] iteration 1593 : loss : 0.280132, supervised_loss: 0.280074
[20:35:17.134] iteration 1594 : loss : 0.278419, supervised_loss: 0.278372
[20:35:18.049] iteration 1595 : loss : 0.299076, supervised_loss: 0.299027
[20:35:18.961] iteration 1596 : loss : 0.282326, supervised_loss: 0.282283
[20:35:20.466] iteration 1597 : loss : 0.286754, supervised_loss: 0.286713
[20:35:21.379] iteration 1598 : loss : 0.284056, supervised_loss: 0.283999
[20:35:22.290] iteration 1599 : loss : 0.297717, supervised_loss: 0.297680
[20:35:23.203] iteration 1600 : loss : 0.293031, supervised_loss: 0.292966
[20:35:25.294] save best model to model/LA_vnet_25_labeled/URPC/iter_1600_dice_0.9208002090454102.pth
[20:35:26.205] iteration 1601 : loss : 0.279388, supervised_loss: 0.279347
[20:35:27.117] iteration 1602 : loss : 0.290486, supervised_loss: 0.290434
[20:35:28.031] iteration 1603 : loss : 0.284800, supervised_loss: 0.284750
[20:35:28.944] iteration 1604 : loss : 0.285519, supervised_loss: 0.285463
[20:35:29.857] iteration 1605 : loss : 0.291438, supervised_loss: 0.291387
[20:35:30.769] iteration 1606 : loss : 0.278374, supervised_loss: 0.278329
[20:35:31.682] iteration 1607 : loss : 0.279135, supervised_loss: 0.279095
[20:35:32.594] iteration 1608 : loss : 0.284229, supervised_loss: 0.284163
[20:35:34.107] iteration 1609 : loss : 0.283304, supervised_loss: 0.283258
[20:35:35.019] iteration 1610 : loss : 0.290261, supervised_loss: 0.290201
[20:35:35.931] iteration 1611 : loss : 0.278537, supervised_loss: 0.278490
[20:35:36.843] iteration 1612 : loss : 0.284285, supervised_loss: 0.284236
[20:35:37.755] iteration 1613 : loss : 0.277282, supervised_loss: 0.277223
[20:35:38.668] iteration 1614 : loss : 0.275672, supervised_loss: 0.275635
[20:35:39.580] iteration 1615 : loss : 0.281341, supervised_loss: 0.281293
[20:35:40.493] iteration 1616 : loss : 0.301088, supervised_loss: 0.301057
[20:35:41.407] iteration 1617 : loss : 0.286914, supervised_loss: 0.286885
[20:35:42.320] iteration 1618 : loss : 0.285458, supervised_loss: 0.285433
[20:35:43.231] iteration 1619 : loss : 0.290395, supervised_loss: 0.290353
[20:35:44.143] iteration 1620 : loss : 0.280680, supervised_loss: 0.280613
[20:35:45.744] iteration 1621 : loss : 0.297023, supervised_loss: 0.296978
[20:35:46.657] iteration 1622 : loss : 0.281596, supervised_loss: 0.281548
[20:35:47.571] iteration 1623 : loss : 0.281331, supervised_loss: 0.281269
[20:35:48.482] iteration 1624 : loss : 0.288019, supervised_loss: 0.287966
[20:35:49.394] iteration 1625 : loss : 0.283436, supervised_loss: 0.283392
[20:35:50.307] iteration 1626 : loss : 0.289356, supervised_loss: 0.289280
[20:35:51.219] iteration 1627 : loss : 0.296601, supervised_loss: 0.296514
[20:35:52.130] iteration 1628 : loss : 0.290966, supervised_loss: 0.290934
[20:35:53.044] iteration 1629 : loss : 0.280348, supervised_loss: 0.280314
[20:35:53.955] iteration 1630 : loss : 0.281026, supervised_loss: 0.280969
[20:35:54.868] iteration 1631 : loss : 0.278316, supervised_loss: 0.278274
[20:35:55.782] iteration 1632 : loss : 0.292532, supervised_loss: 0.292485
[20:35:57.321] iteration 1633 : loss : 0.296912, supervised_loss: 0.296864
[20:35:58.234] iteration 1634 : loss : 0.297457, supervised_loss: 0.297402
[20:35:59.146] iteration 1635 : loss : 0.288396, supervised_loss: 0.288336
[20:36:00.058] iteration 1636 : loss : 0.289994, supervised_loss: 0.289951
[20:36:00.971] iteration 1637 : loss : 0.282001, supervised_loss: 0.281959
[20:36:01.883] iteration 1638 : loss : 0.292108, supervised_loss: 0.292069
[20:36:02.795] iteration 1639 : loss : 0.278100, supervised_loss: 0.278054
[20:36:03.708] iteration 1640 : loss : 0.282874, supervised_loss: 0.282826
[20:36:04.621] iteration 1641 : loss : 0.281793, supervised_loss: 0.281730
[20:36:05.535] iteration 1642 : loss : 0.279029, supervised_loss: 0.278987
[20:36:06.448] iteration 1643 : loss : 0.289548, supervised_loss: 0.289514
[20:36:07.359] iteration 1644 : loss : 0.287600, supervised_loss: 0.287562
[20:36:08.866] iteration 1645 : loss : 0.283637, supervised_loss: 0.283594
[20:36:09.778] iteration 1646 : loss : 0.285441, supervised_loss: 0.285411
[20:36:10.692] iteration 1647 : loss : 0.286162, supervised_loss: 0.286124
[20:36:11.605] iteration 1648 : loss : 0.289908, supervised_loss: 0.289867
[20:36:12.520] iteration 1649 : loss : 0.280969, supervised_loss: 0.280918
[20:36:13.432] iteration 1650 : loss : 0.288739, supervised_loss: 0.288672
[20:36:14.344] iteration 1651 : loss : 0.285757, supervised_loss: 0.285693
[20:36:15.256] iteration 1652 : loss : 0.282457, supervised_loss: 0.282399
[20:36:16.168] iteration 1653 : loss : 0.280883, supervised_loss: 0.280838
[20:36:17.081] iteration 1654 : loss : 0.291967, supervised_loss: 0.291916
[20:36:17.993] iteration 1655 : loss : 0.290809, supervised_loss: 0.290704
[20:36:18.906] iteration 1656 : loss : 0.282814, supervised_loss: 0.282764
[20:36:20.530] iteration 1657 : loss : 0.286149, supervised_loss: 0.286096
[20:36:21.442] iteration 1658 : loss : 0.284927, supervised_loss: 0.284876
[20:36:22.355] iteration 1659 : loss : 0.279554, supervised_loss: 0.279491
[20:36:23.267] iteration 1660 : loss : 0.291677, supervised_loss: 0.291608
[20:36:24.179] iteration 1661 : loss : 0.281345, supervised_loss: 0.281301
[20:36:25.092] iteration 1662 : loss : 0.289023, supervised_loss: 0.288947
[20:36:26.006] iteration 1663 : loss : 0.280421, supervised_loss: 0.280359
[20:36:26.918] iteration 1664 : loss : 0.277192, supervised_loss: 0.277136
[20:36:27.831] iteration 1665 : loss : 0.294827, supervised_loss: 0.294770
[20:36:28.743] iteration 1666 : loss : 0.286669, supervised_loss: 0.286625
[20:36:29.655] iteration 1667 : loss : 0.279557, supervised_loss: 0.279459
[20:36:30.568] iteration 1668 : loss : 0.284948, supervised_loss: 0.284905
[20:36:32.148] iteration 1669 : loss : 0.277108, supervised_loss: 0.277051
[20:36:33.060] iteration 1670 : loss : 0.282299, supervised_loss: 0.282235
[20:36:33.973] iteration 1671 : loss : 0.295924, supervised_loss: 0.295877
[20:36:34.885] iteration 1672 : loss : 0.276459, supervised_loss: 0.276380
[20:36:35.797] iteration 1673 : loss : 0.284057, supervised_loss: 0.284011
[20:36:36.709] iteration 1674 : loss : 0.290421, supervised_loss: 0.290361
[20:36:37.622] iteration 1675 : loss : 0.290078, supervised_loss: 0.290024
[20:36:38.534] iteration 1676 : loss : 0.305144, supervised_loss: 0.305088
[20:36:39.447] iteration 1677 : loss : 0.281213, supervised_loss: 0.281129
[20:36:40.360] iteration 1678 : loss : 0.284023, supervised_loss: 0.283974
[20:36:41.275] iteration 1679 : loss : 0.279978, supervised_loss: 0.279918
[20:36:42.188] iteration 1680 : loss : 0.295847, supervised_loss: 0.295797
[20:36:43.752] iteration 1681 : loss : 0.278714, supervised_loss: 0.278662
[20:36:44.664] iteration 1682 : loss : 0.292564, supervised_loss: 0.292517
[20:36:45.576] iteration 1683 : loss : 0.295176, supervised_loss: 0.295126
[20:36:46.489] iteration 1684 : loss : 0.288903, supervised_loss: 0.288851
[20:36:47.404] iteration 1685 : loss : 0.283596, supervised_loss: 0.283532
[20:36:48.317] iteration 1686 : loss : 0.279819, supervised_loss: 0.279770
[20:36:49.229] iteration 1687 : loss : 0.290923, supervised_loss: 0.290882
[20:36:50.141] iteration 1688 : loss : 0.290458, supervised_loss: 0.290339
[20:36:51.053] iteration 1689 : loss : 0.288261, supervised_loss: 0.288182
[20:36:51.968] iteration 1690 : loss : 0.296200, supervised_loss: 0.296168
[20:36:52.882] iteration 1691 : loss : 0.287536, supervised_loss: 0.287437
[20:36:53.794] iteration 1692 : loss : 0.293346, supervised_loss: 0.293278
[20:36:55.248] iteration 1693 : loss : 0.283138, supervised_loss: 0.283088
[20:36:56.162] iteration 1694 : loss : 0.274162, supervised_loss: 0.274090
[20:36:57.075] iteration 1695 : loss : 0.277924, supervised_loss: 0.277862
[20:36:57.989] iteration 1696 : loss : 0.283353, supervised_loss: 0.283308
[20:36:58.904] iteration 1697 : loss : 0.292637, supervised_loss: 0.292584
[20:36:59.818] iteration 1698 : loss : 0.294567, supervised_loss: 0.294496
[20:37:00.732] iteration 1699 : loss : 0.277916, supervised_loss: 0.277838
[20:37:01.644] iteration 1700 : loss : 0.295590, supervised_loss: 0.295530
[20:37:02.557] iteration 1701 : loss : 0.288562, supervised_loss: 0.288487
[20:37:03.469] iteration 1702 : loss : 0.277772, supervised_loss: 0.277723
[20:37:04.383] iteration 1703 : loss : 0.285002, supervised_loss: 0.284952
[20:37:05.295] iteration 1704 : loss : 0.286566, supervised_loss: 0.286491
[20:37:06.848] iteration 1705 : loss : 0.281025, supervised_loss: 0.280948
[20:37:07.761] iteration 1706 : loss : 0.287763, supervised_loss: 0.287725
[20:37:08.673] iteration 1707 : loss : 0.276215, supervised_loss: 0.276173
[20:37:09.586] iteration 1708 : loss : 0.285578, supervised_loss: 0.285527
[20:37:10.498] iteration 1709 : loss : 0.287869, supervised_loss: 0.287822
[20:37:11.411] iteration 1710 : loss : 0.300401, supervised_loss: 0.300319
[20:37:12.324] iteration 1711 : loss : 0.293530, supervised_loss: 0.293466
[20:37:13.237] iteration 1712 : loss : 0.288571, supervised_loss: 0.288523
[20:37:14.150] iteration 1713 : loss : 0.291046, supervised_loss: 0.291006
[20:37:15.063] iteration 1714 : loss : 0.287893, supervised_loss: 0.287836
[20:37:15.974] iteration 1715 : loss : 0.280477, supervised_loss: 0.280424
[20:37:16.887] iteration 1716 : loss : 0.281272, supervised_loss: 0.281224
[20:37:18.477] iteration 1717 : loss : 0.283199, supervised_loss: 0.283140
[20:37:19.388] iteration 1718 : loss : 0.284996, supervised_loss: 0.284937
[20:37:20.302] iteration 1719 : loss : 0.292574, supervised_loss: 0.292526
[20:37:21.216] iteration 1720 : loss : 0.295345, supervised_loss: 0.295299
[20:37:22.129] iteration 1721 : loss : 0.282645, supervised_loss: 0.282593
[20:37:23.041] iteration 1722 : loss : 0.286249, supervised_loss: 0.286203
[20:37:23.954] iteration 1723 : loss : 0.283712, supervised_loss: 0.283657
[20:37:24.867] iteration 1724 : loss : 0.285230, supervised_loss: 0.285181
[20:37:25.781] iteration 1725 : loss : 0.284528, supervised_loss: 0.284495
[20:37:26.693] iteration 1726 : loss : 0.287543, supervised_loss: 0.287497
[20:37:27.607] iteration 1727 : loss : 0.284930, supervised_loss: 0.284850
[20:37:28.521] iteration 1728 : loss : 0.283911, supervised_loss: 0.283857
[20:37:30.169] iteration 1729 : loss : 0.281373, supervised_loss: 0.281295
[20:37:31.081] iteration 1730 : loss : 0.283192, supervised_loss: 0.283071
[20:37:31.996] iteration 1731 : loss : 0.286667, supervised_loss: 0.286623
[20:37:32.908] iteration 1732 : loss : 0.280972, supervised_loss: 0.280931
[20:37:33.823] iteration 1733 : loss : 0.280738, supervised_loss: 0.280694
[20:37:34.736] iteration 1734 : loss : 0.278152, supervised_loss: 0.278115
[20:37:35.650] iteration 1735 : loss : 0.280008, supervised_loss: 0.279961
[20:37:36.563] iteration 1736 : loss : 0.289343, supervised_loss: 0.289236
[20:37:37.476] iteration 1737 : loss : 0.283014, supervised_loss: 0.282921
[20:37:38.391] iteration 1738 : loss : 0.279843, supervised_loss: 0.279783
[20:37:39.305] iteration 1739 : loss : 0.284473, supervised_loss: 0.284392
[20:37:40.219] iteration 1740 : loss : 0.289716, supervised_loss: 0.289667
[20:37:41.908] iteration 1741 : loss : 0.283009, supervised_loss: 0.282954
[20:37:42.821] iteration 1742 : loss : 0.289585, supervised_loss: 0.289545
[20:37:43.733] iteration 1743 : loss : 0.284650, supervised_loss: 0.284565
[20:37:44.645] iteration 1744 : loss : 0.284035, supervised_loss: 0.283984
[20:37:45.557] iteration 1745 : loss : 0.296012, supervised_loss: 0.295958
[20:37:46.470] iteration 1746 : loss : 0.288749, supervised_loss: 0.288668
[20:37:47.384] iteration 1747 : loss : 0.278845, supervised_loss: 0.278776
[20:37:48.297] iteration 1748 : loss : 0.284243, supervised_loss: 0.284174
[20:37:49.209] iteration 1749 : loss : 0.292708, supervised_loss: 0.292648
[20:37:50.121] iteration 1750 : loss : 0.289472, supervised_loss: 0.289428
[20:37:51.033] iteration 1751 : loss : 0.285698, supervised_loss: 0.285632
[20:37:51.946] iteration 1752 : loss : 0.287736, supervised_loss: 0.287659
[20:37:53.573] iteration 1753 : loss : 0.282856, supervised_loss: 0.282797
[20:37:54.485] iteration 1754 : loss : 0.277347, supervised_loss: 0.277305
[20:37:55.398] iteration 1755 : loss : 0.285284, supervised_loss: 0.285168
[20:37:56.311] iteration 1756 : loss : 0.295447, supervised_loss: 0.295371
[20:37:57.223] iteration 1757 : loss : 0.284832, supervised_loss: 0.284751
[20:37:58.135] iteration 1758 : loss : 0.283575, supervised_loss: 0.283459
[20:37:59.047] iteration 1759 : loss : 0.283204, supervised_loss: 0.283151
[20:37:59.960] iteration 1760 : loss : 0.293121, supervised_loss: 0.293040
[20:38:00.872] iteration 1761 : loss : 0.280419, supervised_loss: 0.280348
[20:38:01.785] iteration 1762 : loss : 0.286574, supervised_loss: 0.286530
[20:38:02.698] iteration 1763 : loss : 0.282562, supervised_loss: 0.282519
[20:38:03.611] iteration 1764 : loss : 0.285573, supervised_loss: 0.285499
[20:38:05.121] iteration 1765 : loss : 0.278275, supervised_loss: 0.278237
[20:38:06.033] iteration 1766 : loss : 0.278496, supervised_loss: 0.278458
[20:38:06.947] iteration 1767 : loss : 0.285283, supervised_loss: 0.285222
[20:38:07.859] iteration 1768 : loss : 0.278477, supervised_loss: 0.278375
[20:38:08.773] iteration 1769 : loss : 0.307469, supervised_loss: 0.307428
[20:38:09.684] iteration 1770 : loss : 0.292198, supervised_loss: 0.292112
[20:38:10.597] iteration 1771 : loss : 0.277484, supervised_loss: 0.277399
[20:38:11.510] iteration 1772 : loss : 0.280958, supervised_loss: 0.280903
[20:38:12.423] iteration 1773 : loss : 0.286506, supervised_loss: 0.286366
[20:38:13.337] iteration 1774 : loss : 0.283795, supervised_loss: 0.283732
[20:38:14.249] iteration 1775 : loss : 0.286548, supervised_loss: 0.286502
[20:38:15.161] iteration 1776 : loss : 0.283878, supervised_loss: 0.283826
[20:38:16.719] iteration 1777 : loss : 0.282912, supervised_loss: 0.282862
[20:38:17.632] iteration 1778 : loss : 0.279492, supervised_loss: 0.279422
[20:38:18.545] iteration 1779 : loss : 0.283767, supervised_loss: 0.283718
[20:38:19.457] iteration 1780 : loss : 0.279446, supervised_loss: 0.279375
[20:38:20.370] iteration 1781 : loss : 0.287364, supervised_loss: 0.287318
[20:38:21.282] iteration 1782 : loss : 0.283850, supervised_loss: 0.283802
[20:38:22.196] iteration 1783 : loss : 0.285885, supervised_loss: 0.285772
[20:38:23.109] iteration 1784 : loss : 0.284303, supervised_loss: 0.284248
[20:38:24.022] iteration 1785 : loss : 0.289972, supervised_loss: 0.289933
[20:38:24.934] iteration 1786 : loss : 0.283117, supervised_loss: 0.283059
[20:38:25.846] iteration 1787 : loss : 0.291162, supervised_loss: 0.291120
[20:38:26.759] iteration 1788 : loss : 0.289312, supervised_loss: 0.289234
[20:38:28.255] iteration 1789 : loss : 0.282382, supervised_loss: 0.282340
[20:38:29.168] iteration 1790 : loss : 0.283338, supervised_loss: 0.283295
[20:38:30.081] iteration 1791 : loss : 0.283062, supervised_loss: 0.282998
[20:38:30.993] iteration 1792 : loss : 0.285717, supervised_loss: 0.285666
[20:38:31.905] iteration 1793 : loss : 0.283779, supervised_loss: 0.283735
[20:38:32.818] iteration 1794 : loss : 0.278302, supervised_loss: 0.278259
[20:38:33.730] iteration 1795 : loss : 0.275094, supervised_loss: 0.275028
[20:38:34.643] iteration 1796 : loss : 0.282570, supervised_loss: 0.282526
[20:38:35.555] iteration 1797 : loss : 0.286288, supervised_loss: 0.286234
[20:38:36.468] iteration 1798 : loss : 0.290595, supervised_loss: 0.290521
[20:38:37.381] iteration 1799 : loss : 0.287723, supervised_loss: 0.287685
[20:38:38.295] iteration 1800 : loss : 0.307839, supervised_loss: 0.307751
[20:38:41.843] iteration 1801 : loss : 0.290230, supervised_loss: 0.290171
[20:38:42.761] iteration 1802 : loss : 0.285380, supervised_loss: 0.285319
[20:38:43.672] iteration 1803 : loss : 0.293342, supervised_loss: 0.293267
[20:38:44.584] iteration 1804 : loss : 0.282560, supervised_loss: 0.282496
[20:38:45.496] iteration 1805 : loss : 0.283043, supervised_loss: 0.282972
[20:38:46.408] iteration 1806 : loss : 0.291916, supervised_loss: 0.291858
[20:38:47.321] iteration 1807 : loss : 0.291144, supervised_loss: 0.291072
[20:38:48.233] iteration 1808 : loss : 0.288947, supervised_loss: 0.288854
[20:38:49.146] iteration 1809 : loss : 0.288410, supervised_loss: 0.288362
[20:38:50.058] iteration 1810 : loss : 0.292933, supervised_loss: 0.292865
[20:38:50.970] iteration 1811 : loss : 0.279755, supervised_loss: 0.279689
[20:38:51.883] iteration 1812 : loss : 0.282860, supervised_loss: 0.282801
[20:38:53.369] iteration 1813 : loss : 0.288393, supervised_loss: 0.288331
[20:38:54.281] iteration 1814 : loss : 0.284709, supervised_loss: 0.284621
[20:38:55.194] iteration 1815 : loss : 0.288787, supervised_loss: 0.288717
[20:38:56.107] iteration 1816 : loss : 0.304365, supervised_loss: 0.304316
[20:38:57.020] iteration 1817 : loss : 0.281482, supervised_loss: 0.281433
[20:38:57.933] iteration 1818 : loss : 0.282066, supervised_loss: 0.282018
[20:38:58.845] iteration 1819 : loss : 0.277685, supervised_loss: 0.277627
[20:38:59.758] iteration 1820 : loss : 0.282699, supervised_loss: 0.282617
[20:39:00.670] iteration 1821 : loss : 0.289217, supervised_loss: 0.289136
[20:39:01.582] iteration 1822 : loss : 0.293960, supervised_loss: 0.293865
[20:39:02.495] iteration 1823 : loss : 0.279660, supervised_loss: 0.279611
[20:39:03.406] iteration 1824 : loss : 0.280288, supervised_loss: 0.280202
[20:39:04.917] iteration 1825 : loss : 0.283638, supervised_loss: 0.283593
[20:39:05.829] iteration 1826 : loss : 0.281001, supervised_loss: 0.280917
[20:39:06.741] iteration 1827 : loss : 0.276038, supervised_loss: 0.275982
[20:39:07.654] iteration 1828 : loss : 0.279769, supervised_loss: 0.279692
[20:39:08.565] iteration 1829 : loss : 0.288572, supervised_loss: 0.288516
[20:39:09.478] iteration 1830 : loss : 0.276938, supervised_loss: 0.276879
[20:39:10.390] iteration 1831 : loss : 0.278650, supervised_loss: 0.278597
[20:39:11.301] iteration 1832 : loss : 0.285740, supervised_loss: 0.285679
[20:39:12.213] iteration 1833 : loss : 0.280647, supervised_loss: 0.280599
[20:39:13.125] iteration 1834 : loss : 0.285331, supervised_loss: 0.285275
[20:39:14.039] iteration 1835 : loss : 0.286630, supervised_loss: 0.286519
[20:39:14.953] iteration 1836 : loss : 0.278894, supervised_loss: 0.278832
[20:39:16.516] iteration 1837 : loss : 0.286402, supervised_loss: 0.286357
[20:39:17.430] iteration 1838 : loss : 0.284791, supervised_loss: 0.284716
[20:39:18.341] iteration 1839 : loss : 0.280095, supervised_loss: 0.280034
[20:39:19.252] iteration 1840 : loss : 0.290003, supervised_loss: 0.289945
[20:39:20.165] iteration 1841 : loss : 0.295891, supervised_loss: 0.295786
[20:39:21.079] iteration 1842 : loss : 0.288416, supervised_loss: 0.288331
[20:39:21.992] iteration 1843 : loss : 0.280047, supervised_loss: 0.279980
[20:39:22.904] iteration 1844 : loss : 0.298563, supervised_loss: 0.298523
[20:39:23.816] iteration 1845 : loss : 0.283688, supervised_loss: 0.283604
[20:39:24.728] iteration 1846 : loss : 0.284705, supervised_loss: 0.284632
[20:39:25.641] iteration 1847 : loss : 0.276571, supervised_loss: 0.276507
[20:39:26.554] iteration 1848 : loss : 0.279814, supervised_loss: 0.279730
[20:39:28.114] iteration 1849 : loss : 0.281719, supervised_loss: 0.281669
[20:39:29.025] iteration 1850 : loss : 0.290440, supervised_loss: 0.290406
[20:39:29.937] iteration 1851 : loss : 0.278827, supervised_loss: 0.278731
[20:39:30.850] iteration 1852 : loss : 0.287740, supervised_loss: 0.287691
[20:39:31.763] iteration 1853 : loss : 0.280509, supervised_loss: 0.280431
[20:39:32.676] iteration 1854 : loss : 0.282695, supervised_loss: 0.282629
[20:39:33.586] iteration 1855 : loss : 0.285913, supervised_loss: 0.285870
[20:39:34.498] iteration 1856 : loss : 0.289821, supervised_loss: 0.289734
[20:39:35.411] iteration 1857 : loss : 0.279770, supervised_loss: 0.279691
[20:39:36.324] iteration 1858 : loss : 0.275620, supervised_loss: 0.275575
[20:39:37.237] iteration 1859 : loss : 0.281163, supervised_loss: 0.281091
[20:39:38.151] iteration 1860 : loss : 0.280795, supervised_loss: 0.280702
[20:39:39.735] iteration 1861 : loss : 0.282479, supervised_loss: 0.282403
[20:39:40.650] iteration 1862 : loss : 0.281836, supervised_loss: 0.281790
[20:39:41.562] iteration 1863 : loss : 0.279688, supervised_loss: 0.279609
[20:39:42.475] iteration 1864 : loss : 0.279879, supervised_loss: 0.279821
[20:39:43.387] iteration 1865 : loss : 0.281056, supervised_loss: 0.280992
[20:39:44.300] iteration 1866 : loss : 0.282734, supervised_loss: 0.282680
[20:39:45.213] iteration 1867 : loss : 0.281830, supervised_loss: 0.281781
[20:39:46.126] iteration 1868 : loss : 0.282106, supervised_loss: 0.282026
[20:39:47.037] iteration 1869 : loss : 0.283725, supervised_loss: 0.283663
[20:39:47.950] iteration 1870 : loss : 0.294489, supervised_loss: 0.294421
[20:39:48.863] iteration 1871 : loss : 0.286435, supervised_loss: 0.286369
[20:39:49.776] iteration 1872 : loss : 0.279971, supervised_loss: 0.279899
[20:39:51.380] iteration 1873 : loss : 0.276066, supervised_loss: 0.275972
[20:39:52.292] iteration 1874 : loss : 0.279981, supervised_loss: 0.279922
[20:39:53.205] iteration 1875 : loss : 0.278603, supervised_loss: 0.278547
[20:39:54.119] iteration 1876 : loss : 0.279679, supervised_loss: 0.279604
[20:39:55.032] iteration 1877 : loss : 0.280034, supervised_loss: 0.279974
[20:39:55.945] iteration 1878 : loss : 0.281166, supervised_loss: 0.281105
[20:39:56.856] iteration 1879 : loss : 0.280746, supervised_loss: 0.280691
[20:39:57.768] iteration 1880 : loss : 0.280897, supervised_loss: 0.280850
[20:39:58.682] iteration 1881 : loss : 0.279661, supervised_loss: 0.279607
[20:39:59.594] iteration 1882 : loss : 0.278268, supervised_loss: 0.278210
[20:40:00.507] iteration 1883 : loss : 0.287639, supervised_loss: 0.287590
[20:40:01.421] iteration 1884 : loss : 0.303850, supervised_loss: 0.303773
[20:40:02.951] iteration 1885 : loss : 0.279975, supervised_loss: 0.279823
[20:40:03.864] iteration 1886 : loss : 0.271142, supervised_loss: 0.271030
[20:40:04.777] iteration 1887 : loss : 0.278074, supervised_loss: 0.278019
[20:40:05.689] iteration 1888 : loss : 0.286650, supervised_loss: 0.286570
[20:40:06.600] iteration 1889 : loss : 0.282141, supervised_loss: 0.282068
[20:40:07.513] iteration 1890 : loss : 0.284405, supervised_loss: 0.284358
[20:40:08.427] iteration 1891 : loss : 0.283921, supervised_loss: 0.283880
[20:40:09.340] iteration 1892 : loss : 0.292618, supervised_loss: 0.292560
[20:40:10.252] iteration 1893 : loss : 0.277501, supervised_loss: 0.277420
[20:40:11.164] iteration 1894 : loss : 0.290024, supervised_loss: 0.289962
[20:40:12.078] iteration 1895 : loss : 0.283055, supervised_loss: 0.282996
[20:40:12.991] iteration 1896 : loss : 0.285176, supervised_loss: 0.285091
[20:40:14.549] iteration 1897 : loss : 0.286880, supervised_loss: 0.286801
[20:40:15.461] iteration 1898 : loss : 0.282870, supervised_loss: 0.282806
[20:40:16.374] iteration 1899 : loss : 0.280699, supervised_loss: 0.280587
[20:40:17.287] iteration 1900 : loss : 0.293820, supervised_loss: 0.293750
[20:40:18.200] iteration 1901 : loss : 0.281442, supervised_loss: 0.281377
[20:40:19.114] iteration 1902 : loss : 0.289266, supervised_loss: 0.289187
[20:40:20.027] iteration 1903 : loss : 0.279874, supervised_loss: 0.279795
[20:40:20.938] iteration 1904 : loss : 0.279174, supervised_loss: 0.279132
[20:40:21.851] iteration 1905 : loss : 0.278478, supervised_loss: 0.278418
[20:40:22.764] iteration 1906 : loss : 0.292385, supervised_loss: 0.292327
[20:40:23.676] iteration 1907 : loss : 0.286672, supervised_loss: 0.286612
[20:40:24.590] iteration 1908 : loss : 0.282891, supervised_loss: 0.282807
[20:40:26.203] iteration 1909 : loss : 0.283979, supervised_loss: 0.283908
[20:40:27.117] iteration 1910 : loss : 0.277456, supervised_loss: 0.277406
[20:40:28.030] iteration 1911 : loss : 0.291901, supervised_loss: 0.291827
[20:40:28.945] iteration 1912 : loss : 0.284678, supervised_loss: 0.284626
[20:40:29.858] iteration 1913 : loss : 0.281331, supervised_loss: 0.281275
[20:40:30.772] iteration 1914 : loss : 0.282846, supervised_loss: 0.282734
[20:40:31.685] iteration 1915 : loss : 0.284489, supervised_loss: 0.284434
[20:40:32.598] iteration 1916 : loss : 0.285986, supervised_loss: 0.285917
[20:40:33.511] iteration 1917 : loss : 0.281350, supervised_loss: 0.281299
[20:40:34.426] iteration 1918 : loss : 0.277719, supervised_loss: 0.277634
[20:40:35.339] iteration 1919 : loss : 0.285266, supervised_loss: 0.285187
[20:40:36.253] iteration 1920 : loss : 0.275788, supervised_loss: 0.275730
[20:40:37.818] iteration 1921 : loss : 0.274031, supervised_loss: 0.273971
[20:40:38.730] iteration 1922 : loss : 0.287107, supervised_loss: 0.287017
[20:40:39.643] iteration 1923 : loss : 0.279444, supervised_loss: 0.279372
[20:40:40.555] iteration 1924 : loss : 0.281155, supervised_loss: 0.281076
[20:40:41.469] iteration 1925 : loss : 0.280394, supervised_loss: 0.280322
[20:40:42.384] iteration 1926 : loss : 0.279129, supervised_loss: 0.279070
[20:40:43.297] iteration 1927 : loss : 0.292323, supervised_loss: 0.292246
[20:40:44.212] iteration 1928 : loss : 0.280346, supervised_loss: 0.280279
[20:40:45.124] iteration 1929 : loss : 0.285410, supervised_loss: 0.285352
[20:40:46.036] iteration 1930 : loss : 0.283294, supervised_loss: 0.283250
[20:40:46.950] iteration 1931 : loss : 0.283138, supervised_loss: 0.283034
[20:40:47.862] iteration 1932 : loss : 0.278966, supervised_loss: 0.278903
[20:40:49.432] iteration 1933 : loss : 0.282367, supervised_loss: 0.282323
[20:40:50.345] iteration 1934 : loss : 0.273942, supervised_loss: 0.273863
[20:40:51.259] iteration 1935 : loss : 0.281738, supervised_loss: 0.281691
[20:40:52.174] iteration 1936 : loss : 0.278103, supervised_loss: 0.278037
[20:40:53.086] iteration 1937 : loss : 0.280840, supervised_loss: 0.280793
[20:40:53.999] iteration 1938 : loss : 0.294016, supervised_loss: 0.293940
[20:40:54.911] iteration 1939 : loss : 0.277853, supervised_loss: 0.277776
[20:40:55.825] iteration 1940 : loss : 0.278618, supervised_loss: 0.278561
[20:40:56.739] iteration 1941 : loss : 0.280110, supervised_loss: 0.280047
[20:40:57.652] iteration 1942 : loss : 0.274586, supervised_loss: 0.274498
[20:40:58.565] iteration 1943 : loss : 0.281315, supervised_loss: 0.281239
[20:40:59.477] iteration 1944 : loss : 0.299224, supervised_loss: 0.299153
[20:41:01.055] iteration 1945 : loss : 0.283862, supervised_loss: 0.283798
[20:41:01.969] iteration 1946 : loss : 0.276638, supervised_loss: 0.276589
[20:41:02.882] iteration 1947 : loss : 0.278819, supervised_loss: 0.278760
[20:41:03.795] iteration 1948 : loss : 0.299738, supervised_loss: 0.299657
[20:41:04.707] iteration 1949 : loss : 0.278391, supervised_loss: 0.278326
[20:41:05.620] iteration 1950 : loss : 0.276989, supervised_loss: 0.276891
[20:41:06.533] iteration 1951 : loss : 0.280457, supervised_loss: 0.280368
[20:41:07.447] iteration 1952 : loss : 0.281432, supervised_loss: 0.281280
[20:41:08.358] iteration 1953 : loss : 0.276633, supervised_loss: 0.276530
[20:41:09.270] iteration 1954 : loss : 0.285268, supervised_loss: 0.285192
[20:41:10.208] iteration 1955 : loss : 0.285055, supervised_loss: 0.284995
[20:41:11.120] iteration 1956 : loss : 0.280246, supervised_loss: 0.280164
[20:41:12.732] iteration 1957 : loss : 0.285380, supervised_loss: 0.285318
[20:41:13.644] iteration 1958 : loss : 0.293115, supervised_loss: 0.293027
[20:41:14.557] iteration 1959 : loss : 0.281983, supervised_loss: 0.281921
[20:41:15.469] iteration 1960 : loss : 0.284310, supervised_loss: 0.284258
[20:41:16.383] iteration 1961 : loss : 0.277047, supervised_loss: 0.276979
[20:41:17.296] iteration 1962 : loss : 0.279689, supervised_loss: 0.279606
[20:41:18.208] iteration 1963 : loss : 0.284198, supervised_loss: 0.284131
[20:41:19.121] iteration 1964 : loss : 0.285653, supervised_loss: 0.285556
[20:41:20.035] iteration 1965 : loss : 0.295790, supervised_loss: 0.295695
[20:41:20.948] iteration 1966 : loss : 0.295085, supervised_loss: 0.295014
[20:41:21.862] iteration 1967 : loss : 0.285990, supervised_loss: 0.285912
[20:41:22.774] iteration 1968 : loss : 0.276343, supervised_loss: 0.276213
[20:41:24.300] iteration 1969 : loss : 0.280392, supervised_loss: 0.280317
[20:41:25.213] iteration 1970 : loss : 0.277894, supervised_loss: 0.277809
[20:41:26.125] iteration 1971 : loss : 0.277702, supervised_loss: 0.277634
[20:41:27.036] iteration 1972 : loss : 0.286561, supervised_loss: 0.286502
[20:41:27.973] iteration 1973 : loss : 0.283769, supervised_loss: 0.283700
[20:41:28.886] iteration 1974 : loss : 0.278855, supervised_loss: 0.278768
[20:41:29.800] iteration 1975 : loss : 0.292336, supervised_loss: 0.292271
[20:41:30.712] iteration 1976 : loss : 0.282640, supervised_loss: 0.282573
[20:41:31.624] iteration 1977 : loss : 0.282283, supervised_loss: 0.282206
[20:41:32.537] iteration 1978 : loss : 0.280976, supervised_loss: 0.280933
[20:41:33.451] iteration 1979 : loss : 0.276563, supervised_loss: 0.276440
[20:41:34.364] iteration 1980 : loss : 0.290906, supervised_loss: 0.290830
[20:41:35.864] iteration 1981 : loss : 0.283891, supervised_loss: 0.283821
[20:41:36.777] iteration 1982 : loss : 0.289900, supervised_loss: 0.289839
[20:41:37.690] iteration 1983 : loss : 0.281383, supervised_loss: 0.281311
[20:41:38.603] iteration 1984 : loss : 0.301127, supervised_loss: 0.301046
[20:41:39.515] iteration 1985 : loss : 0.275436, supervised_loss: 0.275364
[20:41:40.429] iteration 1986 : loss : 0.277023, supervised_loss: 0.276912
[20:41:41.342] iteration 1987 : loss : 0.281135, supervised_loss: 0.281069
[20:41:42.256] iteration 1988 : loss : 0.289010, supervised_loss: 0.288931
[20:41:43.169] iteration 1989 : loss : 0.282878, supervised_loss: 0.282780
[20:41:44.082] iteration 1990 : loss : 0.284816, supervised_loss: 0.284736
[20:41:45.009] iteration 1991 : loss : 0.274711, supervised_loss: 0.274652
[20:41:45.922] iteration 1992 : loss : 0.278972, supervised_loss: 0.278922
[20:41:47.560] iteration 1993 : loss : 0.279567, supervised_loss: 0.279494
[20:41:48.472] iteration 1994 : loss : 0.282907, supervised_loss: 0.282817
[20:41:49.386] iteration 1995 : loss : 0.276634, supervised_loss: 0.276570
[20:41:50.298] iteration 1996 : loss : 0.280890, supervised_loss: 0.280782
[20:41:51.212] iteration 1997 : loss : 0.285503, supervised_loss: 0.285410
[20:41:52.125] iteration 1998 : loss : 0.286690, supervised_loss: 0.286636
[20:41:53.037] iteration 1999 : loss : 0.281855, supervised_loss: 0.281773
[20:41:53.950] iteration 2000 : loss : 0.279390, supervised_loss: 0.279285
[20:41:56.813] iteration 2001 : loss : 0.288254, supervised_loss: 0.288205
[20:41:57.726] iteration 2002 : loss : 0.273492, supervised_loss: 0.273416
[20:41:58.639] iteration 2003 : loss : 0.291031, supervised_loss: 0.290960
[20:41:59.551] iteration 2004 : loss : 0.285111, supervised_loss: 0.284998
[20:42:01.187] iteration 2005 : loss : 0.281222, supervised_loss: 0.281166
[20:42:02.098] iteration 2006 : loss : 0.281734, supervised_loss: 0.281603
[20:42:03.011] iteration 2007 : loss : 0.291129, supervised_loss: 0.291061
[20:42:03.923] iteration 2008 : loss : 0.285574, supervised_loss: 0.285488
[20:42:04.837] iteration 2009 : loss : 0.286589, supervised_loss: 0.286514
[20:42:05.750] iteration 2010 : loss : 0.277812, supervised_loss: 0.277714
[20:42:06.662] iteration 2011 : loss : 0.281781, supervised_loss: 0.281728
[20:42:07.574] iteration 2012 : loss : 0.281449, supervised_loss: 0.281395
[20:42:08.488] iteration 2013 : loss : 0.295789, supervised_loss: 0.295697
[20:42:09.401] iteration 2014 : loss : 0.290423, supervised_loss: 0.290336
[20:42:10.314] iteration 2015 : loss : 0.280771, supervised_loss: 0.280685
[20:42:11.226] iteration 2016 : loss : 0.281475, supervised_loss: 0.281379
[20:42:12.877] iteration 2017 : loss : 0.282891, supervised_loss: 0.282807
[20:42:13.788] iteration 2018 : loss : 0.278130, supervised_loss: 0.278055
[20:42:14.700] iteration 2019 : loss : 0.300014, supervised_loss: 0.299904
[20:42:15.612] iteration 2020 : loss : 0.290077, supervised_loss: 0.289999
[20:42:16.526] iteration 2021 : loss : 0.288158, supervised_loss: 0.288068
[20:42:17.439] iteration 2022 : loss : 0.284348, supervised_loss: 0.284260
[20:42:18.353] iteration 2023 : loss : 0.278831, supervised_loss: 0.278784
[20:42:19.266] iteration 2024 : loss : 0.280005, supervised_loss: 0.279932
[20:42:20.178] iteration 2025 : loss : 0.282289, supervised_loss: 0.282235
[20:42:21.111] iteration 2026 : loss : 0.275373, supervised_loss: 0.275222
[20:42:22.023] iteration 2027 : loss : 0.279513, supervised_loss: 0.279390
[20:42:22.937] iteration 2028 : loss : 0.281484, supervised_loss: 0.281397
[20:42:24.451] iteration 2029 : loss : 0.277918, supervised_loss: 0.277859
[20:42:25.362] iteration 2030 : loss : 0.293334, supervised_loss: 0.293204
[20:42:26.275] iteration 2031 : loss : 0.284273, supervised_loss: 0.284189
[20:42:27.189] iteration 2032 : loss : 0.278368, supervised_loss: 0.278261
[20:42:28.102] iteration 2033 : loss : 0.276908, supervised_loss: 0.276847
[20:42:29.015] iteration 2034 : loss : 0.286593, supervised_loss: 0.286537
[20:42:29.927] iteration 2035 : loss : 0.281825, supervised_loss: 0.281670
[20:42:30.840] iteration 2036 : loss : 0.284470, supervised_loss: 0.284409
[20:42:31.752] iteration 2037 : loss : 0.303480, supervised_loss: 0.303416
[20:42:32.666] iteration 2038 : loss : 0.287007, supervised_loss: 0.286950
[20:42:33.580] iteration 2039 : loss : 0.284813, supervised_loss: 0.284744
[20:42:34.493] iteration 2040 : loss : 0.285702, supervised_loss: 0.285621
[20:42:36.045] iteration 2041 : loss : 0.283727, supervised_loss: 0.283641
[20:42:36.958] iteration 2042 : loss : 0.280197, supervised_loss: 0.280120
[20:42:37.870] iteration 2043 : loss : 0.278308, supervised_loss: 0.278244
[20:42:38.783] iteration 2044 : loss : 0.276049, supervised_loss: 0.275978
[20:42:39.695] iteration 2045 : loss : 0.277556, supervised_loss: 0.277489
[20:42:40.609] iteration 2046 : loss : 0.281168, supervised_loss: 0.281083
[20:42:41.522] iteration 2047 : loss : 0.292124, supervised_loss: 0.292050
[20:42:42.435] iteration 2048 : loss : 0.283678, supervised_loss: 0.283601
[20:42:43.349] iteration 2049 : loss : 0.277592, supervised_loss: 0.277446
[20:42:44.262] iteration 2050 : loss : 0.285218, supervised_loss: 0.285142
[20:42:45.174] iteration 2051 : loss : 0.291712, supervised_loss: 0.291638
[20:42:46.086] iteration 2052 : loss : 0.284617, supervised_loss: 0.284548
[20:42:47.600] iteration 2053 : loss : 0.279353, supervised_loss: 0.279286
[20:42:48.512] iteration 2054 : loss : 0.276827, supervised_loss: 0.276739
[20:42:49.425] iteration 2055 : loss : 0.290406, supervised_loss: 0.290309
[20:42:50.339] iteration 2056 : loss : 0.280923, supervised_loss: 0.280827
[20:42:51.253] iteration 2057 : loss : 0.279737, supervised_loss: 0.279601
[20:42:52.166] iteration 2058 : loss : 0.280496, supervised_loss: 0.280415
[20:42:53.080] iteration 2059 : loss : 0.275946, supervised_loss: 0.275847
[20:42:53.992] iteration 2060 : loss : 0.280102, supervised_loss: 0.280029
[20:42:54.954] iteration 2061 : loss : 0.288676, supervised_loss: 0.288599
[20:42:55.866] iteration 2062 : loss : 0.280098, supervised_loss: 0.280003
[20:42:56.779] iteration 2063 : loss : 0.281608, supervised_loss: 0.281514
[20:42:57.692] iteration 2064 : loss : 0.280380, supervised_loss: 0.280318
[20:42:59.271] iteration 2065 : loss : 0.278003, supervised_loss: 0.277920
[20:43:00.184] iteration 2066 : loss : 0.283558, supervised_loss: 0.283504
[20:43:01.096] iteration 2067 : loss : 0.276563, supervised_loss: 0.276461
[20:43:02.008] iteration 2068 : loss : 0.292474, supervised_loss: 0.292398
[20:43:02.920] iteration 2069 : loss : 0.283394, supervised_loss: 0.283305
[20:43:03.834] iteration 2070 : loss : 0.283412, supervised_loss: 0.283341
[20:43:04.745] iteration 2071 : loss : 0.279306, supervised_loss: 0.279215
[20:43:05.658] iteration 2072 : loss : 0.284298, supervised_loss: 0.284209
[20:43:06.570] iteration 2073 : loss : 0.283011, supervised_loss: 0.282932
[20:43:07.483] iteration 2074 : loss : 0.283055, supervised_loss: 0.282966
[20:43:08.396] iteration 2075 : loss : 0.282248, supervised_loss: 0.282191
[20:43:09.310] iteration 2076 : loss : 0.289829, supervised_loss: 0.289775
[20:43:10.925] iteration 2077 : loss : 0.278293, supervised_loss: 0.278171
[20:43:11.837] iteration 2078 : loss : 0.278736, supervised_loss: 0.278612
[20:43:12.748] iteration 2079 : loss : 0.282484, supervised_loss: 0.282406
[20:43:13.661] iteration 2080 : loss : 0.281953, supervised_loss: 0.281902
[20:43:14.574] iteration 2081 : loss : 0.280504, supervised_loss: 0.280431
[20:43:15.487] iteration 2082 : loss : 0.282144, supervised_loss: 0.282067
[20:43:16.400] iteration 2083 : loss : 0.281027, supervised_loss: 0.280956
[20:43:17.312] iteration 2084 : loss : 0.292676, supervised_loss: 0.292624
[20:43:18.226] iteration 2085 : loss : 0.286731, supervised_loss: 0.286687
[20:43:19.138] iteration 2086 : loss : 0.280610, supervised_loss: 0.280541
[20:43:20.051] iteration 2087 : loss : 0.282677, supervised_loss: 0.282589
[20:43:20.964] iteration 2088 : loss : 0.285430, supervised_loss: 0.285358
[20:43:22.501] iteration 2089 : loss : 0.288631, supervised_loss: 0.288561
[20:43:23.411] iteration 2090 : loss : 0.286578, supervised_loss: 0.286523
[20:43:24.324] iteration 2091 : loss : 0.280664, supervised_loss: 0.280557
[20:43:25.239] iteration 2092 : loss : 0.284352, supervised_loss: 0.284276
[20:43:26.152] iteration 2093 : loss : 0.290848, supervised_loss: 0.290761
[20:43:27.065] iteration 2094 : loss : 0.277698, supervised_loss: 0.277628
[20:43:27.978] iteration 2095 : loss : 0.287692, supervised_loss: 0.287631
[20:43:28.891] iteration 2096 : loss : 0.277001, supervised_loss: 0.276941
[20:43:29.805] iteration 2097 : loss : 0.288361, supervised_loss: 0.288294
[20:43:30.717] iteration 2098 : loss : 0.288036, supervised_loss: 0.287955
[20:43:31.631] iteration 2099 : loss : 0.287061, supervised_loss: 0.286994
[20:43:32.543] iteration 2100 : loss : 0.279509, supervised_loss: 0.279414
[20:43:34.114] iteration 2101 : loss : 0.287673, supervised_loss: 0.287583
[20:43:35.024] iteration 2102 : loss : 0.280528, supervised_loss: 0.280436
[20:43:35.936] iteration 2103 : loss : 0.286649, supervised_loss: 0.286538
[20:43:36.848] iteration 2104 : loss : 0.279003, supervised_loss: 0.278893
[20:43:37.760] iteration 2105 : loss : 0.280563, supervised_loss: 0.280480
[20:43:38.675] iteration 2106 : loss : 0.279172, supervised_loss: 0.279102
[20:43:39.587] iteration 2107 : loss : 0.282832, supervised_loss: 0.282749
[20:43:40.500] iteration 2108 : loss : 0.279748, supervised_loss: 0.279658
[20:43:41.412] iteration 2109 : loss : 0.280682, supervised_loss: 0.280616
[20:43:42.325] iteration 2110 : loss : 0.287853, supervised_loss: 0.287772
[20:43:43.239] iteration 2111 : loss : 0.281802, supervised_loss: 0.281721
[20:43:44.152] iteration 2112 : loss : 0.277724, supervised_loss: 0.277618
[20:43:45.782] iteration 2113 : loss : 0.279652, supervised_loss: 0.279548
[20:43:46.696] iteration 2114 : loss : 0.295744, supervised_loss: 0.295671
[20:43:47.607] iteration 2115 : loss : 0.284042, supervised_loss: 0.283979
[20:43:48.519] iteration 2116 : loss : 0.276626, supervised_loss: 0.276551
[20:43:49.432] iteration 2117 : loss : 0.282749, supervised_loss: 0.282637
[20:43:50.344] iteration 2118 : loss : 0.277306, supervised_loss: 0.277172
[20:43:51.257] iteration 2119 : loss : 0.278097, supervised_loss: 0.277998
[20:43:52.170] iteration 2120 : loss : 0.296686, supervised_loss: 0.296584
[20:43:53.085] iteration 2121 : loss : 0.292545, supervised_loss: 0.292441
[20:43:53.998] iteration 2122 : loss : 0.284773, supervised_loss: 0.284665
[20:43:54.911] iteration 2123 : loss : 0.301672, supervised_loss: 0.301571
[20:43:55.823] iteration 2124 : loss : 0.291166, supervised_loss: 0.291061
[20:43:57.384] iteration 2125 : loss : 0.276537, supervised_loss: 0.276449
[20:43:58.297] iteration 2126 : loss : 0.283436, supervised_loss: 0.283266
[20:43:59.211] iteration 2127 : loss : 0.283085, supervised_loss: 0.283004
[20:44:00.123] iteration 2128 : loss : 0.291739, supervised_loss: 0.291639
[20:44:01.036] iteration 2129 : loss : 0.285118, supervised_loss: 0.285029
[20:44:01.949] iteration 2130 : loss : 0.285371, supervised_loss: 0.285282
[20:44:02.864] iteration 2131 : loss : 0.279427, supervised_loss: 0.279349
[20:44:03.778] iteration 2132 : loss : 0.281736, supervised_loss: 0.281628
[20:44:04.691] iteration 2133 : loss : 0.276837, supervised_loss: 0.276732
[20:44:05.609] iteration 2134 : loss : 0.280243, supervised_loss: 0.280174
[20:44:06.523] iteration 2135 : loss : 0.286220, supervised_loss: 0.286135
[20:44:07.435] iteration 2136 : loss : 0.284109, supervised_loss: 0.283999
[20:44:09.132] iteration 2137 : loss : 0.274920, supervised_loss: 0.274773
[20:44:10.044] iteration 2138 : loss : 0.279955, supervised_loss: 0.279855
[20:44:10.956] iteration 2139 : loss : 0.279485, supervised_loss: 0.279303
[20:44:11.870] iteration 2140 : loss : 0.283697, supervised_loss: 0.283554
[20:44:12.783] iteration 2141 : loss : 0.295063, supervised_loss: 0.294976
[20:44:13.695] iteration 2142 : loss : 0.278888, supervised_loss: 0.278779
[20:44:14.610] iteration 2143 : loss : 0.283339, supervised_loss: 0.283279
[20:44:15.525] iteration 2144 : loss : 0.283643, supervised_loss: 0.283488
[20:44:16.437] iteration 2145 : loss : 0.282061, supervised_loss: 0.281950
[20:44:17.349] iteration 2146 : loss : 0.293989, supervised_loss: 0.293912
[20:44:18.263] iteration 2147 : loss : 0.285565, supervised_loss: 0.285479
[20:44:19.177] iteration 2148 : loss : 0.277711, supervised_loss: 0.277622
[20:44:20.717] iteration 2149 : loss : 0.287361, supervised_loss: 0.287253
[20:44:21.630] iteration 2150 : loss : 0.285824, supervised_loss: 0.285726
[20:44:22.543] iteration 2151 : loss : 0.280288, supervised_loss: 0.280108
[20:44:23.456] iteration 2152 : loss : 0.280010, supervised_loss: 0.279892
[20:44:24.369] iteration 2153 : loss : 0.280914, supervised_loss: 0.280833
[20:44:25.284] iteration 2154 : loss : 0.289237, supervised_loss: 0.289171
[20:44:26.197] iteration 2155 : loss : 0.283545, supervised_loss: 0.283390
[20:44:27.110] iteration 2156 : loss : 0.281732, supervised_loss: 0.281659
[20:44:28.022] iteration 2157 : loss : 0.286471, supervised_loss: 0.286369
[20:44:28.935] iteration 2158 : loss : 0.293030, supervised_loss: 0.292965
[20:44:29.847] iteration 2159 : loss : 0.287359, supervised_loss: 0.287300
[20:44:30.760] iteration 2160 : loss : 0.280743, supervised_loss: 0.280678
[20:44:32.343] iteration 2161 : loss : 0.281962, supervised_loss: 0.281886
[20:44:33.257] iteration 2162 : loss : 0.275338, supervised_loss: 0.275241
[20:44:34.170] iteration 2163 : loss : 0.282710, supervised_loss: 0.282610
[20:44:35.083] iteration 2164 : loss : 0.283252, supervised_loss: 0.283088
[20:44:35.996] iteration 2165 : loss : 0.286466, supervised_loss: 0.286389
[20:44:36.908] iteration 2166 : loss : 0.282733, supervised_loss: 0.282635
[20:44:37.821] iteration 2167 : loss : 0.274010, supervised_loss: 0.273910
[20:44:38.735] iteration 2168 : loss : 0.281936, supervised_loss: 0.281824
[20:44:39.647] iteration 2169 : loss : 0.276523, supervised_loss: 0.276436
[20:44:40.560] iteration 2170 : loss : 0.287577, supervised_loss: 0.287466
[20:44:41.473] iteration 2171 : loss : 0.280910, supervised_loss: 0.280786
[20:44:42.386] iteration 2172 : loss : 0.278083, supervised_loss: 0.278005
[20:44:43.912] iteration 2173 : loss : 0.279937, supervised_loss: 0.279834
[20:44:44.824] iteration 2174 : loss : 0.286726, supervised_loss: 0.286644
[20:44:45.737] iteration 2175 : loss : 0.280441, supervised_loss: 0.280349
[20:44:46.650] iteration 2176 : loss : 0.288986, supervised_loss: 0.288930
[20:44:47.563] iteration 2177 : loss : 0.280401, supervised_loss: 0.280337
[20:44:48.477] iteration 2178 : loss : 0.284144, supervised_loss: 0.284077
[20:44:49.390] iteration 2179 : loss : 0.279376, supervised_loss: 0.279313
[20:44:50.302] iteration 2180 : loss : 0.279173, supervised_loss: 0.279059
[20:44:51.215] iteration 2181 : loss : 0.279030, supervised_loss: 0.278940
[20:44:52.127] iteration 2182 : loss : 0.280265, supervised_loss: 0.280143
[20:44:53.040] iteration 2183 : loss : 0.283001, supervised_loss: 0.282927
[20:44:53.954] iteration 2184 : loss : 0.290831, supervised_loss: 0.290685
[20:44:55.576] iteration 2185 : loss : 0.283663, supervised_loss: 0.283597
[20:44:56.490] iteration 2186 : loss : 0.279475, supervised_loss: 0.279320
[20:44:57.402] iteration 2187 : loss : 0.281238, supervised_loss: 0.281157
[20:44:58.316] iteration 2188 : loss : 0.278529, supervised_loss: 0.278450
[20:44:59.230] iteration 2189 : loss : 0.282449, supervised_loss: 0.282339
[20:45:00.144] iteration 2190 : loss : 0.280059, supervised_loss: 0.279966
[20:45:01.056] iteration 2191 : loss : 0.276928, supervised_loss: 0.276726
[20:45:01.969] iteration 2192 : loss : 0.276968, supervised_loss: 0.276856
[20:45:02.882] iteration 2193 : loss : 0.287028, supervised_loss: 0.286952
[20:45:03.796] iteration 2194 : loss : 0.273805, supervised_loss: 0.273666
[20:45:04.709] iteration 2195 : loss : 0.282010, supervised_loss: 0.281947
[20:45:05.624] iteration 2196 : loss : 0.282971, supervised_loss: 0.282880
[20:45:07.281] iteration 2197 : loss : 0.276277, supervised_loss: 0.276164
[20:45:08.195] iteration 2198 : loss : 0.282880, supervised_loss: 0.282777
[20:45:09.108] iteration 2199 : loss : 0.278522, supervised_loss: 0.278384
[20:45:10.020] iteration 2200 : loss : 0.275097, supervised_loss: 0.275000
[20:45:12.090] save best model to model/LA_vnet_25_labeled/URPC/iter_2200_dice_0.9212624430656433.pth
[20:45:13.002] iteration 2201 : loss : 0.279484, supervised_loss: 0.279363
[20:45:13.915] iteration 2202 : loss : 0.282477, supervised_loss: 0.282389
[20:45:14.828] iteration 2203 : loss : 0.281681, supervised_loss: 0.281503
[20:45:15.739] iteration 2204 : loss : 0.281968, supervised_loss: 0.281900
[20:45:16.651] iteration 2205 : loss : 0.279849, supervised_loss: 0.279768
[20:45:17.563] iteration 2206 : loss : 0.285021, supervised_loss: 0.284930
[20:45:18.477] iteration 2207 : loss : 0.284094, supervised_loss: 0.284041
[20:45:19.390] iteration 2208 : loss : 0.276072, supervised_loss: 0.275969
[20:45:20.887] iteration 2209 : loss : 0.277422, supervised_loss: 0.277341
[20:45:21.800] iteration 2210 : loss : 0.280955, supervised_loss: 0.280810
[20:45:22.712] iteration 2211 : loss : 0.281518, supervised_loss: 0.281451
[20:45:23.625] iteration 2212 : loss : 0.279905, supervised_loss: 0.279759
[20:45:24.537] iteration 2213 : loss : 0.282006, supervised_loss: 0.281932
[20:45:25.449] iteration 2214 : loss : 0.274830, supervised_loss: 0.274675
[20:45:26.362] iteration 2215 : loss : 0.285384, supervised_loss: 0.285333
[20:45:27.275] iteration 2216 : loss : 0.281911, supervised_loss: 0.281808
[20:45:28.187] iteration 2217 : loss : 0.281985, supervised_loss: 0.281924
[20:45:29.101] iteration 2218 : loss : 0.278188, supervised_loss: 0.278120
[20:45:30.015] iteration 2219 : loss : 0.282465, supervised_loss: 0.282380
[20:45:30.927] iteration 2220 : loss : 0.291668, supervised_loss: 0.291519
[20:45:32.498] iteration 2221 : loss : 0.275568, supervised_loss: 0.275461
[20:45:33.412] iteration 2222 : loss : 0.282183, supervised_loss: 0.282115
[20:45:34.324] iteration 2223 : loss : 0.278465, supervised_loss: 0.278399
[20:45:35.237] iteration 2224 : loss : 0.280858, supervised_loss: 0.280745
[20:45:36.150] iteration 2225 : loss : 0.276588, supervised_loss: 0.276507
[20:45:37.063] iteration 2226 : loss : 0.276647, supervised_loss: 0.276582
[20:45:37.976] iteration 2227 : loss : 0.277972, supervised_loss: 0.277912
[20:45:38.889] iteration 2228 : loss : 0.281503, supervised_loss: 0.281394
[20:45:39.801] iteration 2229 : loss : 0.285129, supervised_loss: 0.285060
[20:45:40.713] iteration 2230 : loss : 0.294128, supervised_loss: 0.294009
[20:45:41.627] iteration 2231 : loss : 0.280751, supervised_loss: 0.280658
[20:45:42.540] iteration 2232 : loss : 0.290123, supervised_loss: 0.290041
[20:45:44.043] iteration 2233 : loss : 0.289261, supervised_loss: 0.289170
[20:45:44.956] iteration 2234 : loss : 0.276071, supervised_loss: 0.275991
[20:45:45.870] iteration 2235 : loss : 0.275942, supervised_loss: 0.275854
[20:45:46.782] iteration 2236 : loss : 0.274049, supervised_loss: 0.273991
[20:45:47.695] iteration 2237 : loss : 0.278753, supervised_loss: 0.278675
[20:45:48.606] iteration 2238 : loss : 0.281697, supervised_loss: 0.281615
[20:45:49.520] iteration 2239 : loss : 0.274018, supervised_loss: 0.273902
[20:45:50.432] iteration 2240 : loss : 0.279364, supervised_loss: 0.279288
[20:45:51.346] iteration 2241 : loss : 0.277023, supervised_loss: 0.276937
[20:45:52.258] iteration 2242 : loss : 0.291811, supervised_loss: 0.291742
[20:45:53.171] iteration 2243 : loss : 0.280951, supervised_loss: 0.280889
[20:45:54.083] iteration 2244 : loss : 0.288945, supervised_loss: 0.288892
[20:45:55.653] iteration 2245 : loss : 0.284060, supervised_loss: 0.283984
[20:45:56.565] iteration 2246 : loss : 0.277067, supervised_loss: 0.276959
[20:45:57.477] iteration 2247 : loss : 0.285266, supervised_loss: 0.285198
[20:45:58.390] iteration 2248 : loss : 0.278802, supervised_loss: 0.278684
[20:45:59.304] iteration 2249 : loss : 0.279732, supervised_loss: 0.279618
[20:46:00.217] iteration 2250 : loss : 0.296061, supervised_loss: 0.295861
[20:46:01.131] iteration 2251 : loss : 0.280253, supervised_loss: 0.280133
[20:46:02.043] iteration 2252 : loss : 0.274451, supervised_loss: 0.274313
[20:46:02.955] iteration 2253 : loss : 0.282057, supervised_loss: 0.281947
[20:46:03.867] iteration 2254 : loss : 0.277907, supervised_loss: 0.277813
[20:46:04.780] iteration 2255 : loss : 0.288964, supervised_loss: 0.288876
[20:46:05.694] iteration 2256 : loss : 0.280598, supervised_loss: 0.280517
[20:46:07.196] iteration 2257 : loss : 0.282064, supervised_loss: 0.281979
[20:46:08.109] iteration 2258 : loss : 0.283974, supervised_loss: 0.283852
[20:46:09.022] iteration 2259 : loss : 0.285261, supervised_loss: 0.285201
[20:46:09.934] iteration 2260 : loss : 0.278930, supervised_loss: 0.278785
[20:46:10.846] iteration 2261 : loss : 0.299636, supervised_loss: 0.299572
[20:46:11.759] iteration 2262 : loss : 0.282359, supervised_loss: 0.282289
[20:46:12.671] iteration 2263 : loss : 0.290040, supervised_loss: 0.289898
[20:46:13.584] iteration 2264 : loss : 0.275364, supervised_loss: 0.275280
[20:46:14.496] iteration 2265 : loss : 0.277149, supervised_loss: 0.277042
[20:46:15.410] iteration 2266 : loss : 0.276385, supervised_loss: 0.276268
[20:46:16.322] iteration 2267 : loss : 0.283216, supervised_loss: 0.283118
[20:46:17.235] iteration 2268 : loss : 0.285408, supervised_loss: 0.285334
[20:46:18.761] iteration 2269 : loss : 0.305825, supervised_loss: 0.305750
[20:46:19.672] iteration 2270 : loss : 0.284810, supervised_loss: 0.284695
[20:46:20.585] iteration 2271 : loss : 0.286787, supervised_loss: 0.286703
[20:46:21.499] iteration 2272 : loss : 0.280672, supervised_loss: 0.280575
[20:46:22.413] iteration 2273 : loss : 0.275258, supervised_loss: 0.275090
[20:46:23.325] iteration 2274 : loss : 0.275141, supervised_loss: 0.275051
[20:46:24.237] iteration 2275 : loss : 0.283018, supervised_loss: 0.282946
[20:46:25.149] iteration 2276 : loss : 0.280211, supervised_loss: 0.280092
[20:46:26.062] iteration 2277 : loss : 0.279487, supervised_loss: 0.279395
[20:46:26.977] iteration 2278 : loss : 0.280940, supervised_loss: 0.280850
[20:46:27.891] iteration 2279 : loss : 0.282041, supervised_loss: 0.281872
[20:46:28.805] iteration 2280 : loss : 0.279466, supervised_loss: 0.279369
[20:46:30.304] iteration 2281 : loss : 0.282091, supervised_loss: 0.281984
[20:46:31.217] iteration 2282 : loss : 0.279921, supervised_loss: 0.279811
[20:46:32.130] iteration 2283 : loss : 0.274101, supervised_loss: 0.273947
[20:46:33.043] iteration 2284 : loss : 0.279889, supervised_loss: 0.279818
[20:46:33.956] iteration 2285 : loss : 0.276799, supervised_loss: 0.276713
[20:46:34.868] iteration 2286 : loss : 0.276611, supervised_loss: 0.276533
[20:46:35.781] iteration 2287 : loss : 0.277464, supervised_loss: 0.277240
[20:46:36.694] iteration 2288 : loss : 0.277384, supervised_loss: 0.277303
[20:46:37.608] iteration 2289 : loss : 0.277624, supervised_loss: 0.277538
[20:46:38.522] iteration 2290 : loss : 0.281333, supervised_loss: 0.281258
[20:46:39.435] iteration 2291 : loss : 0.285160, supervised_loss: 0.285003
[20:46:40.348] iteration 2292 : loss : 0.289328, supervised_loss: 0.289222
[20:46:41.893] iteration 2293 : loss : 0.278369, supervised_loss: 0.278268
[20:46:42.807] iteration 2294 : loss : 0.282758, supervised_loss: 0.282686
[20:46:43.720] iteration 2295 : loss : 0.278049, supervised_loss: 0.277953
[20:46:44.633] iteration 2296 : loss : 0.278599, supervised_loss: 0.278524
[20:46:45.544] iteration 2297 : loss : 0.273920, supervised_loss: 0.273837
[20:46:46.458] iteration 2298 : loss : 0.277707, supervised_loss: 0.277577
[20:46:47.370] iteration 2299 : loss : 0.279135, supervised_loss: 0.279056
[20:46:48.282] iteration 2300 : loss : 0.288375, supervised_loss: 0.288273
[20:46:49.195] iteration 2301 : loss : 0.294134, supervised_loss: 0.294014
[20:46:50.108] iteration 2302 : loss : 0.280360, supervised_loss: 0.280295
[20:46:51.023] iteration 2303 : loss : 0.276702, supervised_loss: 0.276608
[20:46:51.937] iteration 2304 : loss : 0.274066, supervised_loss: 0.273969
[20:46:53.464] iteration 2305 : loss : 0.279304, supervised_loss: 0.279159
[20:46:54.377] iteration 2306 : loss : 0.285015, supervised_loss: 0.284924
[20:46:55.290] iteration 2307 : loss : 0.284750, supervised_loss: 0.284613
[20:46:56.202] iteration 2308 : loss : 0.275621, supervised_loss: 0.275503
[20:46:57.115] iteration 2309 : loss : 0.281184, supervised_loss: 0.281079
[20:46:58.026] iteration 2310 : loss : 0.287303, supervised_loss: 0.287209
[20:46:58.939] iteration 2311 : loss : 0.281499, supervised_loss: 0.281394
[20:46:59.852] iteration 2312 : loss : 0.286796, supervised_loss: 0.286708
[20:47:00.764] iteration 2313 : loss : 0.281976, supervised_loss: 0.281876
[20:47:01.676] iteration 2314 : loss : 0.286120, supervised_loss: 0.286051
[20:47:02.590] iteration 2315 : loss : 0.275903, supervised_loss: 0.275798
[20:47:03.503] iteration 2316 : loss : 0.284115, supervised_loss: 0.284031
[20:47:05.015] iteration 2317 : loss : 0.292787, supervised_loss: 0.292662
[20:47:05.926] iteration 2318 : loss : 0.278473, supervised_loss: 0.278322
[20:47:06.838] iteration 2319 : loss : 0.284257, supervised_loss: 0.284145
[20:47:07.752] iteration 2320 : loss : 0.293557, supervised_loss: 0.293470
[20:47:08.665] iteration 2321 : loss : 0.283542, supervised_loss: 0.283408
[20:47:09.578] iteration 2322 : loss : 0.282891, supervised_loss: 0.282782
[20:47:10.490] iteration 2323 : loss : 0.275774, supervised_loss: 0.275675
[20:47:11.402] iteration 2324 : loss : 0.274290, supervised_loss: 0.274179
[20:47:12.315] iteration 2325 : loss : 0.279621, supervised_loss: 0.279509
[20:47:13.229] iteration 2326 : loss : 0.277484, supervised_loss: 0.277406
[20:47:14.142] iteration 2327 : loss : 0.290293, supervised_loss: 0.290207
[20:47:15.054] iteration 2328 : loss : 0.278551, supervised_loss: 0.278469
[20:47:16.555] iteration 2329 : loss : 0.273011, supervised_loss: 0.272889
[20:47:17.468] iteration 2330 : loss : 0.283780, supervised_loss: 0.283720
[20:47:18.381] iteration 2331 : loss : 0.280630, supervised_loss: 0.280565
[20:47:19.293] iteration 2332 : loss : 0.275783, supervised_loss: 0.275659
[20:47:20.206] iteration 2333 : loss : 0.277706, supervised_loss: 0.277609
[20:47:21.119] iteration 2334 : loss : 0.280563, supervised_loss: 0.280456
[20:47:22.031] iteration 2335 : loss : 0.273995, supervised_loss: 0.273863
[20:47:22.943] iteration 2336 : loss : 0.284004, supervised_loss: 0.283904
[20:47:23.856] iteration 2337 : loss : 0.287631, supervised_loss: 0.287545
[20:47:24.769] iteration 2338 : loss : 0.280805, supervised_loss: 0.280742
[20:47:25.683] iteration 2339 : loss : 0.278156, supervised_loss: 0.278080
[20:47:26.596] iteration 2340 : loss : 0.274368, supervised_loss: 0.274316
[20:47:28.233] iteration 2341 : loss : 0.273217, supervised_loss: 0.273145
[20:47:29.144] iteration 2342 : loss : 0.280511, supervised_loss: 0.280416
[20:47:30.058] iteration 2343 : loss : 0.285225, supervised_loss: 0.285139
[20:47:30.971] iteration 2344 : loss : 0.282778, supervised_loss: 0.282683
[20:47:31.885] iteration 2345 : loss : 0.287311, supervised_loss: 0.287204
[20:47:32.798] iteration 2346 : loss : 0.288532, supervised_loss: 0.288394
[20:47:33.712] iteration 2347 : loss : 0.275480, supervised_loss: 0.275396
[20:47:34.624] iteration 2348 : loss : 0.278175, supervised_loss: 0.278084
[20:47:35.537] iteration 2349 : loss : 0.273835, supervised_loss: 0.273688
[20:47:36.451] iteration 2350 : loss : 0.283952, supervised_loss: 0.283868
[20:47:37.366] iteration 2351 : loss : 0.281023, supervised_loss: 0.280931
[20:47:38.281] iteration 2352 : loss : 0.277305, supervised_loss: 0.277211
[20:47:39.893] iteration 2353 : loss : 0.278577, supervised_loss: 0.278491
[20:47:40.807] iteration 2354 : loss : 0.276584, supervised_loss: 0.276503
[20:47:41.719] iteration 2355 : loss : 0.279569, supervised_loss: 0.279499
[20:47:42.632] iteration 2356 : loss : 0.279081, supervised_loss: 0.278943
[20:47:43.545] iteration 2357 : loss : 0.283731, supervised_loss: 0.283474
[20:47:44.460] iteration 2358 : loss : 0.280987, supervised_loss: 0.280915
[20:47:45.373] iteration 2359 : loss : 0.281243, supervised_loss: 0.281127
[20:47:46.286] iteration 2360 : loss : 0.279584, supervised_loss: 0.279436
[20:47:47.200] iteration 2361 : loss : 0.280322, supervised_loss: 0.280209
[20:47:48.113] iteration 2362 : loss : 0.276198, supervised_loss: 0.276102
[20:47:49.027] iteration 2363 : loss : 0.281768, supervised_loss: 0.281685
[20:47:49.941] iteration 2364 : loss : 0.278702, supervised_loss: 0.278575
[20:47:51.418] iteration 2365 : loss : 0.281914, supervised_loss: 0.281827
[20:47:52.330] iteration 2366 : loss : 0.273932, supervised_loss: 0.273848
[20:47:53.243] iteration 2367 : loss : 0.280449, supervised_loss: 0.280370
[20:47:54.156] iteration 2368 : loss : 0.281713, supervised_loss: 0.281558
[20:47:55.068] iteration 2369 : loss : 0.279180, supervised_loss: 0.279017
[20:47:55.980] iteration 2370 : loss : 0.281815, supervised_loss: 0.281733
[20:47:56.894] iteration 2371 : loss : 0.284918, supervised_loss: 0.284804
[20:47:57.808] iteration 2372 : loss : 0.277285, supervised_loss: 0.277148
[20:47:58.723] iteration 2373 : loss : 0.277637, supervised_loss: 0.277507
[20:47:59.637] iteration 2374 : loss : 0.289646, supervised_loss: 0.289548
[20:48:00.550] iteration 2375 : loss : 0.273654, supervised_loss: 0.273572
[20:48:01.463] iteration 2376 : loss : 0.277985, supervised_loss: 0.277915
[20:48:02.992] iteration 2377 : loss : 0.275423, supervised_loss: 0.275341
[20:48:03.904] iteration 2378 : loss : 0.278617, supervised_loss: 0.278539
[20:48:04.816] iteration 2379 : loss : 0.274871, supervised_loss: 0.274723
[20:48:05.729] iteration 2380 : loss : 0.277159, supervised_loss: 0.277035
[20:48:06.643] iteration 2381 : loss : 0.284275, supervised_loss: 0.284187
[20:48:07.557] iteration 2382 : loss : 0.282722, supervised_loss: 0.282579
[20:48:08.471] iteration 2383 : loss : 0.276979, supervised_loss: 0.276872
[20:48:09.385] iteration 2384 : loss : 0.287128, supervised_loss: 0.287047
[20:48:10.300] iteration 2385 : loss : 0.273381, supervised_loss: 0.273303
[20:48:11.213] iteration 2386 : loss : 0.272940, supervised_loss: 0.272678
[20:48:12.125] iteration 2387 : loss : 0.277456, supervised_loss: 0.277268
[20:48:13.038] iteration 2388 : loss : 0.284120, supervised_loss: 0.284040
[20:48:14.549] iteration 2389 : loss : 0.282566, supervised_loss: 0.282483
[20:48:15.462] iteration 2390 : loss : 0.283274, supervised_loss: 0.283170
[20:48:16.375] iteration 2391 : loss : 0.287586, supervised_loss: 0.287483
[20:48:17.297] iteration 2392 : loss : 0.280280, supervised_loss: 0.280185
[20:48:18.211] iteration 2393 : loss : 0.278456, supervised_loss: 0.278329
[20:48:19.124] iteration 2394 : loss : 0.280880, supervised_loss: 0.280763
[20:48:20.038] iteration 2395 : loss : 0.278509, supervised_loss: 0.278418
[20:48:20.951] iteration 2396 : loss : 0.273543, supervised_loss: 0.273397
[20:48:21.866] iteration 2397 : loss : 0.278965, supervised_loss: 0.278888
[20:48:22.778] iteration 2398 : loss : 0.282026, supervised_loss: 0.281942
[20:48:23.692] iteration 2399 : loss : 0.287198, supervised_loss: 0.287098
[20:48:24.606] iteration 2400 : loss : 0.276629, supervised_loss: 0.276543
[20:48:28.037] iteration 2401 : loss : 0.288737, supervised_loss: 0.288629
[20:48:28.951] iteration 2402 : loss : 0.280322, supervised_loss: 0.280128
[20:48:29.864] iteration 2403 : loss : 0.285337, supervised_loss: 0.285223
[20:48:30.778] iteration 2404 : loss : 0.283860, supervised_loss: 0.283742
[20:48:31.691] iteration 2405 : loss : 0.274741, supervised_loss: 0.274639
[20:48:32.604] iteration 2406 : loss : 0.281242, supervised_loss: 0.281063
[20:48:33.518] iteration 2407 : loss : 0.277490, supervised_loss: 0.277404
[20:48:34.431] iteration 2408 : loss : 0.275231, supervised_loss: 0.275107
[20:48:35.345] iteration 2409 : loss : 0.279393, supervised_loss: 0.279276
[20:48:36.259] iteration 2410 : loss : 0.285340, supervised_loss: 0.285144
[20:48:37.202] iteration 2411 : loss : 0.281471, supervised_loss: 0.281332
[20:48:38.116] iteration 2412 : loss : 0.283983, supervised_loss: 0.283856
[20:48:39.639] iteration 2413 : loss : 0.281664, supervised_loss: 0.281576
[20:48:40.552] iteration 2414 : loss : 0.283836, supervised_loss: 0.283699
[20:48:41.465] iteration 2415 : loss : 0.270374, supervised_loss: 0.270269
[20:48:42.379] iteration 2416 : loss : 0.287015, supervised_loss: 0.286892
[20:48:43.293] iteration 2417 : loss : 0.275690, supervised_loss: 0.275563
[20:48:44.208] iteration 2418 : loss : 0.289186, supervised_loss: 0.288998
[20:48:45.121] iteration 2419 : loss : 0.272488, supervised_loss: 0.272388
[20:48:46.034] iteration 2420 : loss : 0.283806, supervised_loss: 0.283727
[20:48:46.947] iteration 2421 : loss : 0.280603, supervised_loss: 0.280513
[20:48:47.861] iteration 2422 : loss : 0.287945, supervised_loss: 0.287850
[20:48:48.777] iteration 2423 : loss : 0.278824, supervised_loss: 0.278653
[20:48:49.691] iteration 2424 : loss : 0.281017, supervised_loss: 0.280922
[20:48:51.219] iteration 2425 : loss : 0.277463, supervised_loss: 0.277214
[20:48:52.132] iteration 2426 : loss : 0.278964, supervised_loss: 0.278827
[20:48:53.045] iteration 2427 : loss : 0.284487, supervised_loss: 0.284405
[20:48:53.960] iteration 2428 : loss : 0.274965, supervised_loss: 0.274817
[20:48:54.872] iteration 2429 : loss : 0.284849, supervised_loss: 0.284724
[20:48:55.787] iteration 2430 : loss : 0.291799, supervised_loss: 0.291674
[20:48:56.700] iteration 2431 : loss : 0.282713, supervised_loss: 0.282633
[20:48:57.616] iteration 2432 : loss : 0.279322, supervised_loss: 0.279139
[20:48:58.532] iteration 2433 : loss : 0.288375, supervised_loss: 0.288277
[20:48:59.446] iteration 2434 : loss : 0.278836, supervised_loss: 0.278724
[20:49:00.361] iteration 2435 : loss : 0.277999, supervised_loss: 0.277838
[20:49:01.276] iteration 2436 : loss : 0.283076, supervised_loss: 0.282956
[20:49:02.769] iteration 2437 : loss : 0.290256, supervised_loss: 0.290166
[20:49:03.682] iteration 2438 : loss : 0.276342, supervised_loss: 0.276245
[20:49:04.596] iteration 2439 : loss : 0.293908, supervised_loss: 0.293822
[20:49:05.510] iteration 2440 : loss : 0.285741, supervised_loss: 0.285593
[20:49:06.424] iteration 2441 : loss : 0.284067, supervised_loss: 0.283965
[20:49:07.339] iteration 2442 : loss : 0.279537, supervised_loss: 0.279386
[20:49:08.253] iteration 2443 : loss : 0.278054, supervised_loss: 0.277976
[20:49:09.169] iteration 2444 : loss : 0.273578, supervised_loss: 0.273478
[20:49:10.083] iteration 2445 : loss : 0.280626, supervised_loss: 0.280524
[20:49:10.997] iteration 2446 : loss : 0.278978, supervised_loss: 0.278798
[20:49:11.912] iteration 2447 : loss : 0.276727, supervised_loss: 0.276640
[20:49:12.827] iteration 2448 : loss : 0.280335, supervised_loss: 0.280260
[20:49:14.338] iteration 2449 : loss : 0.286258, supervised_loss: 0.286126
[20:49:15.254] iteration 2450 : loss : 0.273596, supervised_loss: 0.273468
[20:49:16.169] iteration 2451 : loss : 0.277980, supervised_loss: 0.277880
[20:49:17.084] iteration 2452 : loss : 0.292612, supervised_loss: 0.292501
[20:49:17.998] iteration 2453 : loss : 0.278797, supervised_loss: 0.278710
[20:49:18.913] iteration 2454 : loss : 0.281371, supervised_loss: 0.281243
[20:49:19.828] iteration 2455 : loss : 0.281277, supervised_loss: 0.281165
[20:49:20.742] iteration 2456 : loss : 0.281388, supervised_loss: 0.281277
[20:49:21.657] iteration 2457 : loss : 0.274038, supervised_loss: 0.273890
[20:49:22.572] iteration 2458 : loss : 0.283754, supervised_loss: 0.283654
[20:49:23.487] iteration 2459 : loss : 0.281283, supervised_loss: 0.281152
[20:49:24.401] iteration 2460 : loss : 0.280624, supervised_loss: 0.280522
[20:49:26.001] iteration 2461 : loss : 0.282284, supervised_loss: 0.282087
[20:49:26.916] iteration 2462 : loss : 0.288216, supervised_loss: 0.288074
[20:49:27.832] iteration 2463 : loss : 0.284457, supervised_loss: 0.284357
[20:49:28.747] iteration 2464 : loss : 0.275986, supervised_loss: 0.275877
[20:49:29.662] iteration 2465 : loss : 0.274186, supervised_loss: 0.274062
[20:49:30.579] iteration 2466 : loss : 0.280719, supervised_loss: 0.280622
[20:49:31.494] iteration 2467 : loss : 0.277589, supervised_loss: 0.277478
[20:49:32.410] iteration 2468 : loss : 0.281234, supervised_loss: 0.281147
[20:49:33.326] iteration 2469 : loss : 0.281870, supervised_loss: 0.281775
[20:49:34.243] iteration 2470 : loss : 0.280295, supervised_loss: 0.280180
[20:49:35.158] iteration 2471 : loss : 0.274847, supervised_loss: 0.274700
[20:49:36.074] iteration 2472 : loss : 0.283174, supervised_loss: 0.283021
[20:49:37.642] iteration 2473 : loss : 0.274024, supervised_loss: 0.273942
[20:49:38.554] iteration 2474 : loss : 0.283048, supervised_loss: 0.282952
[20:49:39.470] iteration 2475 : loss : 0.277409, supervised_loss: 0.277306
[20:49:40.386] iteration 2476 : loss : 0.277831, supervised_loss: 0.277707
[20:49:41.302] iteration 2477 : loss : 0.284077, supervised_loss: 0.283945
[20:49:42.219] iteration 2478 : loss : 0.277437, supervised_loss: 0.277263
[20:49:43.135] iteration 2479 : loss : 0.273880, supervised_loss: 0.273793
[20:49:44.051] iteration 2480 : loss : 0.274185, supervised_loss: 0.273940
[20:49:44.967] iteration 2481 : loss : 0.289823, supervised_loss: 0.289688
[20:49:45.883] iteration 2482 : loss : 0.278773, supervised_loss: 0.278666
[20:49:46.798] iteration 2483 : loss : 0.276860, supervised_loss: 0.276706
[20:49:47.715] iteration 2484 : loss : 0.274503, supervised_loss: 0.274384
[20:49:49.330] iteration 2485 : loss : 0.282954, supervised_loss: 0.282751
[20:49:50.246] iteration 2486 : loss : 0.279174, supervised_loss: 0.279078
[20:49:51.161] iteration 2487 : loss : 0.282103, supervised_loss: 0.281951
[20:49:52.077] iteration 2488 : loss : 0.280465, supervised_loss: 0.280337
[20:49:52.992] iteration 2489 : loss : 0.286975, supervised_loss: 0.286878
[20:49:53.908] iteration 2490 : loss : 0.280958, supervised_loss: 0.280793
[20:49:54.824] iteration 2491 : loss : 0.282092, supervised_loss: 0.282005
[20:49:55.740] iteration 2492 : loss : 0.281829, supervised_loss: 0.281736
[20:49:56.657] iteration 2493 : loss : 0.279217, supervised_loss: 0.279133
[20:49:57.573] iteration 2494 : loss : 0.283911, supervised_loss: 0.283794
[20:49:58.489] iteration 2495 : loss : 0.296921, supervised_loss: 0.296753
[20:49:59.406] iteration 2496 : loss : 0.285892, supervised_loss: 0.285729
[20:50:00.901] iteration 2497 : loss : 0.281060, supervised_loss: 0.280963
[20:50:01.816] iteration 2498 : loss : 0.280592, supervised_loss: 0.280427
[20:50:02.731] iteration 2499 : loss : 0.275595, supervised_loss: 0.275485
[20:50:03.646] iteration 2500 : loss : 0.283807, supervised_loss: 0.283725
[20:50:04.561] iteration 2501 : loss : 0.277121, supervised_loss: 0.276911
[20:50:05.477] iteration 2502 : loss : 0.277326, supervised_loss: 0.277189
[20:50:06.393] iteration 2503 : loss : 0.283636, supervised_loss: 0.283502
[20:50:07.309] iteration 2504 : loss : 0.271719, supervised_loss: 0.271476
[20:50:08.224] iteration 2505 : loss : 0.279502, supervised_loss: 0.279399
[20:50:09.140] iteration 2506 : loss : 0.279094, supervised_loss: 0.278983
[20:50:10.056] iteration 2507 : loss : 0.278333, supervised_loss: 0.278160
[20:50:10.972] iteration 2508 : loss : 0.286775, supervised_loss: 0.286631
[20:50:12.612] iteration 2509 : loss : 0.281842, supervised_loss: 0.281630
[20:50:13.527] iteration 2510 : loss : 0.281527, supervised_loss: 0.281367
[20:50:14.443] iteration 2511 : loss : 0.283081, supervised_loss: 0.282980
[20:50:15.358] iteration 2512 : loss : 0.285227, supervised_loss: 0.285092
[20:50:16.274] iteration 2513 : loss : 0.280986, supervised_loss: 0.280861
[20:50:17.190] iteration 2514 : loss : 0.283386, supervised_loss: 0.283292
[20:50:18.107] iteration 2515 : loss : 0.278584, supervised_loss: 0.278350
[20:50:19.024] iteration 2516 : loss : 0.275373, supervised_loss: 0.275271
[20:50:19.941] iteration 2517 : loss : 0.276501, supervised_loss: 0.276393
[20:50:20.858] iteration 2518 : loss : 0.284834, supervised_loss: 0.284745
[20:50:21.774] iteration 2519 : loss : 0.276210, supervised_loss: 0.276116
[20:50:22.692] iteration 2520 : loss : 0.279754, supervised_loss: 0.279681
[20:50:24.206] iteration 2521 : loss : 0.282582, supervised_loss: 0.282479
[20:50:25.121] iteration 2522 : loss : 0.277828, supervised_loss: 0.277710
[20:50:26.038] iteration 2523 : loss : 0.282853, supervised_loss: 0.282720
[20:50:26.954] iteration 2524 : loss : 0.278679, supervised_loss: 0.278553
[20:50:27.871] iteration 2525 : loss : 0.271506, supervised_loss: 0.271415
[20:50:28.787] iteration 2526 : loss : 0.275434, supervised_loss: 0.275356
[20:50:29.707] iteration 2527 : loss : 0.282973, supervised_loss: 0.282828
[20:50:30.623] iteration 2528 : loss : 0.276546, supervised_loss: 0.276297
[20:50:31.539] iteration 2529 : loss : 0.284502, supervised_loss: 0.284405
[20:50:32.454] iteration 2530 : loss : 0.284008, supervised_loss: 0.283836
[20:50:33.372] iteration 2531 : loss : 0.284266, supervised_loss: 0.284172
[20:50:34.290] iteration 2532 : loss : 0.285353, supervised_loss: 0.285265
[20:50:35.891] iteration 2533 : loss : 0.283667, supervised_loss: 0.283445
[20:50:36.805] iteration 2534 : loss : 0.282348, supervised_loss: 0.282211
[20:50:37.721] iteration 2535 : loss : 0.283491, supervised_loss: 0.283401
[20:50:38.638] iteration 2536 : loss : 0.277806, supervised_loss: 0.277702
[20:50:39.553] iteration 2537 : loss : 0.281726, supervised_loss: 0.281530
[20:50:40.469] iteration 2538 : loss : 0.280057, supervised_loss: 0.279925
[20:50:41.386] iteration 2539 : loss : 0.280002, supervised_loss: 0.279935
[20:50:42.303] iteration 2540 : loss : 0.278691, supervised_loss: 0.278580
[20:50:43.218] iteration 2541 : loss : 0.276343, supervised_loss: 0.276097
[20:50:44.135] iteration 2542 : loss : 0.276275, supervised_loss: 0.276179
[20:50:45.052] iteration 2543 : loss : 0.278714, supervised_loss: 0.278533
[20:50:45.969] iteration 2544 : loss : 0.276034, supervised_loss: 0.275951
[20:50:47.481] iteration 2545 : loss : 0.272086, supervised_loss: 0.271952
[20:50:48.397] iteration 2546 : loss : 0.273385, supervised_loss: 0.273258
[20:50:49.314] iteration 2547 : loss : 0.276184, supervised_loss: 0.276044
[20:50:50.230] iteration 2548 : loss : 0.279482, supervised_loss: 0.279362
[20:50:51.147] iteration 2549 : loss : 0.286354, supervised_loss: 0.286245
[20:50:52.062] iteration 2550 : loss : 0.282818, supervised_loss: 0.282716
[20:50:52.978] iteration 2551 : loss : 0.288993, supervised_loss: 0.288849
[20:50:53.896] iteration 2552 : loss : 0.281965, supervised_loss: 0.281857
[20:50:54.812] iteration 2553 : loss : 0.276429, supervised_loss: 0.276212
[20:50:55.728] iteration 2554 : loss : 0.286010, supervised_loss: 0.285883
[20:50:56.644] iteration 2555 : loss : 0.279288, supervised_loss: 0.279175
[20:50:57.561] iteration 2556 : loss : 0.278599, supervised_loss: 0.278455
[20:50:59.076] iteration 2557 : loss : 0.274912, supervised_loss: 0.274802
[20:50:59.990] iteration 2558 : loss : 0.274772, supervised_loss: 0.274694
[20:51:00.906] iteration 2559 : loss : 0.277520, supervised_loss: 0.277374
[20:51:01.821] iteration 2560 : loss : 0.281419, supervised_loss: 0.281288
[20:51:02.737] iteration 2561 : loss : 0.298073, supervised_loss: 0.297984
[20:51:03.653] iteration 2562 : loss : 0.286664, supervised_loss: 0.286570
[20:51:04.569] iteration 2563 : loss : 0.279965, supervised_loss: 0.279773
[20:51:05.486] iteration 2564 : loss : 0.280729, supervised_loss: 0.280624
[20:51:06.401] iteration 2565 : loss : 0.277812, supervised_loss: 0.277705
[20:51:07.316] iteration 2566 : loss : 0.278431, supervised_loss: 0.278296
[20:51:08.234] iteration 2567 : loss : 0.279567, supervised_loss: 0.279483
[20:51:09.151] iteration 2568 : loss : 0.274587, supervised_loss: 0.274494
[20:51:10.707] iteration 2569 : loss : 0.273383, supervised_loss: 0.273265
[20:51:11.621] iteration 2570 : loss : 0.276316, supervised_loss: 0.276200
[20:51:12.535] iteration 2571 : loss : 0.275649, supervised_loss: 0.275492
[20:51:13.450] iteration 2572 : loss : 0.281926, supervised_loss: 0.281818
[20:51:14.365] iteration 2573 : loss : 0.282339, supervised_loss: 0.282230
[20:51:15.280] iteration 2574 : loss : 0.277796, supervised_loss: 0.277656
[20:51:16.195] iteration 2575 : loss : 0.281101, supervised_loss: 0.280942
[20:51:17.110] iteration 2576 : loss : 0.281741, supervised_loss: 0.281534
[20:51:18.025] iteration 2577 : loss : 0.282900, supervised_loss: 0.282821
[20:51:18.940] iteration 2578 : loss : 0.286880, supervised_loss: 0.286679
[20:51:19.856] iteration 2579 : loss : 0.284399, supervised_loss: 0.284295
[20:51:20.771] iteration 2580 : loss : 0.288082, supervised_loss: 0.287890
[20:51:22.326] iteration 2581 : loss : 0.277977, supervised_loss: 0.277878
[20:51:23.240] iteration 2582 : loss : 0.285914, supervised_loss: 0.285777
[20:51:24.154] iteration 2583 : loss : 0.284933, supervised_loss: 0.284832
[20:51:25.068] iteration 2584 : loss : 0.286018, supervised_loss: 0.285868
[20:51:25.984] iteration 2585 : loss : 0.281978, supervised_loss: 0.281827
[20:51:26.900] iteration 2586 : loss : 0.276315, supervised_loss: 0.276217
[20:51:27.816] iteration 2587 : loss : 0.278032, supervised_loss: 0.277764
[20:51:28.731] iteration 2588 : loss : 0.277761, supervised_loss: 0.277636
[20:51:29.648] iteration 2589 : loss : 0.278616, supervised_loss: 0.278380
[20:51:30.564] iteration 2590 : loss : 0.278694, supervised_loss: 0.278329
[20:51:31.479] iteration 2591 : loss : 0.274520, supervised_loss: 0.274367
[20:51:32.393] iteration 2592 : loss : 0.275604, supervised_loss: 0.275456
[20:51:33.886] iteration 2593 : loss : 0.288919, supervised_loss: 0.288824
[20:51:34.801] iteration 2594 : loss : 0.283195, supervised_loss: 0.283065
[20:51:35.715] iteration 2595 : loss : 0.290567, supervised_loss: 0.290432
[20:51:36.630] iteration 2596 : loss : 0.277254, supervised_loss: 0.277118
[20:51:37.544] iteration 2597 : loss : 0.275211, supervised_loss: 0.275122
[20:51:38.458] iteration 2598 : loss : 0.286236, supervised_loss: 0.286133
[20:51:39.372] iteration 2599 : loss : 0.279257, supervised_loss: 0.279102
[20:51:40.286] iteration 2600 : loss : 0.274887, supervised_loss: 0.274704
[20:51:42.357] save best model to model/LA_vnet_25_labeled/URPC/iter_2600_dice_0.9213985800743103.pth
[20:51:43.272] iteration 2601 : loss : 0.279256, supervised_loss: 0.279126
[20:51:44.185] iteration 2602 : loss : 0.278657, supervised_loss: 0.278541
[20:51:45.099] iteration 2603 : loss : 0.280074, supervised_loss: 0.279961
[20:51:46.013] iteration 2604 : loss : 0.287578, supervised_loss: 0.287483
[20:51:47.504] iteration 2605 : loss : 0.273583, supervised_loss: 0.273411
[20:51:48.419] iteration 2606 : loss : 0.280504, supervised_loss: 0.280366
[20:51:49.331] iteration 2607 : loss : 0.274113, supervised_loss: 0.273981
[20:51:50.245] iteration 2608 : loss : 0.281634, supervised_loss: 0.281408
[20:51:51.159] iteration 2609 : loss : 0.272244, supervised_loss: 0.272076
[20:51:52.075] iteration 2610 : loss : 0.276050, supervised_loss: 0.275941
[20:51:52.988] iteration 2611 : loss : 0.272412, supervised_loss: 0.272264
[20:51:53.903] iteration 2612 : loss : 0.284586, supervised_loss: 0.284407
[20:51:54.818] iteration 2613 : loss : 0.276591, supervised_loss: 0.276504
[20:51:55.731] iteration 2614 : loss : 0.294637, supervised_loss: 0.294527
[20:51:56.645] iteration 2615 : loss : 0.274337, supervised_loss: 0.274181
[20:51:57.560] iteration 2616 : loss : 0.279092, supervised_loss: 0.278916
[20:51:59.194] iteration 2617 : loss : 0.274784, supervised_loss: 0.274576
[20:52:00.107] iteration 2618 : loss : 0.278337, supervised_loss: 0.278211
[20:52:01.020] iteration 2619 : loss : 0.278985, supervised_loss: 0.278774
[20:52:01.934] iteration 2620 : loss : 0.279186, supervised_loss: 0.279022
[20:52:02.848] iteration 2621 : loss : 0.278041, supervised_loss: 0.277900
[20:52:03.762] iteration 2622 : loss : 0.275507, supervised_loss: 0.275376
[20:52:04.676] iteration 2623 : loss : 0.280283, supervised_loss: 0.280185
[20:52:05.590] iteration 2624 : loss : 0.289503, supervised_loss: 0.289371
[20:52:06.504] iteration 2625 : loss : 0.274928, supervised_loss: 0.274812
[20:52:07.418] iteration 2626 : loss : 0.280727, supervised_loss: 0.280547
[20:52:08.332] iteration 2627 : loss : 0.284176, supervised_loss: 0.284073
[20:52:09.246] iteration 2628 : loss : 0.289376, supervised_loss: 0.289251
[20:52:10.762] iteration 2629 : loss : 0.281856, supervised_loss: 0.281748
[20:52:11.675] iteration 2630 : loss : 0.274936, supervised_loss: 0.274770
[20:52:12.589] iteration 2631 : loss : 0.281234, supervised_loss: 0.280988
[20:52:13.503] iteration 2632 : loss : 0.278468, supervised_loss: 0.278375
[20:52:14.418] iteration 2633 : loss : 0.275934, supervised_loss: 0.275786
[20:52:15.333] iteration 2634 : loss : 0.285694, supervised_loss: 0.285502
[20:52:16.248] iteration 2635 : loss : 0.276409, supervised_loss: 0.276298
[20:52:17.161] iteration 2636 : loss : 0.281471, supervised_loss: 0.281357
[20:52:18.074] iteration 2637 : loss : 0.281403, supervised_loss: 0.281283
[20:52:18.988] iteration 2638 : loss : 0.277938, supervised_loss: 0.277839
[20:52:19.902] iteration 2639 : loss : 0.282881, supervised_loss: 0.282738
[20:52:20.816] iteration 2640 : loss : 0.280615, supervised_loss: 0.280432
[20:52:22.377] iteration 2641 : loss : 0.285720, supervised_loss: 0.285544
[20:52:23.291] iteration 2642 : loss : 0.286686, supervised_loss: 0.286552
[20:52:24.206] iteration 2643 : loss : 0.282218, supervised_loss: 0.282042
[20:52:25.123] iteration 2644 : loss : 0.281242, supervised_loss: 0.281167
[20:52:26.037] iteration 2645 : loss : 0.271859, supervised_loss: 0.271686
[20:52:26.951] iteration 2646 : loss : 0.293027, supervised_loss: 0.292909
[20:52:27.864] iteration 2647 : loss : 0.275617, supervised_loss: 0.275505
[20:52:28.777] iteration 2648 : loss : 0.286381, supervised_loss: 0.286229
[20:52:29.690] iteration 2649 : loss : 0.270611, supervised_loss: 0.270424
[20:52:30.605] iteration 2650 : loss : 0.282709, supervised_loss: 0.282610
[20:52:31.519] iteration 2651 : loss : 0.274515, supervised_loss: 0.274353
[20:52:32.434] iteration 2652 : loss : 0.278230, supervised_loss: 0.278137
[20:52:33.936] iteration 2653 : loss : 0.280175, supervised_loss: 0.280019
[20:52:34.849] iteration 2654 : loss : 0.283447, supervised_loss: 0.283322
[20:52:35.762] iteration 2655 : loss : 0.279328, supervised_loss: 0.279188
[20:52:36.675] iteration 2656 : loss : 0.280262, supervised_loss: 0.280108
[20:52:37.589] iteration 2657 : loss : 0.280223, supervised_loss: 0.280094
[20:52:38.503] iteration 2658 : loss : 0.276990, supervised_loss: 0.276870
[20:52:39.418] iteration 2659 : loss : 0.291404, supervised_loss: 0.291201
[20:52:40.332] iteration 2660 : loss : 0.281032, supervised_loss: 0.280913
[20:52:41.246] iteration 2661 : loss : 0.280947, supervised_loss: 0.280795
[20:52:42.159] iteration 2662 : loss : 0.280394, supervised_loss: 0.280191
[20:52:43.072] iteration 2663 : loss : 0.280440, supervised_loss: 0.280234
[20:52:43.985] iteration 2664 : loss : 0.273394, supervised_loss: 0.273225
[20:52:45.564] iteration 2665 : loss : 0.275823, supervised_loss: 0.275664
[20:52:46.477] iteration 2666 : loss : 0.274638, supervised_loss: 0.274517
[20:52:47.389] iteration 2667 : loss : 0.275986, supervised_loss: 0.275864
[20:52:48.303] iteration 2668 : loss : 0.286256, supervised_loss: 0.286167
[20:52:49.215] iteration 2669 : loss : 0.281787, supervised_loss: 0.281662
[20:52:50.129] iteration 2670 : loss : 0.276093, supervised_loss: 0.275974
[20:52:51.043] iteration 2671 : loss : 0.283903, supervised_loss: 0.283775
[20:52:51.956] iteration 2672 : loss : 0.282385, supervised_loss: 0.282274
[20:52:52.870] iteration 2673 : loss : 0.277030, supervised_loss: 0.276881
[20:52:53.782] iteration 2674 : loss : 0.278924, supervised_loss: 0.278788
[20:52:54.696] iteration 2675 : loss : 0.287284, supervised_loss: 0.287143
[20:52:55.608] iteration 2676 : loss : 0.279231, supervised_loss: 0.279118
[20:52:57.268] iteration 2677 : loss : 0.273909, supervised_loss: 0.273724
[20:52:58.183] iteration 2678 : loss : 0.275812, supervised_loss: 0.275697
[20:52:59.096] iteration 2679 : loss : 0.274165, supervised_loss: 0.274000
[20:53:00.008] iteration 2680 : loss : 0.272328, supervised_loss: 0.272129
[20:53:00.922] iteration 2681 : loss : 0.281221, supervised_loss: 0.281054
[20:53:01.835] iteration 2682 : loss : 0.277269, supervised_loss: 0.277134
[20:53:02.749] iteration 2683 : loss : 0.294624, supervised_loss: 0.294427
[20:53:03.661] iteration 2684 : loss : 0.281736, supervised_loss: 0.281567
[20:53:04.574] iteration 2685 : loss : 0.285277, supervised_loss: 0.285155
[20:53:05.488] iteration 2686 : loss : 0.282892, supervised_loss: 0.282784
[20:53:06.402] iteration 2687 : loss : 0.279074, supervised_loss: 0.278993
[20:53:07.316] iteration 2688 : loss : 0.278868, supervised_loss: 0.278706
[20:53:08.882] iteration 2689 : loss : 0.274222, supervised_loss: 0.274090
[20:53:09.795] iteration 2690 : loss : 0.276039, supervised_loss: 0.275880
[20:53:10.707] iteration 2691 : loss : 0.287644, supervised_loss: 0.287542
[20:53:11.621] iteration 2692 : loss : 0.279178, supervised_loss: 0.279034
[20:53:12.533] iteration 2693 : loss : 0.280394, supervised_loss: 0.280225
[20:53:13.446] iteration 2694 : loss : 0.279686, supervised_loss: 0.279513
[20:53:14.360] iteration 2695 : loss : 0.275813, supervised_loss: 0.275706
[20:53:15.273] iteration 2696 : loss : 0.278339, supervised_loss: 0.278255
[20:53:16.186] iteration 2697 : loss : 0.287739, supervised_loss: 0.287649
[20:53:17.100] iteration 2698 : loss : 0.273647, supervised_loss: 0.273523
[20:53:18.013] iteration 2699 : loss : 0.276474, supervised_loss: 0.276317
[20:53:18.926] iteration 2700 : loss : 0.284556, supervised_loss: 0.284443
[20:53:20.561] iteration 2701 : loss : 0.280867, supervised_loss: 0.280636
[20:53:21.473] iteration 2702 : loss : 0.278640, supervised_loss: 0.278343
[20:53:22.386] iteration 2703 : loss : 0.282289, supervised_loss: 0.282079
[20:53:23.299] iteration 2704 : loss : 0.275191, supervised_loss: 0.275067
[20:53:24.212] iteration 2705 : loss : 0.276207, supervised_loss: 0.276029
[20:53:25.125] iteration 2706 : loss : 0.284468, supervised_loss: 0.284158
[20:53:26.037] iteration 2707 : loss : 0.276022, supervised_loss: 0.275811
[20:53:26.949] iteration 2708 : loss : 0.291508, supervised_loss: 0.291374
[20:53:27.862] iteration 2709 : loss : 0.284604, supervised_loss: 0.284424
[20:53:28.775] iteration 2710 : loss : 0.274068, supervised_loss: 0.273943
[20:53:29.690] iteration 2711 : loss : 0.275044, supervised_loss: 0.274947
[20:53:30.604] iteration 2712 : loss : 0.277390, supervised_loss: 0.277242
[20:53:32.100] iteration 2713 : loss : 0.285810, supervised_loss: 0.285585
[20:53:33.012] iteration 2714 : loss : 0.274522, supervised_loss: 0.274375
[20:53:33.925] iteration 2715 : loss : 0.281090, supervised_loss: 0.280875
[20:53:34.839] iteration 2716 : loss : 0.296777, supervised_loss: 0.296660
[20:53:35.752] iteration 2717 : loss : 0.282357, supervised_loss: 0.282162
[20:53:36.666] iteration 2718 : loss : 0.275037, supervised_loss: 0.274863
[20:53:37.579] iteration 2719 : loss : 0.283687, supervised_loss: 0.283519
[20:53:38.493] iteration 2720 : loss : 0.278275, supervised_loss: 0.278136
[20:53:39.406] iteration 2721 : loss : 0.279694, supervised_loss: 0.279547
[20:53:40.318] iteration 2722 : loss : 0.282689, supervised_loss: 0.282464
[20:53:41.230] iteration 2723 : loss : 0.278434, supervised_loss: 0.278295
[20:53:42.144] iteration 2724 : loss : 0.275977, supervised_loss: 0.275869
[20:53:43.727] iteration 2725 : loss : 0.281136, supervised_loss: 0.281015
[20:53:44.639] iteration 2726 : loss : 0.279063, supervised_loss: 0.278958
[20:53:45.553] iteration 2727 : loss : 0.280949, supervised_loss: 0.280838
[20:53:46.465] iteration 2728 : loss : 0.293853, supervised_loss: 0.293691
[20:53:47.378] iteration 2729 : loss : 0.283417, supervised_loss: 0.283192
[20:53:48.291] iteration 2730 : loss : 0.279161, supervised_loss: 0.279009
[20:53:49.205] iteration 2731 : loss : 0.276230, supervised_loss: 0.276020
[20:53:50.117] iteration 2732 : loss : 0.279535, supervised_loss: 0.279436
[20:53:51.030] iteration 2733 : loss : 0.279603, supervised_loss: 0.279485
[20:53:51.943] iteration 2734 : loss : 0.277300, supervised_loss: 0.277074
[20:53:52.855] iteration 2735 : loss : 0.280145, supervised_loss: 0.279991
[20:53:53.770] iteration 2736 : loss : 0.272172, supervised_loss: 0.272033
[20:53:55.341] iteration 2737 : loss : 0.278616, supervised_loss: 0.278451
[20:53:56.254] iteration 2738 : loss : 0.281465, supervised_loss: 0.281113
[20:53:57.166] iteration 2739 : loss : 0.279405, supervised_loss: 0.279264
[20:53:58.079] iteration 2740 : loss : 0.277680, supervised_loss: 0.277437
[20:53:58.992] iteration 2741 : loss : 0.304248, supervised_loss: 0.304053
[20:53:59.904] iteration 2742 : loss : 0.284272, supervised_loss: 0.284090
[20:54:00.817] iteration 2743 : loss : 0.277709, supervised_loss: 0.277543
[20:54:01.730] iteration 2744 : loss : 0.278196, supervised_loss: 0.277875
[20:54:02.644] iteration 2745 : loss : 0.283858, supervised_loss: 0.283677
[20:54:03.559] iteration 2746 : loss : 0.276581, supervised_loss: 0.276435
[20:54:04.471] iteration 2747 : loss : 0.278961, supervised_loss: 0.278829
[20:54:05.384] iteration 2748 : loss : 0.286514, supervised_loss: 0.286391
[20:54:06.988] iteration 2749 : loss : 0.292762, supervised_loss: 0.292603
[20:54:07.903] iteration 2750 : loss : 0.277628, supervised_loss: 0.277245
[20:54:08.816] iteration 2751 : loss : 0.278475, supervised_loss: 0.278318
[20:54:09.730] iteration 2752 : loss : 0.275012, supervised_loss: 0.274916
[20:54:10.644] iteration 2753 : loss : 0.284067, supervised_loss: 0.283880
[20:54:11.557] iteration 2754 : loss : 0.278627, supervised_loss: 0.278481
[20:54:12.469] iteration 2755 : loss : 0.279171, supervised_loss: 0.279055
[20:54:13.382] iteration 2756 : loss : 0.281905, supervised_loss: 0.281677
[20:54:14.295] iteration 2757 : loss : 0.276839, supervised_loss: 0.276637
[20:54:15.208] iteration 2758 : loss : 0.285902, supervised_loss: 0.285796
[20:54:16.122] iteration 2759 : loss : 0.279722, supervised_loss: 0.279589
[20:54:17.036] iteration 2760 : loss : 0.280225, supervised_loss: 0.280107
[20:54:18.729] iteration 2761 : loss : 0.281585, supervised_loss: 0.281457
[20:54:19.638] iteration 2762 : loss : 0.276474, supervised_loss: 0.276316
[20:54:20.551] iteration 2763 : loss : 0.277715, supervised_loss: 0.277583
[20:54:21.463] iteration 2764 : loss : 0.282080, supervised_loss: 0.281930
[20:54:22.375] iteration 2765 : loss : 0.285475, supervised_loss: 0.285361
[20:54:23.288] iteration 2766 : loss : 0.278459, supervised_loss: 0.278352
[20:54:24.202] iteration 2767 : loss : 0.288533, supervised_loss: 0.288382
[20:54:25.114] iteration 2768 : loss : 0.278596, supervised_loss: 0.278437
[20:54:26.027] iteration 2769 : loss : 0.285310, supervised_loss: 0.285197
[20:54:26.940] iteration 2770 : loss : 0.274762, supervised_loss: 0.274627
[20:54:27.853] iteration 2771 : loss : 0.279451, supervised_loss: 0.279319
[20:54:28.767] iteration 2772 : loss : 0.277547, supervised_loss: 0.277378
[20:54:30.299] iteration 2773 : loss : 0.292444, supervised_loss: 0.292283
[20:54:31.212] iteration 2774 : loss : 0.275427, supervised_loss: 0.275281
[20:54:32.126] iteration 2775 : loss : 0.288868, supervised_loss: 0.288728
[20:54:33.038] iteration 2776 : loss : 0.290050, supervised_loss: 0.289930
[20:54:33.951] iteration 2777 : loss : 0.281708, supervised_loss: 0.281546
[20:54:34.864] iteration 2778 : loss : 0.272440, supervised_loss: 0.272223
[20:54:35.777] iteration 2779 : loss : 0.282596, supervised_loss: 0.281952
[20:54:36.689] iteration 2780 : loss : 0.277616, supervised_loss: 0.277484
[20:54:37.600] iteration 2781 : loss : 0.280819, supervised_loss: 0.280704
[20:54:38.513] iteration 2782 : loss : 0.281059, supervised_loss: 0.280838
[20:54:39.427] iteration 2783 : loss : 0.295323, supervised_loss: 0.295145
[20:54:40.340] iteration 2784 : loss : 0.286291, supervised_loss: 0.286144
[20:54:41.953] iteration 2785 : loss : 0.285623, supervised_loss: 0.285267
[20:54:42.866] iteration 2786 : loss : 0.272875, supervised_loss: 0.272655
[20:54:43.778] iteration 2787 : loss : 0.288792, supervised_loss: 0.288653
[20:54:44.691] iteration 2788 : loss : 0.285767, supervised_loss: 0.285653
[20:54:45.604] iteration 2789 : loss : 0.277045, supervised_loss: 0.276884
[20:54:46.516] iteration 2790 : loss : 0.277141, supervised_loss: 0.277035
[20:54:47.429] iteration 2791 : loss : 0.273319, supervised_loss: 0.273124
[20:54:48.343] iteration 2792 : loss : 0.276082, supervised_loss: 0.275975
[20:54:49.257] iteration 2793 : loss : 0.287169, supervised_loss: 0.286901
[20:54:50.171] iteration 2794 : loss : 0.276801, supervised_loss: 0.276664
[20:54:51.083] iteration 2795 : loss : 0.280979, supervised_loss: 0.280844
[20:54:51.996] iteration 2796 : loss : 0.276726, supervised_loss: 0.276479
[20:54:53.633] iteration 2797 : loss : 0.280634, supervised_loss: 0.280529
[20:54:54.545] iteration 2798 : loss : 0.274305, supervised_loss: 0.274213
[20:54:55.457] iteration 2799 : loss : 0.275907, supervised_loss: 0.275810
[20:54:56.369] iteration 2800 : loss : 0.280330, supervised_loss: 0.280035
[20:54:58.415] save best model to model/LA_vnet_25_labeled/URPC/iter_2800_dice_0.9219400882720947.pth
[20:54:59.326] iteration 2801 : loss : 0.277462, supervised_loss: 0.277363
[20:55:00.238] iteration 2802 : loss : 0.280651, supervised_loss: 0.280421
[20:55:01.148] iteration 2803 : loss : 0.281981, supervised_loss: 0.281770
[20:55:02.060] iteration 2804 : loss : 0.278971, supervised_loss: 0.278826
[20:55:02.973] iteration 2805 : loss : 0.279613, supervised_loss: 0.279508
[20:55:03.885] iteration 2806 : loss : 0.279106, supervised_loss: 0.278956
[20:55:04.797] iteration 2807 : loss : 0.283868, supervised_loss: 0.283715
[20:55:05.713] iteration 2808 : loss : 0.285376, supervised_loss: 0.285213
[20:55:07.244] iteration 2809 : loss : 0.280302, supervised_loss: 0.280158
[20:55:08.156] iteration 2810 : loss : 0.275307, supervised_loss: 0.275135
[20:55:09.068] iteration 2811 : loss : 0.280676, supervised_loss: 0.280544
[20:55:09.981] iteration 2812 : loss : 0.293260, supervised_loss: 0.293042
[20:55:10.892] iteration 2813 : loss : 0.278641, supervised_loss: 0.278512
[20:55:11.804] iteration 2814 : loss : 0.279962, supervised_loss: 0.279738
[20:55:12.718] iteration 2815 : loss : 0.281575, supervised_loss: 0.281463
[20:55:13.630] iteration 2816 : loss : 0.282743, supervised_loss: 0.282560
[20:55:14.543] iteration 2817 : loss : 0.271365, supervised_loss: 0.271123
[20:55:15.456] iteration 2818 : loss : 0.293936, supervised_loss: 0.293721
[20:55:16.370] iteration 2819 : loss : 0.277667, supervised_loss: 0.277528
[20:55:17.284] iteration 2820 : loss : 0.280967, supervised_loss: 0.280774
[20:55:18.852] iteration 2821 : loss : 0.280857, supervised_loss: 0.280754
[20:55:19.763] iteration 2822 : loss : 0.278351, supervised_loss: 0.278214
[20:55:20.676] iteration 2823 : loss : 0.277904, supervised_loss: 0.277816
[20:55:21.587] iteration 2824 : loss : 0.286032, supervised_loss: 0.285867
[20:55:22.500] iteration 2825 : loss : 0.278075, supervised_loss: 0.277923
[20:55:23.414] iteration 2826 : loss : 0.276311, supervised_loss: 0.276070
[20:55:24.329] iteration 2827 : loss : 0.282372, supervised_loss: 0.282197
[20:55:25.242] iteration 2828 : loss : 0.278428, supervised_loss: 0.278174
[20:55:26.155] iteration 2829 : loss : 0.276393, supervised_loss: 0.276258
[20:55:27.066] iteration 2830 : loss : 0.279895, supervised_loss: 0.279784
[20:55:27.980] iteration 2831 : loss : 0.273236, supervised_loss: 0.272946
[20:55:28.893] iteration 2832 : loss : 0.280630, supervised_loss: 0.280536
[20:55:30.516] iteration 2833 : loss : 0.281525, supervised_loss: 0.281395
[20:55:31.429] iteration 2834 : loss : 0.278526, supervised_loss: 0.278337
[20:55:32.341] iteration 2835 : loss : 0.281298, supervised_loss: 0.281094
[20:55:33.255] iteration 2836 : loss : 0.274300, supervised_loss: 0.274203
[20:55:34.168] iteration 2837 : loss : 0.275516, supervised_loss: 0.275260
[20:55:35.081] iteration 2838 : loss : 0.274305, supervised_loss: 0.273952
[20:55:35.994] iteration 2839 : loss : 0.275749, supervised_loss: 0.275522
[20:55:36.905] iteration 2840 : loss : 0.278761, supervised_loss: 0.278534
[20:55:37.817] iteration 2841 : loss : 0.273069, supervised_loss: 0.272967
[20:55:38.730] iteration 2842 : loss : 0.274805, supervised_loss: 0.274620
[20:55:39.642] iteration 2843 : loss : 0.276171, supervised_loss: 0.276034
[20:55:40.554] iteration 2844 : loss : 0.277374, supervised_loss: 0.277245
[20:55:42.151] iteration 2845 : loss : 0.277818, supervised_loss: 0.277671
[20:55:43.062] iteration 2846 : loss : 0.279182, supervised_loss: 0.279060
[20:55:43.974] iteration 2847 : loss : 0.281934, supervised_loss: 0.281808
[20:55:44.887] iteration 2848 : loss : 0.281220, supervised_loss: 0.280987
[20:55:45.800] iteration 2849 : loss : 0.284103, supervised_loss: 0.283937
[20:55:46.712] iteration 2850 : loss : 0.270826, supervised_loss: 0.270662
[20:55:47.626] iteration 2851 : loss : 0.277272, supervised_loss: 0.277116
[20:55:48.539] iteration 2852 : loss : 0.274648, supervised_loss: 0.274502
[20:55:49.451] iteration 2853 : loss : 0.274257, supervised_loss: 0.274028
[20:55:50.366] iteration 2854 : loss : 0.273496, supervised_loss: 0.273287
[20:55:51.279] iteration 2855 : loss : 0.283884, supervised_loss: 0.283724
[20:55:52.191] iteration 2856 : loss : 0.276543, supervised_loss: 0.276317
[20:55:53.834] iteration 2857 : loss : 0.279112, supervised_loss: 0.278971
[20:55:54.747] iteration 2858 : loss : 0.286500, supervised_loss: 0.286338
[20:55:55.659] iteration 2859 : loss : 0.279253, supervised_loss: 0.279113
[20:55:56.571] iteration 2860 : loss : 0.275318, supervised_loss: 0.275163
[20:55:57.484] iteration 2861 : loss : 0.273663, supervised_loss: 0.273535
[20:55:58.396] iteration 2862 : loss : 0.273766, supervised_loss: 0.273583
[20:55:59.309] iteration 2863 : loss : 0.277557, supervised_loss: 0.277277
[20:56:00.221] iteration 2864 : loss : 0.281281, supervised_loss: 0.280800
[20:56:01.134] iteration 2865 : loss : 0.288133, supervised_loss: 0.287993
[20:56:02.046] iteration 2866 : loss : 0.273818, supervised_loss: 0.273657
[20:56:02.958] iteration 2867 : loss : 0.275510, supervised_loss: 0.275361
[20:56:03.872] iteration 2868 : loss : 0.275566, supervised_loss: 0.275431
[20:56:05.518] iteration 2869 : loss : 0.286135, supervised_loss: 0.285973
[20:56:06.431] iteration 2870 : loss : 0.274889, supervised_loss: 0.274707
[20:56:07.344] iteration 2871 : loss : 0.280411, supervised_loss: 0.280237
[20:56:08.257] iteration 2872 : loss : 0.274582, supervised_loss: 0.274426
[20:56:09.169] iteration 2873 : loss : 0.278191, supervised_loss: 0.278031
[20:56:10.081] iteration 2874 : loss : 0.287722, supervised_loss: 0.287512
[20:56:10.996] iteration 2875 : loss : 0.281023, supervised_loss: 0.280834
[20:56:11.909] iteration 2876 : loss : 0.273021, supervised_loss: 0.272769
[20:56:12.822] iteration 2877 : loss : 0.278849, supervised_loss: 0.278681
[20:56:13.733] iteration 2878 : loss : 0.278678, supervised_loss: 0.278545
[20:56:14.647] iteration 2879 : loss : 0.276032, supervised_loss: 0.275843
[20:56:15.560] iteration 2880 : loss : 0.274870, supervised_loss: 0.274719
[20:56:17.114] iteration 2881 : loss : 0.278125, supervised_loss: 0.277962
[20:56:18.026] iteration 2882 : loss : 0.279198, supervised_loss: 0.279047
[20:56:18.938] iteration 2883 : loss : 0.276839, supervised_loss: 0.276731
[20:56:19.851] iteration 2884 : loss : 0.289658, supervised_loss: 0.289475
[20:56:20.765] iteration 2885 : loss : 0.282582, supervised_loss: 0.282501
[20:56:21.678] iteration 2886 : loss : 0.282050, supervised_loss: 0.281884
[20:56:22.590] iteration 2887 : loss : 0.280991, supervised_loss: 0.280891
[20:56:23.502] iteration 2888 : loss : 0.286047, supervised_loss: 0.285902
[20:56:24.415] iteration 2889 : loss : 0.279012, supervised_loss: 0.278730
[20:56:25.327] iteration 2890 : loss : 0.284408, supervised_loss: 0.284218
[20:56:26.240] iteration 2891 : loss : 0.286951, supervised_loss: 0.286766
[20:56:27.153] iteration 2892 : loss : 0.281219, supervised_loss: 0.281013
[20:56:28.733] iteration 2893 : loss : 0.278589, supervised_loss: 0.278397
[20:56:29.645] iteration 2894 : loss : 0.282532, supervised_loss: 0.282377
[20:56:30.558] iteration 2895 : loss : 0.287513, supervised_loss: 0.287354
[20:56:31.471] iteration 2896 : loss : 0.277727, supervised_loss: 0.277458
[20:56:32.384] iteration 2897 : loss : 0.276978, supervised_loss: 0.276786
[20:56:33.296] iteration 2898 : loss : 0.275598, supervised_loss: 0.275373
[20:56:34.208] iteration 2899 : loss : 0.275994, supervised_loss: 0.275779
[20:56:35.121] iteration 2900 : loss : 0.284789, supervised_loss: 0.284501
[20:56:36.035] iteration 2901 : loss : 0.275971, supervised_loss: 0.275833
[20:56:36.949] iteration 2902 : loss : 0.283232, supervised_loss: 0.283097
[20:56:37.862] iteration 2903 : loss : 0.276619, supervised_loss: 0.276421
[20:56:38.775] iteration 2904 : loss : 0.277158, supervised_loss: 0.276932
[20:56:40.339] iteration 2905 : loss : 0.282437, supervised_loss: 0.282324
[20:56:41.252] iteration 2906 : loss : 0.278906, supervised_loss: 0.278709
[20:56:42.164] iteration 2907 : loss : 0.283854, supervised_loss: 0.283681
[20:56:43.077] iteration 2908 : loss : 0.272887, supervised_loss: 0.272678
[20:56:43.989] iteration 2909 : loss : 0.283842, supervised_loss: 0.283426
[20:56:44.901] iteration 2910 : loss : 0.278325, supervised_loss: 0.278200
[20:56:45.814] iteration 2911 : loss : 0.279824, supervised_loss: 0.279591
[20:56:46.728] iteration 2912 : loss : 0.277470, supervised_loss: 0.277153
[20:56:47.641] iteration 2913 : loss : 0.273548, supervised_loss: 0.273381
[20:56:48.552] iteration 2914 : loss : 0.287015, supervised_loss: 0.286683
[20:56:49.466] iteration 2915 : loss : 0.277078, supervised_loss: 0.276908
[20:56:50.379] iteration 2916 : loss : 0.276883, supervised_loss: 0.276677
[20:56:51.986] iteration 2917 : loss : 0.279668, supervised_loss: 0.279503
[20:56:52.897] iteration 2918 : loss : 0.278483, supervised_loss: 0.278328
[20:56:53.809] iteration 2919 : loss : 0.286184, supervised_loss: 0.285956
[20:56:54.721] iteration 2920 : loss : 0.278336, supervised_loss: 0.278146
[20:56:55.633] iteration 2921 : loss : 0.276096, supervised_loss: 0.275964
[20:56:56.546] iteration 2922 : loss : 0.276075, supervised_loss: 0.275921
[20:56:57.459] iteration 2923 : loss : 0.273647, supervised_loss: 0.273445
[20:56:58.371] iteration 2924 : loss : 0.281291, supervised_loss: 0.281157
[20:56:59.283] iteration 2925 : loss : 0.275150, supervised_loss: 0.275025
[20:57:00.196] iteration 2926 : loss : 0.277861, supervised_loss: 0.277750
[20:57:01.109] iteration 2927 : loss : 0.282506, supervised_loss: 0.282258
[20:57:02.023] iteration 2928 : loss : 0.275392, supervised_loss: 0.275134
[20:57:03.554] iteration 2929 : loss : 0.276258, supervised_loss: 0.276109
[20:57:04.468] iteration 2930 : loss : 0.273472, supervised_loss: 0.273311
[20:57:05.382] iteration 2931 : loss : 0.272485, supervised_loss: 0.272386
[20:57:06.295] iteration 2932 : loss : 0.279401, supervised_loss: 0.279206
[20:57:07.207] iteration 2933 : loss : 0.280417, supervised_loss: 0.280235
[20:57:08.118] iteration 2934 : loss : 0.277812, supervised_loss: 0.277640
[20:57:09.031] iteration 2935 : loss : 0.277147, supervised_loss: 0.276918
[20:57:09.943] iteration 2936 : loss : 0.272825, supervised_loss: 0.272710
[20:57:10.855] iteration 2937 : loss : 0.277882, supervised_loss: 0.277746
[20:57:11.768] iteration 2938 : loss : 0.278102, supervised_loss: 0.277960
[20:57:12.680] iteration 2939 : loss : 0.281505, supervised_loss: 0.281328
[20:57:13.593] iteration 2940 : loss : 0.292267, supervised_loss: 0.292118
[20:57:15.287] iteration 2941 : loss : 0.274793, supervised_loss: 0.274637
[20:57:16.198] iteration 2942 : loss : 0.279795, supervised_loss: 0.279633
[20:57:17.111] iteration 2943 : loss : 0.277935, supervised_loss: 0.277707
[20:57:18.023] iteration 2944 : loss : 0.279785, supervised_loss: 0.279680
[20:57:18.936] iteration 2945 : loss : 0.273550, supervised_loss: 0.273362
[20:57:19.850] iteration 2946 : loss : 0.281195, supervised_loss: 0.281071
[20:57:20.763] iteration 2947 : loss : 0.278941, supervised_loss: 0.278827
[20:57:21.676] iteration 2948 : loss : 0.282552, supervised_loss: 0.282419
[20:57:22.588] iteration 2949 : loss : 0.272418, supervised_loss: 0.272296
[20:57:23.501] iteration 2950 : loss : 0.283678, supervised_loss: 0.283552
[20:57:24.414] iteration 2951 : loss : 0.277232, supervised_loss: 0.277091
[20:57:25.327] iteration 2952 : loss : 0.278243, supervised_loss: 0.278117
[20:57:26.888] iteration 2953 : loss : 0.277998, supervised_loss: 0.277834
[20:57:27.799] iteration 2954 : loss : 0.274880, supervised_loss: 0.274744
[20:57:28.712] iteration 2955 : loss : 0.276428, supervised_loss: 0.276286
[20:57:29.625] iteration 2956 : loss : 0.277314, supervised_loss: 0.277125
[20:57:30.537] iteration 2957 : loss : 0.274651, supervised_loss: 0.274517
[20:57:31.450] iteration 2958 : loss : 0.276278, supervised_loss: 0.276082
[20:57:32.363] iteration 2959 : loss : 0.293164, supervised_loss: 0.293043
[20:57:33.276] iteration 2960 : loss : 0.275730, supervised_loss: 0.275589
[20:57:34.188] iteration 2961 : loss : 0.277451, supervised_loss: 0.277298
[20:57:35.101] iteration 2962 : loss : 0.274978, supervised_loss: 0.274834
[20:57:36.014] iteration 2963 : loss : 0.275124, supervised_loss: 0.274957
[20:57:36.928] iteration 2964 : loss : 0.276316, supervised_loss: 0.276175
[20:57:38.423] iteration 2965 : loss : 0.281661, supervised_loss: 0.281520
[20:57:39.336] iteration 2966 : loss : 0.275792, supervised_loss: 0.275529
[20:57:40.249] iteration 2967 : loss : 0.270141, supervised_loss: 0.269946
[20:57:41.162] iteration 2968 : loss : 0.278169, supervised_loss: 0.278017
[20:57:42.074] iteration 2969 : loss : 0.274005, supervised_loss: 0.273817
[20:57:42.987] iteration 2970 : loss : 0.284071, supervised_loss: 0.283743
[20:57:43.899] iteration 2971 : loss : 0.277544, supervised_loss: 0.277341
[20:57:44.813] iteration 2972 : loss : 0.286402, supervised_loss: 0.286276
[20:57:45.725] iteration 2973 : loss : 0.282330, supervised_loss: 0.282126
[20:57:46.638] iteration 2974 : loss : 0.275754, supervised_loss: 0.275574
[20:57:47.550] iteration 2975 : loss : 0.275913, supervised_loss: 0.275765
[20:57:48.464] iteration 2976 : loss : 0.271526, supervised_loss: 0.271268
[20:57:50.026] iteration 2977 : loss : 0.279312, supervised_loss: 0.279100
[20:57:50.938] iteration 2978 : loss : 0.279142, supervised_loss: 0.278951
[20:57:51.850] iteration 2979 : loss : 0.275092, supervised_loss: 0.274914
[20:57:52.764] iteration 2980 : loss : 0.278386, supervised_loss: 0.278246
[20:57:53.678] iteration 2981 : loss : 0.275791, supervised_loss: 0.275606
[20:57:54.590] iteration 2982 : loss : 0.278754, supervised_loss: 0.278573
[20:57:55.503] iteration 2983 : loss : 0.273374, supervised_loss: 0.273235
[20:57:56.415] iteration 2984 : loss : 0.290251, supervised_loss: 0.290053
[20:57:57.328] iteration 2985 : loss : 0.287603, supervised_loss: 0.287463
[20:57:58.240] iteration 2986 : loss : 0.272128, supervised_loss: 0.271984
[20:57:59.153] iteration 2987 : loss : 0.272968, supervised_loss: 0.272841
[20:58:00.066] iteration 2988 : loss : 0.277238, supervised_loss: 0.277042
[20:58:01.641] iteration 2989 : loss : 0.275611, supervised_loss: 0.275401
[20:58:02.553] iteration 2990 : loss : 0.278240, supervised_loss: 0.277990
[20:58:03.465] iteration 2991 : loss : 0.282478, supervised_loss: 0.282381
[20:58:04.377] iteration 2992 : loss : 0.278875, supervised_loss: 0.278725
[20:58:05.290] iteration 2993 : loss : 0.274511, supervised_loss: 0.274328
[20:58:06.201] iteration 2994 : loss : 0.276991, supervised_loss: 0.276750
[20:58:07.115] iteration 2995 : loss : 0.275409, supervised_loss: 0.275255
[20:58:08.027] iteration 2996 : loss : 0.279865, supervised_loss: 0.279688
[20:58:08.939] iteration 2997 : loss : 0.284038, supervised_loss: 0.283822
[20:58:09.852] iteration 2998 : loss : 0.276605, supervised_loss: 0.276485
[20:58:10.765] iteration 2999 : loss : 0.272205, supervised_loss: 0.271991
[20:58:11.679] iteration 3000 : loss : 0.290398, supervised_loss: 0.290303
[20:58:15.123] iteration 3001 : loss : 0.276000, supervised_loss: 0.275814
[20:58:16.034] iteration 3002 : loss : 0.277953, supervised_loss: 0.277730
[20:58:16.947] iteration 3003 : loss : 0.284652, supervised_loss: 0.284472
[20:58:17.859] iteration 3004 : loss : 0.286715, supervised_loss: 0.286563
[20:58:18.772] iteration 3005 : loss : 0.280416, supervised_loss: 0.280264
[20:58:19.686] iteration 3006 : loss : 0.272984, supervised_loss: 0.272802
[20:58:20.599] iteration 3007 : loss : 0.277425, supervised_loss: 0.277175
[20:58:21.512] iteration 3008 : loss : 0.288544, supervised_loss: 0.288290
[20:58:22.424] iteration 3009 : loss : 0.272671, supervised_loss: 0.272518
[20:58:23.338] iteration 3010 : loss : 0.274856, supervised_loss: 0.274687
[20:58:24.251] iteration 3011 : loss : 0.274994, supervised_loss: 0.274705
[20:58:25.165] iteration 3012 : loss : 0.276559, supervised_loss: 0.276385
[20:58:26.784] iteration 3013 : loss : 0.282059, supervised_loss: 0.281898
[20:58:27.697] iteration 3014 : loss : 0.277881, supervised_loss: 0.277695
[20:58:28.609] iteration 3015 : loss : 0.273940, supervised_loss: 0.273805
[20:58:29.521] iteration 3016 : loss : 0.279598, supervised_loss: 0.279433
[20:58:30.434] iteration 3017 : loss : 0.280441, supervised_loss: 0.280271
[20:58:31.346] iteration 3018 : loss : 0.274980, supervised_loss: 0.274718
[20:58:32.258] iteration 3019 : loss : 0.278546, supervised_loss: 0.278351
[20:58:33.169] iteration 3020 : loss : 0.282829, supervised_loss: 0.282544
[20:58:34.082] iteration 3021 : loss : 0.276373, supervised_loss: 0.276115
[20:58:34.995] iteration 3022 : loss : 0.276359, supervised_loss: 0.276110
[20:58:35.908] iteration 3023 : loss : 0.271917, supervised_loss: 0.271683
[20:58:36.820] iteration 3024 : loss : 0.279419, supervised_loss: 0.279274
[20:58:38.504] iteration 3025 : loss : 0.275464, supervised_loss: 0.275187
[20:58:39.415] iteration 3026 : loss : 0.284787, supervised_loss: 0.284593
[20:58:40.328] iteration 3027 : loss : 0.284329, supervised_loss: 0.284184
[20:58:41.243] iteration 3028 : loss : 0.275783, supervised_loss: 0.275638
[20:58:42.155] iteration 3029 : loss : 0.272551, supervised_loss: 0.272356
[20:58:43.066] iteration 3030 : loss : 0.276170, supervised_loss: 0.275965
[20:58:43.979] iteration 3031 : loss : 0.284263, supervised_loss: 0.284000
[20:58:44.892] iteration 3032 : loss : 0.275796, supervised_loss: 0.275617
[20:58:45.803] iteration 3033 : loss : 0.281276, supervised_loss: 0.281156
[20:58:46.715] iteration 3034 : loss : 0.284521, supervised_loss: 0.284278
[20:58:47.628] iteration 3035 : loss : 0.287534, supervised_loss: 0.287342
[20:58:48.540] iteration 3036 : loss : 0.277780, supervised_loss: 0.277633
[20:58:50.104] iteration 3037 : loss : 0.279920, supervised_loss: 0.279752
[20:58:51.018] iteration 3038 : loss : 0.272376, supervised_loss: 0.272104
[20:58:51.929] iteration 3039 : loss : 0.280038, supervised_loss: 0.279595
[20:58:52.842] iteration 3040 : loss : 0.279094, supervised_loss: 0.278925
[20:58:53.753] iteration 3041 : loss : 0.290009, supervised_loss: 0.289779
[20:58:54.666] iteration 3042 : loss : 0.275444, supervised_loss: 0.275291
[20:58:55.579] iteration 3043 : loss : 0.281449, supervised_loss: 0.281335
[20:58:56.493] iteration 3044 : loss : 0.273982, supervised_loss: 0.273564
[20:58:57.405] iteration 3045 : loss : 0.273984, supervised_loss: 0.273847
[20:58:58.317] iteration 3046 : loss : 0.298997, supervised_loss: 0.298784
[20:58:59.230] iteration 3047 : loss : 0.278784, supervised_loss: 0.278621
[20:59:00.143] iteration 3048 : loss : 0.282158, supervised_loss: 0.281920
[20:59:01.748] iteration 3049 : loss : 0.287932, supervised_loss: 0.287746
[20:59:02.660] iteration 3050 : loss : 0.279438, supervised_loss: 0.279312
[20:59:03.572] iteration 3051 : loss : 0.282824, supervised_loss: 0.282649
[20:59:04.485] iteration 3052 : loss : 0.282696, supervised_loss: 0.282522
[20:59:05.398] iteration 3053 : loss : 0.272632, supervised_loss: 0.272448
[20:59:06.311] iteration 3054 : loss : 0.274172, supervised_loss: 0.273913
[20:59:07.223] iteration 3055 : loss : 0.275231, supervised_loss: 0.275051
[20:59:08.135] iteration 3056 : loss : 0.274969, supervised_loss: 0.274799
[20:59:09.048] iteration 3057 : loss : 0.282146, supervised_loss: 0.281986
[20:59:09.961] iteration 3058 : loss : 0.275253, supervised_loss: 0.275050
[20:59:10.874] iteration 3059 : loss : 0.282866, supervised_loss: 0.282731
[20:59:11.788] iteration 3060 : loss : 0.269716, supervised_loss: 0.269457
[20:59:13.276] iteration 3061 : loss : 0.279259, supervised_loss: 0.279127
[20:59:14.188] iteration 3062 : loss : 0.275940, supervised_loss: 0.275810
[20:59:15.102] iteration 3063 : loss : 0.279590, supervised_loss: 0.279301
[20:59:16.014] iteration 3064 : loss : 0.280119, supervised_loss: 0.279890
[20:59:16.962] iteration 3065 : loss : 0.283821, supervised_loss: 0.283644
[20:59:17.876] iteration 3066 : loss : 0.274846, supervised_loss: 0.274635
[20:59:18.787] iteration 3067 : loss : 0.280366, supervised_loss: 0.279988
[20:59:19.699] iteration 3068 : loss : 0.275050, supervised_loss: 0.274844
[20:59:20.611] iteration 3069 : loss : 0.276139, supervised_loss: 0.275954
[20:59:21.525] iteration 3070 : loss : 0.277612, supervised_loss: 0.277311
[20:59:22.438] iteration 3071 : loss : 0.274803, supervised_loss: 0.274631
[20:59:23.352] iteration 3072 : loss : 0.278166, supervised_loss: 0.278012
[20:59:24.934] iteration 3073 : loss : 0.287216, supervised_loss: 0.286843
[20:59:25.846] iteration 3074 : loss : 0.274925, supervised_loss: 0.274649
[20:59:26.758] iteration 3075 : loss : 0.275523, supervised_loss: 0.275392
[20:59:27.671] iteration 3076 : loss : 0.268493, supervised_loss: 0.268246
[20:59:28.584] iteration 3077 : loss : 0.286070, supervised_loss: 0.285884
[20:59:29.498] iteration 3078 : loss : 0.278014, supervised_loss: 0.277860
[20:59:30.412] iteration 3079 : loss : 0.274513, supervised_loss: 0.274319
[20:59:31.324] iteration 3080 : loss : 0.283588, supervised_loss: 0.283450
[20:59:32.237] iteration 3081 : loss : 0.290192, supervised_loss: 0.289999
[20:59:33.150] iteration 3082 : loss : 0.275959, supervised_loss: 0.275614
[20:59:34.071] iteration 3083 : loss : 0.279700, supervised_loss: 0.279553
[20:59:34.983] iteration 3084 : loss : 0.277992, supervised_loss: 0.277668
[20:59:36.631] iteration 3085 : loss : 0.274184, supervised_loss: 0.274019
[20:59:37.543] iteration 3086 : loss : 0.283349, supervised_loss: 0.283225
[20:59:38.455] iteration 3087 : loss : 0.274487, supervised_loss: 0.274324
[20:59:39.368] iteration 3088 : loss : 0.287804, supervised_loss: 0.287655
[20:59:40.281] iteration 3089 : loss : 0.274381, supervised_loss: 0.274261
[20:59:41.193] iteration 3090 : loss : 0.273881, supervised_loss: 0.273739
[20:59:42.104] iteration 3091 : loss : 0.275491, supervised_loss: 0.275362
[20:59:43.016] iteration 3092 : loss : 0.279877, supervised_loss: 0.279565
[20:59:43.929] iteration 3093 : loss : 0.270007, supervised_loss: 0.269768
[20:59:44.842] iteration 3094 : loss : 0.273786, supervised_loss: 0.273668
[20:59:45.755] iteration 3095 : loss : 0.284833, supervised_loss: 0.284611
[20:59:46.666] iteration 3096 : loss : 0.277402, supervised_loss: 0.277267
[20:59:48.190] iteration 3097 : loss : 0.289776, supervised_loss: 0.289593
[20:59:49.103] iteration 3098 : loss : 0.284699, supervised_loss: 0.284523
[20:59:50.015] iteration 3099 : loss : 0.271470, supervised_loss: 0.271279
[20:59:50.927] iteration 3100 : loss : 0.278176, supervised_loss: 0.278010
[20:59:51.840] iteration 3101 : loss : 0.278320, supervised_loss: 0.278210
[20:59:52.753] iteration 3102 : loss : 0.279055, supervised_loss: 0.278849
[20:59:53.666] iteration 3103 : loss : 0.274276, supervised_loss: 0.274157
[20:59:54.578] iteration 3104 : loss : 0.279416, supervised_loss: 0.279116
[20:59:55.490] iteration 3105 : loss : 0.275936, supervised_loss: 0.275731
[20:59:56.402] iteration 3106 : loss : 0.279525, supervised_loss: 0.279360
[20:59:57.314] iteration 3107 : loss : 0.277657, supervised_loss: 0.277519
[20:59:58.227] iteration 3108 : loss : 0.291724, supervised_loss: 0.291501
[20:59:59.714] iteration 3109 : loss : 0.277683, supervised_loss: 0.277457
[21:00:00.626] iteration 3110 : loss : 0.295517, supervised_loss: 0.295359
[21:00:01.540] iteration 3111 : loss : 0.277803, supervised_loss: 0.277604
[21:00:02.452] iteration 3112 : loss : 0.283758, supervised_loss: 0.283530
[21:00:03.363] iteration 3113 : loss : 0.276991, supervised_loss: 0.276796
[21:00:04.277] iteration 3114 : loss : 0.269773, supervised_loss: 0.269407
[21:00:05.190] iteration 3115 : loss : 0.274907, supervised_loss: 0.274624
[21:00:06.102] iteration 3116 : loss : 0.282216, supervised_loss: 0.282061
[21:00:07.015] iteration 3117 : loss : 0.280405, supervised_loss: 0.280163
[21:00:07.926] iteration 3118 : loss : 0.269734, supervised_loss: 0.269584
[21:00:08.841] iteration 3119 : loss : 0.280977, supervised_loss: 0.280842
[21:00:09.753] iteration 3120 : loss : 0.282034, supervised_loss: 0.281779
[21:00:11.258] iteration 3121 : loss : 0.274269, supervised_loss: 0.274040
[21:00:12.170] iteration 3122 : loss : 0.278846, supervised_loss: 0.278655
[21:00:13.082] iteration 3123 : loss : 0.276616, supervised_loss: 0.276495
[21:00:13.995] iteration 3124 : loss : 0.271686, supervised_loss: 0.271438
[21:00:14.908] iteration 3125 : loss : 0.276051, supervised_loss: 0.275919
[21:00:15.821] iteration 3126 : loss : 0.279978, supervised_loss: 0.279765
[21:00:16.733] iteration 3127 : loss : 0.277779, supervised_loss: 0.277534
[21:00:17.646] iteration 3128 : loss : 0.293540, supervised_loss: 0.293293
[21:00:18.558] iteration 3129 : loss : 0.277270, supervised_loss: 0.277107
[21:00:19.470] iteration 3130 : loss : 0.281375, supervised_loss: 0.280995
[21:00:20.383] iteration 3131 : loss : 0.282572, supervised_loss: 0.282345
[21:00:21.297] iteration 3132 : loss : 0.276356, supervised_loss: 0.276130
[21:00:22.862] iteration 3133 : loss : 0.283424, supervised_loss: 0.283282
[21:00:23.774] iteration 3134 : loss : 0.274952, supervised_loss: 0.274726
[21:00:24.686] iteration 3135 : loss : 0.269432, supervised_loss: 0.268963
[21:00:25.598] iteration 3136 : loss : 0.273775, supervised_loss: 0.273563
[21:00:26.511] iteration 3137 : loss : 0.279875, supervised_loss: 0.279673
[21:00:27.423] iteration 3138 : loss : 0.274647, supervised_loss: 0.274416
[21:00:28.335] iteration 3139 : loss : 0.281128, supervised_loss: 0.280934
[21:00:29.248] iteration 3140 : loss : 0.271781, supervised_loss: 0.271656
[21:00:30.161] iteration 3141 : loss : 0.274416, supervised_loss: 0.274262
[21:00:31.073] iteration 3142 : loss : 0.279860, supervised_loss: 0.279529
[21:00:31.988] iteration 3143 : loss : 0.274043, supervised_loss: 0.273881
[21:00:32.900] iteration 3144 : loss : 0.276942, supervised_loss: 0.276767
[21:00:34.388] iteration 3145 : loss : 0.274960, supervised_loss: 0.274777
[21:00:35.300] iteration 3146 : loss : 0.276156, supervised_loss: 0.275950
[21:00:36.213] iteration 3147 : loss : 0.292849, supervised_loss: 0.292657
[21:00:37.125] iteration 3148 : loss : 0.288967, supervised_loss: 0.288744
[21:00:38.038] iteration 3149 : loss : 0.280220, supervised_loss: 0.279858
[21:00:38.951] iteration 3150 : loss : 0.274289, supervised_loss: 0.274080
[21:00:39.865] iteration 3151 : loss : 0.272618, supervised_loss: 0.272507
[21:00:40.779] iteration 3152 : loss : 0.278423, supervised_loss: 0.278204
[21:00:41.692] iteration 3153 : loss : 0.275354, supervised_loss: 0.275167
[21:00:42.605] iteration 3154 : loss : 0.281040, supervised_loss: 0.280846
[21:00:43.519] iteration 3155 : loss : 0.277393, supervised_loss: 0.277195
[21:00:44.432] iteration 3156 : loss : 0.280716, supervised_loss: 0.280477
[21:00:46.005] iteration 3157 : loss : 0.277045, supervised_loss: 0.276859
[21:00:46.918] iteration 3158 : loss : 0.275786, supervised_loss: 0.275616
[21:00:47.830] iteration 3159 : loss : 0.282578, supervised_loss: 0.282369
[21:00:48.743] iteration 3160 : loss : 0.283261, supervised_loss: 0.283073
[21:00:49.657] iteration 3161 : loss : 0.278367, supervised_loss: 0.278055
[21:00:50.570] iteration 3162 : loss : 0.282635, supervised_loss: 0.282261
[21:00:51.482] iteration 3163 : loss : 0.282372, supervised_loss: 0.282054
[21:00:52.394] iteration 3164 : loss : 0.279107, supervised_loss: 0.278754
[21:00:53.306] iteration 3165 : loss : 0.276807, supervised_loss: 0.276524
[21:00:54.219] iteration 3166 : loss : 0.270384, supervised_loss: 0.270216
[21:00:55.133] iteration 3167 : loss : 0.274415, supervised_loss: 0.274290
[21:00:56.046] iteration 3168 : loss : 0.270869, supervised_loss: 0.270620
[21:00:57.548] iteration 3169 : loss : 0.278425, supervised_loss: 0.278278
[21:00:58.458] iteration 3170 : loss : 0.287597, supervised_loss: 0.287364
[21:00:59.371] iteration 3171 : loss : 0.279449, supervised_loss: 0.279272
[21:01:00.286] iteration 3172 : loss : 0.277682, supervised_loss: 0.277479
[21:01:01.198] iteration 3173 : loss : 0.275490, supervised_loss: 0.275193
[21:01:02.110] iteration 3174 : loss : 0.278446, supervised_loss: 0.278219
[21:01:03.022] iteration 3175 : loss : 0.273169, supervised_loss: 0.272911
[21:01:03.935] iteration 3176 : loss : 0.282664, supervised_loss: 0.282427
[21:01:04.849] iteration 3177 : loss : 0.274971, supervised_loss: 0.274787
[21:01:05.762] iteration 3178 : loss : 0.278601, supervised_loss: 0.278388
[21:01:06.675] iteration 3179 : loss : 0.276383, supervised_loss: 0.275973
[21:01:07.588] iteration 3180 : loss : 0.273526, supervised_loss: 0.273333
[21:01:09.166] iteration 3181 : loss : 0.282818, supervised_loss: 0.282553
[21:01:10.079] iteration 3182 : loss : 0.276741, supervised_loss: 0.276447
[21:01:10.992] iteration 3183 : loss : 0.276098, supervised_loss: 0.275923
[21:01:11.904] iteration 3184 : loss : 0.278018, supervised_loss: 0.277763
[21:01:12.817] iteration 3185 : loss : 0.274684, supervised_loss: 0.274516
[21:01:13.730] iteration 3186 : loss : 0.279350, supervised_loss: 0.278974
[21:01:14.643] iteration 3187 : loss : 0.274960, supervised_loss: 0.274719
[21:01:15.555] iteration 3188 : loss : 0.273042, supervised_loss: 0.272635
[21:01:16.468] iteration 3189 : loss : 0.280032, supervised_loss: 0.279897
[21:01:17.381] iteration 3190 : loss : 0.276729, supervised_loss: 0.276267
[21:01:18.293] iteration 3191 : loss : 0.282746, supervised_loss: 0.282548
[21:01:19.206] iteration 3192 : loss : 0.280554, supervised_loss: 0.280411
[21:01:20.757] iteration 3193 : loss : 0.277192, supervised_loss: 0.277016
[21:01:21.667] iteration 3194 : loss : 0.277238, supervised_loss: 0.277089
[21:01:22.579] iteration 3195 : loss : 0.277278, supervised_loss: 0.277122
[21:01:23.493] iteration 3196 : loss : 0.282646, supervised_loss: 0.282303
[21:01:24.406] iteration 3197 : loss : 0.277689, supervised_loss: 0.277532
[21:01:25.319] iteration 3198 : loss : 0.271699, supervised_loss: 0.271523
[21:01:26.232] iteration 3199 : loss : 0.279055, supervised_loss: 0.278704
[21:01:27.144] iteration 3200 : loss : 0.270801, supervised_loss: 0.270607
[21:01:30.003] iteration 3201 : loss : 0.286921, supervised_loss: 0.286682
[21:01:30.916] iteration 3202 : loss : 0.280876, supervised_loss: 0.280579
[21:01:31.827] iteration 3203 : loss : 0.276037, supervised_loss: 0.275804
[21:01:32.741] iteration 3204 : loss : 0.275652, supervised_loss: 0.275424
[21:01:34.250] iteration 3205 : loss : 0.272025, supervised_loss: 0.271703
[21:01:35.160] iteration 3206 : loss : 0.280911, supervised_loss: 0.280715
[21:01:36.072] iteration 3207 : loss : 0.275536, supervised_loss: 0.275344
[21:01:36.986] iteration 3208 : loss : 0.270980, supervised_loss: 0.270775
[21:01:37.898] iteration 3209 : loss : 0.274081, supervised_loss: 0.273918
[21:01:38.811] iteration 3210 : loss : 0.281103, supervised_loss: 0.280841
[21:01:39.724] iteration 3211 : loss : 0.282926, supervised_loss: 0.282798
[21:01:40.636] iteration 3212 : loss : 0.273377, supervised_loss: 0.273238
[21:01:41.549] iteration 3213 : loss : 0.279299, supervised_loss: 0.279102
[21:01:42.463] iteration 3214 : loss : 0.277552, supervised_loss: 0.277378
[21:01:43.376] iteration 3215 : loss : 0.279644, supervised_loss: 0.279207
[21:01:44.289] iteration 3216 : loss : 0.279581, supervised_loss: 0.279278
[21:01:45.796] iteration 3217 : loss : 0.279858, supervised_loss: 0.279631
[21:01:46.707] iteration 3218 : loss : 0.273842, supervised_loss: 0.273453
[21:01:47.619] iteration 3219 : loss : 0.274445, supervised_loss: 0.274274
[21:01:48.530] iteration 3220 : loss : 0.278486, supervised_loss: 0.278330
[21:01:49.444] iteration 3221 : loss : 0.275872, supervised_loss: 0.275628
[21:01:50.356] iteration 3222 : loss : 0.280891, supervised_loss: 0.280758
[21:01:51.269] iteration 3223 : loss : 0.275427, supervised_loss: 0.275245
[21:01:52.183] iteration 3224 : loss : 0.279821, supervised_loss: 0.279579
[21:01:53.095] iteration 3225 : loss : 0.272702, supervised_loss: 0.272581
[21:01:54.007] iteration 3226 : loss : 0.273650, supervised_loss: 0.273443
[21:01:54.920] iteration 3227 : loss : 0.280303, supervised_loss: 0.280115
[21:01:55.832] iteration 3228 : loss : 0.278027, supervised_loss: 0.277857
[21:01:57.414] iteration 3229 : loss : 0.277164, supervised_loss: 0.276959
[21:01:58.327] iteration 3230 : loss : 0.278268, supervised_loss: 0.278038
[21:01:59.240] iteration 3231 : loss : 0.274243, supervised_loss: 0.274066
[21:02:00.152] iteration 3232 : loss : 0.279415, supervised_loss: 0.279283
[21:02:01.065] iteration 3233 : loss : 0.284386, supervised_loss: 0.284066
[21:02:01.977] iteration 3234 : loss : 0.282549, supervised_loss: 0.282411
[21:02:02.889] iteration 3235 : loss : 0.277092, supervised_loss: 0.276867
[21:02:03.801] iteration 3236 : loss : 0.276876, supervised_loss: 0.276619
[21:02:04.713] iteration 3237 : loss : 0.273473, supervised_loss: 0.273287
[21:02:05.627] iteration 3238 : loss : 0.277639, supervised_loss: 0.277463
[21:02:06.540] iteration 3239 : loss : 0.275450, supervised_loss: 0.275279
[21:02:07.452] iteration 3240 : loss : 0.281530, supervised_loss: 0.281387
[21:02:09.072] iteration 3241 : loss : 0.279777, supervised_loss: 0.279601
[21:02:09.985] iteration 3242 : loss : 0.275380, supervised_loss: 0.275221
[21:02:10.898] iteration 3243 : loss : 0.273268, supervised_loss: 0.273114
[21:02:11.809] iteration 3244 : loss : 0.277193, supervised_loss: 0.277022
[21:02:12.721] iteration 3245 : loss : 0.275078, supervised_loss: 0.274815
[21:02:13.633] iteration 3246 : loss : 0.273914, supervised_loss: 0.273729
[21:02:14.545] iteration 3247 : loss : 0.274819, supervised_loss: 0.274618
[21:02:15.456] iteration 3248 : loss : 0.271933, supervised_loss: 0.271646
[21:02:16.368] iteration 3249 : loss : 0.273218, supervised_loss: 0.273045
[21:02:17.281] iteration 3250 : loss : 0.288461, supervised_loss: 0.288215
[21:02:18.195] iteration 3251 : loss : 0.278190, supervised_loss: 0.278032
[21:02:19.109] iteration 3252 : loss : 0.269490, supervised_loss: 0.269294
[21:02:20.679] iteration 3253 : loss : 0.270373, supervised_loss: 0.270226
[21:02:21.593] iteration 3254 : loss : 0.278788, supervised_loss: 0.278608
[21:02:22.505] iteration 3255 : loss : 0.270442, supervised_loss: 0.270115
[21:02:23.417] iteration 3256 : loss : 0.278682, supervised_loss: 0.278434
[21:02:24.330] iteration 3257 : loss : 0.270837, supervised_loss: 0.270645
[21:02:25.244] iteration 3258 : loss : 0.305979, supervised_loss: 0.305625
[21:02:26.157] iteration 3259 : loss : 0.274085, supervised_loss: 0.273838
[21:02:27.070] iteration 3260 : loss : 0.278817, supervised_loss: 0.278612
[21:02:27.983] iteration 3261 : loss : 0.270574, supervised_loss: 0.270367
[21:02:28.895] iteration 3262 : loss : 0.273480, supervised_loss: 0.273264
[21:02:29.808] iteration 3263 : loss : 0.274780, supervised_loss: 0.274554
[21:02:30.721] iteration 3264 : loss : 0.274899, supervised_loss: 0.274734
[21:02:32.323] iteration 3265 : loss : 0.273218, supervised_loss: 0.272928
[21:02:33.236] iteration 3266 : loss : 0.277425, supervised_loss: 0.277229
[21:02:34.148] iteration 3267 : loss : 0.273407, supervised_loss: 0.273164
[21:02:35.063] iteration 3268 : loss : 0.280708, supervised_loss: 0.280419
[21:02:35.976] iteration 3269 : loss : 0.273195, supervised_loss: 0.272968
[21:02:36.888] iteration 3270 : loss : 0.278908, supervised_loss: 0.278711
[21:02:37.802] iteration 3271 : loss : 0.273027, supervised_loss: 0.272809
[21:02:38.714] iteration 3272 : loss : 0.282762, supervised_loss: 0.282536
[21:02:39.626] iteration 3273 : loss : 0.274697, supervised_loss: 0.274498
[21:02:40.538] iteration 3274 : loss : 0.271771, supervised_loss: 0.271563
[21:02:41.451] iteration 3275 : loss : 0.282615, supervised_loss: 0.282434
[21:02:42.365] iteration 3276 : loss : 0.282100, supervised_loss: 0.281880
[21:02:43.926] iteration 3277 : loss : 0.275703, supervised_loss: 0.275445
[21:02:44.840] iteration 3278 : loss : 0.267887, supervised_loss: 0.267682
[21:02:45.753] iteration 3279 : loss : 0.273126, supervised_loss: 0.272965
[21:02:46.665] iteration 3280 : loss : 0.272049, supervised_loss: 0.271570
[21:02:47.578] iteration 3281 : loss : 0.280054, supervised_loss: 0.279820
[21:02:48.490] iteration 3282 : loss : 0.279523, supervised_loss: 0.279362
[21:02:49.404] iteration 3283 : loss : 0.288380, supervised_loss: 0.288028
[21:02:50.317] iteration 3284 : loss : 0.277371, supervised_loss: 0.277181
[21:02:51.228] iteration 3285 : loss : 0.292816, supervised_loss: 0.292448
[21:02:52.141] iteration 3286 : loss : 0.279402, supervised_loss: 0.279195
[21:02:53.054] iteration 3287 : loss : 0.275578, supervised_loss: 0.275359
[21:02:53.967] iteration 3288 : loss : 0.273540, supervised_loss: 0.273347
[21:02:55.479] iteration 3289 : loss : 0.283399, supervised_loss: 0.283194
[21:02:56.390] iteration 3290 : loss : 0.275074, supervised_loss: 0.274608
[21:02:57.303] iteration 3291 : loss : 0.278949, supervised_loss: 0.278731
[21:02:58.216] iteration 3292 : loss : 0.276037, supervised_loss: 0.275840
[21:02:59.128] iteration 3293 : loss : 0.275487, supervised_loss: 0.275269
[21:03:00.040] iteration 3294 : loss : 0.273885, supervised_loss: 0.273714
[21:03:00.953] iteration 3295 : loss : 0.296172, supervised_loss: 0.296023
[21:03:01.866] iteration 3296 : loss : 0.278140, supervised_loss: 0.277928
[21:03:02.780] iteration 3297 : loss : 0.279281, supervised_loss: 0.279125
[21:03:03.693] iteration 3298 : loss : 0.290578, supervised_loss: 0.290431
[21:03:04.606] iteration 3299 : loss : 0.276313, supervised_loss: 0.276161
[21:03:05.519] iteration 3300 : loss : 0.277337, supervised_loss: 0.277069
[21:03:07.021] iteration 3301 : loss : 0.282957, supervised_loss: 0.282648
[21:03:07.934] iteration 3302 : loss : 0.276398, supervised_loss: 0.276135
[21:03:08.847] iteration 3303 : loss : 0.277723, supervised_loss: 0.277415
[21:03:09.759] iteration 3304 : loss : 0.279428, supervised_loss: 0.279202
[21:03:10.672] iteration 3305 : loss : 0.282941, supervised_loss: 0.282710
[21:03:11.586] iteration 3306 : loss : 0.272041, supervised_loss: 0.271867
[21:03:12.499] iteration 3307 : loss : 0.275750, supervised_loss: 0.275558
[21:03:13.413] iteration 3308 : loss : 0.272007, supervised_loss: 0.270296
[21:03:14.326] iteration 3309 : loss : 0.291929, supervised_loss: 0.291658
[21:03:15.239] iteration 3310 : loss : 0.287765, supervised_loss: 0.287538
[21:03:16.152] iteration 3311 : loss : 0.276683, supervised_loss: 0.276500
[21:03:17.064] iteration 3312 : loss : 0.281161, supervised_loss: 0.280964
[21:03:18.538] iteration 3313 : loss : 0.277157, supervised_loss: 0.276968
[21:03:19.451] iteration 3314 : loss : 0.273342, supervised_loss: 0.273006
[21:03:20.364] iteration 3315 : loss : 0.276418, supervised_loss: 0.276188
[21:03:21.277] iteration 3316 : loss : 0.280689, supervised_loss: 0.280521
[21:03:22.190] iteration 3317 : loss : 0.280114, supervised_loss: 0.279929
[21:03:23.104] iteration 3318 : loss : 0.282310, supervised_loss: 0.282036
[21:03:24.017] iteration 3319 : loss : 0.285669, supervised_loss: 0.285485
[21:03:24.930] iteration 3320 : loss : 0.277977, supervised_loss: 0.277805
[21:03:25.843] iteration 3321 : loss : 0.273162, supervised_loss: 0.272895
[21:03:26.756] iteration 3322 : loss : 0.278599, supervised_loss: 0.278394
[21:03:27.695] iteration 3323 : loss : 0.281828, supervised_loss: 0.281536
[21:03:28.609] iteration 3324 : loss : 0.274686, supervised_loss: 0.274514
[21:03:30.104] iteration 3325 : loss : 0.282192, supervised_loss: 0.281937
[21:03:31.017] iteration 3326 : loss : 0.280649, supervised_loss: 0.280408
[21:03:31.931] iteration 3327 : loss : 0.291034, supervised_loss: 0.290819
[21:03:32.844] iteration 3328 : loss : 0.288333, supervised_loss: 0.288197
[21:03:33.758] iteration 3329 : loss : 0.272978, supervised_loss: 0.272691
[21:03:34.671] iteration 3330 : loss : 0.274158, supervised_loss: 0.273713
[21:03:35.585] iteration 3331 : loss : 0.281314, supervised_loss: 0.281041
[21:03:36.498] iteration 3332 : loss : 0.274784, supervised_loss: 0.274595
[21:03:37.410] iteration 3333 : loss : 0.281061, supervised_loss: 0.280793
[21:03:38.324] iteration 3334 : loss : 0.282953, supervised_loss: 0.282684
[21:03:39.237] iteration 3335 : loss : 0.274534, supervised_loss: 0.274345
[21:03:40.150] iteration 3336 : loss : 0.273087, supervised_loss: 0.272933
[21:03:41.632] iteration 3337 : loss : 0.281262, supervised_loss: 0.281069
[21:03:42.544] iteration 3338 : loss : 0.277704, supervised_loss: 0.277484
[21:03:43.458] iteration 3339 : loss : 0.279372, supervised_loss: 0.279123
[21:03:44.371] iteration 3340 : loss : 0.281895, supervised_loss: 0.281667
[21:03:45.284] iteration 3341 : loss : 0.281493, supervised_loss: 0.281190
[21:03:46.197] iteration 3342 : loss : 0.284907, supervised_loss: 0.284664
[21:03:47.110] iteration 3343 : loss : 0.277127, supervised_loss: 0.276757
[21:03:48.022] iteration 3344 : loss : 0.279907, supervised_loss: 0.279682
[21:03:48.936] iteration 3345 : loss : 0.279235, supervised_loss: 0.278681
[21:03:49.849] iteration 3346 : loss : 0.274928, supervised_loss: 0.274585
[21:03:50.764] iteration 3347 : loss : 0.280107, supervised_loss: 0.279777
[21:03:51.677] iteration 3348 : loss : 0.276917, supervised_loss: 0.276695
[21:03:53.246] iteration 3349 : loss : 0.282404, supervised_loss: 0.282109
[21:03:54.159] iteration 3350 : loss : 0.294575, supervised_loss: 0.294326
[21:03:55.071] iteration 3351 : loss : 0.275977, supervised_loss: 0.275738
[21:03:55.983] iteration 3352 : loss : 0.287677, supervised_loss: 0.287471
[21:03:56.895] iteration 3353 : loss : 0.276907, supervised_loss: 0.276574
[21:03:57.809] iteration 3354 : loss : 0.281262, supervised_loss: 0.280918
[21:03:58.721] iteration 3355 : loss : 0.270623, supervised_loss: 0.270371
[21:03:59.635] iteration 3356 : loss : 0.271489, supervised_loss: 0.271219
[21:04:00.548] iteration 3357 : loss : 0.274167, supervised_loss: 0.274011
[21:04:01.460] iteration 3358 : loss : 0.277129, supervised_loss: 0.276848
[21:04:02.374] iteration 3359 : loss : 0.281915, supervised_loss: 0.281152
[21:04:03.288] iteration 3360 : loss : 0.275660, supervised_loss: 0.275295
[21:04:04.835] iteration 3361 : loss : 0.274792, supervised_loss: 0.274579
[21:04:05.748] iteration 3362 : loss : 0.272775, supervised_loss: 0.272596
[21:04:06.662] iteration 3363 : loss : 0.278968, supervised_loss: 0.278697
[21:04:07.576] iteration 3364 : loss : 0.273738, supervised_loss: 0.273535
[21:04:08.488] iteration 3365 : loss : 0.270536, supervised_loss: 0.270299
[21:04:09.401] iteration 3366 : loss : 0.273245, supervised_loss: 0.273046
[21:04:10.314] iteration 3367 : loss : 0.283715, supervised_loss: 0.283505
[21:04:11.226] iteration 3368 : loss : 0.276163, supervised_loss: 0.276004
[21:04:12.139] iteration 3369 : loss : 0.278904, supervised_loss: 0.278740
[21:04:13.053] iteration 3370 : loss : 0.285167, supervised_loss: 0.284805
[21:04:13.967] iteration 3371 : loss : 0.277860, supervised_loss: 0.277624
[21:04:14.879] iteration 3372 : loss : 0.275564, supervised_loss: 0.275305
[21:04:16.509] iteration 3373 : loss : 0.276816, supervised_loss: 0.276596
[21:04:17.421] iteration 3374 : loss : 0.275362, supervised_loss: 0.275173
[21:04:18.333] iteration 3375 : loss : 0.279199, supervised_loss: 0.279012
[21:04:19.246] iteration 3376 : loss : 0.278050, supervised_loss: 0.277813
[21:04:20.160] iteration 3377 : loss : 0.274547, supervised_loss: 0.274371
[21:04:21.073] iteration 3378 : loss : 0.275005, supervised_loss: 0.274673
[21:04:21.988] iteration 3379 : loss : 0.279022, supervised_loss: 0.278697
[21:04:22.901] iteration 3380 : loss : 0.280937, supervised_loss: 0.280793
[21:04:23.813] iteration 3381 : loss : 0.280158, supervised_loss: 0.279914
[21:04:24.727] iteration 3382 : loss : 0.275676, supervised_loss: 0.275330
[21:04:25.642] iteration 3383 : loss : 0.275481, supervised_loss: 0.275206
[21:04:26.557] iteration 3384 : loss : 0.275874, supervised_loss: 0.275583
[21:04:28.064] iteration 3385 : loss : 0.282147, supervised_loss: 0.281861
[21:04:28.977] iteration 3386 : loss : 0.274896, supervised_loss: 0.274765
[21:04:29.889] iteration 3387 : loss : 0.283130, supervised_loss: 0.282842
[21:04:30.801] iteration 3388 : loss : 0.272830, supervised_loss: 0.272638
[21:04:31.714] iteration 3389 : loss : 0.277831, supervised_loss: 0.277491
[21:04:32.628] iteration 3390 : loss : 0.278587, supervised_loss: 0.278425
[21:04:33.540] iteration 3391 : loss : 0.275288, supervised_loss: 0.275033
[21:04:34.453] iteration 3392 : loss : 0.276771, supervised_loss: 0.276545
[21:04:35.366] iteration 3393 : loss : 0.275505, supervised_loss: 0.275313
[21:04:36.278] iteration 3394 : loss : 0.286610, supervised_loss: 0.286388
[21:04:37.192] iteration 3395 : loss : 0.281162, supervised_loss: 0.280912
[21:04:38.105] iteration 3396 : loss : 0.277145, supervised_loss: 0.276878
[21:04:39.722] iteration 3397 : loss : 0.278093, supervised_loss: 0.277920
[21:04:40.634] iteration 3398 : loss : 0.279016, supervised_loss: 0.278489
[21:04:41.546] iteration 3399 : loss : 0.278170, supervised_loss: 0.277938
[21:04:42.460] iteration 3400 : loss : 0.275131, supervised_loss: 0.274651
[21:04:45.313] iteration 3401 : loss : 0.279682, supervised_loss: 0.279370
[21:04:46.226] iteration 3402 : loss : 0.278979, supervised_loss: 0.278763
[21:04:47.138] iteration 3403 : loss : 0.271832, supervised_loss: 0.271678
[21:04:48.051] iteration 3404 : loss : 0.274203, supervised_loss: 0.273972
[21:04:48.965] iteration 3405 : loss : 0.279295, supervised_loss: 0.279009
[21:04:49.879] iteration 3406 : loss : 0.276935, supervised_loss: 0.276653
[21:04:50.792] iteration 3407 : loss : 0.278370, supervised_loss: 0.278179
[21:04:51.706] iteration 3408 : loss : 0.282563, supervised_loss: 0.282316
[21:04:53.302] iteration 3409 : loss : 0.288719, supervised_loss: 0.288509
[21:04:54.215] iteration 3410 : loss : 0.274796, supervised_loss: 0.274581
[21:04:55.128] iteration 3411 : loss : 0.276769, supervised_loss: 0.276603
[21:04:56.040] iteration 3412 : loss : 0.275319, supervised_loss: 0.275026
[21:04:56.953] iteration 3413 : loss : 0.284027, supervised_loss: 0.283755
[21:04:57.866] iteration 3414 : loss : 0.277873, supervised_loss: 0.277603
[21:04:58.779] iteration 3415 : loss : 0.275892, supervised_loss: 0.275623
[21:04:59.691] iteration 3416 : loss : 0.275295, supervised_loss: 0.275015
[21:05:00.605] iteration 3417 : loss : 0.277999, supervised_loss: 0.277653
[21:05:01.519] iteration 3418 : loss : 0.282533, supervised_loss: 0.282347
[21:05:02.432] iteration 3419 : loss : 0.276622, supervised_loss: 0.276434
[21:05:03.345] iteration 3420 : loss : 0.273660, supervised_loss: 0.273428
[21:05:04.973] iteration 3421 : loss : 0.284386, supervised_loss: 0.284166
[21:05:05.886] iteration 3422 : loss : 0.274668, supervised_loss: 0.274493
[21:05:06.798] iteration 3423 : loss : 0.277938, supervised_loss: 0.277625
[21:05:07.710] iteration 3424 : loss : 0.281505, supervised_loss: 0.281338
[21:05:08.624] iteration 3425 : loss : 0.280517, supervised_loss: 0.280276
[21:05:09.538] iteration 3426 : loss : 0.277786, supervised_loss: 0.277568
[21:05:10.452] iteration 3427 : loss : 0.278866, supervised_loss: 0.278570
[21:05:11.365] iteration 3428 : loss : 0.272628, supervised_loss: 0.272440
[21:05:12.276] iteration 3429 : loss : 0.277988, supervised_loss: 0.277810
[21:05:13.190] iteration 3430 : loss : 0.274528, supervised_loss: 0.274279
[21:05:14.102] iteration 3431 : loss : 0.278592, supervised_loss: 0.278439
[21:05:15.015] iteration 3432 : loss : 0.281605, supervised_loss: 0.281474
[21:05:16.701] iteration 3433 : loss : 0.276035, supervised_loss: 0.275698
[21:05:17.612] iteration 3434 : loss : 0.272462, supervised_loss: 0.272315
[21:05:18.525] iteration 3435 : loss : 0.273911, supervised_loss: 0.273680
[21:05:19.438] iteration 3436 : loss : 0.275361, supervised_loss: 0.275081
[21:05:20.351] iteration 3437 : loss : 0.285024, supervised_loss: 0.284578
[21:05:21.266] iteration 3438 : loss : 0.280727, supervised_loss: 0.280458
[21:05:22.180] iteration 3439 : loss : 0.278562, supervised_loss: 0.278230
[21:05:23.093] iteration 3440 : loss : 0.276061, supervised_loss: 0.275859
[21:05:24.005] iteration 3441 : loss : 0.282717, supervised_loss: 0.282530
[21:05:24.918] iteration 3442 : loss : 0.270504, supervised_loss: 0.270031
[21:05:25.833] iteration 3443 : loss : 0.276677, supervised_loss: 0.276184
[21:05:26.746] iteration 3444 : loss : 0.279047, supervised_loss: 0.278721
[21:05:28.231] iteration 3445 : loss : 0.274483, supervised_loss: 0.274265
[21:05:29.144] iteration 3446 : loss : 0.289130, supervised_loss: 0.288871
[21:05:30.057] iteration 3447 : loss : 0.273643, supervised_loss: 0.273494
[21:05:30.971] iteration 3448 : loss : 0.283273, supervised_loss: 0.282853
[21:05:31.885] iteration 3449 : loss : 0.278890, supervised_loss: 0.278684
[21:05:32.798] iteration 3450 : loss : 0.273528, supervised_loss: 0.273106
[21:05:33.711] iteration 3451 : loss : 0.274037, supervised_loss: 0.273499
[21:05:34.623] iteration 3452 : loss : 0.283566, supervised_loss: 0.283165
[21:05:35.537] iteration 3453 : loss : 0.274999, supervised_loss: 0.274721
[21:05:36.451] iteration 3454 : loss : 0.280385, supervised_loss: 0.280165
[21:05:37.365] iteration 3455 : loss : 0.284790, supervised_loss: 0.284453
[21:05:38.277] iteration 3456 : loss : 0.272755, supervised_loss: 0.272561
[21:05:39.771] iteration 3457 : loss : 0.275097, supervised_loss: 0.274871
[21:05:40.684] iteration 3458 : loss : 0.284529, supervised_loss: 0.284325
[21:05:41.596] iteration 3459 : loss : 0.278191, supervised_loss: 0.277979
[21:05:42.508] iteration 3460 : loss : 0.272632, supervised_loss: 0.272431
[21:05:43.420] iteration 3461 : loss : 0.280868, supervised_loss: 0.280663
[21:05:44.333] iteration 3462 : loss : 0.278550, supervised_loss: 0.278388
[21:05:45.246] iteration 3463 : loss : 0.272903, supervised_loss: 0.272715
[21:05:46.158] iteration 3464 : loss : 0.287508, supervised_loss: 0.287303
[21:05:47.093] iteration 3465 : loss : 0.277467, supervised_loss: 0.277258
[21:05:48.005] iteration 3466 : loss : 0.282292, supervised_loss: 0.282050
[21:05:48.920] iteration 3467 : loss : 0.285899, supervised_loss: 0.285715
[21:05:49.834] iteration 3468 : loss : 0.275674, supervised_loss: 0.275367
[21:05:51.464] iteration 3469 : loss : 0.276982, supervised_loss: 0.276736
[21:05:52.378] iteration 3470 : loss : 0.278287, supervised_loss: 0.277997
[21:05:53.291] iteration 3471 : loss : 0.273172, supervised_loss: 0.272982
[21:05:54.203] iteration 3472 : loss : 0.272508, supervised_loss: 0.272217
[21:05:55.115] iteration 3473 : loss : 0.278583, supervised_loss: 0.278322
[21:05:56.027] iteration 3474 : loss : 0.284551, supervised_loss: 0.284085
[21:05:56.941] iteration 3475 : loss : 0.279137, supervised_loss: 0.278946
[21:05:57.853] iteration 3476 : loss : 0.272980, supervised_loss: 0.272765
[21:05:58.766] iteration 3477 : loss : 0.280433, supervised_loss: 0.280081
[21:05:59.678] iteration 3478 : loss : 0.280966, supervised_loss: 0.280751
[21:06:00.592] iteration 3479 : loss : 0.278899, supervised_loss: 0.278751
[21:06:01.505] iteration 3480 : loss : 0.274913, supervised_loss: 0.274644
[21:06:03.074] iteration 3481 : loss : 0.273114, supervised_loss: 0.272737
[21:06:03.987] iteration 3482 : loss : 0.283382, supervised_loss: 0.283081
[21:06:04.899] iteration 3483 : loss : 0.275173, supervised_loss: 0.274854
[21:06:05.812] iteration 3484 : loss : 0.275903, supervised_loss: 0.275502
[21:06:06.724] iteration 3485 : loss : 0.275950, supervised_loss: 0.275653
[21:06:07.638] iteration 3486 : loss : 0.275985, supervised_loss: 0.275690
[21:06:08.551] iteration 3487 : loss : 0.279714, supervised_loss: 0.279450
[21:06:09.464] iteration 3488 : loss : 0.271564, supervised_loss: 0.271142
[21:06:10.378] iteration 3489 : loss : 0.275076, supervised_loss: 0.274883
[21:06:11.291] iteration 3490 : loss : 0.273620, supervised_loss: 0.273092
[21:06:12.204] iteration 3491 : loss : 0.283364, supervised_loss: 0.283084
[21:06:13.117] iteration 3492 : loss : 0.274613, supervised_loss: 0.274415
[21:06:14.816] iteration 3493 : loss : 0.276938, supervised_loss: 0.276627
[21:06:15.729] iteration 3494 : loss : 0.281646, supervised_loss: 0.281221
[21:06:16.640] iteration 3495 : loss : 0.269496, supervised_loss: 0.269303
[21:06:17.552] iteration 3496 : loss : 0.277229, supervised_loss: 0.276803
[21:06:18.463] iteration 3497 : loss : 0.284675, supervised_loss: 0.284471
[21:06:19.376] iteration 3498 : loss : 0.285591, supervised_loss: 0.285408
[21:06:20.288] iteration 3499 : loss : 0.279150, supervised_loss: 0.278947
[21:06:21.202] iteration 3500 : loss : 0.277609, supervised_loss: 0.277349
[21:06:22.114] iteration 3501 : loss : 0.275176, supervised_loss: 0.274702
[21:06:23.026] iteration 3502 : loss : 0.278567, supervised_loss: 0.278334
[21:06:23.939] iteration 3503 : loss : 0.282032, supervised_loss: 0.281649
[21:06:24.852] iteration 3504 : loss : 0.278024, supervised_loss: 0.277800
[21:06:26.373] iteration 3505 : loss : 0.272421, supervised_loss: 0.272085
[21:06:27.285] iteration 3506 : loss : 0.276640, supervised_loss: 0.276349
[21:06:28.197] iteration 3507 : loss : 0.278959, supervised_loss: 0.278341
[21:06:29.110] iteration 3508 : loss : 0.273353, supervised_loss: 0.273146
[21:06:30.023] iteration 3509 : loss : 0.274352, supervised_loss: 0.274136
[21:06:30.938] iteration 3510 : loss : 0.276595, supervised_loss: 0.276403
[21:06:31.851] iteration 3511 : loss : 0.276640, supervised_loss: 0.276453
[21:06:32.763] iteration 3512 : loss : 0.280179, supervised_loss: 0.279606
[21:06:33.676] iteration 3513 : loss : 0.281168, supervised_loss: 0.280881
[21:06:34.590] iteration 3514 : loss : 0.284053, supervised_loss: 0.283769
[21:06:35.503] iteration 3515 : loss : 0.279628, supervised_loss: 0.279379
[21:06:36.418] iteration 3516 : loss : 0.273487, supervised_loss: 0.273359
[21:06:37.923] iteration 3517 : loss : 0.282988, supervised_loss: 0.282703
[21:06:38.836] iteration 3518 : loss : 0.273437, supervised_loss: 0.273236
[21:06:39.748] iteration 3519 : loss : 0.273551, supervised_loss: 0.273326
[21:06:40.661] iteration 3520 : loss : 0.275348, supervised_loss: 0.275144
[21:06:41.573] iteration 3521 : loss : 0.277361, supervised_loss: 0.276885
[21:06:42.487] iteration 3522 : loss : 0.275959, supervised_loss: 0.275745
[21:06:43.400] iteration 3523 : loss : 0.273795, supervised_loss: 0.273509
[21:06:44.311] iteration 3524 : loss : 0.282710, supervised_loss: 0.282493
[21:06:45.225] iteration 3525 : loss : 0.276715, supervised_loss: 0.276557
[21:06:46.139] iteration 3526 : loss : 0.275159, supervised_loss: 0.274930
[21:06:47.051] iteration 3527 : loss : 0.277673, supervised_loss: 0.277472
[21:06:47.964] iteration 3528 : loss : 0.272334, supervised_loss: 0.272029
[21:06:49.527] iteration 3529 : loss : 0.275893, supervised_loss: 0.275712
[21:06:50.440] iteration 3530 : loss : 0.274924, supervised_loss: 0.274622
[21:06:51.353] iteration 3531 : loss : 0.274177, supervised_loss: 0.273989
[21:06:52.266] iteration 3532 : loss : 0.275582, supervised_loss: 0.275343
[21:06:53.178] iteration 3533 : loss : 0.278610, supervised_loss: 0.278430
[21:06:54.092] iteration 3534 : loss : 0.276414, supervised_loss: 0.276167
[21:06:55.006] iteration 3535 : loss : 0.276987, supervised_loss: 0.276631
[21:06:55.920] iteration 3536 : loss : 0.269673, supervised_loss: 0.269460
[21:06:56.834] iteration 3537 : loss : 0.279741, supervised_loss: 0.279547
[21:06:57.748] iteration 3538 : loss : 0.269836, supervised_loss: 0.269596
[21:06:58.662] iteration 3539 : loss : 0.279899, supervised_loss: 0.279663
[21:06:59.575] iteration 3540 : loss : 0.277604, supervised_loss: 0.277390
[21:07:01.151] iteration 3541 : loss : 0.272651, supervised_loss: 0.271989
[21:07:02.066] iteration 3542 : loss : 0.278926, supervised_loss: 0.278718
[21:07:02.980] iteration 3543 : loss : 0.280518, supervised_loss: 0.280354
[21:07:03.893] iteration 3544 : loss : 0.286955, supervised_loss: 0.286581
[21:07:04.806] iteration 3545 : loss : 0.271029, supervised_loss: 0.270670
[21:07:05.719] iteration 3546 : loss : 0.276476, supervised_loss: 0.276310
[21:07:06.631] iteration 3547 : loss : 0.284687, supervised_loss: 0.284520
[21:07:07.544] iteration 3548 : loss : 0.282920, supervised_loss: 0.282712
[21:07:08.457] iteration 3549 : loss : 0.275015, supervised_loss: 0.274806
[21:07:09.372] iteration 3550 : loss : 0.281740, supervised_loss: 0.281169
[21:07:10.284] iteration 3551 : loss : 0.275532, supervised_loss: 0.275281
[21:07:11.197] iteration 3552 : loss : 0.275581, supervised_loss: 0.275378
[21:07:12.731] iteration 3553 : loss : 0.290343, supervised_loss: 0.290177
[21:07:13.644] iteration 3554 : loss : 0.273367, supervised_loss: 0.273119
[21:07:14.558] iteration 3555 : loss : 0.280831, supervised_loss: 0.280468
[21:07:15.472] iteration 3556 : loss : 0.272312, supervised_loss: 0.272025
[21:07:16.384] iteration 3557 : loss : 0.284282, supervised_loss: 0.284051
[21:07:17.298] iteration 3558 : loss : 0.271381, supervised_loss: 0.271209
[21:07:18.210] iteration 3559 : loss : 0.275542, supervised_loss: 0.275235
[21:07:19.123] iteration 3560 : loss : 0.275723, supervised_loss: 0.275424
[21:07:20.037] iteration 3561 : loss : 0.281055, supervised_loss: 0.280791
[21:07:20.950] iteration 3562 : loss : 0.272648, supervised_loss: 0.272417
[21:07:21.863] iteration 3563 : loss : 0.276865, supervised_loss: 0.276685
[21:07:22.776] iteration 3564 : loss : 0.277495, supervised_loss: 0.277268
[21:07:24.360] iteration 3565 : loss : 0.274409, supervised_loss: 0.273945
[21:07:25.272] iteration 3566 : loss : 0.268999, supervised_loss: 0.268781
[21:07:26.185] iteration 3567 : loss : 0.272398, supervised_loss: 0.272008
[21:07:27.099] iteration 3568 : loss : 0.274884, supervised_loss: 0.274597
[21:07:28.011] iteration 3569 : loss : 0.280072, supervised_loss: 0.279770
[21:07:28.925] iteration 3570 : loss : 0.280216, supervised_loss: 0.280008
[21:07:29.839] iteration 3571 : loss : 0.270889, supervised_loss: 0.270625
[21:07:30.752] iteration 3572 : loss : 0.275407, supervised_loss: 0.275103
[21:07:31.664] iteration 3573 : loss : 0.277660, supervised_loss: 0.277404
[21:07:32.577] iteration 3574 : loss : 0.280457, supervised_loss: 0.280180
[21:07:33.490] iteration 3575 : loss : 0.273092, supervised_loss: 0.272858
[21:07:34.402] iteration 3576 : loss : 0.277806, supervised_loss: 0.277535
[21:07:36.027] iteration 3577 : loss : 0.278049, supervised_loss: 0.277795
[21:07:36.939] iteration 3578 : loss : 0.282409, supervised_loss: 0.282235
[21:07:37.851] iteration 3579 : loss : 0.277385, supervised_loss: 0.277109
[21:07:38.765] iteration 3580 : loss : 0.283261, supervised_loss: 0.282962
[21:07:39.678] iteration 3581 : loss : 0.280598, supervised_loss: 0.280222
[21:07:40.589] iteration 3582 : loss : 0.278982, supervised_loss: 0.278750
[21:07:41.502] iteration 3583 : loss : 0.274284, supervised_loss: 0.274047
[21:07:42.414] iteration 3584 : loss : 0.282212, supervised_loss: 0.281951
[21:07:43.329] iteration 3585 : loss : 0.272497, supervised_loss: 0.272280
[21:07:44.243] iteration 3586 : loss : 0.277699, supervised_loss: 0.277510
[21:07:45.158] iteration 3587 : loss : 0.279439, supervised_loss: 0.279253
[21:07:46.071] iteration 3588 : loss : 0.285540, supervised_loss: 0.285197
[21:07:47.571] iteration 3589 : loss : 0.276343, supervised_loss: 0.276154
[21:07:48.483] iteration 3590 : loss : 0.272796, supervised_loss: 0.272633
[21:07:49.396] iteration 3591 : loss : 0.272654, supervised_loss: 0.272380
[21:07:50.309] iteration 3592 : loss : 0.274372, supervised_loss: 0.274123
[21:07:51.223] iteration 3593 : loss : 0.274451, supervised_loss: 0.274266
[21:07:52.136] iteration 3594 : loss : 0.273856, supervised_loss: 0.273668
[21:07:53.049] iteration 3595 : loss : 0.276834, supervised_loss: 0.276610
[21:07:53.963] iteration 3596 : loss : 0.275819, supervised_loss: 0.275629
[21:07:54.874] iteration 3597 : loss : 0.273781, supervised_loss: 0.273594
[21:07:55.787] iteration 3598 : loss : 0.283685, supervised_loss: 0.283337
[21:07:56.701] iteration 3599 : loss : 0.279727, supervised_loss: 0.279532
[21:07:57.616] iteration 3600 : loss : 0.277555, supervised_loss: 0.277118
[21:08:01.057] iteration 3601 : loss : 0.274482, supervised_loss: 0.274269
[21:08:01.969] iteration 3602 : loss : 0.272330, supervised_loss: 0.272084
[21:08:02.883] iteration 3603 : loss : 0.283097, supervised_loss: 0.282607
[21:08:03.796] iteration 3604 : loss : 0.277359, supervised_loss: 0.277192
[21:08:04.709] iteration 3605 : loss : 0.277394, supervised_loss: 0.277102
[21:08:05.621] iteration 3606 : loss : 0.287055, supervised_loss: 0.286824
[21:08:06.534] iteration 3607 : loss : 0.273726, supervised_loss: 0.273461
[21:08:07.448] iteration 3608 : loss : 0.278813, supervised_loss: 0.278593
[21:08:08.360] iteration 3609 : loss : 0.278271, supervised_loss: 0.278090
[21:08:09.273] iteration 3610 : loss : 0.273429, supervised_loss: 0.272982
[21:08:10.185] iteration 3611 : loss : 0.285144, supervised_loss: 0.284966
[21:08:11.099] iteration 3612 : loss : 0.276162, supervised_loss: 0.275887
[21:08:12.674] iteration 3613 : loss : 0.284626, supervised_loss: 0.284339
[21:08:13.586] iteration 3614 : loss : 0.279040, supervised_loss: 0.278781
[21:08:14.497] iteration 3615 : loss : 0.275974, supervised_loss: 0.275725
[21:08:15.411] iteration 3616 : loss : 0.281854, supervised_loss: 0.281558
[21:08:16.324] iteration 3617 : loss : 0.270331, supervised_loss: 0.270000
[21:08:17.236] iteration 3618 : loss : 0.282769, supervised_loss: 0.282573
[21:08:18.149] iteration 3619 : loss : 0.281299, supervised_loss: 0.281026
[21:08:19.062] iteration 3620 : loss : 0.272139, supervised_loss: 0.271844
[21:08:19.975] iteration 3621 : loss : 0.273137, supervised_loss: 0.272810
[21:08:20.889] iteration 3622 : loss : 0.274441, supervised_loss: 0.274277
[21:08:21.803] iteration 3623 : loss : 0.276340, supervised_loss: 0.275872
[21:08:22.719] iteration 3624 : loss : 0.281857, supervised_loss: 0.281605
[21:08:24.417] iteration 3625 : loss : 0.286104, supervised_loss: 0.285804
[21:08:25.329] iteration 3626 : loss : 0.281856, supervised_loss: 0.281505
[21:08:26.241] iteration 3627 : loss : 0.277129, supervised_loss: 0.276876
[21:08:27.154] iteration 3628 : loss : 0.276404, supervised_loss: 0.276107
[21:08:28.065] iteration 3629 : loss : 0.270885, supervised_loss: 0.270623
[21:08:28.977] iteration 3630 : loss : 0.276535, supervised_loss: 0.276336
[21:08:29.890] iteration 3631 : loss : 0.274936, supervised_loss: 0.274623
[21:08:30.803] iteration 3632 : loss : 0.274325, supervised_loss: 0.274105
[21:08:31.716] iteration 3633 : loss : 0.284853, supervised_loss: 0.284649
[21:08:32.630] iteration 3634 : loss : 0.286255, supervised_loss: 0.285998
[21:08:33.543] iteration 3635 : loss : 0.278660, supervised_loss: 0.278340
[21:08:34.455] iteration 3636 : loss : 0.276904, supervised_loss: 0.276648
[21:08:35.938] iteration 3637 : loss : 0.272860, supervised_loss: 0.272434
[21:08:36.851] iteration 3638 : loss : 0.280675, supervised_loss: 0.280448
[21:08:37.764] iteration 3639 : loss : 0.280767, supervised_loss: 0.280450
[21:08:38.678] iteration 3640 : loss : 0.272307, supervised_loss: 0.271963
[21:08:39.589] iteration 3641 : loss : 0.273354, supervised_loss: 0.273127
[21:08:40.503] iteration 3642 : loss : 0.287107, supervised_loss: 0.286757
[21:08:41.415] iteration 3643 : loss : 0.287042, supervised_loss: 0.286776
[21:08:42.328] iteration 3644 : loss : 0.273428, supervised_loss: 0.273213
[21:08:43.241] iteration 3645 : loss : 0.273574, supervised_loss: 0.273398
[21:08:44.154] iteration 3646 : loss : 0.282143, supervised_loss: 0.281734
[21:08:45.067] iteration 3647 : loss : 0.273663, supervised_loss: 0.273314
[21:08:45.980] iteration 3648 : loss : 0.282853, supervised_loss: 0.282632
[21:08:47.480] iteration 3649 : loss : 0.272732, supervised_loss: 0.272483
[21:08:48.392] iteration 3650 : loss : 0.279255, supervised_loss: 0.278813
[21:08:49.305] iteration 3651 : loss : 0.277670, supervised_loss: 0.277331
[21:08:50.217] iteration 3652 : loss : 0.274374, supervised_loss: 0.273890
[21:08:51.129] iteration 3653 : loss : 0.276768, supervised_loss: 0.276517
[21:08:52.042] iteration 3654 : loss : 0.277681, supervised_loss: 0.277295
[21:08:52.954] iteration 3655 : loss : 0.277590, supervised_loss: 0.277282
[21:08:53.867] iteration 3656 : loss : 0.276833, supervised_loss: 0.276587
[21:08:54.779] iteration 3657 : loss : 0.278449, supervised_loss: 0.278257
[21:08:55.691] iteration 3658 : loss : 0.280949, supervised_loss: 0.280717
[21:08:56.628] iteration 3659 : loss : 0.280213, supervised_loss: 0.279833
[21:08:57.540] iteration 3660 : loss : 0.277743, supervised_loss: 0.277328
[21:08:59.098] iteration 3661 : loss : 0.280366, supervised_loss: 0.279953
[21:09:00.011] iteration 3662 : loss : 0.277478, supervised_loss: 0.277249
[21:09:00.924] iteration 3663 : loss : 0.282612, supervised_loss: 0.282293
[21:09:01.837] iteration 3664 : loss : 0.289223, supervised_loss: 0.288880
[21:09:02.750] iteration 3665 : loss : 0.272969, supervised_loss: 0.272800
[21:09:03.662] iteration 3666 : loss : 0.269316, supervised_loss: 0.269093
[21:09:04.575] iteration 3667 : loss : 0.268919, supervised_loss: 0.268604
[21:09:05.488] iteration 3668 : loss : 0.281574, supervised_loss: 0.281252
[21:09:06.402] iteration 3669 : loss : 0.273972, supervised_loss: 0.273682
[21:09:07.314] iteration 3670 : loss : 0.277830, supervised_loss: 0.277273
[21:09:08.226] iteration 3671 : loss : 0.274741, supervised_loss: 0.274478
[21:09:09.139] iteration 3672 : loss : 0.277595, supervised_loss: 0.277297
[21:09:10.732] iteration 3673 : loss : 0.271491, supervised_loss: 0.271090
[21:09:11.645] iteration 3674 : loss : 0.279816, supervised_loss: 0.279503
[21:09:12.556] iteration 3675 : loss : 0.280695, supervised_loss: 0.280344
[21:09:13.470] iteration 3676 : loss : 0.274930, supervised_loss: 0.274621
[21:09:14.383] iteration 3677 : loss : 0.272044, supervised_loss: 0.271701
[21:09:15.295] iteration 3678 : loss : 0.275152, supervised_loss: 0.274895
[21:09:16.208] iteration 3679 : loss : 0.272806, supervised_loss: 0.272524
[21:09:17.121] iteration 3680 : loss : 0.274736, supervised_loss: 0.274548
[21:09:18.033] iteration 3681 : loss : 0.289361, supervised_loss: 0.289051
[21:09:18.946] iteration 3682 : loss : 0.274562, supervised_loss: 0.274308
[21:09:19.860] iteration 3683 : loss : 0.275120, supervised_loss: 0.274620
[21:09:20.774] iteration 3684 : loss : 0.275555, supervised_loss: 0.275362
[21:09:22.340] iteration 3685 : loss : 0.276810, supervised_loss: 0.276370
[21:09:23.251] iteration 3686 : loss : 0.272524, supervised_loss: 0.272323
[21:09:24.163] iteration 3687 : loss : 0.276596, supervised_loss: 0.276231
[21:09:25.076] iteration 3688 : loss : 0.270319, supervised_loss: 0.270092
[21:09:25.988] iteration 3689 : loss : 0.277407, supervised_loss: 0.277111
[21:09:26.901] iteration 3690 : loss : 0.273804, supervised_loss: 0.273576
[21:09:27.814] iteration 3691 : loss : 0.275684, supervised_loss: 0.275437
[21:09:28.726] iteration 3692 : loss : 0.279742, supervised_loss: 0.279535
[21:09:29.639] iteration 3693 : loss : 0.273720, supervised_loss: 0.273451
[21:09:30.552] iteration 3694 : loss : 0.286197, supervised_loss: 0.285940
[21:09:31.465] iteration 3695 : loss : 0.274062, supervised_loss: 0.273819
[21:09:32.380] iteration 3696 : loss : 0.280877, supervised_loss: 0.280603
[21:09:33.894] iteration 3697 : loss : 0.277474, supervised_loss: 0.277218
[21:09:34.806] iteration 3698 : loss : 0.274180, supervised_loss: 0.273927
[21:09:35.719] iteration 3699 : loss : 0.273832, supervised_loss: 0.273482
[21:09:36.634] iteration 3700 : loss : 0.279213, supervised_loss: 0.278966
[21:09:37.547] iteration 3701 : loss : 0.288543, supervised_loss: 0.288309
[21:09:38.460] iteration 3702 : loss : 0.274727, supervised_loss: 0.274385
[21:09:39.372] iteration 3703 : loss : 0.274384, supervised_loss: 0.274105
[21:09:40.285] iteration 3704 : loss : 0.283503, supervised_loss: 0.283216
[21:09:41.199] iteration 3705 : loss : 0.272782, supervised_loss: 0.272607
[21:09:42.114] iteration 3706 : loss : 0.271378, supervised_loss: 0.271097
[21:09:43.027] iteration 3707 : loss : 0.275828, supervised_loss: 0.275584
[21:09:43.939] iteration 3708 : loss : 0.268675, supervised_loss: 0.268425
[21:09:45.452] iteration 3709 : loss : 0.273228, supervised_loss: 0.273012
[21:09:46.365] iteration 3710 : loss : 0.271619, supervised_loss: 0.271218
[21:09:47.278] iteration 3711 : loss : 0.274918, supervised_loss: 0.274669
[21:09:48.190] iteration 3712 : loss : 0.274915, supervised_loss: 0.274673
[21:09:49.102] iteration 3713 : loss : 0.276347, supervised_loss: 0.276057
[21:09:50.015] iteration 3714 : loss : 0.272930, supervised_loss: 0.272429
[21:09:50.929] iteration 3715 : loss : 0.278909, supervised_loss: 0.278642
[21:09:51.841] iteration 3716 : loss : 0.275619, supervised_loss: 0.275108
[21:09:52.754] iteration 3717 : loss : 0.283296, supervised_loss: 0.282990
[21:09:53.667] iteration 3718 : loss : 0.273942, supervised_loss: 0.273725
[21:09:54.581] iteration 3719 : loss : 0.272408, supervised_loss: 0.272047
[21:09:55.494] iteration 3720 : loss : 0.279986, supervised_loss: 0.279519
[21:09:57.114] iteration 3721 : loss : 0.275648, supervised_loss: 0.275344
[21:09:58.025] iteration 3722 : loss : 0.273719, supervised_loss: 0.273502
[21:09:58.938] iteration 3723 : loss : 0.275914, supervised_loss: 0.275650
[21:09:59.850] iteration 3724 : loss : 0.279609, supervised_loss: 0.279397
[21:10:00.763] iteration 3725 : loss : 0.275562, supervised_loss: 0.275165
[21:10:01.676] iteration 3726 : loss : 0.276292, supervised_loss: 0.276075
[21:10:02.588] iteration 3727 : loss : 0.281885, supervised_loss: 0.281538
[21:10:03.500] iteration 3728 : loss : 0.270143, supervised_loss: 0.269611
[21:10:04.415] iteration 3729 : loss : 0.273021, supervised_loss: 0.272857
[21:10:05.328] iteration 3730 : loss : 0.275043, supervised_loss: 0.274784
[21:10:06.241] iteration 3731 : loss : 0.270525, supervised_loss: 0.270277
[21:10:07.156] iteration 3732 : loss : 0.277123, supervised_loss: 0.276835
[21:10:08.742] iteration 3733 : loss : 0.280400, supervised_loss: 0.280211
[21:10:09.655] iteration 3734 : loss : 0.279286, supervised_loss: 0.279060
[21:10:10.568] iteration 3735 : loss : 0.274889, supervised_loss: 0.274648
[21:10:11.481] iteration 3736 : loss : 0.279244, supervised_loss: 0.278963
[21:10:12.395] iteration 3737 : loss : 0.274496, supervised_loss: 0.274328
[21:10:13.308] iteration 3738 : loss : 0.282402, supervised_loss: 0.281882
[21:10:14.221] iteration 3739 : loss : 0.274099, supervised_loss: 0.273810
[21:10:15.133] iteration 3740 : loss : 0.276002, supervised_loss: 0.275708
[21:10:16.047] iteration 3741 : loss : 0.272226, supervised_loss: 0.271941
[21:10:16.961] iteration 3742 : loss : 0.274039, supervised_loss: 0.273606
[21:10:17.874] iteration 3743 : loss : 0.280791, supervised_loss: 0.280548
[21:10:18.787] iteration 3744 : loss : 0.272264, supervised_loss: 0.271928
[21:10:20.461] iteration 3745 : loss : 0.274635, supervised_loss: 0.274356
[21:10:21.373] iteration 3746 : loss : 0.274849, supervised_loss: 0.274674
[21:10:22.285] iteration 3747 : loss : 0.275195, supervised_loss: 0.274927
[21:10:23.198] iteration 3748 : loss : 0.275477, supervised_loss: 0.275165
[21:10:24.111] iteration 3749 : loss : 0.276254, supervised_loss: 0.276025
[21:10:25.024] iteration 3750 : loss : 0.273997, supervised_loss: 0.273607
[21:10:25.936] iteration 3751 : loss : 0.276724, supervised_loss: 0.276504
[21:10:26.849] iteration 3752 : loss : 0.280155, supervised_loss: 0.279982
[21:10:27.761] iteration 3753 : loss : 0.272002, supervised_loss: 0.271728
[21:10:28.675] iteration 3754 : loss : 0.272308, supervised_loss: 0.272053
[21:10:29.588] iteration 3755 : loss : 0.283593, supervised_loss: 0.283373
[21:10:30.501] iteration 3756 : loss : 0.275693, supervised_loss: 0.275261
[21:10:32.029] iteration 3757 : loss : 0.267450, supervised_loss: 0.267181
[21:10:32.941] iteration 3758 : loss : 0.273579, supervised_loss: 0.273184
[21:10:33.853] iteration 3759 : loss : 0.269792, supervised_loss: 0.269448
[21:10:34.765] iteration 3760 : loss : 0.280947, supervised_loss: 0.280619
[21:10:35.678] iteration 3761 : loss : 0.279049, supervised_loss: 0.278829
[21:10:36.591] iteration 3762 : loss : 0.276379, supervised_loss: 0.276005
[21:10:37.504] iteration 3763 : loss : 0.268481, supervised_loss: 0.268200
[21:10:38.416] iteration 3764 : loss : 0.287285, supervised_loss: 0.287052
[21:10:39.329] iteration 3765 : loss : 0.275616, supervised_loss: 0.275289
[21:10:40.241] iteration 3766 : loss : 0.280998, supervised_loss: 0.280757
[21:10:41.154] iteration 3767 : loss : 0.285759, supervised_loss: 0.285558
[21:10:42.068] iteration 3768 : loss : 0.274866, supervised_loss: 0.274578
[21:10:43.701] iteration 3769 : loss : 0.269967, supervised_loss: 0.269743
[21:10:44.613] iteration 3770 : loss : 0.279747, supervised_loss: 0.279271
[21:10:45.526] iteration 3771 : loss : 0.270869, supervised_loss: 0.270561
[21:10:46.440] iteration 3772 : loss : 0.268919, supervised_loss: 0.268685
[21:10:47.352] iteration 3773 : loss : 0.276836, supervised_loss: 0.276425
[21:10:48.265] iteration 3774 : loss : 0.280720, supervised_loss: 0.280177
[21:10:49.177] iteration 3775 : loss : 0.275810, supervised_loss: 0.275589
[21:10:50.091] iteration 3776 : loss : 0.277393, supervised_loss: 0.277101
[21:10:51.004] iteration 3777 : loss : 0.277020, supervised_loss: 0.276627
[21:10:51.916] iteration 3778 : loss : 0.274570, supervised_loss: 0.274389
[21:10:52.830] iteration 3779 : loss : 0.277966, supervised_loss: 0.277716
[21:10:53.743] iteration 3780 : loss : 0.280509, supervised_loss: 0.280303
[21:10:55.301] iteration 3781 : loss : 0.271076, supervised_loss: 0.270818
[21:10:56.214] iteration 3782 : loss : 0.273629, supervised_loss: 0.273267
[21:10:57.126] iteration 3783 : loss : 0.275468, supervised_loss: 0.275129
[21:10:58.039] iteration 3784 : loss : 0.278327, supervised_loss: 0.278078
[21:10:58.952] iteration 3785 : loss : 0.273980, supervised_loss: 0.273723
[21:10:59.864] iteration 3786 : loss : 0.281117, supervised_loss: 0.280858
[21:11:00.777] iteration 3787 : loss : 0.288137, supervised_loss: 0.287804
[21:11:01.690] iteration 3788 : loss : 0.274256, supervised_loss: 0.274043
[21:11:02.603] iteration 3789 : loss : 0.275803, supervised_loss: 0.275624
[21:11:03.515] iteration 3790 : loss : 0.277916, supervised_loss: 0.277665
[21:11:04.427] iteration 3791 : loss : 0.278412, supervised_loss: 0.278162
[21:11:05.339] iteration 3792 : loss : 0.278985, supervised_loss: 0.278794
[21:11:06.932] iteration 3793 : loss : 0.278464, supervised_loss: 0.278253
[21:11:07.844] iteration 3794 : loss : 0.274617, supervised_loss: 0.274383
[21:11:08.756] iteration 3795 : loss : 0.274274, supervised_loss: 0.274031
[21:11:09.669] iteration 3796 : loss : 0.274631, supervised_loss: 0.274311
[21:11:10.580] iteration 3797 : loss : 0.272047, supervised_loss: 0.271834
[21:11:11.492] iteration 3798 : loss : 0.278038, supervised_loss: 0.277784
[21:11:12.407] iteration 3799 : loss : 0.277520, supervised_loss: 0.277268
[21:11:13.321] iteration 3800 : loss : 0.276293, supervised_loss: 0.275875
[21:11:15.368] save best model to model/LA_vnet_25_labeled/URPC/iter_3800_dice_0.9233959317207336.pth
[21:11:16.281] iteration 3801 : loss : 0.271552, supervised_loss: 0.271284
[21:11:17.193] iteration 3802 : loss : 0.281271, supervised_loss: 0.281018
[21:11:18.107] iteration 3803 : loss : 0.278387, supervised_loss: 0.277950
[21:11:19.020] iteration 3804 : loss : 0.277602, supervised_loss: 0.277182
[21:11:20.564] iteration 3805 : loss : 0.274843, supervised_loss: 0.274529
[21:11:21.478] iteration 3806 : loss : 0.273590, supervised_loss: 0.273322
[21:11:22.390] iteration 3807 : loss : 0.280426, supervised_loss: 0.280209
[21:11:23.303] iteration 3808 : loss : 0.270186, supervised_loss: 0.269860
[21:11:24.216] iteration 3809 : loss : 0.269662, supervised_loss: 0.269442
[21:11:25.128] iteration 3810 : loss : 0.281819, supervised_loss: 0.281653
[21:11:26.040] iteration 3811 : loss : 0.272681, supervised_loss: 0.272444
[21:11:26.952] iteration 3812 : loss : 0.283721, supervised_loss: 0.283354
[21:11:27.864] iteration 3813 : loss : 0.278809, supervised_loss: 0.278383
[21:11:28.778] iteration 3814 : loss : 0.280167, supervised_loss: 0.279897
[21:11:29.691] iteration 3815 : loss : 0.279273, supervised_loss: 0.278978
[21:11:30.604] iteration 3816 : loss : 0.272756, supervised_loss: 0.272491
[21:11:32.152] iteration 3817 : loss : 0.275641, supervised_loss: 0.275410
[21:11:33.065] iteration 3818 : loss : 0.271412, supervised_loss: 0.270975
[21:11:33.977] iteration 3819 : loss : 0.272112, supervised_loss: 0.271849
[21:11:34.890] iteration 3820 : loss : 0.278436, supervised_loss: 0.278150
[21:11:35.803] iteration 3821 : loss : 0.277101, supervised_loss: 0.276773
[21:11:36.715] iteration 3822 : loss : 0.270823, supervised_loss: 0.270520
[21:11:37.628] iteration 3823 : loss : 0.274300, supervised_loss: 0.273953
[21:11:38.540] iteration 3824 : loss : 0.296253, supervised_loss: 0.296044
[21:11:39.453] iteration 3825 : loss : 0.281940, supervised_loss: 0.281682
[21:11:40.368] iteration 3826 : loss : 0.279527, supervised_loss: 0.279267
[21:11:41.282] iteration 3827 : loss : 0.281384, supervised_loss: 0.280979
[21:11:42.195] iteration 3828 : loss : 0.270946, supervised_loss: 0.270688
[21:11:43.726] iteration 3829 : loss : 0.273630, supervised_loss: 0.273262
[21:11:44.637] iteration 3830 : loss : 0.280038, supervised_loss: 0.279751
[21:11:45.550] iteration 3831 : loss : 0.272026, supervised_loss: 0.271657
[21:11:46.463] iteration 3832 : loss : 0.273913, supervised_loss: 0.273399
[21:11:47.376] iteration 3833 : loss : 0.272591, supervised_loss: 0.272215
[21:11:48.288] iteration 3834 : loss : 0.276769, supervised_loss: 0.276572
[21:11:49.201] iteration 3835 : loss : 0.279407, supervised_loss: 0.278737
[21:11:50.115] iteration 3836 : loss : 0.275555, supervised_loss: 0.275095
[21:11:51.029] iteration 3837 : loss : 0.272894, supervised_loss: 0.272613
[21:11:51.942] iteration 3838 : loss : 0.271150, supervised_loss: 0.270860
[21:11:52.854] iteration 3839 : loss : 0.273363, supervised_loss: 0.273112
[21:11:53.767] iteration 3840 : loss : 0.281605, supervised_loss: 0.281172
[21:11:55.419] iteration 3841 : loss : 0.279711, supervised_loss: 0.279293
[21:11:56.332] iteration 3842 : loss : 0.275254, supervised_loss: 0.274935
[21:11:57.244] iteration 3843 : loss : 0.270580, supervised_loss: 0.270257
[21:11:58.157] iteration 3844 : loss : 0.277231, supervised_loss: 0.276447
[21:11:59.071] iteration 3845 : loss : 0.273577, supervised_loss: 0.273065
[21:11:59.984] iteration 3846 : loss : 0.280109, supervised_loss: 0.279884
[21:12:00.897] iteration 3847 : loss : 0.272757, supervised_loss: 0.272391
[21:12:01.810] iteration 3848 : loss : 0.288940, supervised_loss: 0.288661
[21:12:02.723] iteration 3849 : loss : 0.273183, supervised_loss: 0.272959
[21:12:03.635] iteration 3850 : loss : 0.269987, supervised_loss: 0.269645
[21:12:04.549] iteration 3851 : loss : 0.271652, supervised_loss: 0.271468
[21:12:05.463] iteration 3852 : loss : 0.274583, supervised_loss: 0.274289
[21:12:07.030] iteration 3853 : loss : 0.276967, supervised_loss: 0.276646
[21:12:07.942] iteration 3854 : loss : 0.275117, supervised_loss: 0.274804
[21:12:08.855] iteration 3855 : loss : 0.270183, supervised_loss: 0.269756
[21:12:09.769] iteration 3856 : loss : 0.271033, supervised_loss: 0.270709
[21:12:10.682] iteration 3857 : loss : 0.281628, supervised_loss: 0.281396
[21:12:11.594] iteration 3858 : loss : 0.274596, supervised_loss: 0.274276
[21:12:12.507] iteration 3859 : loss : 0.274307, supervised_loss: 0.274038
[21:12:13.421] iteration 3860 : loss : 0.281717, supervised_loss: 0.281445
[21:12:14.334] iteration 3861 : loss : 0.275884, supervised_loss: 0.275483
[21:12:15.247] iteration 3862 : loss : 0.268577, supervised_loss: 0.268174
[21:12:16.184] iteration 3863 : loss : 0.275564, supervised_loss: 0.275334
[21:12:17.098] iteration 3864 : loss : 0.276462, supervised_loss: 0.276293
[21:12:18.658] iteration 3865 : loss : 0.275770, supervised_loss: 0.275439
[21:12:19.572] iteration 3866 : loss : 0.276415, supervised_loss: 0.276133
[21:12:20.484] iteration 3867 : loss : 0.274172, supervised_loss: 0.273791
[21:12:21.396] iteration 3868 : loss : 0.270197, supervised_loss: 0.269936
[21:12:22.309] iteration 3869 : loss : 0.271916, supervised_loss: 0.271509
[21:12:23.221] iteration 3870 : loss : 0.278061, supervised_loss: 0.277764
[21:12:24.133] iteration 3871 : loss : 0.275351, supervised_loss: 0.275052
[21:12:25.045] iteration 3872 : loss : 0.273369, supervised_loss: 0.273085
[21:12:25.957] iteration 3873 : loss : 0.280261, supervised_loss: 0.280010
[21:12:26.870] iteration 3874 : loss : 0.279472, supervised_loss: 0.279261
[21:12:27.782] iteration 3875 : loss : 0.273366, supervised_loss: 0.273131
[21:12:28.694] iteration 3876 : loss : 0.279482, supervised_loss: 0.279152
[21:12:30.286] iteration 3877 : loss : 0.273014, supervised_loss: 0.272657
[21:12:31.198] iteration 3878 : loss : 0.281357, supervised_loss: 0.281144
[21:12:32.110] iteration 3879 : loss : 0.273913, supervised_loss: 0.273684
[21:12:33.025] iteration 3880 : loss : 0.284736, supervised_loss: 0.284498
[21:12:33.939] iteration 3881 : loss : 0.268868, supervised_loss: 0.268498
[21:12:34.851] iteration 3882 : loss : 0.279237, supervised_loss: 0.278890
[21:12:35.762] iteration 3883 : loss : 0.273702, supervised_loss: 0.273446
[21:12:36.676] iteration 3884 : loss : 0.278476, supervised_loss: 0.278213
[21:12:37.588] iteration 3885 : loss : 0.275104, supervised_loss: 0.274843
[21:12:38.536] iteration 3886 : loss : 0.279436, supervised_loss: 0.279110
[21:12:39.449] iteration 3887 : loss : 0.278702, supervised_loss: 0.278190
[21:12:40.362] iteration 3888 : loss : 0.273211, supervised_loss: 0.273040
[21:12:41.901] iteration 3889 : loss : 0.273427, supervised_loss: 0.273066
[21:12:42.813] iteration 3890 : loss : 0.273490, supervised_loss: 0.273199
[21:12:43.725] iteration 3891 : loss : 0.280007, supervised_loss: 0.279548
[21:12:44.638] iteration 3892 : loss : 0.274106, supervised_loss: 0.273747
[21:12:45.551] iteration 3893 : loss : 0.273286, supervised_loss: 0.273036
[21:12:46.464] iteration 3894 : loss : 0.275579, supervised_loss: 0.275376
[21:12:47.376] iteration 3895 : loss : 0.272363, supervised_loss: 0.272186
[21:12:48.288] iteration 3896 : loss : 0.272289, supervised_loss: 0.271845
[21:12:49.203] iteration 3897 : loss : 0.274345, supervised_loss: 0.274056
[21:12:50.116] iteration 3898 : loss : 0.271199, supervised_loss: 0.270941
[21:12:51.029] iteration 3899 : loss : 0.275340, supervised_loss: 0.275129
[21:12:51.942] iteration 3900 : loss : 0.280107, supervised_loss: 0.279806
[21:12:53.442] iteration 3901 : loss : 0.271128, supervised_loss: 0.270624
[21:12:54.356] iteration 3902 : loss : 0.269807, supervised_loss: 0.269371
[21:12:55.269] iteration 3903 : loss : 0.273186, supervised_loss: 0.272938
[21:12:56.181] iteration 3904 : loss : 0.278127, supervised_loss: 0.277868
[21:12:57.092] iteration 3905 : loss : 0.274178, supervised_loss: 0.273940
[21:12:58.004] iteration 3906 : loss : 0.273731, supervised_loss: 0.273456
[21:12:58.918] iteration 3907 : loss : 0.277281, supervised_loss: 0.276935
[21:12:59.833] iteration 3908 : loss : 0.292477, supervised_loss: 0.292102
[21:13:00.746] iteration 3909 : loss : 0.282640, supervised_loss: 0.282410
[21:13:01.659] iteration 3910 : loss : 0.273908, supervised_loss: 0.273375
[21:13:02.572] iteration 3911 : loss : 0.280332, supervised_loss: 0.280069
[21:13:03.485] iteration 3912 : loss : 0.271758, supervised_loss: 0.271365
[21:13:05.044] iteration 3913 : loss : 0.273678, supervised_loss: 0.273329
[21:13:05.956] iteration 3914 : loss : 0.273598, supervised_loss: 0.273282
[21:13:06.870] iteration 3915 : loss : 0.276742, supervised_loss: 0.276452
[21:13:07.783] iteration 3916 : loss : 0.271309, supervised_loss: 0.271055
[21:13:08.695] iteration 3917 : loss : 0.275213, supervised_loss: 0.274876
[21:13:09.607] iteration 3918 : loss : 0.272998, supervised_loss: 0.272659
[21:13:10.521] iteration 3919 : loss : 0.270954, supervised_loss: 0.270495
[21:13:11.435] iteration 3920 : loss : 0.277499, supervised_loss: 0.277110
[21:13:12.348] iteration 3921 : loss : 0.277430, supervised_loss: 0.277091
[21:13:13.261] iteration 3922 : loss : 0.272154, supervised_loss: 0.271947
[21:13:14.173] iteration 3923 : loss : 0.272124, supervised_loss: 0.271841
[21:13:15.085] iteration 3924 : loss : 0.276886, supervised_loss: 0.276460
[21:13:16.634] iteration 3925 : loss : 0.282281, supervised_loss: 0.282037
[21:13:17.547] iteration 3926 : loss : 0.275472, supervised_loss: 0.274538
[21:13:18.461] iteration 3927 : loss : 0.273313, supervised_loss: 0.272937
[21:13:19.373] iteration 3928 : loss : 0.274646, supervised_loss: 0.274149
[21:13:20.287] iteration 3929 : loss : 0.275378, supervised_loss: 0.274997
[21:13:21.199] iteration 3930 : loss : 0.268588, supervised_loss: 0.268265
[21:13:22.113] iteration 3931 : loss : 0.276850, supervised_loss: 0.276526
[21:13:23.026] iteration 3932 : loss : 0.281132, supervised_loss: 0.280858
[21:13:23.939] iteration 3933 : loss : 0.274807, supervised_loss: 0.274492
[21:13:24.854] iteration 3934 : loss : 0.284463, supervised_loss: 0.283908
[21:13:25.766] iteration 3935 : loss : 0.276172, supervised_loss: 0.275910
[21:13:26.682] iteration 3936 : loss : 0.272781, supervised_loss: 0.272533
[21:13:28.188] iteration 3937 : loss : 0.276842, supervised_loss: 0.276486
[21:13:29.100] iteration 3938 : loss : 0.277195, supervised_loss: 0.276771
[21:13:30.012] iteration 3939 : loss : 0.287905, supervised_loss: 0.287680
[21:13:30.924] iteration 3940 : loss : 0.269164, supervised_loss: 0.268758
[21:13:31.837] iteration 3941 : loss : 0.279280, supervised_loss: 0.279030
[21:13:32.749] iteration 3942 : loss : 0.283050, supervised_loss: 0.282763
[21:13:33.661] iteration 3943 : loss : 0.279837, supervised_loss: 0.279479
[21:13:34.574] iteration 3944 : loss : 0.274359, supervised_loss: 0.274054
[21:13:35.487] iteration 3945 : loss : 0.271834, supervised_loss: 0.271477
[21:13:36.400] iteration 3946 : loss : 0.268353, supervised_loss: 0.268126
[21:13:37.312] iteration 3947 : loss : 0.276431, supervised_loss: 0.276053
[21:13:38.225] iteration 3948 : loss : 0.271556, supervised_loss: 0.271286
[21:13:39.759] iteration 3949 : loss : 0.275701, supervised_loss: 0.275217
[21:13:40.672] iteration 3950 : loss : 0.272642, supervised_loss: 0.272388
[21:13:41.585] iteration 3951 : loss : 0.273735, supervised_loss: 0.273475
[21:13:42.498] iteration 3952 : loss : 0.279461, supervised_loss: 0.279220
[21:13:43.410] iteration 3953 : loss : 0.269751, supervised_loss: 0.269336
[21:13:44.323] iteration 3954 : loss : 0.275839, supervised_loss: 0.275553
[21:13:45.236] iteration 3955 : loss : 0.274591, supervised_loss: 0.274386
[21:13:46.149] iteration 3956 : loss : 0.281110, supervised_loss: 0.280809
[21:13:47.099] iteration 3957 : loss : 0.271880, supervised_loss: 0.271683
[21:13:48.012] iteration 3958 : loss : 0.285068, supervised_loss: 0.284326
[21:13:48.924] iteration 3959 : loss : 0.284172, supervised_loss: 0.283887
[21:13:49.837] iteration 3960 : loss : 0.271616, supervised_loss: 0.271377
[21:13:51.417] iteration 3961 : loss : 0.271796, supervised_loss: 0.271362
[21:13:52.329] iteration 3962 : loss : 0.269779, supervised_loss: 0.269482
[21:13:53.241] iteration 3963 : loss : 0.268594, supervised_loss: 0.268224
[21:13:54.153] iteration 3964 : loss : 0.274246, supervised_loss: 0.273971
[21:13:55.065] iteration 3965 : loss : 0.285447, supervised_loss: 0.285181
[21:13:55.977] iteration 3966 : loss : 0.278010, supervised_loss: 0.277785
[21:13:56.891] iteration 3967 : loss : 0.284743, supervised_loss: 0.284461
[21:13:57.804] iteration 3968 : loss : 0.284727, supervised_loss: 0.284492
[21:13:58.716] iteration 3969 : loss : 0.277617, supervised_loss: 0.277231
[21:13:59.629] iteration 3970 : loss : 0.279916, supervised_loss: 0.279455
[21:14:00.541] iteration 3971 : loss : 0.269908, supervised_loss: 0.269595
[21:14:01.454] iteration 3972 : loss : 0.267502, supervised_loss: 0.267096
[21:14:03.098] iteration 3973 : loss : 0.276833, supervised_loss: 0.276387
[21:14:04.011] iteration 3974 : loss : 0.278942, supervised_loss: 0.278617
[21:14:04.922] iteration 3975 : loss : 0.278658, supervised_loss: 0.278277
[21:14:05.834] iteration 3976 : loss : 0.275690, supervised_loss: 0.275227
[21:14:06.747] iteration 3977 : loss : 0.280897, supervised_loss: 0.280596
[21:14:07.661] iteration 3978 : loss : 0.270340, supervised_loss: 0.269998
[21:14:08.574] iteration 3979 : loss : 0.278931, supervised_loss: 0.278707
[21:14:09.486] iteration 3980 : loss : 0.279126, supervised_loss: 0.278801
[21:14:10.398] iteration 3981 : loss : 0.278187, supervised_loss: 0.277832
[21:14:11.310] iteration 3982 : loss : 0.279351, supervised_loss: 0.279125
[21:14:12.224] iteration 3983 : loss : 0.276381, supervised_loss: 0.275770
[21:14:13.137] iteration 3984 : loss : 0.281480, supervised_loss: 0.281061
[21:14:14.650] iteration 3985 : loss : 0.276002, supervised_loss: 0.275719
[21:14:15.563] iteration 3986 : loss : 0.278403, supervised_loss: 0.278083
[21:14:16.475] iteration 3987 : loss : 0.282083, supervised_loss: 0.281704
[21:14:17.388] iteration 3988 : loss : 0.279897, supervised_loss: 0.279611
[21:14:18.299] iteration 3989 : loss : 0.274369, supervised_loss: 0.274094
[21:14:19.211] iteration 3990 : loss : 0.276483, supervised_loss: 0.276190
[21:14:20.124] iteration 3991 : loss : 0.280811, supervised_loss: 0.280464
[21:14:21.037] iteration 3992 : loss : 0.273799, supervised_loss: 0.273590
[21:14:21.950] iteration 3993 : loss : 0.273063, supervised_loss: 0.272792
[21:14:22.863] iteration 3994 : loss : 0.280594, supervised_loss: 0.280131
[21:14:23.775] iteration 3995 : loss : 0.273907, supervised_loss: 0.273677
[21:14:24.688] iteration 3996 : loss : 0.276191, supervised_loss: 0.275757
[21:14:26.206] iteration 3997 : loss : 0.274007, supervised_loss: 0.273751
[21:14:27.120] iteration 3998 : loss : 0.272893, supervised_loss: 0.272614
[21:14:28.032] iteration 3999 : loss : 0.274193, supervised_loss: 0.273902
[21:14:28.945] iteration 4000 : loss : 0.274344, supervised_loss: 0.273727
[21:14:31.042] save best model to model/LA_vnet_25_labeled/URPC/iter_4000_dice_0.9241928458213806.pth
[21:14:31.954] iteration 4001 : loss : 0.273458, supervised_loss: 0.273223
[21:14:32.867] iteration 4002 : loss : 0.276684, supervised_loss: 0.276123
[21:14:33.780] iteration 4003 : loss : 0.279683, supervised_loss: 0.279157
[21:14:34.694] iteration 4004 : loss : 0.278377, supervised_loss: 0.278061
[21:14:35.607] iteration 4005 : loss : 0.288081, supervised_loss: 0.287829
[21:14:36.519] iteration 4006 : loss : 0.275516, supervised_loss: 0.275236
[21:14:37.431] iteration 4007 : loss : 0.272039, supervised_loss: 0.271628
[21:14:38.344] iteration 4008 : loss : 0.278513, supervised_loss: 0.278305
[21:14:39.903] iteration 4009 : loss : 0.279474, supervised_loss: 0.279096
[21:14:40.814] iteration 4010 : loss : 0.275217, supervised_loss: 0.274626
[21:14:41.727] iteration 4011 : loss : 0.272255, supervised_loss: 0.271884
[21:14:42.641] iteration 4012 : loss : 0.285773, supervised_loss: 0.285290
[21:14:43.555] iteration 4013 : loss : 0.273636, supervised_loss: 0.273071
[21:14:44.467] iteration 4014 : loss : 0.270138, supervised_loss: 0.269891
[21:14:45.380] iteration 4015 : loss : 0.274043, supervised_loss: 0.273589
[21:14:46.292] iteration 4016 : loss : 0.272213, supervised_loss: 0.271905
[21:14:47.204] iteration 4017 : loss : 0.279004, supervised_loss: 0.278723
[21:14:48.117] iteration 4018 : loss : 0.276827, supervised_loss: 0.275827
[21:14:49.030] iteration 4019 : loss : 0.274828, supervised_loss: 0.274603
[21:14:49.942] iteration 4020 : loss : 0.272738, supervised_loss: 0.272131
[21:14:51.512] iteration 4021 : loss : 0.267510, supervised_loss: 0.267266
[21:14:52.425] iteration 4022 : loss : 0.271222, supervised_loss: 0.270937
[21:14:53.336] iteration 4023 : loss : 0.269916, supervised_loss: 0.269658
[21:14:54.249] iteration 4024 : loss : 0.271279, supervised_loss: 0.270960
[21:14:55.161] iteration 4025 : loss : 0.277687, supervised_loss: 0.277327
[21:14:56.073] iteration 4026 : loss : 0.273695, supervised_loss: 0.273419
[21:14:56.986] iteration 4027 : loss : 0.286109, supervised_loss: 0.285666
[21:14:57.900] iteration 4028 : loss : 0.277719, supervised_loss: 0.277173
[21:14:58.813] iteration 4029 : loss : 0.283440, supervised_loss: 0.282956
[21:14:59.726] iteration 4030 : loss : 0.271223, supervised_loss: 0.270966
[21:15:00.639] iteration 4031 : loss : 0.275350, supervised_loss: 0.275111
[21:15:01.552] iteration 4032 : loss : 0.277219, supervised_loss: 0.276760
[21:15:03.139] iteration 4033 : loss : 0.280252, supervised_loss: 0.279823
[21:15:04.050] iteration 4034 : loss : 0.276264, supervised_loss: 0.275989
[21:15:04.962] iteration 4035 : loss : 0.272182, supervised_loss: 0.271907
[21:15:05.875] iteration 4036 : loss : 0.274262, supervised_loss: 0.273481
[21:15:06.786] iteration 4037 : loss : 0.276834, supervised_loss: 0.276563
[21:15:07.697] iteration 4038 : loss : 0.278051, supervised_loss: 0.277431
[21:15:08.610] iteration 4039 : loss : 0.274129, supervised_loss: 0.273836
[21:15:09.523] iteration 4040 : loss : 0.277269, supervised_loss: 0.276936
[21:15:10.436] iteration 4041 : loss : 0.275174, supervised_loss: 0.274985
[21:15:11.348] iteration 4042 : loss : 0.277705, supervised_loss: 0.277221
[21:15:12.261] iteration 4043 : loss : 0.285087, supervised_loss: 0.284872
[21:15:13.174] iteration 4044 : loss : 0.282966, supervised_loss: 0.282653
[21:15:14.725] iteration 4045 : loss : 0.276183, supervised_loss: 0.275963
[21:15:15.638] iteration 4046 : loss : 0.275306, supervised_loss: 0.274945
[21:15:16.551] iteration 4047 : loss : 0.277727, supervised_loss: 0.277473
[21:15:17.464] iteration 4048 : loss : 0.272290, supervised_loss: 0.272035
[21:15:18.378] iteration 4049 : loss : 0.285098, supervised_loss: 0.284743
[21:15:19.290] iteration 4050 : loss : 0.272907, supervised_loss: 0.272667
[21:15:20.202] iteration 4051 : loss : 0.271410, supervised_loss: 0.271014
[21:15:21.115] iteration 4052 : loss : 0.280779, supervised_loss: 0.280203
[21:15:22.027] iteration 4053 : loss : 0.278977, supervised_loss: 0.278456
[21:15:22.941] iteration 4054 : loss : 0.276229, supervised_loss: 0.275827
[21:15:23.854] iteration 4055 : loss : 0.272226, supervised_loss: 0.271988
[21:15:24.766] iteration 4056 : loss : 0.276673, supervised_loss: 0.276366
[21:15:26.359] iteration 4057 : loss : 0.276875, supervised_loss: 0.276537
[21:15:27.272] iteration 4058 : loss : 0.277775, supervised_loss: 0.277219
[21:15:28.186] iteration 4059 : loss : 0.275460, supervised_loss: 0.275053
[21:15:29.099] iteration 4060 : loss : 0.277678, supervised_loss: 0.277378
[21:15:30.012] iteration 4061 : loss : 0.274112, supervised_loss: 0.273748
[21:15:30.924] iteration 4062 : loss : 0.273503, supervised_loss: 0.273210
[21:15:31.836] iteration 4063 : loss : 0.274239, supervised_loss: 0.273767
[21:15:32.748] iteration 4064 : loss : 0.275983, supervised_loss: 0.275714
[21:15:33.661] iteration 4065 : loss : 0.276669, supervised_loss: 0.276140
[21:15:34.573] iteration 4066 : loss : 0.278656, supervised_loss: 0.278186
[21:15:35.486] iteration 4067 : loss : 0.276828, supervised_loss: 0.276396
[21:15:36.399] iteration 4068 : loss : 0.275372, supervised_loss: 0.275152
[21:15:38.025] iteration 4069 : loss : 0.277761, supervised_loss: 0.277454
[21:15:38.938] iteration 4070 : loss : 0.273948, supervised_loss: 0.273573
[21:15:39.849] iteration 4071 : loss : 0.283754, supervised_loss: 0.283486
[21:15:40.762] iteration 4072 : loss : 0.271167, supervised_loss: 0.270857
[21:15:41.674] iteration 4073 : loss : 0.278192, supervised_loss: 0.277551
[21:15:42.587] iteration 4074 : loss : 0.282820, supervised_loss: 0.282522
[21:15:43.500] iteration 4075 : loss : 0.277084, supervised_loss: 0.276710
[21:15:44.412] iteration 4076 : loss : 0.271533, supervised_loss: 0.271141
[21:15:45.325] iteration 4077 : loss : 0.274382, supervised_loss: 0.274133
[21:15:46.239] iteration 4078 : loss : 0.279096, supervised_loss: 0.278777
[21:15:47.151] iteration 4079 : loss : 0.273437, supervised_loss: 0.273074
[21:15:48.064] iteration 4080 : loss : 0.282692, supervised_loss: 0.282163
[21:15:49.596] iteration 4081 : loss : 0.279368, supervised_loss: 0.278885
[21:15:50.508] iteration 4082 : loss : 0.274896, supervised_loss: 0.274639
[21:15:51.420] iteration 4083 : loss : 0.276692, supervised_loss: 0.276283
[21:15:52.334] iteration 4084 : loss : 0.283198, supervised_loss: 0.282750
[21:15:53.247] iteration 4085 : loss : 0.275203, supervised_loss: 0.274885
[21:15:54.160] iteration 4086 : loss : 0.275778, supervised_loss: 0.275431
[21:15:55.074] iteration 4087 : loss : 0.279861, supervised_loss: 0.279553
[21:15:55.987] iteration 4088 : loss : 0.279333, supervised_loss: 0.278703
[21:15:56.901] iteration 4089 : loss : 0.276082, supervised_loss: 0.275776
[21:15:57.815] iteration 4090 : loss : 0.275993, supervised_loss: 0.275564
[21:15:58.728] iteration 4091 : loss : 0.284399, supervised_loss: 0.283925
[21:15:59.641] iteration 4092 : loss : 0.277737, supervised_loss: 0.277249
[21:16:01.284] iteration 4093 : loss : 0.281320, supervised_loss: 0.280976
[21:16:02.196] iteration 4094 : loss : 0.278703, supervised_loss: 0.278441
[21:16:03.110] iteration 4095 : loss : 0.272263, supervised_loss: 0.272028
[21:16:04.023] iteration 4096 : loss : 0.278488, supervised_loss: 0.278277
[21:16:04.935] iteration 4097 : loss : 0.274190, supervised_loss: 0.273921
[21:16:05.848] iteration 4098 : loss : 0.274641, supervised_loss: 0.274258
[21:16:06.759] iteration 4099 : loss : 0.271071, supervised_loss: 0.270865
[21:16:07.672] iteration 4100 : loss : 0.280002, supervised_loss: 0.279763
[21:16:08.585] iteration 4101 : loss : 0.268760, supervised_loss: 0.268576
[21:16:09.499] iteration 4102 : loss : 0.273929, supervised_loss: 0.273587
[21:16:10.412] iteration 4103 : loss : 0.279521, supervised_loss: 0.279193
[21:16:11.327] iteration 4104 : loss : 0.273842, supervised_loss: 0.273448
[21:16:12.980] iteration 4105 : loss : 0.275408, supervised_loss: 0.274847
[21:16:13.893] iteration 4106 : loss : 0.274591, supervised_loss: 0.274085
[21:16:14.807] iteration 4107 : loss : 0.275444, supervised_loss: 0.275132
[21:16:15.720] iteration 4108 : loss : 0.273726, supervised_loss: 0.273319
[21:16:16.634] iteration 4109 : loss : 0.285149, supervised_loss: 0.284782
[21:16:17.546] iteration 4110 : loss : 0.275129, supervised_loss: 0.274857
[21:16:18.459] iteration 4111 : loss : 0.278236, supervised_loss: 0.277960
[21:16:19.371] iteration 4112 : loss : 0.275254, supervised_loss: 0.275031
[21:16:20.286] iteration 4113 : loss : 0.277964, supervised_loss: 0.277642
[21:16:21.200] iteration 4114 : loss : 0.278196, supervised_loss: 0.277830
[21:16:22.114] iteration 4115 : loss : 0.283790, supervised_loss: 0.283471
[21:16:23.027] iteration 4116 : loss : 0.276542, supervised_loss: 0.276271
[21:16:24.652] iteration 4117 : loss : 0.270850, supervised_loss: 0.270507
[21:16:25.564] iteration 4118 : loss : 0.271837, supervised_loss: 0.271350
[21:16:26.477] iteration 4119 : loss : 0.274690, supervised_loss: 0.274420
[21:16:27.390] iteration 4120 : loss : 0.272052, supervised_loss: 0.271640
[21:16:28.305] iteration 4121 : loss : 0.270731, supervised_loss: 0.270232
[21:16:29.217] iteration 4122 : loss : 0.278826, supervised_loss: 0.278475
[21:16:30.130] iteration 4123 : loss : 0.276023, supervised_loss: 0.275593
[21:16:31.044] iteration 4124 : loss : 0.280334, supervised_loss: 0.279900
[21:16:31.957] iteration 4125 : loss : 0.277235, supervised_loss: 0.276963
[21:16:32.869] iteration 4126 : loss : 0.273990, supervised_loss: 0.273661
[21:16:33.782] iteration 4127 : loss : 0.274423, supervised_loss: 0.274124
[21:16:34.696] iteration 4128 : loss : 0.274128, supervised_loss: 0.273802
[21:16:36.197] iteration 4129 : loss : 0.276050, supervised_loss: 0.275742
[21:16:37.111] iteration 4130 : loss : 0.277716, supervised_loss: 0.277434
[21:16:38.023] iteration 4131 : loss : 0.289437, supervised_loss: 0.289157
[21:16:38.934] iteration 4132 : loss : 0.272847, supervised_loss: 0.272463
[21:16:39.847] iteration 4133 : loss : 0.274169, supervised_loss: 0.273930
[21:16:40.759] iteration 4134 : loss : 0.275577, supervised_loss: 0.275268
[21:16:41.671] iteration 4135 : loss : 0.272847, supervised_loss: 0.272537
[21:16:42.583] iteration 4136 : loss : 0.273149, supervised_loss: 0.272837
[21:16:43.496] iteration 4137 : loss : 0.281573, supervised_loss: 0.281184
[21:16:44.409] iteration 4138 : loss : 0.280873, supervised_loss: 0.279902
[21:16:45.321] iteration 4139 : loss : 0.276106, supervised_loss: 0.275577
[21:16:46.234] iteration 4140 : loss : 0.272714, supervised_loss: 0.272440
[21:16:47.797] iteration 4141 : loss : 0.273280, supervised_loss: 0.272997
[21:16:48.709] iteration 4142 : loss : 0.281517, supervised_loss: 0.281241
[21:16:49.623] iteration 4143 : loss : 0.284561, supervised_loss: 0.284237
[21:16:50.536] iteration 4144 : loss : 0.280927, supervised_loss: 0.280652
[21:16:51.448] iteration 4145 : loss : 0.271273, supervised_loss: 0.270907
[21:16:52.360] iteration 4146 : loss : 0.274922, supervised_loss: 0.273763
[21:16:53.273] iteration 4147 : loss : 0.275171, supervised_loss: 0.274826
[21:16:54.186] iteration 4148 : loss : 0.279890, supervised_loss: 0.279449
[21:16:55.100] iteration 4149 : loss : 0.271743, supervised_loss: 0.271383
[21:16:56.012] iteration 4150 : loss : 0.277040, supervised_loss: 0.276663
[21:16:56.925] iteration 4151 : loss : 0.271560, supervised_loss: 0.271248
[21:16:57.837] iteration 4152 : loss : 0.276070, supervised_loss: 0.275536
[21:16:59.404] iteration 4153 : loss : 0.274226, supervised_loss: 0.273878
[21:17:00.317] iteration 4154 : loss : 0.269936, supervised_loss: 0.269608
[21:17:01.229] iteration 4155 : loss : 0.273455, supervised_loss: 0.273088
[21:17:02.143] iteration 4156 : loss : 0.273219, supervised_loss: 0.272746
[21:17:03.056] iteration 4157 : loss : 0.272991, supervised_loss: 0.272702
[21:17:03.968] iteration 4158 : loss : 0.277127, supervised_loss: 0.276877
[21:17:04.880] iteration 4159 : loss : 0.282953, supervised_loss: 0.282733
[21:17:05.794] iteration 4160 : loss : 0.278995, supervised_loss: 0.278785
[21:17:06.706] iteration 4161 : loss : 0.277132, supervised_loss: 0.276844
[21:17:07.619] iteration 4162 : loss : 0.276405, supervised_loss: 0.275953
[21:17:08.532] iteration 4163 : loss : 0.273585, supervised_loss: 0.273306
[21:17:09.445] iteration 4164 : loss : 0.278744, supervised_loss: 0.278371
[21:17:10.960] iteration 4165 : loss : 0.283501, supervised_loss: 0.283172
[21:17:11.874] iteration 4166 : loss : 0.270227, supervised_loss: 0.269932
[21:17:12.786] iteration 4167 : loss : 0.274050, supervised_loss: 0.273758
[21:17:13.700] iteration 4168 : loss : 0.272423, supervised_loss: 0.272051
[21:17:14.613] iteration 4169 : loss : 0.272377, supervised_loss: 0.271975
[21:17:15.525] iteration 4170 : loss : 0.267198, supervised_loss: 0.266820
[21:17:16.437] iteration 4171 : loss : 0.279862, supervised_loss: 0.279593
[21:17:17.350] iteration 4172 : loss : 0.280041, supervised_loss: 0.279537
[21:17:18.264] iteration 4173 : loss : 0.275105, supervised_loss: 0.274737
[21:17:19.177] iteration 4174 : loss : 0.279812, supervised_loss: 0.279422
[21:17:20.089] iteration 4175 : loss : 0.275132, supervised_loss: 0.274844
[21:17:21.001] iteration 4176 : loss : 0.270170, supervised_loss: 0.269947
[21:17:22.521] iteration 4177 : loss : 0.272217, supervised_loss: 0.271951
[21:17:23.435] iteration 4178 : loss : 0.276316, supervised_loss: 0.275653
[21:17:24.347] iteration 4179 : loss : 0.267567, supervised_loss: 0.267101
[21:17:25.259] iteration 4180 : loss : 0.273490, supervised_loss: 0.273165
[21:17:26.171] iteration 4181 : loss : 0.277863, supervised_loss: 0.277550
[21:17:27.083] iteration 4182 : loss : 0.277892, supervised_loss: 0.277481
[21:17:28.017] iteration 4183 : loss : 0.272425, supervised_loss: 0.272091
[21:17:28.931] iteration 4184 : loss : 0.274111, supervised_loss: 0.273683
[21:17:29.844] iteration 4185 : loss : 0.272644, supervised_loss: 0.272302
[21:17:30.758] iteration 4186 : loss : 0.276767, supervised_loss: 0.276514
[21:17:31.670] iteration 4187 : loss : 0.279430, supervised_loss: 0.279125
[21:17:32.583] iteration 4188 : loss : 0.277392, supervised_loss: 0.277097
[21:17:34.075] iteration 4189 : loss : 0.276261, supervised_loss: 0.275872
[21:17:34.988] iteration 4190 : loss : 0.274032, supervised_loss: 0.273722
[21:17:35.901] iteration 4191 : loss : 0.269876, supervised_loss: 0.269683
[21:17:36.813] iteration 4192 : loss : 0.266319, supervised_loss: 0.265969
[21:17:37.724] iteration 4193 : loss : 0.277918, supervised_loss: 0.277563
[21:17:38.637] iteration 4194 : loss : 0.277062, supervised_loss: 0.276719
[21:17:39.550] iteration 4195 : loss : 0.273125, supervised_loss: 0.272824
[21:17:40.462] iteration 4196 : loss : 0.276570, supervised_loss: 0.276234
[21:17:41.376] iteration 4197 : loss : 0.271306, supervised_loss: 0.270934
[21:17:42.288] iteration 4198 : loss : 0.272499, supervised_loss: 0.272063
[21:17:43.200] iteration 4199 : loss : 0.270219, supervised_loss: 0.269986
[21:17:44.115] iteration 4200 : loss : 0.276220, supervised_loss: 0.275928
[21:17:47.712] iteration 4201 : loss : 0.281847, supervised_loss: 0.281520
[21:17:48.624] iteration 4202 : loss : 0.276120, supervised_loss: 0.275691
[21:17:49.537] iteration 4203 : loss : 0.275870, supervised_loss: 0.275576
[21:17:50.449] iteration 4204 : loss : 0.277782, supervised_loss: 0.277471
[21:17:51.362] iteration 4205 : loss : 0.274447, supervised_loss: 0.274028
[21:17:52.274] iteration 4206 : loss : 0.274781, supervised_loss: 0.274387
[21:17:53.186] iteration 4207 : loss : 0.273372, supervised_loss: 0.273031
[21:17:54.099] iteration 4208 : loss : 0.280018, supervised_loss: 0.279248
[21:17:55.012] iteration 4209 : loss : 0.273878, supervised_loss: 0.273433
[21:17:55.925] iteration 4210 : loss : 0.283796, supervised_loss: 0.283561
[21:17:56.839] iteration 4211 : loss : 0.273807, supervised_loss: 0.273429
[21:17:57.752] iteration 4212 : loss : 0.271062, supervised_loss: 0.270620
[21:17:59.310] iteration 4213 : loss : 0.269813, supervised_loss: 0.269355
[21:18:00.221] iteration 4214 : loss : 0.269633, supervised_loss: 0.269283
[21:18:01.135] iteration 4215 : loss : 0.273067, supervised_loss: 0.272565
[21:18:02.048] iteration 4216 : loss : 0.276550, supervised_loss: 0.276253
[21:18:02.962] iteration 4217 : loss : 0.270143, supervised_loss: 0.269905
[21:18:03.873] iteration 4218 : loss : 0.278111, supervised_loss: 0.277610
[21:18:04.785] iteration 4219 : loss : 0.276256, supervised_loss: 0.275874
[21:18:05.697] iteration 4220 : loss : 0.278455, supervised_loss: 0.278068
[21:18:06.611] iteration 4221 : loss : 0.272864, supervised_loss: 0.272570
[21:18:07.524] iteration 4222 : loss : 0.279464, supervised_loss: 0.279130
[21:18:08.438] iteration 4223 : loss : 0.278272, supervised_loss: 0.277899
[21:18:09.350] iteration 4224 : loss : 0.284432, supervised_loss: 0.283880
[21:18:10.964] iteration 4225 : loss : 0.274041, supervised_loss: 0.273689
[21:18:11.874] iteration 4226 : loss : 0.277376, supervised_loss: 0.277041
[21:18:12.787] iteration 4227 : loss : 0.282391, supervised_loss: 0.282093
[21:18:13.700] iteration 4228 : loss : 0.284059, supervised_loss: 0.283636
[21:18:14.612] iteration 4229 : loss : 0.279359, supervised_loss: 0.279031
[21:18:15.525] iteration 4230 : loss : 0.276818, supervised_loss: 0.276226
[21:18:16.436] iteration 4231 : loss : 0.277549, supervised_loss: 0.277265
[21:18:17.349] iteration 4232 : loss : 0.274830, supervised_loss: 0.273500
[21:18:18.262] iteration 4233 : loss : 0.275492, supervised_loss: 0.275089
[21:18:19.176] iteration 4234 : loss : 0.277015, supervised_loss: 0.276569
[21:18:20.088] iteration 4235 : loss : 0.270226, supervised_loss: 0.269912
[21:18:21.000] iteration 4236 : loss : 0.283872, supervised_loss: 0.283529
[21:18:22.526] iteration 4237 : loss : 0.282825, supervised_loss: 0.282300
[21:18:23.439] iteration 4238 : loss : 0.276559, supervised_loss: 0.276064
[21:18:24.352] iteration 4239 : loss : 0.286872, supervised_loss: 0.286348
[21:18:25.266] iteration 4240 : loss : 0.276072, supervised_loss: 0.275759
[21:18:26.180] iteration 4241 : loss : 0.279696, supervised_loss: 0.279289
[21:18:27.094] iteration 4242 : loss : 0.271852, supervised_loss: 0.271489
[21:18:28.006] iteration 4243 : loss : 0.271943, supervised_loss: 0.271494
[21:18:28.928] iteration 4244 : loss : 0.270444, supervised_loss: 0.270062
[21:18:29.841] iteration 4245 : loss : 0.277726, supervised_loss: 0.277401
[21:18:30.753] iteration 4246 : loss : 0.273240, supervised_loss: 0.272863
[21:18:31.665] iteration 4247 : loss : 0.274787, supervised_loss: 0.274489
[21:18:32.578] iteration 4248 : loss : 0.272169, supervised_loss: 0.271642
[21:18:34.178] iteration 4249 : loss : 0.274814, supervised_loss: 0.274342
[21:18:35.091] iteration 4250 : loss : 0.276998, supervised_loss: 0.276621
[21:18:36.003] iteration 4251 : loss : 0.278138, supervised_loss: 0.277811
[21:18:36.914] iteration 4252 : loss : 0.274942, supervised_loss: 0.274485
[21:18:37.827] iteration 4253 : loss : 0.278604, supervised_loss: 0.278324
[21:18:38.740] iteration 4254 : loss : 0.277212, supervised_loss: 0.276721
[21:18:39.654] iteration 4255 : loss : 0.275258, supervised_loss: 0.274911
[21:18:40.567] iteration 4256 : loss : 0.273190, supervised_loss: 0.272781
[21:18:41.479] iteration 4257 : loss : 0.273266, supervised_loss: 0.272972
[21:18:42.391] iteration 4258 : loss : 0.273862, supervised_loss: 0.273510
[21:18:43.305] iteration 4259 : loss : 0.274051, supervised_loss: 0.273760
[21:18:44.219] iteration 4260 : loss : 0.273552, supervised_loss: 0.273221
[21:18:45.801] iteration 4261 : loss : 0.272531, supervised_loss: 0.271929
[21:18:46.714] iteration 4262 : loss : 0.276536, supervised_loss: 0.276281
[21:18:47.625] iteration 4263 : loss : 0.273641, supervised_loss: 0.273359
[21:18:48.538] iteration 4264 : loss : 0.268270, supervised_loss: 0.267811
[21:18:49.451] iteration 4265 : loss : 0.275955, supervised_loss: 0.275249
[21:18:50.364] iteration 4266 : loss : 0.279811, supervised_loss: 0.279484
[21:18:51.276] iteration 4267 : loss : 0.275848, supervised_loss: 0.275658
[21:18:52.189] iteration 4268 : loss : 0.274093, supervised_loss: 0.273755
[21:18:53.102] iteration 4269 : loss : 0.281242, supervised_loss: 0.280844
[21:18:54.016] iteration 4270 : loss : 0.272561, supervised_loss: 0.272333
[21:18:54.929] iteration 4271 : loss : 0.269078, supervised_loss: 0.268804
[21:18:55.842] iteration 4272 : loss : 0.270526, supervised_loss: 0.270152
[21:18:57.392] iteration 4273 : loss : 0.271117, supervised_loss: 0.270791
[21:18:58.304] iteration 4274 : loss : 0.277417, supervised_loss: 0.277034
[21:18:59.216] iteration 4275 : loss : 0.272525, supervised_loss: 0.272232
[21:19:00.130] iteration 4276 : loss : 0.275680, supervised_loss: 0.275353
[21:19:01.043] iteration 4277 : loss : 0.275833, supervised_loss: 0.275304
[21:19:01.955] iteration 4278 : loss : 0.275295, supervised_loss: 0.274936
[21:19:02.868] iteration 4279 : loss : 0.275285, supervised_loss: 0.274905
[21:19:03.783] iteration 4280 : loss : 0.272773, supervised_loss: 0.272421
[21:19:04.698] iteration 4281 : loss : 0.276407, supervised_loss: 0.275596
[21:19:05.611] iteration 4282 : loss : 0.273973, supervised_loss: 0.273476
[21:19:06.522] iteration 4283 : loss : 0.271485, supervised_loss: 0.270953
[21:19:07.435] iteration 4284 : loss : 0.283989, supervised_loss: 0.283544
[21:19:09.009] iteration 4285 : loss : 0.273213, supervised_loss: 0.272903
[21:19:09.920] iteration 4286 : loss : 0.282566, supervised_loss: 0.282140
[21:19:10.834] iteration 4287 : loss : 0.281224, supervised_loss: 0.280878
[21:19:11.746] iteration 4288 : loss : 0.274640, supervised_loss: 0.274321
[21:19:12.659] iteration 4289 : loss : 0.272087, supervised_loss: 0.271591
[21:19:13.570] iteration 4290 : loss : 0.273900, supervised_loss: 0.273275
[21:19:14.482] iteration 4291 : loss : 0.285574, supervised_loss: 0.285315
[21:19:15.394] iteration 4292 : loss : 0.274487, supervised_loss: 0.274149
[21:19:16.307] iteration 4293 : loss : 0.274868, supervised_loss: 0.274398
[21:19:17.218] iteration 4294 : loss : 0.270440, supervised_loss: 0.270101
[21:19:18.132] iteration 4295 : loss : 0.279827, supervised_loss: 0.279529
[21:19:19.044] iteration 4296 : loss : 0.267870, supervised_loss: 0.267461
[21:19:20.548] iteration 4297 : loss : 0.279145, supervised_loss: 0.278174
[21:19:21.460] iteration 4298 : loss : 0.275016, supervised_loss: 0.274716
[21:19:22.373] iteration 4299 : loss : 0.272251, supervised_loss: 0.271851
[21:19:23.285] iteration 4300 : loss : 0.276091, supervised_loss: 0.275809
[21:19:24.197] iteration 4301 : loss : 0.274117, supervised_loss: 0.273706
[21:19:25.110] iteration 4302 : loss : 0.273519, supervised_loss: 0.273196
[21:19:26.022] iteration 4303 : loss : 0.269647, supervised_loss: 0.269040
[21:19:26.935] iteration 4304 : loss : 0.271886, supervised_loss: 0.271108
[21:19:27.847] iteration 4305 : loss : 0.272414, supervised_loss: 0.271882
[21:19:28.759] iteration 4306 : loss : 0.280091, supervised_loss: 0.279616
[21:19:29.673] iteration 4307 : loss : 0.275676, supervised_loss: 0.275298
[21:19:30.587] iteration 4308 : loss : 0.273470, supervised_loss: 0.273004
[21:19:32.163] iteration 4309 : loss : 0.268609, supervised_loss: 0.268140
[21:19:33.074] iteration 4310 : loss : 0.273101, supervised_loss: 0.272786
[21:19:33.987] iteration 4311 : loss : 0.273152, supervised_loss: 0.272658
[21:19:34.901] iteration 4312 : loss : 0.276508, supervised_loss: 0.276118
[21:19:35.812] iteration 4313 : loss : 0.272479, supervised_loss: 0.272109
[21:19:36.725] iteration 4314 : loss : 0.270649, supervised_loss: 0.270370
[21:19:37.637] iteration 4315 : loss : 0.271854, supervised_loss: 0.271531
[21:19:38.550] iteration 4316 : loss : 0.269718, supervised_loss: 0.269475
[21:19:39.465] iteration 4317 : loss : 0.269697, supervised_loss: 0.268904
[21:19:40.379] iteration 4318 : loss : 0.272045, supervised_loss: 0.271537
[21:19:41.292] iteration 4319 : loss : 0.272651, supervised_loss: 0.272389
[21:19:42.205] iteration 4320 : loss : 0.278198, supervised_loss: 0.277677
[21:19:43.880] iteration 4321 : loss : 0.271496, supervised_loss: 0.271152
[21:19:44.793] iteration 4322 : loss : 0.274722, supervised_loss: 0.274492
[21:19:45.706] iteration 4323 : loss : 0.279963, supervised_loss: 0.279605
[21:19:46.618] iteration 4324 : loss : 0.284927, supervised_loss: 0.284663
[21:19:47.531] iteration 4325 : loss : 0.273404, supervised_loss: 0.273060
[21:19:48.443] iteration 4326 : loss : 0.273135, supervised_loss: 0.272525
[21:19:49.357] iteration 4327 : loss : 0.275324, supervised_loss: 0.274981
[21:19:50.271] iteration 4328 : loss : 0.278799, supervised_loss: 0.278458
[21:19:51.185] iteration 4329 : loss : 0.272948, supervised_loss: 0.272648
[21:19:52.097] iteration 4330 : loss : 0.271345, supervised_loss: 0.270581
[21:19:53.010] iteration 4331 : loss : 0.270933, supervised_loss: 0.270585
[21:19:53.922] iteration 4332 : loss : 0.270481, supervised_loss: 0.270001
[21:19:55.409] iteration 4333 : loss : 0.276143, supervised_loss: 0.275662
[21:19:56.321] iteration 4334 : loss : 0.273904, supervised_loss: 0.273478
[21:19:57.234] iteration 4335 : loss : 0.273120, supervised_loss: 0.272797
[21:19:58.148] iteration 4336 : loss : 0.274202, supervised_loss: 0.273767
[21:19:59.061] iteration 4337 : loss : 0.272400, supervised_loss: 0.271939
[21:19:59.975] iteration 4338 : loss : 0.276439, supervised_loss: 0.276201
[21:20:00.887] iteration 4339 : loss : 0.272296, supervised_loss: 0.271670
[21:20:01.799] iteration 4340 : loss : 0.272813, supervised_loss: 0.272020
[21:20:02.712] iteration 4341 : loss : 0.278373, supervised_loss: 0.277937
[21:20:03.625] iteration 4342 : loss : 0.273292, supervised_loss: 0.272792
[21:20:04.538] iteration 4343 : loss : 0.276308, supervised_loss: 0.275896
[21:20:05.451] iteration 4344 : loss : 0.276491, supervised_loss: 0.275969
[21:20:06.953] iteration 4345 : loss : 0.275070, supervised_loss: 0.274739
[21:20:07.866] iteration 4346 : loss : 0.270880, supervised_loss: 0.270606
[21:20:08.781] iteration 4347 : loss : 0.268689, supervised_loss: 0.268029
[21:20:09.696] iteration 4348 : loss : 0.279809, supervised_loss: 0.279325
[21:20:10.608] iteration 4349 : loss : 0.279867, supervised_loss: 0.279371
[21:20:11.521] iteration 4350 : loss : 0.277350, supervised_loss: 0.277081
[21:20:12.433] iteration 4351 : loss : 0.276679, supervised_loss: 0.276071
[21:20:13.346] iteration 4352 : loss : 0.275037, supervised_loss: 0.274571
[21:20:14.260] iteration 4353 : loss : 0.283616, supervised_loss: 0.283303
[21:20:15.173] iteration 4354 : loss : 0.274460, supervised_loss: 0.274120
[21:20:16.084] iteration 4355 : loss : 0.272081, supervised_loss: 0.271789
[21:20:16.997] iteration 4356 : loss : 0.272873, supervised_loss: 0.272516
[21:20:18.550] iteration 4357 : loss : 0.273190, supervised_loss: 0.272582
[21:20:19.462] iteration 4358 : loss : 0.275491, supervised_loss: 0.275023
[21:20:20.375] iteration 4359 : loss : 0.279471, supervised_loss: 0.279176
[21:20:21.286] iteration 4360 : loss : 0.273743, supervised_loss: 0.273089
[21:20:22.198] iteration 4361 : loss : 0.272344, supervised_loss: 0.271895
[21:20:23.111] iteration 4362 : loss : 0.279118, supervised_loss: 0.278547
[21:20:24.023] iteration 4363 : loss : 0.273616, supervised_loss: 0.273256
[21:20:24.936] iteration 4364 : loss : 0.283983, supervised_loss: 0.283510
[21:20:25.850] iteration 4365 : loss : 0.270222, supervised_loss: 0.269670
[21:20:26.762] iteration 4366 : loss : 0.277444, supervised_loss: 0.277022
[21:20:27.675] iteration 4367 : loss : 0.279858, supervised_loss: 0.279581
[21:20:28.586] iteration 4368 : loss : 0.270356, supervised_loss: 0.269749
[21:20:30.171] iteration 4369 : loss : 0.284075, supervised_loss: 0.283767
[21:20:31.084] iteration 4370 : loss : 0.271017, supervised_loss: 0.270553
[21:20:31.995] iteration 4371 : loss : 0.275567, supervised_loss: 0.275267
[21:20:32.907] iteration 4372 : loss : 0.274673, supervised_loss: 0.274342
[21:20:33.819] iteration 4373 : loss : 0.281183, supervised_loss: 0.280833
[21:20:34.733] iteration 4374 : loss : 0.273625, supervised_loss: 0.273313
[21:20:35.647] iteration 4375 : loss : 0.272004, supervised_loss: 0.271593
[21:20:36.560] iteration 4376 : loss : 0.274306, supervised_loss: 0.273906
[21:20:37.473] iteration 4377 : loss : 0.271806, supervised_loss: 0.271553
[21:20:38.385] iteration 4378 : loss : 0.275447, supervised_loss: 0.275116
[21:20:39.297] iteration 4379 : loss : 0.272439, supervised_loss: 0.271963
[21:20:40.210] iteration 4380 : loss : 0.273148, supervised_loss: 0.272810
[21:20:41.727] iteration 4381 : loss : 0.274124, supervised_loss: 0.273847
[21:20:42.640] iteration 4382 : loss : 0.286861, supervised_loss: 0.286596
[21:20:43.552] iteration 4383 : loss : 0.277856, supervised_loss: 0.277620
[21:20:44.464] iteration 4384 : loss : 0.276539, supervised_loss: 0.276024
[21:20:45.378] iteration 4385 : loss : 0.272809, supervised_loss: 0.272381
[21:20:46.290] iteration 4386 : loss : 0.267457, supervised_loss: 0.267164
[21:20:47.204] iteration 4387 : loss : 0.279064, supervised_loss: 0.278368
[21:20:48.116] iteration 4388 : loss : 0.272539, supervised_loss: 0.272077
[21:20:49.029] iteration 4389 : loss : 0.271520, supervised_loss: 0.271260
[21:20:49.941] iteration 4390 : loss : 0.270877, supervised_loss: 0.270509
[21:20:50.854] iteration 4391 : loss : 0.281655, supervised_loss: 0.281386
[21:20:51.768] iteration 4392 : loss : 0.269459, supervised_loss: 0.269135
[21:20:53.439] iteration 4393 : loss : 0.273454, supervised_loss: 0.272922
[21:20:54.351] iteration 4394 : loss : 0.273895, supervised_loss: 0.273618
[21:20:55.265] iteration 4395 : loss : 0.277686, supervised_loss: 0.277388
[21:20:56.178] iteration 4396 : loss : 0.275497, supervised_loss: 0.275070
[21:20:57.091] iteration 4397 : loss : 0.270944, supervised_loss: 0.270631
[21:20:58.003] iteration 4398 : loss : 0.276221, supervised_loss: 0.275802
[21:20:58.915] iteration 4399 : loss : 0.278196, supervised_loss: 0.277901
[21:20:59.828] iteration 4400 : loss : 0.275116, supervised_loss: 0.274874
[21:21:02.689] iteration 4401 : loss : 0.277940, supervised_loss: 0.277481
[21:21:03.602] iteration 4402 : loss : 0.280740, supervised_loss: 0.280402
[21:21:04.515] iteration 4403 : loss : 0.277985, supervised_loss: 0.277544
[21:21:05.426] iteration 4404 : loss : 0.272712, supervised_loss: 0.272246
[21:21:06.920] iteration 4405 : loss : 0.281279, supervised_loss: 0.280966
[21:21:07.834] iteration 4406 : loss : 0.283516, supervised_loss: 0.282988
[21:21:08.745] iteration 4407 : loss : 0.273760, supervised_loss: 0.273266
[21:21:09.658] iteration 4408 : loss : 0.281832, supervised_loss: 0.281068
[21:21:10.571] iteration 4409 : loss : 0.272485, supervised_loss: 0.272198
[21:21:11.483] iteration 4410 : loss : 0.276323, supervised_loss: 0.275770
[21:21:12.396] iteration 4411 : loss : 0.274337, supervised_loss: 0.273917
[21:21:13.309] iteration 4412 : loss : 0.274378, supervised_loss: 0.274132
[21:21:14.222] iteration 4413 : loss : 0.288321, supervised_loss: 0.288030
[21:21:15.135] iteration 4414 : loss : 0.280496, supervised_loss: 0.280207
[21:21:16.049] iteration 4415 : loss : 0.275702, supervised_loss: 0.274782
[21:21:16.963] iteration 4416 : loss : 0.279062, supervised_loss: 0.278671
[21:21:18.652] iteration 4417 : loss : 0.273181, supervised_loss: 0.272431
[21:21:19.564] iteration 4418 : loss : 0.270797, supervised_loss: 0.270261
[21:21:20.477] iteration 4419 : loss : 0.276536, supervised_loss: 0.276259
[21:21:21.390] iteration 4420 : loss : 0.281328, supervised_loss: 0.280963
[21:21:22.302] iteration 4421 : loss : 0.273745, supervised_loss: 0.273334
[21:21:23.215] iteration 4422 : loss : 0.278678, supervised_loss: 0.278411
[21:21:24.127] iteration 4423 : loss : 0.274114, supervised_loss: 0.273495
[21:21:25.041] iteration 4424 : loss : 0.275603, supervised_loss: 0.275120
[21:21:25.953] iteration 4425 : loss : 0.271106, supervised_loss: 0.270711
[21:21:26.865] iteration 4426 : loss : 0.270213, supervised_loss: 0.269867
[21:21:27.777] iteration 4427 : loss : 0.275680, supervised_loss: 0.275422
[21:21:28.691] iteration 4428 : loss : 0.269011, supervised_loss: 0.268481
[21:21:30.286] iteration 4429 : loss : 0.268806, supervised_loss: 0.268410
[21:21:31.197] iteration 4430 : loss : 0.280982, supervised_loss: 0.280149
[21:21:32.110] iteration 4431 : loss : 0.272827, supervised_loss: 0.272542
[21:21:33.023] iteration 4432 : loss : 0.272506, supervised_loss: 0.272208
[21:21:33.935] iteration 4433 : loss : 0.283529, supervised_loss: 0.283219
[21:21:34.848] iteration 4434 : loss : 0.270495, supervised_loss: 0.270163
[21:21:35.760] iteration 4435 : loss : 0.271217, supervised_loss: 0.270643
[21:21:36.672] iteration 4436 : loss : 0.271781, supervised_loss: 0.271458
[21:21:37.585] iteration 4437 : loss : 0.272206, supervised_loss: 0.271596
[21:21:38.498] iteration 4438 : loss : 0.274125, supervised_loss: 0.273709
[21:21:39.411] iteration 4439 : loss : 0.270539, supervised_loss: 0.269973
[21:21:40.324] iteration 4440 : loss : 0.276101, supervised_loss: 0.275622
[21:21:41.880] iteration 4441 : loss : 0.275392, supervised_loss: 0.274768
[21:21:42.794] iteration 4442 : loss : 0.288827, supervised_loss: 0.288230
[21:21:43.707] iteration 4443 : loss : 0.274899, supervised_loss: 0.274560
[21:21:44.619] iteration 4444 : loss : 0.273103, supervised_loss: 0.272439
[21:21:45.533] iteration 4445 : loss : 0.279272, supervised_loss: 0.278873
[21:21:46.444] iteration 4446 : loss : 0.269144, supervised_loss: 0.268809
[21:21:47.357] iteration 4447 : loss : 0.274521, supervised_loss: 0.274287
[21:21:48.271] iteration 4448 : loss : 0.278065, supervised_loss: 0.277548
[21:21:49.185] iteration 4449 : loss : 0.277358, supervised_loss: 0.276987
[21:21:50.097] iteration 4450 : loss : 0.270671, supervised_loss: 0.270177
[21:21:51.010] iteration 4451 : loss : 0.276754, supervised_loss: 0.276457
[21:21:51.923] iteration 4452 : loss : 0.274294, supervised_loss: 0.273974
[21:21:53.551] iteration 4453 : loss : 0.268931, supervised_loss: 0.268419
[21:21:54.463] iteration 4454 : loss : 0.278753, supervised_loss: 0.278361
[21:21:55.376] iteration 4455 : loss : 0.272775, supervised_loss: 0.272305
[21:21:56.288] iteration 4456 : loss : 0.275854, supervised_loss: 0.275529
[21:21:57.201] iteration 4457 : loss : 0.270215, supervised_loss: 0.269703
[21:21:58.114] iteration 4458 : loss : 0.279611, supervised_loss: 0.279102
[21:21:59.026] iteration 4459 : loss : 0.278737, supervised_loss: 0.278310
[21:21:59.939] iteration 4460 : loss : 0.274469, supervised_loss: 0.274092
[21:22:00.852] iteration 4461 : loss : 0.272990, supervised_loss: 0.272636
[21:22:01.765] iteration 4462 : loss : 0.271782, supervised_loss: 0.271275
[21:22:02.678] iteration 4463 : loss : 0.270458, supervised_loss: 0.269876
[21:22:03.592] iteration 4464 : loss : 0.272490, supervised_loss: 0.271849
[21:22:05.109] iteration 4465 : loss : 0.274202, supervised_loss: 0.273733
[21:22:06.021] iteration 4466 : loss : 0.276076, supervised_loss: 0.275720
[21:22:06.933] iteration 4467 : loss : 0.276710, supervised_loss: 0.276358
[21:22:07.845] iteration 4468 : loss : 0.274608, supervised_loss: 0.274216
[21:22:08.758] iteration 4469 : loss : 0.286963, supervised_loss: 0.286456
[21:22:09.671] iteration 4470 : loss : 0.272309, supervised_loss: 0.271967
[21:22:10.583] iteration 4471 : loss : 0.276202, supervised_loss: 0.275809
[21:22:11.496] iteration 4472 : loss : 0.279392, supervised_loss: 0.278935
[21:22:12.409] iteration 4473 : loss : 0.274228, supervised_loss: 0.273862
[21:22:13.323] iteration 4474 : loss : 0.287230, supervised_loss: 0.286864
[21:22:14.237] iteration 4475 : loss : 0.277311, supervised_loss: 0.276999
[21:22:15.152] iteration 4476 : loss : 0.272197, supervised_loss: 0.271859
[21:22:16.759] iteration 4477 : loss : 0.276720, supervised_loss: 0.276415
[21:22:17.672] iteration 4478 : loss : 0.272065, supervised_loss: 0.271756
[21:22:18.586] iteration 4479 : loss : 0.272954, supervised_loss: 0.272423
[21:22:19.498] iteration 4480 : loss : 0.267328, supervised_loss: 0.266525
[21:22:20.410] iteration 4481 : loss : 0.287018, supervised_loss: 0.286603
[21:22:21.322] iteration 4482 : loss : 0.267889, supervised_loss: 0.267420
[21:22:22.235] iteration 4483 : loss : 0.278479, supervised_loss: 0.278110
[21:22:23.150] iteration 4484 : loss : 0.274109, supervised_loss: 0.273795
[21:22:24.063] iteration 4485 : loss : 0.277899, supervised_loss: 0.277364
[21:22:24.974] iteration 4486 : loss : 0.271382, supervised_loss: 0.271025
[21:22:25.888] iteration 4487 : loss : 0.282328, supervised_loss: 0.281831
[21:22:26.802] iteration 4488 : loss : 0.273483, supervised_loss: 0.273014
[21:22:28.377] iteration 4489 : loss : 0.273077, supervised_loss: 0.272566
[21:22:29.289] iteration 4490 : loss : 0.273194, supervised_loss: 0.272957
[21:22:30.201] iteration 4491 : loss : 0.273939, supervised_loss: 0.273689
[21:22:31.114] iteration 4492 : loss : 0.269743, supervised_loss: 0.269373
[21:22:32.026] iteration 4493 : loss : 0.273087, supervised_loss: 0.272787
[21:22:32.938] iteration 4494 : loss : 0.276779, supervised_loss: 0.276223
[21:22:33.852] iteration 4495 : loss : 0.278281, supervised_loss: 0.277999
[21:22:34.765] iteration 4496 : loss : 0.270881, supervised_loss: 0.270610
[21:22:35.678] iteration 4497 : loss : 0.274194, supervised_loss: 0.273752
[21:22:36.591] iteration 4498 : loss : 0.272160, supervised_loss: 0.271795
[21:22:37.504] iteration 4499 : loss : 0.282622, supervised_loss: 0.282372
[21:22:38.418] iteration 4500 : loss : 0.274189, supervised_loss: 0.273729
[21:22:40.043] iteration 4501 : loss : 0.272258, supervised_loss: 0.271869
[21:22:40.955] iteration 4502 : loss : 0.276153, supervised_loss: 0.275713
[21:22:41.866] iteration 4503 : loss : 0.281033, supervised_loss: 0.280512
[21:22:42.778] iteration 4504 : loss : 0.272302, supervised_loss: 0.272077
[21:22:43.692] iteration 4505 : loss : 0.276495, supervised_loss: 0.276165
[21:22:44.605] iteration 4506 : loss : 0.272002, supervised_loss: 0.271730
[21:22:45.517] iteration 4507 : loss : 0.278935, supervised_loss: 0.278566
[21:22:46.431] iteration 4508 : loss : 0.276868, supervised_loss: 0.276258
[21:22:47.344] iteration 4509 : loss : 0.275608, supervised_loss: 0.275162
[21:22:48.258] iteration 4510 : loss : 0.271304, supervised_loss: 0.270802
[21:22:49.172] iteration 4511 : loss : 0.273084, supervised_loss: 0.272567
[21:22:50.084] iteration 4512 : loss : 0.273654, supervised_loss: 0.273339
[21:22:51.649] iteration 4513 : loss : 0.275396, supervised_loss: 0.275135
[21:22:52.561] iteration 4514 : loss : 0.272803, supervised_loss: 0.272422
[21:22:53.475] iteration 4515 : loss : 0.272791, supervised_loss: 0.272232
[21:22:54.386] iteration 4516 : loss : 0.275289, supervised_loss: 0.275048
[21:22:55.298] iteration 4517 : loss : 0.272089, supervised_loss: 0.271453
[21:22:56.211] iteration 4518 : loss : 0.275935, supervised_loss: 0.275598
[21:22:57.124] iteration 4519 : loss : 0.275920, supervised_loss: 0.275544
[21:22:58.039] iteration 4520 : loss : 0.274392, supervised_loss: 0.274106
[21:22:58.952] iteration 4521 : loss : 0.279887, supervised_loss: 0.279561
[21:22:59.863] iteration 4522 : loss : 0.279650, supervised_loss: 0.279247
[21:23:00.775] iteration 4523 : loss : 0.269432, supervised_loss: 0.268814
[21:23:01.688] iteration 4524 : loss : 0.271719, supervised_loss: 0.271158
[21:23:03.261] iteration 4525 : loss : 0.272202, supervised_loss: 0.271819
[21:23:04.173] iteration 4526 : loss : 0.271177, supervised_loss: 0.270931
[21:23:05.084] iteration 4527 : loss : 0.275887, supervised_loss: 0.275440
[21:23:05.999] iteration 4528 : loss : 0.276371, supervised_loss: 0.275753
[21:23:06.913] iteration 4529 : loss : 0.281196, supervised_loss: 0.280811
[21:23:07.824] iteration 4530 : loss : 0.273755, supervised_loss: 0.273250
[21:23:08.736] iteration 4531 : loss : 0.279090, supervised_loss: 0.278725
[21:23:09.649] iteration 4532 : loss : 0.270832, supervised_loss: 0.270561
[21:23:10.562] iteration 4533 : loss : 0.280396, supervised_loss: 0.280005
[21:23:11.476] iteration 4534 : loss : 0.269553, supervised_loss: 0.269110
[21:23:12.388] iteration 4535 : loss : 0.270077, supervised_loss: 0.269743
[21:23:13.301] iteration 4536 : loss : 0.279542, supervised_loss: 0.278879
[21:23:14.883] iteration 4537 : loss : 0.281769, supervised_loss: 0.281264
[21:23:15.795] iteration 4538 : loss : 0.273142, supervised_loss: 0.272894
[21:23:16.708] iteration 4539 : loss : 0.278394, supervised_loss: 0.277827
[21:23:17.620] iteration 4540 : loss : 0.275111, supervised_loss: 0.273899
[21:23:18.533] iteration 4541 : loss : 0.275650, supervised_loss: 0.275299
[21:23:19.446] iteration 4542 : loss : 0.272604, supervised_loss: 0.272162
[21:23:20.358] iteration 4543 : loss : 0.271145, supervised_loss: 0.270750
[21:23:21.272] iteration 4544 : loss : 0.284024, supervised_loss: 0.283510
[21:23:22.185] iteration 4545 : loss : 0.272320, supervised_loss: 0.271828
[21:23:23.099] iteration 4546 : loss : 0.272596, supervised_loss: 0.271831
[21:23:24.012] iteration 4547 : loss : 0.283682, supervised_loss: 0.283384
[21:23:24.925] iteration 4548 : loss : 0.273945, supervised_loss: 0.273495
[21:23:26.444] iteration 4549 : loss : 0.270114, supervised_loss: 0.269540
[21:23:27.357] iteration 4550 : loss : 0.273432, supervised_loss: 0.272840
[21:23:28.269] iteration 4551 : loss : 0.276308, supervised_loss: 0.275918
[21:23:29.182] iteration 4552 : loss : 0.269051, supervised_loss: 0.268474
[21:23:30.093] iteration 4553 : loss : 0.270258, supervised_loss: 0.269870
[21:23:31.005] iteration 4554 : loss : 0.279256, supervised_loss: 0.278940
[21:23:31.919] iteration 4555 : loss : 0.273267, supervised_loss: 0.273005
[21:23:32.833] iteration 4556 : loss : 0.279407, supervised_loss: 0.279052
[21:23:33.748] iteration 4557 : loss : 0.271516, supervised_loss: 0.271169
[21:23:34.661] iteration 4558 : loss : 0.276036, supervised_loss: 0.275706
[21:23:35.574] iteration 4559 : loss : 0.273419, supervised_loss: 0.272995
[21:23:36.485] iteration 4560 : loss : 0.274740, supervised_loss: 0.274327
[21:23:38.071] iteration 4561 : loss : 0.279781, supervised_loss: 0.279027
[21:23:38.983] iteration 4562 : loss : 0.270535, supervised_loss: 0.270142
[21:23:39.896] iteration 4563 : loss : 0.273297, supervised_loss: 0.272879
[21:23:40.808] iteration 4564 : loss : 0.274382, supervised_loss: 0.273851
[21:23:41.722] iteration 4565 : loss : 0.274552, supervised_loss: 0.274176
[21:23:42.637] iteration 4566 : loss : 0.270741, supervised_loss: 0.270232
[21:23:43.550] iteration 4567 : loss : 0.277434, supervised_loss: 0.277103
[21:23:44.463] iteration 4568 : loss : 0.274546, supervised_loss: 0.274167
[21:23:45.376] iteration 4569 : loss : 0.273654, supervised_loss: 0.273181
[21:23:46.290] iteration 4570 : loss : 0.275687, supervised_loss: 0.275260
[21:23:47.204] iteration 4571 : loss : 0.283189, supervised_loss: 0.282784
[21:23:48.116] iteration 4572 : loss : 0.277796, supervised_loss: 0.277329
[21:23:49.682] iteration 4573 : loss : 0.272127, supervised_loss: 0.271706
[21:23:50.595] iteration 4574 : loss : 0.273024, supervised_loss: 0.272716
[21:23:51.507] iteration 4575 : loss : 0.275358, supervised_loss: 0.274866
[21:23:52.420] iteration 4576 : loss : 0.274341, supervised_loss: 0.273849
[21:23:53.334] iteration 4577 : loss : 0.274647, supervised_loss: 0.274208
[21:23:54.246] iteration 4578 : loss : 0.269686, supervised_loss: 0.269260
[21:23:55.158] iteration 4579 : loss : 0.278965, supervised_loss: 0.278563
[21:23:56.071] iteration 4580 : loss : 0.273091, supervised_loss: 0.272407
[21:23:56.986] iteration 4581 : loss : 0.273494, supervised_loss: 0.273102
[21:23:57.899] iteration 4582 : loss : 0.276711, supervised_loss: 0.276365
[21:23:58.811] iteration 4583 : loss : 0.272922, supervised_loss: 0.272695
[21:23:59.723] iteration 4584 : loss : 0.280054, supervised_loss: 0.279082
[21:24:01.360] iteration 4585 : loss : 0.271926, supervised_loss: 0.271562
[21:24:02.271] iteration 4586 : loss : 0.278109, supervised_loss: 0.277485
[21:24:03.182] iteration 4587 : loss : 0.278194, supervised_loss: 0.277523
[21:24:04.094] iteration 4588 : loss : 0.271052, supervised_loss: 0.270699
[21:24:05.008] iteration 4589 : loss : 0.281099, supervised_loss: 0.280748
[21:24:05.920] iteration 4590 : loss : 0.274680, supervised_loss: 0.274248
[21:24:06.832] iteration 4591 : loss : 0.276213, supervised_loss: 0.275642
[21:24:07.742] iteration 4592 : loss : 0.276996, supervised_loss: 0.276667
[21:24:08.655] iteration 4593 : loss : 0.274280, supervised_loss: 0.273847
[21:24:09.567] iteration 4594 : loss : 0.267447, supervised_loss: 0.267054
[21:24:10.479] iteration 4595 : loss : 0.271390, supervised_loss: 0.270835
[21:24:11.392] iteration 4596 : loss : 0.272762, supervised_loss: 0.272355
[21:24:12.885] iteration 4597 : loss : 0.274089, supervised_loss: 0.273716
[21:24:13.797] iteration 4598 : loss : 0.273292, supervised_loss: 0.272699
[21:24:14.709] iteration 4599 : loss : 0.269987, supervised_loss: 0.269437
[21:24:15.623] iteration 4600 : loss : 0.273121, supervised_loss: 0.272757
[21:24:18.504] iteration 4601 : loss : 0.277808, supervised_loss: 0.277486
[21:24:19.417] iteration 4602 : loss : 0.274490, supervised_loss: 0.274197
[21:24:20.330] iteration 4603 : loss : 0.274547, supervised_loss: 0.274005
[21:24:21.242] iteration 4604 : loss : 0.278105, supervised_loss: 0.277498
[21:24:22.157] iteration 4605 : loss : 0.279558, supervised_loss: 0.279198
[21:24:23.071] iteration 4606 : loss : 0.273790, supervised_loss: 0.273321
[21:24:23.984] iteration 4607 : loss : 0.275391, supervised_loss: 0.274982
[21:24:24.897] iteration 4608 : loss : 0.273199, supervised_loss: 0.272782
[21:24:26.476] iteration 4609 : loss : 0.280735, supervised_loss: 0.280264
[21:24:27.389] iteration 4610 : loss : 0.270653, supervised_loss: 0.269653
[21:24:28.302] iteration 4611 : loss : 0.266544, supervised_loss: 0.266251
[21:24:29.215] iteration 4612 : loss : 0.274458, supervised_loss: 0.274033
[21:24:30.126] iteration 4613 : loss : 0.280627, supervised_loss: 0.280236
[21:24:31.037] iteration 4614 : loss : 0.280731, supervised_loss: 0.280249
[21:24:31.949] iteration 4615 : loss : 0.275869, supervised_loss: 0.275492
[21:24:32.861] iteration 4616 : loss : 0.273613, supervised_loss: 0.273335
[21:24:33.772] iteration 4617 : loss : 0.272706, supervised_loss: 0.272405
[21:24:34.685] iteration 4618 : loss : 0.271933, supervised_loss: 0.271601
[21:24:35.597] iteration 4619 : loss : 0.277096, supervised_loss: 0.276503
[21:24:36.510] iteration 4620 : loss : 0.274611, supervised_loss: 0.274217
[21:24:38.098] iteration 4621 : loss : 0.272211, supervised_loss: 0.271700
[21:24:39.011] iteration 4622 : loss : 0.273405, supervised_loss: 0.273159
[21:24:39.923] iteration 4623 : loss : 0.274446, supervised_loss: 0.274013
[21:24:40.834] iteration 4624 : loss : 0.275369, supervised_loss: 0.275075
[21:24:41.747] iteration 4625 : loss : 0.268311, supervised_loss: 0.267980
[21:24:42.659] iteration 4626 : loss : 0.273663, supervised_loss: 0.273068
[21:24:43.573] iteration 4627 : loss : 0.273709, supervised_loss: 0.273403
[21:24:44.486] iteration 4628 : loss : 0.275557, supervised_loss: 0.275260
[21:24:45.398] iteration 4629 : loss : 0.280746, supervised_loss: 0.280424
[21:24:46.310] iteration 4630 : loss : 0.271176, supervised_loss: 0.270825
[21:24:47.223] iteration 4631 : loss : 0.268687, supervised_loss: 0.268296
[21:24:48.135] iteration 4632 : loss : 0.270850, supervised_loss: 0.270340
[21:24:49.700] iteration 4633 : loss : 0.273056, supervised_loss: 0.272579
[21:24:50.611] iteration 4634 : loss : 0.268628, supervised_loss: 0.268250
[21:24:51.524] iteration 4635 : loss : 0.267840, supervised_loss: 0.267520
[21:24:52.435] iteration 4636 : loss : 0.267229, supervised_loss: 0.266905
[21:24:53.346] iteration 4637 : loss : 0.273704, supervised_loss: 0.273384
[21:24:54.258] iteration 4638 : loss : 0.273531, supervised_loss: 0.272971
[21:24:55.170] iteration 4639 : loss : 0.282209, supervised_loss: 0.281538
[21:24:56.082] iteration 4640 : loss : 0.277031, supervised_loss: 0.276586
[21:24:57.023] iteration 4641 : loss : 0.272171, supervised_loss: 0.271830
[21:24:57.935] iteration 4642 : loss : 0.273201, supervised_loss: 0.272510
[21:24:58.849] iteration 4643 : loss : 0.274367, supervised_loss: 0.273939
[21:24:59.762] iteration 4644 : loss : 0.276665, supervised_loss: 0.276121
[21:25:01.343] iteration 4645 : loss : 0.275986, supervised_loss: 0.275548
[21:25:02.256] iteration 4646 : loss : 0.274434, supervised_loss: 0.273469
[21:25:03.169] iteration 4647 : loss : 0.272314, supervised_loss: 0.271883
[21:25:04.082] iteration 4648 : loss : 0.272415, supervised_loss: 0.271963
[21:25:04.996] iteration 4649 : loss : 0.278540, supervised_loss: 0.278166
[21:25:05.907] iteration 4650 : loss : 0.274067, supervised_loss: 0.273264
[21:25:06.819] iteration 4651 : loss : 0.270524, supervised_loss: 0.270211
[21:25:07.733] iteration 4652 : loss : 0.271594, supervised_loss: 0.271243
[21:25:08.645] iteration 4653 : loss : 0.271190, supervised_loss: 0.270946
[21:25:09.557] iteration 4654 : loss : 0.270193, supervised_loss: 0.269908
[21:25:10.468] iteration 4655 : loss : 0.273540, supervised_loss: 0.273281
[21:25:11.380] iteration 4656 : loss : 0.274635, supervised_loss: 0.274036
[21:25:12.961] iteration 4657 : loss : 0.274906, supervised_loss: 0.273019
[21:25:13.873] iteration 4658 : loss : 0.269547, supervised_loss: 0.268970
[21:25:14.786] iteration 4659 : loss : 0.271646, supervised_loss: 0.271050
[21:25:15.697] iteration 4660 : loss : 0.272053, supervised_loss: 0.271709
[21:25:16.609] iteration 4661 : loss : 0.272357, supervised_loss: 0.271410
[21:25:17.522] iteration 4662 : loss : 0.275762, supervised_loss: 0.275399
[21:25:18.434] iteration 4663 : loss : 0.270471, supervised_loss: 0.269833
[21:25:19.346] iteration 4664 : loss : 0.275214, supervised_loss: 0.274868
[21:25:20.259] iteration 4665 : loss : 0.275340, supervised_loss: 0.274959
[21:25:21.172] iteration 4666 : loss : 0.285040, supervised_loss: 0.284807
[21:25:22.086] iteration 4667 : loss : 0.275737, supervised_loss: 0.275481
[21:25:22.999] iteration 4668 : loss : 0.279860, supervised_loss: 0.279457
[21:25:24.620] iteration 4669 : loss : 0.274449, supervised_loss: 0.274067
[21:25:25.533] iteration 4670 : loss : 0.278634, supervised_loss: 0.278329
[21:25:26.445] iteration 4671 : loss : 0.274349, supervised_loss: 0.274051
[21:25:27.357] iteration 4672 : loss : 0.272091, supervised_loss: 0.271784
[21:25:28.269] iteration 4673 : loss : 0.272350, supervised_loss: 0.271999
[21:25:29.182] iteration 4674 : loss : 0.270457, supervised_loss: 0.270085
[21:25:30.094] iteration 4675 : loss : 0.276883, supervised_loss: 0.276523
[21:25:31.007] iteration 4676 : loss : 0.275585, supervised_loss: 0.275253
[21:25:31.920] iteration 4677 : loss : 0.277266, supervised_loss: 0.276934
[21:25:32.833] iteration 4678 : loss : 0.274845, supervised_loss: 0.274127
[21:25:33.747] iteration 4679 : loss : 0.272542, supervised_loss: 0.272163
[21:25:34.660] iteration 4680 : loss : 0.268012, supervised_loss: 0.267424
[21:25:36.256] iteration 4681 : loss : 0.275050, supervised_loss: 0.274473
[21:25:37.168] iteration 4682 : loss : 0.266357, supervised_loss: 0.265821
[21:25:38.082] iteration 4683 : loss : 0.271374, supervised_loss: 0.271147
[21:25:39.005] iteration 4684 : loss : 0.274463, supervised_loss: 0.273904
[21:25:39.917] iteration 4685 : loss : 0.276428, supervised_loss: 0.275968
[21:25:40.830] iteration 4686 : loss : 0.270683, supervised_loss: 0.270296
[21:25:41.743] iteration 4687 : loss : 0.270273, supervised_loss: 0.269904
[21:25:42.657] iteration 4688 : loss : 0.273283, supervised_loss: 0.272893
[21:25:43.570] iteration 4689 : loss : 0.272339, supervised_loss: 0.272001
[21:25:44.482] iteration 4690 : loss : 0.281271, supervised_loss: 0.280835
[21:25:45.395] iteration 4691 : loss : 0.273840, supervised_loss: 0.273160
[21:25:46.309] iteration 4692 : loss : 0.271075, supervised_loss: 0.270627
[21:25:47.897] iteration 4693 : loss : 0.267608, supervised_loss: 0.266995
[21:25:48.810] iteration 4694 : loss : 0.275421, supervised_loss: 0.275133
[21:25:49.721] iteration 4695 : loss : 0.274724, supervised_loss: 0.274289
[21:25:50.634] iteration 4696 : loss : 0.273951, supervised_loss: 0.273507
[21:25:51.548] iteration 4697 : loss : 0.280846, supervised_loss: 0.280375
[21:25:52.461] iteration 4698 : loss : 0.270724, supervised_loss: 0.270342
[21:25:53.375] iteration 4699 : loss : 0.270742, supervised_loss: 0.270432
[21:25:54.288] iteration 4700 : loss : 0.284489, supervised_loss: 0.283863
[21:25:55.199] iteration 4701 : loss : 0.273008, supervised_loss: 0.272425
[21:25:56.112] iteration 4702 : loss : 0.279076, supervised_loss: 0.278764
[21:25:57.025] iteration 4703 : loss : 0.279024, supervised_loss: 0.278695
[21:25:57.940] iteration 4704 : loss : 0.270012, supervised_loss: 0.269715
[21:25:59.490] iteration 4705 : loss : 0.271121, supervised_loss: 0.270699
[21:26:00.401] iteration 4706 : loss : 0.276680, supervised_loss: 0.276351
[21:26:01.315] iteration 4707 : loss : 0.275373, supervised_loss: 0.274997
[21:26:02.228] iteration 4708 : loss : 0.274748, supervised_loss: 0.274348
[21:26:03.141] iteration 4709 : loss : 0.277831, supervised_loss: 0.277439
[21:26:04.052] iteration 4710 : loss : 0.272801, supervised_loss: 0.272371
[21:26:04.964] iteration 4711 : loss : 0.273497, supervised_loss: 0.272981
[21:26:05.877] iteration 4712 : loss : 0.272710, supervised_loss: 0.272417
[21:26:06.789] iteration 4713 : loss : 0.269721, supervised_loss: 0.269369
[21:26:07.700] iteration 4714 : loss : 0.277548, supervised_loss: 0.277159
[21:26:08.613] iteration 4715 : loss : 0.271287, supervised_loss: 0.270823
[21:26:09.526] iteration 4716 : loss : 0.279771, supervised_loss: 0.279406
[21:26:11.047] iteration 4717 : loss : 0.272941, supervised_loss: 0.272419
[21:26:11.959] iteration 4718 : loss : 0.273822, supervised_loss: 0.273058
[21:26:12.871] iteration 4719 : loss : 0.275534, supervised_loss: 0.275258
[21:26:13.784] iteration 4720 : loss : 0.284417, supervised_loss: 0.284001
[21:26:14.696] iteration 4721 : loss : 0.272935, supervised_loss: 0.272585
[21:26:15.622] iteration 4722 : loss : 0.274276, supervised_loss: 0.273932
[21:26:16.535] iteration 4723 : loss : 0.276774, supervised_loss: 0.276090
[21:26:17.447] iteration 4724 : loss : 0.273033, supervised_loss: 0.272632
[21:26:18.361] iteration 4725 : loss : 0.275602, supervised_loss: 0.275142
[21:26:19.273] iteration 4726 : loss : 0.278675, supervised_loss: 0.278368
[21:26:20.185] iteration 4727 : loss : 0.277954, supervised_loss: 0.277532
[21:26:21.098] iteration 4728 : loss : 0.275002, supervised_loss: 0.274445
[21:26:22.614] iteration 4729 : loss : 0.271780, supervised_loss: 0.271322
[21:26:23.528] iteration 4730 : loss : 0.272869, supervised_loss: 0.272276
[21:26:24.440] iteration 4731 : loss : 0.276495, supervised_loss: 0.276094
[21:26:25.352] iteration 4732 : loss : 0.274077, supervised_loss: 0.273638
[21:26:26.265] iteration 4733 : loss : 0.271689, supervised_loss: 0.271092
[21:26:27.178] iteration 4734 : loss : 0.283770, supervised_loss: 0.283424
[21:26:28.090] iteration 4735 : loss : 0.274725, supervised_loss: 0.274401
[21:26:29.001] iteration 4736 : loss : 0.276986, supervised_loss: 0.276473
[21:26:29.915] iteration 4737 : loss : 0.269469, supervised_loss: 0.269063
[21:26:30.828] iteration 4738 : loss : 0.270556, supervised_loss: 0.270203
[21:26:31.742] iteration 4739 : loss : 0.268531, supervised_loss: 0.268008
[21:26:32.654] iteration 4740 : loss : 0.273004, supervised_loss: 0.272743
[21:26:34.277] iteration 4741 : loss : 0.283084, supervised_loss: 0.282452
[21:26:35.189] iteration 4742 : loss : 0.269513, supervised_loss: 0.269127
[21:26:36.102] iteration 4743 : loss : 0.275226, supervised_loss: 0.274943
[21:26:37.015] iteration 4744 : loss : 0.270494, supervised_loss: 0.270169
[21:26:37.929] iteration 4745 : loss : 0.275866, supervised_loss: 0.275167
[21:26:38.841] iteration 4746 : loss : 0.272886, supervised_loss: 0.272385
[21:26:39.753] iteration 4747 : loss : 0.274345, supervised_loss: 0.274064
[21:26:40.665] iteration 4748 : loss : 0.272108, supervised_loss: 0.269193
[21:26:41.579] iteration 4749 : loss : 0.280407, supervised_loss: 0.280020
[21:26:42.493] iteration 4750 : loss : 0.267964, supervised_loss: 0.267603
[21:26:43.406] iteration 4751 : loss : 0.270922, supervised_loss: 0.270621
[21:26:44.318] iteration 4752 : loss : 0.278889, supervised_loss: 0.278143
[21:26:45.913] iteration 4753 : loss : 0.277140, supervised_loss: 0.276741
[21:26:46.826] iteration 4754 : loss : 0.273671, supervised_loss: 0.273232
[21:26:47.737] iteration 4755 : loss : 0.274557, supervised_loss: 0.274175
[21:26:48.649] iteration 4756 : loss : 0.272694, supervised_loss: 0.272095
[21:26:49.563] iteration 4757 : loss : 0.274430, supervised_loss: 0.273966
[21:26:50.476] iteration 4758 : loss : 0.274574, supervised_loss: 0.274001
[21:26:51.389] iteration 4759 : loss : 0.272612, supervised_loss: 0.272098
[21:26:52.300] iteration 4760 : loss : 0.276374, supervised_loss: 0.275998
[21:26:53.212] iteration 4761 : loss : 0.272759, supervised_loss: 0.272527
[21:26:54.125] iteration 4762 : loss : 0.274433, supervised_loss: 0.274172
[21:26:55.037] iteration 4763 : loss : 0.272518, supervised_loss: 0.271958
[21:26:55.951] iteration 4764 : loss : 0.274668, supervised_loss: 0.273724
[21:26:57.471] iteration 4765 : loss : 0.281386, supervised_loss: 0.281018
[21:26:58.382] iteration 4766 : loss : 0.274407, supervised_loss: 0.273894
[21:26:59.293] iteration 4767 : loss : 0.280610, supervised_loss: 0.280142
[21:27:00.206] iteration 4768 : loss : 0.272415, supervised_loss: 0.272017
[21:27:01.119] iteration 4769 : loss : 0.273190, supervised_loss: 0.272852
[21:27:02.031] iteration 4770 : loss : 0.273253, supervised_loss: 0.272904
[21:27:02.943] iteration 4771 : loss : 0.270847, supervised_loss: 0.270469
[21:27:03.856] iteration 4772 : loss : 0.271362, supervised_loss: 0.270772
[21:27:04.768] iteration 4773 : loss : 0.277125, supervised_loss: 0.276745
[21:27:05.680] iteration 4774 : loss : 0.270955, supervised_loss: 0.270458
[21:27:06.592] iteration 4775 : loss : 0.273075, supervised_loss: 0.272665
[21:27:07.504] iteration 4776 : loss : 0.274845, supervised_loss: 0.274128
[21:27:09.048] iteration 4777 : loss : 0.272261, supervised_loss: 0.271961
[21:27:09.960] iteration 4778 : loss : 0.271238, supervised_loss: 0.270913
[21:27:10.872] iteration 4779 : loss : 0.273936, supervised_loss: 0.273675
[21:27:11.785] iteration 4780 : loss : 0.273093, supervised_loss: 0.272670
[21:27:12.697] iteration 4781 : loss : 0.273228, supervised_loss: 0.272921
[21:27:13.610] iteration 4782 : loss : 0.273121, supervised_loss: 0.272513
[21:27:14.521] iteration 4783 : loss : 0.284482, supervised_loss: 0.283634
[21:27:15.433] iteration 4784 : loss : 0.272428, supervised_loss: 0.272131
[21:27:16.346] iteration 4785 : loss : 0.271289, supervised_loss: 0.271002
[21:27:17.257] iteration 4786 : loss : 0.274268, supervised_loss: 0.273671
[21:27:18.170] iteration 4787 : loss : 0.271100, supervised_loss: 0.270130
[21:27:19.082] iteration 4788 : loss : 0.279325, supervised_loss: 0.278879
[21:27:20.779] iteration 4789 : loss : 0.277938, supervised_loss: 0.277297
[21:27:21.690] iteration 4790 : loss : 0.272043, supervised_loss: 0.271773
[21:27:22.602] iteration 4791 : loss : 0.268157, supervised_loss: 0.267519
[21:27:23.514] iteration 4792 : loss : 0.269889, supervised_loss: 0.269511
[21:27:24.426] iteration 4793 : loss : 0.275773, supervised_loss: 0.275127
[21:27:25.337] iteration 4794 : loss : 0.272644, supervised_loss: 0.272171
[21:27:26.248] iteration 4795 : loss : 0.268872, supervised_loss: 0.268322
[21:27:27.160] iteration 4796 : loss : 0.270262, supervised_loss: 0.270020
[21:27:28.072] iteration 4797 : loss : 0.275266, supervised_loss: 0.274801
[21:27:28.984] iteration 4798 : loss : 0.277257, supervised_loss: 0.276958
[21:27:29.896] iteration 4799 : loss : 0.271013, supervised_loss: 0.270516
[21:27:30.808] iteration 4800 : loss : 0.271846, supervised_loss: 0.271310
[21:27:34.350] iteration 4801 : loss : 0.273402, supervised_loss: 0.272812
[21:27:35.262] iteration 4802 : loss : 0.279871, supervised_loss: 0.279362
[21:27:36.175] iteration 4803 : loss : 0.277573, supervised_loss: 0.277216
[21:27:37.087] iteration 4804 : loss : 0.275649, supervised_loss: 0.275110
[21:27:37.999] iteration 4805 : loss : 0.274056, supervised_loss: 0.273549
[21:27:38.911] iteration 4806 : loss : 0.270662, supervised_loss: 0.270214
[21:27:39.822] iteration 4807 : loss : 0.285554, supervised_loss: 0.285185
[21:27:40.735] iteration 4808 : loss : 0.270052, supervised_loss: 0.269534
[21:27:41.648] iteration 4809 : loss : 0.271894, supervised_loss: 0.271529
[21:27:42.560] iteration 4810 : loss : 0.273508, supervised_loss: 0.272755
[21:27:43.471] iteration 4811 : loss : 0.270843, supervised_loss: 0.270393
[21:27:44.382] iteration 4812 : loss : 0.270836, supervised_loss: 0.270391
[21:27:46.001] iteration 4813 : loss : 0.273594, supervised_loss: 0.273016
[21:27:46.914] iteration 4814 : loss : 0.282769, supervised_loss: 0.281998
[21:27:47.826] iteration 4815 : loss : 0.272867, supervised_loss: 0.272193
[21:27:48.740] iteration 4816 : loss : 0.273352, supervised_loss: 0.272787
[21:27:49.652] iteration 4817 : loss : 0.268277, supervised_loss: 0.267814
[21:27:50.563] iteration 4818 : loss : 0.278661, supervised_loss: 0.278016
[21:27:51.475] iteration 4819 : loss : 0.280651, supervised_loss: 0.280378
[21:27:52.388] iteration 4820 : loss : 0.269601, supervised_loss: 0.269082
[21:27:53.301] iteration 4821 : loss : 0.272488, supervised_loss: 0.271945
[21:27:54.213] iteration 4822 : loss : 0.270169, supervised_loss: 0.269805
[21:27:55.125] iteration 4823 : loss : 0.274203, supervised_loss: 0.273952
[21:27:56.038] iteration 4824 : loss : 0.272014, supervised_loss: 0.271576
[21:27:57.608] iteration 4825 : loss : 0.270032, supervised_loss: 0.269560
[21:27:58.520] iteration 4826 : loss : 0.274919, supervised_loss: 0.274608
[21:27:59.433] iteration 4827 : loss : 0.272744, supervised_loss: 0.272300
[21:28:00.345] iteration 4828 : loss : 0.273228, supervised_loss: 0.272870
[21:28:01.257] iteration 4829 : loss : 0.276585, supervised_loss: 0.276205
[21:28:02.170] iteration 4830 : loss : 0.278978, supervised_loss: 0.278462
[21:28:03.082] iteration 4831 : loss : 0.284518, supervised_loss: 0.283578
[21:28:03.994] iteration 4832 : loss : 0.267592, supervised_loss: 0.267292
[21:28:04.907] iteration 4833 : loss : 0.285089, supervised_loss: 0.284694
[21:28:05.819] iteration 4834 : loss : 0.271516, supervised_loss: 0.271069
[21:28:06.731] iteration 4835 : loss : 0.265736, supervised_loss: 0.265304
[21:28:07.643] iteration 4836 : loss : 0.276003, supervised_loss: 0.275661
[21:28:09.126] iteration 4837 : loss : 0.271475, supervised_loss: 0.271003
[21:28:10.037] iteration 4838 : loss : 0.279661, supervised_loss: 0.279370
[21:28:10.949] iteration 4839 : loss : 0.274222, supervised_loss: 0.273886
[21:28:11.861] iteration 4840 : loss : 0.273867, supervised_loss: 0.273515
[21:28:12.775] iteration 4841 : loss : 0.280030, supervised_loss: 0.279726
[21:28:13.689] iteration 4842 : loss : 0.276215, supervised_loss: 0.275946
[21:28:14.602] iteration 4843 : loss : 0.275108, supervised_loss: 0.274030
[21:28:15.516] iteration 4844 : loss : 0.269425, supervised_loss: 0.268949
[21:28:16.431] iteration 4845 : loss : 0.276548, supervised_loss: 0.275922
[21:28:17.347] iteration 4846 : loss : 0.271407, supervised_loss: 0.270956
[21:28:18.261] iteration 4847 : loss : 0.278397, supervised_loss: 0.277939
[21:28:19.175] iteration 4848 : loss : 0.272347, supervised_loss: 0.271814
[21:28:20.786] iteration 4849 : loss : 0.269714, supervised_loss: 0.269192
[21:28:21.700] iteration 4850 : loss : 0.280168, supervised_loss: 0.279612
[21:28:22.614] iteration 4851 : loss : 0.269065, supervised_loss: 0.268231
[21:28:23.528] iteration 4852 : loss : 0.269735, supervised_loss: 0.269407
[21:28:24.443] iteration 4853 : loss : 0.270437, supervised_loss: 0.269497
[21:28:25.358] iteration 4854 : loss : 0.269938, supervised_loss: 0.269546
[21:28:26.273] iteration 4855 : loss : 0.272944, supervised_loss: 0.272522
[21:28:27.186] iteration 4856 : loss : 0.271554, supervised_loss: 0.270865
[21:28:28.099] iteration 4857 : loss : 0.282728, supervised_loss: 0.282372
[21:28:29.011] iteration 4858 : loss : 0.287017, supervised_loss: 0.286662
[21:28:29.924] iteration 4859 : loss : 0.272934, supervised_loss: 0.272428
[21:28:30.837] iteration 4860 : loss : 0.276872, supervised_loss: 0.276630
[21:28:32.449] iteration 4861 : loss : 0.274160, supervised_loss: 0.273813
[21:28:33.362] iteration 4862 : loss : 0.272459, supervised_loss: 0.271960
[21:28:34.273] iteration 4863 : loss : 0.275420, supervised_loss: 0.274960
[21:28:35.186] iteration 4864 : loss : 0.275827, supervised_loss: 0.275327
[21:28:36.098] iteration 4865 : loss : 0.279274, supervised_loss: 0.278888
[21:28:37.011] iteration 4866 : loss : 0.279735, supervised_loss: 0.279066
[21:28:37.923] iteration 4867 : loss : 0.270368, supervised_loss: 0.269904
[21:28:38.834] iteration 4868 : loss : 0.269768, supervised_loss: 0.269416
[21:28:39.746] iteration 4869 : loss : 0.277202, supervised_loss: 0.276764
[21:28:40.659] iteration 4870 : loss : 0.270532, supervised_loss: 0.270194
[21:28:41.573] iteration 4871 : loss : 0.277829, supervised_loss: 0.277216
[21:28:42.485] iteration 4872 : loss : 0.277998, supervised_loss: 0.277082
[21:28:44.041] iteration 4873 : loss : 0.273829, supervised_loss: 0.273375
[21:28:44.953] iteration 4874 : loss : 0.272625, supervised_loss: 0.272086
[21:28:45.866] iteration 4875 : loss : 0.277028, supervised_loss: 0.276668
[21:28:46.777] iteration 4876 : loss : 0.273775, supervised_loss: 0.273325
[21:28:47.689] iteration 4877 : loss : 0.274451, supervised_loss: 0.273865
[21:28:48.601] iteration 4878 : loss : 0.273872, supervised_loss: 0.273358
[21:28:49.513] iteration 4879 : loss : 0.277628, supervised_loss: 0.277045
[21:28:50.426] iteration 4880 : loss : 0.271913, supervised_loss: 0.271562
[21:28:51.339] iteration 4881 : loss : 0.266956, supervised_loss: 0.266392
[21:28:52.252] iteration 4882 : loss : 0.274338, supervised_loss: 0.273814
[21:28:53.164] iteration 4883 : loss : 0.275117, supervised_loss: 0.274656
[21:28:54.077] iteration 4884 : loss : 0.273904, supervised_loss: 0.273487
[21:28:55.675] iteration 4885 : loss : 0.277786, supervised_loss: 0.277232
[21:28:56.588] iteration 4886 : loss : 0.274148, supervised_loss: 0.273692
[21:28:57.565] iteration 4887 : loss : 0.275724, supervised_loss: 0.274663
[21:28:58.478] iteration 4888 : loss : 0.276346, supervised_loss: 0.275961
[21:28:59.389] iteration 4889 : loss : 0.275474, supervised_loss: 0.274958
[21:29:00.301] iteration 4890 : loss : 0.272456, supervised_loss: 0.272015
[21:29:01.213] iteration 4891 : loss : 0.280542, supervised_loss: 0.279864
[21:29:02.126] iteration 4892 : loss : 0.273723, supervised_loss: 0.272607
[21:29:03.057] iteration 4893 : loss : 0.274164, supervised_loss: 0.272935
[21:29:03.969] iteration 4894 : loss : 0.268697, supervised_loss: 0.268263
[21:29:04.881] iteration 4895 : loss : 0.273381, supervised_loss: 0.272765
[21:29:05.794] iteration 4896 : loss : 0.272098, supervised_loss: 0.271738
[21:29:07.310] iteration 4897 : loss : 0.275706, supervised_loss: 0.275246
[21:29:08.222] iteration 4898 : loss : 0.272078, supervised_loss: 0.271410
[21:29:09.133] iteration 4899 : loss : 0.276914, supervised_loss: 0.276564
[21:29:10.046] iteration 4900 : loss : 0.273365, supervised_loss: 0.272299
[21:29:10.960] iteration 4901 : loss : 0.274155, supervised_loss: 0.273836
[21:29:11.872] iteration 4902 : loss : 0.273006, supervised_loss: 0.272528
[21:29:12.784] iteration 4903 : loss : 0.269567, supervised_loss: 0.269178
[21:29:13.697] iteration 4904 : loss : 0.273169, supervised_loss: 0.272695
[21:29:14.610] iteration 4905 : loss : 0.271835, supervised_loss: 0.271175
[21:29:15.525] iteration 4906 : loss : 0.272899, supervised_loss: 0.272511
[21:29:16.438] iteration 4907 : loss : 0.272317, supervised_loss: 0.271604
[21:29:17.350] iteration 4908 : loss : 0.285881, supervised_loss: 0.285379
[21:29:18.933] iteration 4909 : loss : 0.281353, supervised_loss: 0.280997
[21:29:19.845] iteration 4910 : loss : 0.272008, supervised_loss: 0.271563
[21:29:20.759] iteration 4911 : loss : 0.271612, supervised_loss: 0.271141
[21:29:21.671] iteration 4912 : loss : 0.271302, supervised_loss: 0.270947
[21:29:22.584] iteration 4913 : loss : 0.282005, supervised_loss: 0.281512
[21:29:23.495] iteration 4914 : loss : 0.271638, supervised_loss: 0.271128
[21:29:24.408] iteration 4915 : loss : 0.272745, supervised_loss: 0.272190
[21:29:25.321] iteration 4916 : loss : 0.268574, supervised_loss: 0.268272
[21:29:26.235] iteration 4917 : loss : 0.272009, supervised_loss: 0.270906
[21:29:27.148] iteration 4918 : loss : 0.272767, supervised_loss: 0.272234
[21:29:28.060] iteration 4919 : loss : 0.267473, supervised_loss: 0.267143
[21:29:28.973] iteration 4920 : loss : 0.275934, supervised_loss: 0.275616
[21:29:30.543] iteration 4921 : loss : 0.275123, supervised_loss: 0.274755
[21:29:31.455] iteration 4922 : loss : 0.269278, supervised_loss: 0.269025
[21:29:32.367] iteration 4923 : loss : 0.273070, supervised_loss: 0.272586
[21:29:33.278] iteration 4924 : loss : 0.279177, supervised_loss: 0.278721
[21:29:34.190] iteration 4925 : loss : 0.276065, supervised_loss: 0.275666
[21:29:35.102] iteration 4926 : loss : 0.274247, supervised_loss: 0.273846
[21:29:36.015] iteration 4927 : loss : 0.276651, supervised_loss: 0.276198
[21:29:36.929] iteration 4928 : loss : 0.277818, supervised_loss: 0.277451
[21:29:37.842] iteration 4929 : loss : 0.269606, supervised_loss: 0.268797
[21:29:38.756] iteration 4930 : loss : 0.274999, supervised_loss: 0.274644
[21:29:39.668] iteration 4931 : loss : 0.274284, supervised_loss: 0.273854
[21:29:40.580] iteration 4932 : loss : 0.269055, supervised_loss: 0.268505
[21:29:42.254] iteration 4933 : loss : 0.277231, supervised_loss: 0.276562
[21:29:43.166] iteration 4934 : loss : 0.279768, supervised_loss: 0.279319
[21:29:44.078] iteration 4935 : loss : 0.267743, supervised_loss: 0.267421
[21:29:44.990] iteration 4936 : loss : 0.281638, supervised_loss: 0.281340
[21:29:45.903] iteration 4937 : loss : 0.279808, supervised_loss: 0.279272
[21:29:46.816] iteration 4938 : loss : 0.267574, supervised_loss: 0.267049
[21:29:47.728] iteration 4939 : loss : 0.270341, supervised_loss: 0.269779
[21:29:48.640] iteration 4940 : loss : 0.274839, supervised_loss: 0.274465
[21:29:49.553] iteration 4941 : loss : 0.273210, supervised_loss: 0.272884
[21:29:50.467] iteration 4942 : loss : 0.282587, supervised_loss: 0.281912
[21:29:51.381] iteration 4943 : loss : 0.273433, supervised_loss: 0.272823
[21:29:52.293] iteration 4944 : loss : 0.274813, supervised_loss: 0.274415
[21:29:53.920] iteration 4945 : loss : 0.272469, supervised_loss: 0.272059
[21:29:54.830] iteration 4946 : loss : 0.268839, supervised_loss: 0.267761
[21:29:55.741] iteration 4947 : loss : 0.272371, supervised_loss: 0.271958
[21:29:56.653] iteration 4948 : loss : 0.282925, supervised_loss: 0.282274
[21:29:57.566] iteration 4949 : loss : 0.275558, supervised_loss: 0.275087
[21:29:58.478] iteration 4950 : loss : 0.272407, supervised_loss: 0.271924
[21:29:59.390] iteration 4951 : loss : 0.279455, supervised_loss: 0.279156
[21:30:00.302] iteration 4952 : loss : 0.271561, supervised_loss: 0.271102
[21:30:01.216] iteration 4953 : loss : 0.271890, supervised_loss: 0.271409
[21:30:02.128] iteration 4954 : loss : 0.278962, supervised_loss: 0.277589
[21:30:03.040] iteration 4955 : loss : 0.277571, supervised_loss: 0.277046
[21:30:03.954] iteration 4956 : loss : 0.272128, supervised_loss: 0.271848
[21:30:05.572] iteration 4957 : loss : 0.278382, supervised_loss: 0.277908
[21:30:06.483] iteration 4958 : loss : 0.274315, supervised_loss: 0.273789
[21:30:07.395] iteration 4959 : loss : 0.274650, supervised_loss: 0.274348
[21:30:08.307] iteration 4960 : loss : 0.272225, supervised_loss: 0.271817
[21:30:09.220] iteration 4961 : loss : 0.283831, supervised_loss: 0.283377
[21:30:10.132] iteration 4962 : loss : 0.272665, supervised_loss: 0.272218
[21:30:11.046] iteration 4963 : loss : 0.271925, supervised_loss: 0.271512
[21:30:11.959] iteration 4964 : loss : 0.274104, supervised_loss: 0.273728
[21:30:12.872] iteration 4965 : loss : 0.278007, supervised_loss: 0.277520
[21:30:13.785] iteration 4966 : loss : 0.278237, supervised_loss: 0.277675
[21:30:14.697] iteration 4967 : loss : 0.272649, supervised_loss: 0.272243
[21:30:15.611] iteration 4968 : loss : 0.273685, supervised_loss: 0.273075
[21:30:17.159] iteration 4969 : loss : 0.278060, supervised_loss: 0.277628
[21:30:18.073] iteration 4970 : loss : 0.273819, supervised_loss: 0.273441
[21:30:18.986] iteration 4971 : loss : 0.270279, supervised_loss: 0.269647
[21:30:19.898] iteration 4972 : loss : 0.273772, supervised_loss: 0.273265
[21:30:20.810] iteration 4973 : loss : 0.277553, supervised_loss: 0.277069
[21:30:21.723] iteration 4974 : loss : 0.275298, supervised_loss: 0.274922
[21:30:22.636] iteration 4975 : loss : 0.270247, supervised_loss: 0.269729
[21:30:23.550] iteration 4976 : loss : 0.271271, supervised_loss: 0.270679
[21:30:24.465] iteration 4977 : loss : 0.282131, supervised_loss: 0.281701
[21:30:25.377] iteration 4978 : loss : 0.271200, supervised_loss: 0.270673
[21:30:26.291] iteration 4979 : loss : 0.278850, supervised_loss: 0.278450
[21:30:27.203] iteration 4980 : loss : 0.270995, supervised_loss: 0.270550
[21:30:28.769] iteration 4981 : loss : 0.272368, supervised_loss: 0.271947
[21:30:29.680] iteration 4982 : loss : 0.270223, supervised_loss: 0.269839
[21:30:30.594] iteration 4983 : loss : 0.276516, supervised_loss: 0.276005
[21:30:31.506] iteration 4984 : loss : 0.277725, supervised_loss: 0.277313
[21:30:32.417] iteration 4985 : loss : 0.274322, supervised_loss: 0.273847
[21:30:33.329] iteration 4986 : loss : 0.273090, supervised_loss: 0.272691
[21:30:34.241] iteration 4987 : loss : 0.281751, supervised_loss: 0.281220
[21:30:35.154] iteration 4988 : loss : 0.282817, supervised_loss: 0.282136
[21:30:36.066] iteration 4989 : loss : 0.269139, supervised_loss: 0.268559
[21:30:36.977] iteration 4990 : loss : 0.273577, supervised_loss: 0.273123
[21:30:37.891] iteration 4991 : loss : 0.276346, supervised_loss: 0.275725
[21:30:38.804] iteration 4992 : loss : 0.276592, supervised_loss: 0.276138
[21:30:40.421] iteration 4993 : loss : 0.279677, supervised_loss: 0.279334
[21:30:41.334] iteration 4994 : loss : 0.270255, supervised_loss: 0.269487
[21:30:42.248] iteration 4995 : loss : 0.281292, supervised_loss: 0.280779
[21:30:43.160] iteration 4996 : loss : 0.274068, supervised_loss: 0.273671
[21:30:44.073] iteration 4997 : loss : 0.273641, supervised_loss: 0.272969
[21:30:44.985] iteration 4998 : loss : 0.276796, supervised_loss: 0.276114
[21:30:45.898] iteration 4999 : loss : 0.279270, supervised_loss: 0.278860
[21:30:46.812] iteration 5000 : loss : 0.275156, supervised_loss: 0.274545
[21:30:49.679] iteration 5001 : loss : 0.268165, supervised_loss: 0.267869
[21:30:50.592] iteration 5002 : loss : 0.269709, supervised_loss: 0.269172
[21:30:51.504] iteration 5003 : loss : 0.273807, supervised_loss: 0.273392
[21:30:52.416] iteration 5004 : loss : 0.275234, supervised_loss: 0.274804
[21:30:53.938] iteration 5005 : loss : 0.285530, supervised_loss: 0.285119
[21:30:54.850] iteration 5006 : loss : 0.268211, supervised_loss: 0.267623
[21:30:55.762] iteration 5007 : loss : 0.272331, supervised_loss: 0.271734
[21:30:56.675] iteration 5008 : loss : 0.274610, supervised_loss: 0.274089
[21:30:57.587] iteration 5009 : loss : 0.267955, supervised_loss: 0.267474
[21:30:58.498] iteration 5010 : loss : 0.273126, supervised_loss: 0.272608
[21:30:59.411] iteration 5011 : loss : 0.267261, supervised_loss: 0.266896
[21:31:00.324] iteration 5012 : loss : 0.270266, supervised_loss: 0.269524
[21:31:01.239] iteration 5013 : loss : 0.270928, supervised_loss: 0.270395
[21:31:02.152] iteration 5014 : loss : 0.274367, supervised_loss: 0.273877
[21:31:03.065] iteration 5015 : loss : 0.278821, supervised_loss: 0.278288
[21:31:03.977] iteration 5016 : loss : 0.276996, supervised_loss: 0.276542
[21:31:05.553] iteration 5017 : loss : 0.281527, supervised_loss: 0.281155
[21:31:06.465] iteration 5018 : loss : 0.274243, supervised_loss: 0.273779
[21:31:07.379] iteration 5019 : loss : 0.276123, supervised_loss: 0.275569
[21:31:08.293] iteration 5020 : loss : 0.272296, supervised_loss: 0.271787
[21:31:09.205] iteration 5021 : loss : 0.279349, supervised_loss: 0.278792
[21:31:10.116] iteration 5022 : loss : 0.277253, supervised_loss: 0.276793
[21:31:11.029] iteration 5023 : loss : 0.270332, supervised_loss: 0.270002
[21:31:11.941] iteration 5024 : loss : 0.273961, supervised_loss: 0.273239
[21:31:12.853] iteration 5025 : loss : 0.278525, supervised_loss: 0.277711
[21:31:13.765] iteration 5026 : loss : 0.269915, supervised_loss: 0.269538
[21:31:14.678] iteration 5027 : loss : 0.267833, supervised_loss: 0.267424
[21:31:15.591] iteration 5028 : loss : 0.277479, supervised_loss: 0.277109
[21:31:17.090] iteration 5029 : loss : 0.276335, supervised_loss: 0.275699
[21:31:18.003] iteration 5030 : loss : 0.270294, supervised_loss: 0.269807
[21:31:18.915] iteration 5031 : loss : 0.275075, supervised_loss: 0.274604
[21:31:19.828] iteration 5032 : loss : 0.273739, supervised_loss: 0.273257
[21:31:20.740] iteration 5033 : loss : 0.274476, supervised_loss: 0.274088
[21:31:21.654] iteration 5034 : loss : 0.288596, supervised_loss: 0.288231
[21:31:22.568] iteration 5035 : loss : 0.273794, supervised_loss: 0.273359
[21:31:23.481] iteration 5036 : loss : 0.267804, supervised_loss: 0.267433
[21:31:24.394] iteration 5037 : loss : 0.274701, supervised_loss: 0.273699
[21:31:25.307] iteration 5038 : loss : 0.279422, supervised_loss: 0.279040
[21:31:26.219] iteration 5039 : loss : 0.270274, supervised_loss: 0.269959
[21:31:27.132] iteration 5040 : loss : 0.274403, supervised_loss: 0.274035
[21:31:28.677] iteration 5041 : loss : 0.275479, supervised_loss: 0.275049
[21:31:29.589] iteration 5042 : loss : 0.276078, supervised_loss: 0.275301
[21:31:30.501] iteration 5043 : loss : 0.274839, supervised_loss: 0.274159
[21:31:31.415] iteration 5044 : loss : 0.289822, supervised_loss: 0.289485
[21:31:32.330] iteration 5045 : loss : 0.269716, supervised_loss: 0.269375
[21:31:33.243] iteration 5046 : loss : 0.280377, supervised_loss: 0.279813
[21:31:34.155] iteration 5047 : loss : 0.273986, supervised_loss: 0.273560
[21:31:35.068] iteration 5048 : loss : 0.280007, supervised_loss: 0.279574
[21:31:35.981] iteration 5049 : loss : 0.272460, supervised_loss: 0.272067
[21:31:36.895] iteration 5050 : loss : 0.271367, supervised_loss: 0.270996
[21:31:37.809] iteration 5051 : loss : 0.270192, supervised_loss: 0.269377
[21:31:38.722] iteration 5052 : loss : 0.275506, supervised_loss: 0.275102
[21:31:40.346] iteration 5053 : loss : 0.273017, supervised_loss: 0.272492
[21:31:41.257] iteration 5054 : loss : 0.271201, supervised_loss: 0.270555
[21:31:42.171] iteration 5055 : loss : 0.279009, supervised_loss: 0.278525
[21:31:43.085] iteration 5056 : loss : 0.270455, supervised_loss: 0.269692
[21:31:43.998] iteration 5057 : loss : 0.274223, supervised_loss: 0.273260
[21:31:44.911] iteration 5058 : loss : 0.274400, supervised_loss: 0.274053
[21:31:45.824] iteration 5059 : loss : 0.271219, supervised_loss: 0.270912
[21:31:46.737] iteration 5060 : loss : 0.274224, supervised_loss: 0.273849
[21:31:47.649] iteration 5061 : loss : 0.272548, supervised_loss: 0.271719
[21:31:48.563] iteration 5062 : loss : 0.273910, supervised_loss: 0.273341
[21:31:49.476] iteration 5063 : loss : 0.274692, supervised_loss: 0.274323
[21:31:50.391] iteration 5064 : loss : 0.276877, supervised_loss: 0.274495
[21:31:51.903] iteration 5065 : loss : 0.272835, supervised_loss: 0.272359
[21:31:52.816] iteration 5066 : loss : 0.275319, supervised_loss: 0.274938
[21:31:53.728] iteration 5067 : loss : 0.279035, supervised_loss: 0.278713
[21:31:54.641] iteration 5068 : loss : 0.271123, supervised_loss: 0.270673
[21:31:55.555] iteration 5069 : loss : 0.276726, supervised_loss: 0.276439
[21:31:56.469] iteration 5070 : loss : 0.271463, supervised_loss: 0.271011
[21:31:57.382] iteration 5071 : loss : 0.274670, supervised_loss: 0.274170
[21:31:58.294] iteration 5072 : loss : 0.276924, supervised_loss: 0.276131
[21:31:59.208] iteration 5073 : loss : 0.276163, supervised_loss: 0.275529
[21:32:00.121] iteration 5074 : loss : 0.275760, supervised_loss: 0.275373
[21:32:01.034] iteration 5075 : loss : 0.273315, supervised_loss: 0.272970
[21:32:01.947] iteration 5076 : loss : 0.282043, supervised_loss: 0.281674
[21:32:03.463] iteration 5077 : loss : 0.270392, supervised_loss: 0.269984
[21:32:04.376] iteration 5078 : loss : 0.269130, supervised_loss: 0.268875
[21:32:05.288] iteration 5079 : loss : 0.268663, supervised_loss: 0.268306
[21:32:06.200] iteration 5080 : loss : 0.270337, supervised_loss: 0.270116
[21:32:07.114] iteration 5081 : loss : 0.272846, supervised_loss: 0.272478
[21:32:08.027] iteration 5082 : loss : 0.273372, supervised_loss: 0.273021
[21:32:08.939] iteration 5083 : loss : 0.273074, supervised_loss: 0.272568
[21:32:09.851] iteration 5084 : loss : 0.274921, supervised_loss: 0.274467
[21:32:10.765] iteration 5085 : loss : 0.269931, supervised_loss: 0.269497
[21:32:11.677] iteration 5086 : loss : 0.272236, supervised_loss: 0.271561
[21:32:12.592] iteration 5087 : loss : 0.284306, supervised_loss: 0.283841
[21:32:13.507] iteration 5088 : loss : 0.283297, supervised_loss: 0.282961
[21:32:15.116] iteration 5089 : loss : 0.276628, supervised_loss: 0.276015
[21:32:16.029] iteration 5090 : loss : 0.272949, supervised_loss: 0.272479
[21:32:16.941] iteration 5091 : loss : 0.276980, supervised_loss: 0.276491
[21:32:17.853] iteration 5092 : loss : 0.275975, supervised_loss: 0.275444
[21:32:18.765] iteration 5093 : loss : 0.275719, supervised_loss: 0.275167
[21:32:19.679] iteration 5094 : loss : 0.270332, supervised_loss: 0.269928
[21:32:20.593] iteration 5095 : loss : 0.275304, supervised_loss: 0.274869
[21:32:21.506] iteration 5096 : loss : 0.277548, supervised_loss: 0.277185
[21:32:22.419] iteration 5097 : loss : 0.275622, supervised_loss: 0.275298
[21:32:23.333] iteration 5098 : loss : 0.272173, supervised_loss: 0.271720
[21:32:24.246] iteration 5099 : loss : 0.272071, supervised_loss: 0.271551
[21:32:25.159] iteration 5100 : loss : 0.272592, supervised_loss: 0.272154
[21:32:26.673] iteration 5101 : loss : 0.270384, supervised_loss: 0.270086
[21:32:27.585] iteration 5102 : loss : 0.277476, supervised_loss: 0.277076
[21:32:28.497] iteration 5103 : loss : 0.272773, supervised_loss: 0.272406
[21:32:29.410] iteration 5104 : loss : 0.270100, supervised_loss: 0.269757
[21:32:30.324] iteration 5105 : loss : 0.270330, supervised_loss: 0.270049
[21:32:31.237] iteration 5106 : loss : 0.275231, supervised_loss: 0.274748
[21:32:32.150] iteration 5107 : loss : 0.272925, supervised_loss: 0.272323
[21:32:33.061] iteration 5108 : loss : 0.272739, supervised_loss: 0.272352
[21:32:33.975] iteration 5109 : loss : 0.268512, supervised_loss: 0.268090
[21:32:34.887] iteration 5110 : loss : 0.272694, supervised_loss: 0.272004
[21:32:35.801] iteration 5111 : loss : 0.274120, supervised_loss: 0.273781
[21:32:36.714] iteration 5112 : loss : 0.272464, supervised_loss: 0.271540
[21:32:38.240] iteration 5113 : loss : 0.271830, supervised_loss: 0.271442
[21:32:39.151] iteration 5114 : loss : 0.273314, supervised_loss: 0.272981
[21:32:40.063] iteration 5115 : loss : 0.271204, supervised_loss: 0.270594
[21:32:40.975] iteration 5116 : loss : 0.272124, supervised_loss: 0.271485
[21:32:41.890] iteration 5117 : loss : 0.269598, supervised_loss: 0.268715
[21:32:42.803] iteration 5118 : loss : 0.274181, supervised_loss: 0.273742
[21:32:43.716] iteration 5119 : loss : 0.279760, supervised_loss: 0.279184
[21:32:44.629] iteration 5120 : loss : 0.271999, supervised_loss: 0.270856
[21:32:45.541] iteration 5121 : loss : 0.273594, supervised_loss: 0.272917
[21:32:46.454] iteration 5122 : loss : 0.277945, supervised_loss: 0.277547
[21:32:47.367] iteration 5123 : loss : 0.274051, supervised_loss: 0.273668
[21:32:48.280] iteration 5124 : loss : 0.274001, supervised_loss: 0.273626
[21:32:49.909] iteration 5125 : loss : 0.272061, supervised_loss: 0.271448
[21:32:50.821] iteration 5126 : loss : 0.272132, supervised_loss: 0.271583
[21:32:51.734] iteration 5127 : loss : 0.265503, supervised_loss: 0.264987
[21:32:52.646] iteration 5128 : loss : 0.273770, supervised_loss: 0.273445
[21:32:53.558] iteration 5129 : loss : 0.276097, supervised_loss: 0.275770
[21:32:54.471] iteration 5130 : loss : 0.275383, supervised_loss: 0.274903
[21:32:55.384] iteration 5131 : loss : 0.283100, supervised_loss: 0.282694
[21:32:56.296] iteration 5132 : loss : 0.271476, supervised_loss: 0.270934
[21:32:57.208] iteration 5133 : loss : 0.273675, supervised_loss: 0.273093
[21:32:58.122] iteration 5134 : loss : 0.271751, supervised_loss: 0.271220
[21:32:59.034] iteration 5135 : loss : 0.273658, supervised_loss: 0.273161
[21:32:59.946] iteration 5136 : loss : 0.277370, supervised_loss: 0.276900
[21:33:01.546] iteration 5137 : loss : 0.280274, supervised_loss: 0.279642
[21:33:02.459] iteration 5138 : loss : 0.274917, supervised_loss: 0.274432
[21:33:03.372] iteration 5139 : loss : 0.271239, supervised_loss: 0.270780
[21:33:04.284] iteration 5140 : loss : 0.272341, supervised_loss: 0.271853
[21:33:05.205] iteration 5141 : loss : 0.271779, supervised_loss: 0.271303
[21:33:06.118] iteration 5142 : loss : 0.273524, supervised_loss: 0.273124
[21:33:07.031] iteration 5143 : loss : 0.273005, supervised_loss: 0.272627
[21:33:07.944] iteration 5144 : loss : 0.270800, supervised_loss: 0.270248
[21:33:08.855] iteration 5145 : loss : 0.268472, supervised_loss: 0.268153
[21:33:09.769] iteration 5146 : loss : 0.277639, supervised_loss: 0.277186
[21:33:10.682] iteration 5147 : loss : 0.273834, supervised_loss: 0.273559
[21:33:11.594] iteration 5148 : loss : 0.269783, supervised_loss: 0.269235
[21:33:13.225] iteration 5149 : loss : 0.265829, supervised_loss: 0.265491
[21:33:14.139] iteration 5150 : loss : 0.270915, supervised_loss: 0.270505
[21:33:15.052] iteration 5151 : loss : 0.270580, supervised_loss: 0.270173
[21:33:15.965] iteration 5152 : loss : 0.270307, supervised_loss: 0.269695
[21:33:16.876] iteration 5153 : loss : 0.270166, supervised_loss: 0.269877
[21:33:17.789] iteration 5154 : loss : 0.271638, supervised_loss: 0.270777
[21:33:18.702] iteration 5155 : loss : 0.274610, supervised_loss: 0.274218
[21:33:19.614] iteration 5156 : loss : 0.271888, supervised_loss: 0.270983
[21:33:20.527] iteration 5157 : loss : 0.281588, supervised_loss: 0.280812
[21:33:21.440] iteration 5158 : loss : 0.278499, supervised_loss: 0.277832
[21:33:22.353] iteration 5159 : loss : 0.275601, supervised_loss: 0.275217
[21:33:23.266] iteration 5160 : loss : 0.273973, supervised_loss: 0.273404
[21:33:24.935] iteration 5161 : loss : 0.268944, supervised_loss: 0.268468
[21:33:25.848] iteration 5162 : loss : 0.271991, supervised_loss: 0.270981
[21:33:26.762] iteration 5163 : loss : 0.269706, supervised_loss: 0.269467
[21:33:27.674] iteration 5164 : loss : 0.270877, supervised_loss: 0.270410
[21:33:28.586] iteration 5165 : loss : 0.278534, supervised_loss: 0.277633
[21:33:29.499] iteration 5166 : loss : 0.265611, supervised_loss: 0.265214
[21:33:30.411] iteration 5167 : loss : 0.284741, supervised_loss: 0.284350
[21:33:31.324] iteration 5168 : loss : 0.274448, supervised_loss: 0.274065
[21:33:32.238] iteration 5169 : loss : 0.274599, supervised_loss: 0.274250
[21:33:33.151] iteration 5170 : loss : 0.273746, supervised_loss: 0.273320
[21:33:34.064] iteration 5171 : loss : 0.276443, supervised_loss: 0.276030
[21:33:34.976] iteration 5172 : loss : 0.274976, supervised_loss: 0.274490
[21:33:36.593] iteration 5173 : loss : 0.267555, supervised_loss: 0.267280
[21:33:37.505] iteration 5174 : loss : 0.273743, supervised_loss: 0.273477
[21:33:38.417] iteration 5175 : loss : 0.269094, supervised_loss: 0.268625
[21:33:39.330] iteration 5176 : loss : 0.269215, supervised_loss: 0.268891
[21:33:40.240] iteration 5177 : loss : 0.280631, supervised_loss: 0.280209
[21:33:41.152] iteration 5178 : loss : 0.274962, supervised_loss: 0.274591
[21:33:42.063] iteration 5179 : loss : 0.270278, supervised_loss: 0.269453
[21:33:42.977] iteration 5180 : loss : 0.275672, supervised_loss: 0.275144
[21:33:43.889] iteration 5181 : loss : 0.269211, supervised_loss: 0.268864
[21:33:44.801] iteration 5182 : loss : 0.281882, supervised_loss: 0.281545
[21:33:45.713] iteration 5183 : loss : 0.270788, supervised_loss: 0.270521
[21:33:46.626] iteration 5184 : loss : 0.268729, supervised_loss: 0.268405
[21:33:48.171] iteration 5185 : loss : 0.267545, supervised_loss: 0.267063
[21:33:49.083] iteration 5186 : loss : 0.272774, supervised_loss: 0.272318
[21:33:49.996] iteration 5187 : loss : 0.277627, supervised_loss: 0.277240
[21:33:50.908] iteration 5188 : loss : 0.273547, supervised_loss: 0.273224
[21:33:51.820] iteration 5189 : loss : 0.275094, supervised_loss: 0.274743
[21:33:52.734] iteration 5190 : loss : 0.265954, supervised_loss: 0.265411
[21:33:53.646] iteration 5191 : loss : 0.281348, supervised_loss: 0.280762
[21:33:54.559] iteration 5192 : loss : 0.274119, supervised_loss: 0.273545
[21:33:55.470] iteration 5193 : loss : 0.273382, supervised_loss: 0.272723
[21:33:56.384] iteration 5194 : loss : 0.274427, supervised_loss: 0.273857
[21:33:57.297] iteration 5195 : loss : 0.269878, supervised_loss: 0.269311
[21:33:58.210] iteration 5196 : loss : 0.272971, supervised_loss: 0.272650
[21:33:59.706] iteration 5197 : loss : 0.276057, supervised_loss: 0.275711
[21:34:00.620] iteration 5198 : loss : 0.276090, supervised_loss: 0.275649
[21:34:01.532] iteration 5199 : loss : 0.271201, supervised_loss: 0.270819
[21:34:02.444] iteration 5200 : loss : 0.272039, supervised_loss: 0.271548
[21:34:05.328] iteration 5201 : loss : 0.274085, supervised_loss: 0.273452
[21:34:06.241] iteration 5202 : loss : 0.274278, supervised_loss: 0.273800
[21:34:07.153] iteration 5203 : loss : 0.275043, supervised_loss: 0.274688
[21:34:08.065] iteration 5204 : loss : 0.273857, supervised_loss: 0.273581
[21:34:08.979] iteration 5205 : loss : 0.275849, supervised_loss: 0.275514
[21:34:09.891] iteration 5206 : loss : 0.270610, supervised_loss: 0.270027
[21:34:10.805] iteration 5207 : loss : 0.279043, supervised_loss: 0.278627
[21:34:11.718] iteration 5208 : loss : 0.275469, supervised_loss: 0.275095
[21:34:13.416] iteration 5209 : loss : 0.274341, supervised_loss: 0.273986
[21:34:14.326] iteration 5210 : loss : 0.270750, supervised_loss: 0.270211
[21:34:15.240] iteration 5211 : loss : 0.272227, supervised_loss: 0.271826
[21:34:16.154] iteration 5212 : loss : 0.273930, supervised_loss: 0.273468
[21:34:17.067] iteration 5213 : loss : 0.275925, supervised_loss: 0.275518
[21:34:17.978] iteration 5214 : loss : 0.268064, supervised_loss: 0.267670
[21:34:18.890] iteration 5215 : loss : 0.271309, supervised_loss: 0.270694
[21:34:19.803] iteration 5216 : loss : 0.270843, supervised_loss: 0.270220
[21:34:20.716] iteration 5217 : loss : 0.268352, supervised_loss: 0.267827
[21:34:21.629] iteration 5218 : loss : 0.273599, supervised_loss: 0.273223
[21:34:22.543] iteration 5219 : loss : 0.268170, supervised_loss: 0.267765
[21:34:23.455] iteration 5220 : loss : 0.273465, supervised_loss: 0.272484
[21:34:25.027] iteration 5221 : loss : 0.273811, supervised_loss: 0.273276
[21:34:25.941] iteration 5222 : loss : 0.274352, supervised_loss: 0.274001
[21:34:26.855] iteration 5223 : loss : 0.277055, supervised_loss: 0.276157
[21:34:27.767] iteration 5224 : loss : 0.274397, supervised_loss: 0.273813
[21:34:28.679] iteration 5225 : loss : 0.274372, supervised_loss: 0.273968
[21:34:29.592] iteration 5226 : loss : 0.282115, supervised_loss: 0.281839
[21:34:30.504] iteration 5227 : loss : 0.271581, supervised_loss: 0.271314
[21:34:31.418] iteration 5228 : loss : 0.270844, supervised_loss: 0.270418
[21:34:32.332] iteration 5229 : loss : 0.275534, supervised_loss: 0.275066
[21:34:33.243] iteration 5230 : loss : 0.269815, supervised_loss: 0.269441
[21:34:34.156] iteration 5231 : loss : 0.271214, supervised_loss: 0.270686
[21:34:35.069] iteration 5232 : loss : 0.274086, supervised_loss: 0.273449
[21:34:36.627] iteration 5233 : loss : 0.274034, supervised_loss: 0.273579
[21:34:37.539] iteration 5234 : loss : 0.276102, supervised_loss: 0.275727
[21:34:38.452] iteration 5235 : loss : 0.278743, supervised_loss: 0.278185
[21:34:39.365] iteration 5236 : loss : 0.275452, supervised_loss: 0.274758
[21:34:40.278] iteration 5237 : loss : 0.274291, supervised_loss: 0.273677
[21:34:41.192] iteration 5238 : loss : 0.274640, supervised_loss: 0.274199
[21:34:42.104] iteration 5239 : loss : 0.271286, supervised_loss: 0.270828
[21:34:43.026] iteration 5240 : loss : 0.276223, supervised_loss: 0.275811
[21:34:43.941] iteration 5241 : loss : 0.275380, supervised_loss: 0.274711
[21:34:44.853] iteration 5242 : loss : 0.271664, supervised_loss: 0.271285
[21:34:45.764] iteration 5243 : loss : 0.276008, supervised_loss: 0.275449
[21:34:46.676] iteration 5244 : loss : 0.274465, supervised_loss: 0.274092
[21:34:48.229] iteration 5245 : loss : 0.274826, supervised_loss: 0.274467
[21:34:49.151] iteration 5246 : loss : 0.272493, supervised_loss: 0.271985
[21:34:50.063] iteration 5247 : loss : 0.268189, supervised_loss: 0.267778
[21:34:50.977] iteration 5248 : loss : 0.272845, supervised_loss: 0.272397
[21:34:51.889] iteration 5249 : loss : 0.270031, supervised_loss: 0.269640
[21:34:52.802] iteration 5250 : loss : 0.269377, supervised_loss: 0.268850
[21:34:53.714] iteration 5251 : loss : 0.275098, supervised_loss: 0.274702
[21:34:54.626] iteration 5252 : loss : 0.277294, supervised_loss: 0.276778
[21:34:55.539] iteration 5253 : loss : 0.271324, supervised_loss: 0.270896
[21:34:56.452] iteration 5254 : loss : 0.275608, supervised_loss: 0.274945
[21:34:57.365] iteration 5255 : loss : 0.269532, supervised_loss: 0.269172
[21:34:58.278] iteration 5256 : loss : 0.277044, supervised_loss: 0.276623
[21:34:59.830] iteration 5257 : loss : 0.270053, supervised_loss: 0.269444
[21:35:00.743] iteration 5258 : loss : 0.271175, supervised_loss: 0.270832
[21:35:01.656] iteration 5259 : loss : 0.271273, supervised_loss: 0.270938
[21:35:02.568] iteration 5260 : loss : 0.277876, supervised_loss: 0.277371
[21:35:03.481] iteration 5261 : loss : 0.274761, supervised_loss: 0.274167
[21:35:04.394] iteration 5262 : loss : 0.270918, supervised_loss: 0.270323
[21:35:05.306] iteration 5263 : loss : 0.266981, supervised_loss: 0.266611
[21:35:06.219] iteration 5264 : loss : 0.276472, supervised_loss: 0.275811
[21:35:07.132] iteration 5265 : loss : 0.273758, supervised_loss: 0.273356
[21:35:08.046] iteration 5266 : loss : 0.275270, supervised_loss: 0.274871
[21:35:08.959] iteration 5267 : loss : 0.280966, supervised_loss: 0.280444
[21:35:09.873] iteration 5268 : loss : 0.274923, supervised_loss: 0.274388
[21:35:11.486] iteration 5269 : loss : 0.275152, supervised_loss: 0.274750
[21:35:12.397] iteration 5270 : loss : 0.269197, supervised_loss: 0.268774
[21:35:13.310] iteration 5271 : loss : 0.269040, supervised_loss: 0.268624
[21:35:14.224] iteration 5272 : loss : 0.272886, supervised_loss: 0.272357
[21:35:15.137] iteration 5273 : loss : 0.274807, supervised_loss: 0.274324
[21:35:16.049] iteration 5274 : loss : 0.272839, supervised_loss: 0.272299
[21:35:16.962] iteration 5275 : loss : 0.271941, supervised_loss: 0.271501
[21:35:17.874] iteration 5276 : loss : 0.276283, supervised_loss: 0.275400
[21:35:18.787] iteration 5277 : loss : 0.274018, supervised_loss: 0.273430
[21:35:19.700] iteration 5278 : loss : 0.267250, supervised_loss: 0.266825
[21:35:20.611] iteration 5279 : loss : 0.269543, supervised_loss: 0.269023
[21:35:21.523] iteration 5280 : loss : 0.273558, supervised_loss: 0.273075
[21:35:23.120] iteration 5281 : loss : 0.273908, supervised_loss: 0.273406
[21:35:24.031] iteration 5282 : loss : 0.275388, supervised_loss: 0.274791
[21:35:24.943] iteration 5283 : loss : 0.277743, supervised_loss: 0.277319
[21:35:25.855] iteration 5284 : loss : 0.275398, supervised_loss: 0.275028
[21:35:26.767] iteration 5285 : loss : 0.274881, supervised_loss: 0.274371
[21:35:27.679] iteration 5286 : loss : 0.270657, supervised_loss: 0.270183
[21:35:28.592] iteration 5287 : loss : 0.273220, supervised_loss: 0.272801
[21:35:29.503] iteration 5288 : loss : 0.265467, supervised_loss: 0.264867
[21:35:30.416] iteration 5289 : loss : 0.271369, supervised_loss: 0.270912
[21:35:31.329] iteration 5290 : loss : 0.275508, supervised_loss: 0.275184
[21:35:32.241] iteration 5291 : loss : 0.272067, supervised_loss: 0.271576
[21:35:33.153] iteration 5292 : loss : 0.274005, supervised_loss: 0.273675
[21:35:34.722] iteration 5293 : loss : 0.272657, supervised_loss: 0.272092
[21:35:35.634] iteration 5294 : loss : 0.270897, supervised_loss: 0.270486
[21:35:36.547] iteration 5295 : loss : 0.268064, supervised_loss: 0.267669
[21:35:37.460] iteration 5296 : loss : 0.276282, supervised_loss: 0.275399
[21:35:38.373] iteration 5297 : loss : 0.273431, supervised_loss: 0.272875
[21:35:39.287] iteration 5298 : loss : 0.274913, supervised_loss: 0.271497
[21:35:40.201] iteration 5299 : loss : 0.275766, supervised_loss: 0.275181
[21:35:41.113] iteration 5300 : loss : 0.271059, supervised_loss: 0.270768
[21:35:42.025] iteration 5301 : loss : 0.267808, supervised_loss: 0.267445
[21:35:42.938] iteration 5302 : loss : 0.274858, supervised_loss: 0.274441
[21:35:43.850] iteration 5303 : loss : 0.278043, supervised_loss: 0.277504
[21:35:44.763] iteration 5304 : loss : 0.274363, supervised_loss: 0.273811
[21:35:46.267] iteration 5305 : loss : 0.270940, supervised_loss: 0.270461
[21:35:47.180] iteration 5306 : loss : 0.278583, supervised_loss: 0.278125
[21:35:48.093] iteration 5307 : loss : 0.272836, supervised_loss: 0.272344
[21:35:49.006] iteration 5308 : loss : 0.280516, supervised_loss: 0.280053
[21:35:49.921] iteration 5309 : loss : 0.277378, supervised_loss: 0.276967
[21:35:50.834] iteration 5310 : loss : 0.270755, supervised_loss: 0.270164
[21:35:51.746] iteration 5311 : loss : 0.273755, supervised_loss: 0.273219
[21:35:52.658] iteration 5312 : loss : 0.275197, supervised_loss: 0.274787
[21:35:53.571] iteration 5313 : loss : 0.274625, supervised_loss: 0.274145
[21:35:54.484] iteration 5314 : loss : 0.272762, supervised_loss: 0.272318
[21:35:55.397] iteration 5315 : loss : 0.279064, supervised_loss: 0.278563
[21:35:56.311] iteration 5316 : loss : 0.277353, supervised_loss: 0.277043
[21:35:57.852] iteration 5317 : loss : 0.269891, supervised_loss: 0.269370
[21:35:58.764] iteration 5318 : loss : 0.277784, supervised_loss: 0.276941
[21:35:59.676] iteration 5319 : loss : 0.269983, supervised_loss: 0.269479
[21:36:00.589] iteration 5320 : loss : 0.274575, supervised_loss: 0.274159
[21:36:01.501] iteration 5321 : loss : 0.269940, supervised_loss: 0.269120
[21:36:02.415] iteration 5322 : loss : 0.276372, supervised_loss: 0.275915
[21:36:03.329] iteration 5323 : loss : 0.273239, supervised_loss: 0.271942
[21:36:04.241] iteration 5324 : loss : 0.272213, supervised_loss: 0.271622
[21:36:05.154] iteration 5325 : loss : 0.274857, supervised_loss: 0.274411
[21:36:06.068] iteration 5326 : loss : 0.278887, supervised_loss: 0.278461
[21:36:06.982] iteration 5327 : loss : 0.280700, supervised_loss: 0.280097
[21:36:07.894] iteration 5328 : loss : 0.273079, supervised_loss: 0.272631
[21:36:09.413] iteration 5329 : loss : 0.271845, supervised_loss: 0.271337
[21:36:10.324] iteration 5330 : loss : 0.274208, supervised_loss: 0.273615
[21:36:11.237] iteration 5331 : loss : 0.264879, supervised_loss: 0.264399
[21:36:12.150] iteration 5332 : loss : 0.272946, supervised_loss: 0.272525
[21:36:13.062] iteration 5333 : loss : 0.281406, supervised_loss: 0.280723
[21:36:13.976] iteration 5334 : loss : 0.268260, supervised_loss: 0.267711
[21:36:14.889] iteration 5335 : loss : 0.272455, supervised_loss: 0.272049
[21:36:15.802] iteration 5336 : loss : 0.275045, supervised_loss: 0.274557
[21:36:16.715] iteration 5337 : loss : 0.274723, supervised_loss: 0.274279
[21:36:17.629] iteration 5338 : loss : 0.274310, supervised_loss: 0.273823
[21:36:18.543] iteration 5339 : loss : 0.277688, supervised_loss: 0.277322
[21:36:19.456] iteration 5340 : loss : 0.270692, supervised_loss: 0.269420
[21:36:21.131] iteration 5341 : loss : 0.279293, supervised_loss: 0.278921
[21:36:22.043] iteration 5342 : loss : 0.275278, supervised_loss: 0.274937
[21:36:22.955] iteration 5343 : loss : 0.276537, supervised_loss: 0.276094
[21:36:23.867] iteration 5344 : loss : 0.280337, supervised_loss: 0.279940
[21:36:24.780] iteration 5345 : loss : 0.268625, supervised_loss: 0.268340
[21:36:25.692] iteration 5346 : loss : 0.277203, supervised_loss: 0.276800
[21:36:26.605] iteration 5347 : loss : 0.281692, supervised_loss: 0.281104
[21:36:27.519] iteration 5348 : loss : 0.273831, supervised_loss: 0.273415
[21:36:28.431] iteration 5349 : loss : 0.273978, supervised_loss: 0.273067
[21:36:29.345] iteration 5350 : loss : 0.274047, supervised_loss: 0.273641
[21:36:30.257] iteration 5351 : loss : 0.272140, supervised_loss: 0.270980
[21:36:31.170] iteration 5352 : loss : 0.274917, supervised_loss: 0.274234
[21:36:32.666] iteration 5353 : loss : 0.276642, supervised_loss: 0.275933
[21:36:33.578] iteration 5354 : loss : 0.271634, supervised_loss: 0.271075
[21:36:34.489] iteration 5355 : loss : 0.283871, supervised_loss: 0.283536
[21:36:35.402] iteration 5356 : loss : 0.270535, supervised_loss: 0.270064
[21:36:36.314] iteration 5357 : loss : 0.271939, supervised_loss: 0.271646
[21:36:37.226] iteration 5358 : loss : 0.268958, supervised_loss: 0.268279
[21:36:38.139] iteration 5359 : loss : 0.270868, supervised_loss: 0.270459
[21:36:39.053] iteration 5360 : loss : 0.271325, supervised_loss: 0.270284
[21:36:39.966] iteration 5361 : loss : 0.278349, supervised_loss: 0.278020
[21:36:40.879] iteration 5362 : loss : 0.271720, supervised_loss: 0.270987
[21:36:41.792] iteration 5363 : loss : 0.276056, supervised_loss: 0.275550
[21:36:42.705] iteration 5364 : loss : 0.271421, supervised_loss: 0.270905
[21:36:44.352] iteration 5365 : loss : 0.269575, supervised_loss: 0.268927
[21:36:45.263] iteration 5366 : loss : 0.276182, supervised_loss: 0.275756
[21:36:46.177] iteration 5367 : loss : 0.271524, supervised_loss: 0.271073
[21:36:47.089] iteration 5368 : loss : 0.280672, supervised_loss: 0.279755
[21:36:48.001] iteration 5369 : loss : 0.274494, supervised_loss: 0.274108
[21:36:48.913] iteration 5370 : loss : 0.271169, supervised_loss: 0.270776
[21:36:49.825] iteration 5371 : loss : 0.282207, supervised_loss: 0.281793
[21:36:50.737] iteration 5372 : loss : 0.267969, supervised_loss: 0.267337
[21:36:51.650] iteration 5373 : loss : 0.271794, supervised_loss: 0.271448
[21:36:52.563] iteration 5374 : loss : 0.277437, supervised_loss: 0.276905
[21:36:53.476] iteration 5375 : loss : 0.276992, supervised_loss: 0.276286
[21:36:54.389] iteration 5376 : loss : 0.273902, supervised_loss: 0.273656
[21:36:55.980] iteration 5377 : loss : 0.275706, supervised_loss: 0.275145
[21:36:56.893] iteration 5378 : loss : 0.274038, supervised_loss: 0.273288
[21:36:57.804] iteration 5379 : loss : 0.273044, supervised_loss: 0.272514
[21:36:58.716] iteration 5380 : loss : 0.277458, supervised_loss: 0.276649
[21:36:59.629] iteration 5381 : loss : 0.274448, supervised_loss: 0.273740
[21:37:00.543] iteration 5382 : loss : 0.269794, supervised_loss: 0.269506
[21:37:01.455] iteration 5383 : loss : 0.272967, supervised_loss: 0.272112
[21:37:02.368] iteration 5384 : loss : 0.271114, supervised_loss: 0.270656
[21:37:03.281] iteration 5385 : loss : 0.275492, supervised_loss: 0.274932
[21:37:04.194] iteration 5386 : loss : 0.278237, supervised_loss: 0.277815
[21:37:05.107] iteration 5387 : loss : 0.273711, supervised_loss: 0.273257
[21:37:06.020] iteration 5388 : loss : 0.277741, supervised_loss: 0.277456
[21:37:07.607] iteration 5389 : loss : 0.275242, supervised_loss: 0.274723
[21:37:08.519] iteration 5390 : loss : 0.269825, supervised_loss: 0.269456
[21:37:09.432] iteration 5391 : loss : 0.274556, supervised_loss: 0.273849
[21:37:10.343] iteration 5392 : loss : 0.272027, supervised_loss: 0.271724
[21:37:11.255] iteration 5393 : loss : 0.278132, supervised_loss: 0.277754
[21:37:12.169] iteration 5394 : loss : 0.273515, supervised_loss: 0.273059
[21:37:13.083] iteration 5395 : loss : 0.276919, supervised_loss: 0.276420
[21:37:13.996] iteration 5396 : loss : 0.273674, supervised_loss: 0.273182
[21:37:14.910] iteration 5397 : loss : 0.279518, supervised_loss: 0.279135
[21:37:15.821] iteration 5398 : loss : 0.272929, supervised_loss: 0.272203
[21:37:16.733] iteration 5399 : loss : 0.271200, supervised_loss: 0.270677
[21:37:17.648] iteration 5400 : loss : 0.269802, supervised_loss: 0.269388
[21:37:19.718] save best model to model/LA_vnet_25_labeled/URPC/iter_5400_dice_0.9241932034492493.pth
[21:37:21.278] iteration 5401 : loss : 0.285321, supervised_loss: 0.284791
[21:37:22.189] iteration 5402 : loss : 0.277680, supervised_loss: 0.277047
[21:37:23.102] iteration 5403 : loss : 0.280653, supervised_loss: 0.279976
[21:37:24.039] iteration 5404 : loss : 0.277696, supervised_loss: 0.277205
[21:37:24.951] iteration 5405 : loss : 0.270204, supervised_loss: 0.269324
[21:37:25.864] iteration 5406 : loss : 0.277781, supervised_loss: 0.277416
[21:37:26.775] iteration 5407 : loss : 0.270884, supervised_loss: 0.270247
[21:37:27.688] iteration 5408 : loss : 0.270306, supervised_loss: 0.269988
[21:37:28.601] iteration 5409 : loss : 0.274232, supervised_loss: 0.273848
[21:37:29.514] iteration 5410 : loss : 0.270388, supervised_loss: 0.269876
[21:37:30.426] iteration 5411 : loss : 0.277541, supervised_loss: 0.276949
[21:37:31.340] iteration 5412 : loss : 0.273659, supervised_loss: 0.273262
[21:37:32.951] iteration 5413 : loss : 0.270390, supervised_loss: 0.269593
[21:37:33.863] iteration 5414 : loss : 0.276967, supervised_loss: 0.276530
[21:37:34.774] iteration 5415 : loss : 0.283988, supervised_loss: 0.283263
[21:37:35.686] iteration 5416 : loss : 0.277293, supervised_loss: 0.276623
[21:37:36.599] iteration 5417 : loss : 0.268301, supervised_loss: 0.267655
[21:37:37.511] iteration 5418 : loss : 0.267937, supervised_loss: 0.267562
[21:37:38.423] iteration 5419 : loss : 0.269796, supervised_loss: 0.269202
[21:37:39.336] iteration 5420 : loss : 0.268853, supervised_loss: 0.268486
[21:37:40.249] iteration 5421 : loss : 0.271992, supervised_loss: 0.271250
[21:37:41.236] iteration 5422 : loss : 0.274308, supervised_loss: 0.273700
[21:37:42.149] iteration 5423 : loss : 0.275170, supervised_loss: 0.274642
[21:37:43.063] iteration 5424 : loss : 0.277826, supervised_loss: 0.277360
[21:37:44.571] iteration 5425 : loss : 0.276958, supervised_loss: 0.276498
[21:37:45.484] iteration 5426 : loss : 0.271738, supervised_loss: 0.271402
[21:37:46.398] iteration 5427 : loss : 0.272627, supervised_loss: 0.271967
[21:37:47.311] iteration 5428 : loss : 0.272032, supervised_loss: 0.271472
[21:37:48.224] iteration 5429 : loss : 0.274007, supervised_loss: 0.273552
[21:37:49.137] iteration 5430 : loss : 0.272749, supervised_loss: 0.272218
[21:37:50.050] iteration 5431 : loss : 0.274555, supervised_loss: 0.274248
[21:37:50.964] iteration 5432 : loss : 0.270015, supervised_loss: 0.269645
[21:37:51.877] iteration 5433 : loss : 0.272858, supervised_loss: 0.272326
[21:37:52.791] iteration 5434 : loss : 0.272535, supervised_loss: 0.272102
[21:37:53.704] iteration 5435 : loss : 0.276120, supervised_loss: 0.275634
[21:37:54.617] iteration 5436 : loss : 0.278573, supervised_loss: 0.278144
[21:37:56.129] iteration 5437 : loss : 0.273476, supervised_loss: 0.272850
[21:37:57.042] iteration 5438 : loss : 0.273165, supervised_loss: 0.272869
[21:37:57.953] iteration 5439 : loss : 0.272339, supervised_loss: 0.271573
[21:37:58.865] iteration 5440 : loss : 0.271021, supervised_loss: 0.270533
[21:37:59.778] iteration 5441 : loss : 0.279008, supervised_loss: 0.278030
[21:38:00.691] iteration 5442 : loss : 0.271996, supervised_loss: 0.271197
[21:38:01.603] iteration 5443 : loss : 0.269196, supervised_loss: 0.268759
[21:38:02.515] iteration 5444 : loss : 0.271204, supervised_loss: 0.270886
[21:38:03.427] iteration 5445 : loss : 0.278703, supervised_loss: 0.278214
[21:38:04.340] iteration 5446 : loss : 0.275380, supervised_loss: 0.274755
[21:38:05.254] iteration 5447 : loss : 0.275438, supervised_loss: 0.274978
[21:38:06.168] iteration 5448 : loss : 0.275482, supervised_loss: 0.274980
[21:38:07.657] iteration 5449 : loss : 0.273776, supervised_loss: 0.273019
[21:38:08.569] iteration 5450 : loss : 0.280329, supervised_loss: 0.279805
[21:38:09.481] iteration 5451 : loss : 0.272293, supervised_loss: 0.271915
[21:38:10.394] iteration 5452 : loss : 0.272339, supervised_loss: 0.271987
[21:38:11.305] iteration 5453 : loss : 0.273519, supervised_loss: 0.273073
[21:38:12.217] iteration 5454 : loss : 0.279446, supervised_loss: 0.278849
[21:38:13.130] iteration 5455 : loss : 0.273491, supervised_loss: 0.273122
[21:38:14.043] iteration 5456 : loss : 0.271160, supervised_loss: 0.270762
[21:38:14.956] iteration 5457 : loss : 0.271984, supervised_loss: 0.271402
[21:38:15.869] iteration 5458 : loss : 0.279376, supervised_loss: 0.278453
[21:38:16.782] iteration 5459 : loss : 0.276150, supervised_loss: 0.275730
[21:38:17.694] iteration 5460 : loss : 0.271028, supervised_loss: 0.270575
[21:38:19.350] iteration 5461 : loss : 0.269995, supervised_loss: 0.269512
[21:38:20.262] iteration 5462 : loss : 0.268664, supervised_loss: 0.268195
[21:38:21.174] iteration 5463 : loss : 0.270305, supervised_loss: 0.269467
[21:38:22.087] iteration 5464 : loss : 0.273168, supervised_loss: 0.272535
[21:38:23.000] iteration 5465 : loss : 0.270309, supervised_loss: 0.269736
[21:38:23.912] iteration 5466 : loss : 0.269874, supervised_loss: 0.269400
[21:38:24.825] iteration 5467 : loss : 0.269992, supervised_loss: 0.269522
[21:38:25.740] iteration 5468 : loss : 0.269320, supervised_loss: 0.268841
[21:38:26.654] iteration 5469 : loss : 0.277174, supervised_loss: 0.276736
[21:38:27.566] iteration 5470 : loss : 0.272095, supervised_loss: 0.271800
[21:38:28.478] iteration 5471 : loss : 0.269110, supervised_loss: 0.268590
[21:38:29.392] iteration 5472 : loss : 0.285779, supervised_loss: 0.285330
[21:38:30.896] iteration 5473 : loss : 0.271160, supervised_loss: 0.270473
[21:38:31.811] iteration 5474 : loss : 0.268863, supervised_loss: 0.268438
[21:38:32.724] iteration 5475 : loss : 0.271861, supervised_loss: 0.271471
[21:38:33.635] iteration 5476 : loss : 0.273596, supervised_loss: 0.273191
[21:38:34.548] iteration 5477 : loss : 0.268420, supervised_loss: 0.268013
[21:38:35.460] iteration 5478 : loss : 0.280585, supervised_loss: 0.280272
[21:38:36.372] iteration 5479 : loss : 0.274152, supervised_loss: 0.273825
[21:38:37.286] iteration 5480 : loss : 0.273054, supervised_loss: 0.272576
[21:38:38.199] iteration 5481 : loss : 0.271640, supervised_loss: 0.270531
[21:38:39.111] iteration 5482 : loss : 0.269612, supervised_loss: 0.269111
[21:38:40.024] iteration 5483 : loss : 0.269638, supervised_loss: 0.269048
[21:38:40.938] iteration 5484 : loss : 0.276061, supervised_loss: 0.275088
[21:38:42.560] iteration 5485 : loss : 0.269968, supervised_loss: 0.269060
[21:38:43.472] iteration 5486 : loss : 0.273476, supervised_loss: 0.273074
[21:38:44.384] iteration 5487 : loss : 0.272818, supervised_loss: 0.272105
[21:38:45.297] iteration 5488 : loss : 0.269581, supervised_loss: 0.269102
[21:38:46.209] iteration 5489 : loss : 0.266894, supervised_loss: 0.266447
[21:38:47.122] iteration 5490 : loss : 0.275855, supervised_loss: 0.275037
[21:38:48.035] iteration 5491 : loss : 0.272913, supervised_loss: 0.271698
[21:38:48.947] iteration 5492 : loss : 0.273451, supervised_loss: 0.272956
[21:38:49.859] iteration 5493 : loss : 0.267671, supervised_loss: 0.267199
[21:38:50.771] iteration 5494 : loss : 0.271463, supervised_loss: 0.271008
[21:38:51.684] iteration 5495 : loss : 0.276708, supervised_loss: 0.276043
[21:38:52.597] iteration 5496 : loss : 0.272119, supervised_loss: 0.271693
[21:38:54.129] iteration 5497 : loss : 0.272700, supervised_loss: 0.272422
[21:38:55.043] iteration 5498 : loss : 0.274066, supervised_loss: 0.273719
[21:38:55.956] iteration 5499 : loss : 0.280866, supervised_loss: 0.280419
[21:38:56.869] iteration 5500 : loss : 0.275169, supervised_loss: 0.274703
[21:38:57.782] iteration 5501 : loss : 0.275376, supervised_loss: 0.274985
[21:38:58.695] iteration 5502 : loss : 0.273783, supervised_loss: 0.273327
[21:38:59.608] iteration 5503 : loss : 0.269666, supervised_loss: 0.269137
[21:39:00.520] iteration 5504 : loss : 0.273794, supervised_loss: 0.273436
[21:39:01.433] iteration 5505 : loss : 0.271794, supervised_loss: 0.271259
[21:39:02.347] iteration 5506 : loss : 0.272918, supervised_loss: 0.272430
[21:39:03.260] iteration 5507 : loss : 0.274814, supervised_loss: 0.274363
[21:39:04.175] iteration 5508 : loss : 0.270925, supervised_loss: 0.270361
[21:39:05.696] iteration 5509 : loss : 0.269761, supervised_loss: 0.269154
[21:39:06.608] iteration 5510 : loss : 0.270400, supervised_loss: 0.269943
[21:39:07.521] iteration 5511 : loss : 0.272310, supervised_loss: 0.271731
[21:39:08.434] iteration 5512 : loss : 0.268985, supervised_loss: 0.268623
[21:39:09.345] iteration 5513 : loss : 0.269479, supervised_loss: 0.268957
[21:39:10.257] iteration 5514 : loss : 0.279622, supervised_loss: 0.279254
[21:39:11.169] iteration 5515 : loss : 0.275358, supervised_loss: 0.274895
[21:39:12.081] iteration 5516 : loss : 0.275258, supervised_loss: 0.274763
[21:39:12.994] iteration 5517 : loss : 0.273599, supervised_loss: 0.273130
[21:39:13.908] iteration 5518 : loss : 0.270649, supervised_loss: 0.270366
[21:39:14.821] iteration 5519 : loss : 0.271344, supervised_loss: 0.270847
[21:39:15.733] iteration 5520 : loss : 0.270824, supervised_loss: 0.270311
[21:39:17.289] iteration 5521 : loss : 0.279434, supervised_loss: 0.278812
[21:39:18.204] iteration 5522 : loss : 0.269155, supervised_loss: 0.268670
[21:39:19.115] iteration 5523 : loss : 0.269212, supervised_loss: 0.268689
[21:39:20.027] iteration 5524 : loss : 0.270602, supervised_loss: 0.270150
[21:39:20.940] iteration 5525 : loss : 0.269932, supervised_loss: 0.269568
[21:39:21.852] iteration 5526 : loss : 0.270342, supervised_loss: 0.269966
[21:39:22.764] iteration 5527 : loss : 0.271983, supervised_loss: 0.271586
[21:39:23.678] iteration 5528 : loss : 0.272174, supervised_loss: 0.271848
[21:39:24.590] iteration 5529 : loss : 0.276117, supervised_loss: 0.275818
[21:39:25.503] iteration 5530 : loss : 0.265662, supervised_loss: 0.265095
[21:39:26.415] iteration 5531 : loss : 0.282211, supervised_loss: 0.281787
[21:39:27.329] iteration 5532 : loss : 0.270117, supervised_loss: 0.269460
[21:39:28.958] iteration 5533 : loss : 0.276999, supervised_loss: 0.276388
[21:39:29.871] iteration 5534 : loss : 0.270240, supervised_loss: 0.269895
[21:39:30.783] iteration 5535 : loss : 0.273934, supervised_loss: 0.273404
[21:39:31.695] iteration 5536 : loss : 0.272480, supervised_loss: 0.272135
[21:39:32.607] iteration 5537 : loss : 0.274763, supervised_loss: 0.274296
[21:39:33.520] iteration 5538 : loss : 0.274159, supervised_loss: 0.273871
[21:39:34.433] iteration 5539 : loss : 0.273543, supervised_loss: 0.273072
[21:39:35.347] iteration 5540 : loss : 0.271332, supervised_loss: 0.271096
[21:39:36.261] iteration 5541 : loss : 0.268528, supervised_loss: 0.267831
[21:39:37.175] iteration 5542 : loss : 0.272333, supervised_loss: 0.271607
[21:39:38.088] iteration 5543 : loss : 0.273887, supervised_loss: 0.273342
[21:39:39.000] iteration 5544 : loss : 0.271321, supervised_loss: 0.270676
[21:39:40.556] iteration 5545 : loss : 0.270241, supervised_loss: 0.269856
[21:39:41.470] iteration 5546 : loss : 0.270034, supervised_loss: 0.269654
[21:39:42.381] iteration 5547 : loss : 0.276847, supervised_loss: 0.276385
[21:39:43.294] iteration 5548 : loss : 0.272492, supervised_loss: 0.272064
[21:39:44.207] iteration 5549 : loss : 0.269859, supervised_loss: 0.269340
[21:39:45.120] iteration 5550 : loss : 0.269744, supervised_loss: 0.269385
[21:39:46.032] iteration 5551 : loss : 0.273667, supervised_loss: 0.273280
[21:39:46.945] iteration 5552 : loss : 0.272203, supervised_loss: 0.271756
[21:39:47.859] iteration 5553 : loss : 0.270165, supervised_loss: 0.269816
[21:39:48.772] iteration 5554 : loss : 0.276396, supervised_loss: 0.275934
[21:39:49.684] iteration 5555 : loss : 0.272058, supervised_loss: 0.271507
[21:39:50.597] iteration 5556 : loss : 0.270652, supervised_loss: 0.270025
[21:39:52.248] iteration 5557 : loss : 0.276999, supervised_loss: 0.276586
[21:39:53.159] iteration 5558 : loss : 0.270485, supervised_loss: 0.270128
[21:39:54.071] iteration 5559 : loss : 0.275702, supervised_loss: 0.274996
[21:39:54.985] iteration 5560 : loss : 0.271191, supervised_loss: 0.270772
[21:39:55.897] iteration 5561 : loss : 0.271772, supervised_loss: 0.271141
[21:39:56.809] iteration 5562 : loss : 0.270797, supervised_loss: 0.270266
[21:39:57.721] iteration 5563 : loss : 0.273058, supervised_loss: 0.272709
[21:39:58.634] iteration 5564 : loss : 0.282836, supervised_loss: 0.282170
[21:39:59.548] iteration 5565 : loss : 0.269557, supervised_loss: 0.269131
[21:40:00.462] iteration 5566 : loss : 0.273039, supervised_loss: 0.272075
[21:40:01.375] iteration 5567 : loss : 0.286805, supervised_loss: 0.286453
[21:40:02.288] iteration 5568 : loss : 0.270577, supervised_loss: 0.270162
[21:40:03.900] iteration 5569 : loss : 0.271545, supervised_loss: 0.271026
[21:40:04.813] iteration 5570 : loss : 0.277650, supervised_loss: 0.277220
[21:40:05.724] iteration 5571 : loss : 0.275880, supervised_loss: 0.275545
[21:40:06.636] iteration 5572 : loss : 0.277536, supervised_loss: 0.276107
[21:40:07.548] iteration 5573 : loss : 0.272029, supervised_loss: 0.271467
[21:40:08.462] iteration 5574 : loss : 0.276235, supervised_loss: 0.275663
[21:40:09.375] iteration 5575 : loss : 0.270622, supervised_loss: 0.270158
[21:40:10.287] iteration 5576 : loss : 0.271411, supervised_loss: 0.270985
[21:40:11.200] iteration 5577 : loss : 0.268821, supervised_loss: 0.268309
[21:40:12.112] iteration 5578 : loss : 0.271514, supervised_loss: 0.270994
[21:40:13.025] iteration 5579 : loss : 0.266875, supervised_loss: 0.266544
[21:40:13.937] iteration 5580 : loss : 0.272287, supervised_loss: 0.271979
[21:40:15.512] iteration 5581 : loss : 0.270039, supervised_loss: 0.269552
[21:40:16.424] iteration 5582 : loss : 0.269175, supervised_loss: 0.268703
[21:40:17.337] iteration 5583 : loss : 0.274990, supervised_loss: 0.274562
[21:40:18.250] iteration 5584 : loss : 0.275264, supervised_loss: 0.274755
[21:40:19.161] iteration 5585 : loss : 0.272539, supervised_loss: 0.272103
[21:40:20.074] iteration 5586 : loss : 0.274600, supervised_loss: 0.273985
[21:40:20.986] iteration 5587 : loss : 0.277019, supervised_loss: 0.276516
[21:40:21.899] iteration 5588 : loss : 0.271880, supervised_loss: 0.271316
[21:40:22.812] iteration 5589 : loss : 0.273372, supervised_loss: 0.272830
[21:40:23.725] iteration 5590 : loss : 0.272837, supervised_loss: 0.272239
[21:40:24.639] iteration 5591 : loss : 0.275098, supervised_loss: 0.274652
[21:40:25.550] iteration 5592 : loss : 0.276873, supervised_loss: 0.276277
[21:40:27.110] iteration 5593 : loss : 0.272019, supervised_loss: 0.271585
[21:40:28.024] iteration 5594 : loss : 0.271744, supervised_loss: 0.271192
[21:40:28.937] iteration 5595 : loss : 0.275328, supervised_loss: 0.274836
[21:40:29.849] iteration 5596 : loss : 0.272617, supervised_loss: 0.272296
[21:40:30.762] iteration 5597 : loss : 0.271968, supervised_loss: 0.271432
[21:40:31.675] iteration 5598 : loss : 0.275186, supervised_loss: 0.274692
[21:40:32.588] iteration 5599 : loss : 0.276311, supervised_loss: 0.275820
[21:40:33.503] iteration 5600 : loss : 0.275883, supervised_loss: 0.275225
[21:40:36.346] iteration 5601 : loss : 0.270991, supervised_loss: 0.269997
[21:40:37.258] iteration 5602 : loss : 0.272610, supervised_loss: 0.272286
[21:40:38.171] iteration 5603 : loss : 0.277877, supervised_loss: 0.277379
[21:40:39.084] iteration 5604 : loss : 0.277752, supervised_loss: 0.277368
[21:40:40.706] iteration 5605 : loss : 0.280938, supervised_loss: 0.280249
[21:40:41.619] iteration 5606 : loss : 0.268654, supervised_loss: 0.268063
[21:40:42.531] iteration 5607 : loss : 0.275923, supervised_loss: 0.275431
[21:40:43.443] iteration 5608 : loss : 0.270344, supervised_loss: 0.269779
[21:40:44.356] iteration 5609 : loss : 0.280395, supervised_loss: 0.279905
[21:40:45.268] iteration 5610 : loss : 0.270217, supervised_loss: 0.269698
[21:40:46.180] iteration 5611 : loss : 0.273169, supervised_loss: 0.272560
[21:40:47.092] iteration 5612 : loss : 0.271230, supervised_loss: 0.270686
[21:40:48.005] iteration 5613 : loss : 0.268874, supervised_loss: 0.268227
[21:40:48.918] iteration 5614 : loss : 0.274343, supervised_loss: 0.273818
[21:40:49.832] iteration 5615 : loss : 0.274512, supervised_loss: 0.273956
[21:40:50.744] iteration 5616 : loss : 0.272039, supervised_loss: 0.271709
[21:40:52.294] iteration 5617 : loss : 0.270305, supervised_loss: 0.269366
[21:40:53.206] iteration 5618 : loss : 0.269966, supervised_loss: 0.269167
[21:40:54.118] iteration 5619 : loss : 0.270120, supervised_loss: 0.269252
[21:40:55.031] iteration 5620 : loss : 0.274636, supervised_loss: 0.274150
[21:40:55.944] iteration 5621 : loss : 0.269134, supervised_loss: 0.268673
[21:40:56.856] iteration 5622 : loss : 0.273050, supervised_loss: 0.272627
[21:40:57.769] iteration 5623 : loss : 0.276624, supervised_loss: 0.275815
[21:40:58.682] iteration 5624 : loss : 0.272556, supervised_loss: 0.272089
[21:40:59.594] iteration 5625 : loss : 0.275759, supervised_loss: 0.275079
[21:41:00.509] iteration 5626 : loss : 0.268696, supervised_loss: 0.268098
[21:41:01.421] iteration 5627 : loss : 0.272426, supervised_loss: 0.271628
[21:41:02.334] iteration 5628 : loss : 0.271210, supervised_loss: 0.270675
[21:41:03.822] iteration 5629 : loss : 0.270320, supervised_loss: 0.269873
[21:41:04.736] iteration 5630 : loss : 0.277041, supervised_loss: 0.276660
[21:41:05.647] iteration 5631 : loss : 0.278151, supervised_loss: 0.277691
[21:41:06.559] iteration 5632 : loss : 0.278725, supervised_loss: 0.278103
[21:41:07.471] iteration 5633 : loss : 0.276040, supervised_loss: 0.275674
[21:41:08.384] iteration 5634 : loss : 0.270892, supervised_loss: 0.270108
[21:41:09.298] iteration 5635 : loss : 0.274627, supervised_loss: 0.274041
[21:41:10.209] iteration 5636 : loss : 0.279266, supervised_loss: 0.278809
[21:41:11.125] iteration 5637 : loss : 0.276836, supervised_loss: 0.276458
[21:41:12.037] iteration 5638 : loss : 0.270228, supervised_loss: 0.269656
[21:41:12.950] iteration 5639 : loss : 0.275424, supervised_loss: 0.274823
[21:41:13.864] iteration 5640 : loss : 0.270452, supervised_loss: 0.269685
[21:41:15.450] iteration 5641 : loss : 0.272533, supervised_loss: 0.272082
[21:41:16.364] iteration 5642 : loss : 0.277115, supervised_loss: 0.276421
[21:41:17.276] iteration 5643 : loss : 0.276950, supervised_loss: 0.276407
[21:41:18.190] iteration 5644 : loss : 0.290703, supervised_loss: 0.290312
[21:41:19.102] iteration 5645 : loss : 0.273150, supervised_loss: 0.272685
[21:41:20.015] iteration 5646 : loss : 0.277085, supervised_loss: 0.276718
[21:41:20.929] iteration 5647 : loss : 0.274902, supervised_loss: 0.274563
[21:41:21.843] iteration 5648 : loss : 0.267809, supervised_loss: 0.267471
[21:41:22.756] iteration 5649 : loss : 0.282785, supervised_loss: 0.281499
[21:41:23.670] iteration 5650 : loss : 0.273220, supervised_loss: 0.272695
[21:41:24.583] iteration 5651 : loss : 0.273400, supervised_loss: 0.272669
[21:41:25.496] iteration 5652 : loss : 0.277578, supervised_loss: 0.277101
[21:41:27.121] iteration 5653 : loss : 0.271682, supervised_loss: 0.271081
[21:41:28.034] iteration 5654 : loss : 0.272009, supervised_loss: 0.271334
[21:41:28.948] iteration 5655 : loss : 0.277275, supervised_loss: 0.276492
[21:41:29.862] iteration 5656 : loss : 0.279248, supervised_loss: 0.278900
[21:41:30.775] iteration 5657 : loss : 0.276933, supervised_loss: 0.276479
[21:41:31.688] iteration 5658 : loss : 0.272171, supervised_loss: 0.271629
[21:41:32.601] iteration 5659 : loss : 0.275320, supervised_loss: 0.274750
[21:41:33.514] iteration 5660 : loss : 0.279090, supervised_loss: 0.278553
[21:41:34.426] iteration 5661 : loss : 0.271805, supervised_loss: 0.271168
[21:41:35.339] iteration 5662 : loss : 0.272763, supervised_loss: 0.272074
[21:41:36.254] iteration 5663 : loss : 0.274615, supervised_loss: 0.273901
[21:41:37.168] iteration 5664 : loss : 0.275163, supervised_loss: 0.274733
[21:41:38.871] iteration 5665 : loss : 0.272366, supervised_loss: 0.271149
[21:41:39.782] iteration 5666 : loss : 0.272472, supervised_loss: 0.271971
[21:41:40.695] iteration 5667 : loss : 0.270510, supervised_loss: 0.270246
[21:41:41.609] iteration 5668 : loss : 0.274415, supervised_loss: 0.273858
[21:41:42.520] iteration 5669 : loss : 0.273153, supervised_loss: 0.272487
[21:41:43.433] iteration 5670 : loss : 0.268677, supervised_loss: 0.268335
[21:41:44.346] iteration 5671 : loss : 0.270709, supervised_loss: 0.270259
[21:41:45.258] iteration 5672 : loss : 0.272472, supervised_loss: 0.272058
[21:41:46.170] iteration 5673 : loss : 0.273171, supervised_loss: 0.272787
[21:41:47.084] iteration 5674 : loss : 0.282344, supervised_loss: 0.281488
[21:41:47.998] iteration 5675 : loss : 0.276895, supervised_loss: 0.276539
[21:41:48.912] iteration 5676 : loss : 0.272500, supervised_loss: 0.272115
[21:41:50.409] iteration 5677 : loss : 0.269524, supervised_loss: 0.268994
[21:41:51.323] iteration 5678 : loss : 0.276889, supervised_loss: 0.276540
[21:41:52.236] iteration 5679 : loss : 0.270265, supervised_loss: 0.269507
[21:41:53.148] iteration 5680 : loss : 0.271673, supervised_loss: 0.271267
[21:41:54.060] iteration 5681 : loss : 0.275698, supervised_loss: 0.275347
[21:41:54.973] iteration 5682 : loss : 0.271863, supervised_loss: 0.271328
[21:41:55.886] iteration 5683 : loss : 0.273798, supervised_loss: 0.273480
[21:41:56.799] iteration 5684 : loss : 0.271917, supervised_loss: 0.271413
[21:41:57.713] iteration 5685 : loss : 0.278056, supervised_loss: 0.277519
[21:41:58.626] iteration 5686 : loss : 0.272245, supervised_loss: 0.271633
[21:41:59.539] iteration 5687 : loss : 0.276689, supervised_loss: 0.275922
[21:42:00.453] iteration 5688 : loss : 0.277229, supervised_loss: 0.276797
[21:42:02.148] iteration 5689 : loss : 0.274111, supervised_loss: 0.273308
[21:42:03.061] iteration 5690 : loss : 0.270945, supervised_loss: 0.270567
[21:42:03.974] iteration 5691 : loss : 0.279684, supervised_loss: 0.279102
[21:42:04.887] iteration 5692 : loss : 0.277545, supervised_loss: 0.276415
[21:42:05.799] iteration 5693 : loss : 0.272495, supervised_loss: 0.272119
[21:42:06.711] iteration 5694 : loss : 0.270253, supervised_loss: 0.269820
[21:42:07.624] iteration 5695 : loss : 0.270091, supervised_loss: 0.269376
[21:42:08.538] iteration 5696 : loss : 0.266791, supervised_loss: 0.266280
[21:42:09.452] iteration 5697 : loss : 0.274654, supervised_loss: 0.274046
[21:42:10.365] iteration 5698 : loss : 0.274193, supervised_loss: 0.273615
[21:42:11.277] iteration 5699 : loss : 0.277099, supervised_loss: 0.276371
[21:42:12.189] iteration 5700 : loss : 0.283047, supervised_loss: 0.282571
[21:42:13.804] iteration 5701 : loss : 0.269222, supervised_loss: 0.268799
[21:42:14.716] iteration 5702 : loss : 0.276941, supervised_loss: 0.276492
[21:42:15.629] iteration 5703 : loss : 0.267020, supervised_loss: 0.266458
[21:42:16.542] iteration 5704 : loss : 0.273065, supervised_loss: 0.272530
[21:42:17.455] iteration 5705 : loss : 0.273406, supervised_loss: 0.273080
[21:42:18.370] iteration 5706 : loss : 0.275842, supervised_loss: 0.275316
[21:42:19.282] iteration 5707 : loss : 0.277828, supervised_loss: 0.277194
[21:42:20.195] iteration 5708 : loss : 0.268202, supervised_loss: 0.267735
[21:42:21.107] iteration 5709 : loss : 0.276382, supervised_loss: 0.275988
[21:42:22.020] iteration 5710 : loss : 0.269783, supervised_loss: 0.269375
[21:42:22.933] iteration 5711 : loss : 0.272120, supervised_loss: 0.271371
[21:42:23.847] iteration 5712 : loss : 0.281199, supervised_loss: 0.280860
[21:42:25.419] iteration 5713 : loss : 0.274334, supervised_loss: 0.273761
[21:42:26.332] iteration 5714 : loss : 0.277838, supervised_loss: 0.276944
[21:42:27.245] iteration 5715 : loss : 0.267814, supervised_loss: 0.267368
[21:42:28.157] iteration 5716 : loss : 0.276368, supervised_loss: 0.275701
[21:42:29.069] iteration 5717 : loss : 0.269193, supervised_loss: 0.268791
[21:42:29.982] iteration 5718 : loss : 0.276811, supervised_loss: 0.276313
[21:42:30.894] iteration 5719 : loss : 0.275547, supervised_loss: 0.274966
[21:42:31.806] iteration 5720 : loss : 0.272903, supervised_loss: 0.272492
[21:42:32.719] iteration 5721 : loss : 0.277422, supervised_loss: 0.277026
[21:42:33.632] iteration 5722 : loss : 0.269771, supervised_loss: 0.269221
[21:42:34.547] iteration 5723 : loss : 0.276248, supervised_loss: 0.275875
[21:42:35.460] iteration 5724 : loss : 0.273000, supervised_loss: 0.272598
[21:42:36.961] iteration 5725 : loss : 0.269881, supervised_loss: 0.269520
[21:42:37.873] iteration 5726 : loss : 0.271804, supervised_loss: 0.271390
[21:42:38.786] iteration 5727 : loss : 0.280618, supervised_loss: 0.280176
[21:42:39.700] iteration 5728 : loss : 0.280727, supervised_loss: 0.279769
[21:42:40.614] iteration 5729 : loss : 0.268946, supervised_loss: 0.268412
[21:42:41.528] iteration 5730 : loss : 0.270718, supervised_loss: 0.270163
[21:42:42.440] iteration 5731 : loss : 0.265525, supervised_loss: 0.265192
[21:42:43.354] iteration 5732 : loss : 0.272278, supervised_loss: 0.271980
[21:42:44.267] iteration 5733 : loss : 0.267210, supervised_loss: 0.266805
[21:42:45.179] iteration 5734 : loss : 0.269673, supervised_loss: 0.269237
[21:42:46.093] iteration 5735 : loss : 0.267329, supervised_loss: 0.266570
[21:42:47.007] iteration 5736 : loss : 0.269799, supervised_loss: 0.269201
[21:42:48.533] iteration 5737 : loss : 0.269414, supervised_loss: 0.268915
[21:42:49.445] iteration 5738 : loss : 0.275503, supervised_loss: 0.274782
[21:42:50.357] iteration 5739 : loss : 0.274225, supervised_loss: 0.273622
[21:42:51.270] iteration 5740 : loss : 0.275450, supervised_loss: 0.275010
[21:42:52.182] iteration 5741 : loss : 0.274474, supervised_loss: 0.274030
[21:42:53.096] iteration 5742 : loss : 0.270479, supervised_loss: 0.270189
[21:42:54.008] iteration 5743 : loss : 0.271619, supervised_loss: 0.270769
[21:42:54.921] iteration 5744 : loss : 0.271256, supervised_loss: 0.270703
[21:42:55.833] iteration 5745 : loss : 0.272192, supervised_loss: 0.271634
[21:42:56.746] iteration 5746 : loss : 0.273430, supervised_loss: 0.273074
[21:42:57.659] iteration 5747 : loss : 0.270460, supervised_loss: 0.270145
[21:42:58.572] iteration 5748 : loss : 0.285140, supervised_loss: 0.284563
[21:43:00.153] iteration 5749 : loss : 0.276499, supervised_loss: 0.275987
[21:43:01.067] iteration 5750 : loss : 0.271573, supervised_loss: 0.271143
[21:43:01.979] iteration 5751 : loss : 0.273582, supervised_loss: 0.272330
[21:43:02.892] iteration 5752 : loss : 0.271594, supervised_loss: 0.271100
[21:43:03.806] iteration 5753 : loss : 0.267004, supervised_loss: 0.266603
[21:43:04.718] iteration 5754 : loss : 0.270790, supervised_loss: 0.269142
[21:43:05.630] iteration 5755 : loss : 0.277724, supervised_loss: 0.277309
[21:43:06.544] iteration 5756 : loss : 0.269367, supervised_loss: 0.268942
[21:43:07.458] iteration 5757 : loss : 0.268164, supervised_loss: 0.267770
[21:43:08.371] iteration 5758 : loss : 0.276424, supervised_loss: 0.275672
[21:43:09.283] iteration 5759 : loss : 0.268639, supervised_loss: 0.267637
[21:43:10.197] iteration 5760 : loss : 0.278020, supervised_loss: 0.277095
[21:43:11.701] iteration 5761 : loss : 0.270159, supervised_loss: 0.269491
[21:43:12.615] iteration 5762 : loss : 0.273364, supervised_loss: 0.272980
[21:43:13.528] iteration 5763 : loss : 0.271007, supervised_loss: 0.270495
[21:43:14.442] iteration 5764 : loss : 0.269999, supervised_loss: 0.269333
[21:43:15.355] iteration 5765 : loss : 0.282426, supervised_loss: 0.281674
[21:43:16.267] iteration 5766 : loss : 0.275840, supervised_loss: 0.275424
[21:43:17.180] iteration 5767 : loss : 0.275260, supervised_loss: 0.274684
[21:43:18.093] iteration 5768 : loss : 0.275454, supervised_loss: 0.274932
[21:43:19.005] iteration 5769 : loss : 0.272562, supervised_loss: 0.272224
[21:43:19.918] iteration 5770 : loss : 0.276824, supervised_loss: 0.276084
[21:43:20.831] iteration 5771 : loss : 0.273659, supervised_loss: 0.273289
[21:43:21.745] iteration 5772 : loss : 0.286013, supervised_loss: 0.285569
[21:43:23.343] iteration 5773 : loss : 0.269866, supervised_loss: 0.269251
[21:43:24.255] iteration 5774 : loss : 0.272271, supervised_loss: 0.271915
[21:43:25.169] iteration 5775 : loss : 0.277086, supervised_loss: 0.276386
[21:43:26.081] iteration 5776 : loss : 0.271277, supervised_loss: 0.270697
[21:43:26.993] iteration 5777 : loss : 0.271306, supervised_loss: 0.270951
[21:43:27.905] iteration 5778 : loss : 0.274742, supervised_loss: 0.274141
[21:43:28.817] iteration 5779 : loss : 0.271254, supervised_loss: 0.270855
[21:43:29.730] iteration 5780 : loss : 0.273527, supervised_loss: 0.273012
[21:43:30.644] iteration 5781 : loss : 0.273695, supervised_loss: 0.273316
[21:43:31.559] iteration 5782 : loss : 0.273885, supervised_loss: 0.273164
[21:43:32.474] iteration 5783 : loss : 0.280228, supervised_loss: 0.279812
[21:43:33.386] iteration 5784 : loss : 0.271191, supervised_loss: 0.270688
[21:43:34.951] iteration 5785 : loss : 0.269379, supervised_loss: 0.268989
[21:43:35.862] iteration 5786 : loss : 0.269129, supervised_loss: 0.268493
[21:43:36.775] iteration 5787 : loss : 0.276322, supervised_loss: 0.275768
[21:43:37.688] iteration 5788 : loss : 0.266410, supervised_loss: 0.265990
[21:43:38.602] iteration 5789 : loss : 0.275021, supervised_loss: 0.274344
[21:43:39.516] iteration 5790 : loss : 0.269506, supervised_loss: 0.269054
[21:43:40.429] iteration 5791 : loss : 0.275581, supervised_loss: 0.275217
[21:43:41.343] iteration 5792 : loss : 0.270413, supervised_loss: 0.269804
[21:43:42.256] iteration 5793 : loss : 0.267217, supervised_loss: 0.266715
[21:43:43.171] iteration 5794 : loss : 0.280549, supervised_loss: 0.279970
[21:43:44.084] iteration 5795 : loss : 0.269590, supervised_loss: 0.268945
[21:43:44.997] iteration 5796 : loss : 0.269074, supervised_loss: 0.268322
[21:43:46.580] iteration 5797 : loss : 0.274436, supervised_loss: 0.273767
[21:43:47.495] iteration 5798 : loss : 0.275667, supervised_loss: 0.275048
[21:43:48.407] iteration 5799 : loss : 0.273030, supervised_loss: 0.272610
[21:43:49.319] iteration 5800 : loss : 0.279348, supervised_loss: 0.278536
[21:43:52.188] iteration 5801 : loss : 0.274141, supervised_loss: 0.273686
[21:43:53.100] iteration 5802 : loss : 0.269610, supervised_loss: 0.269298
[21:43:54.012] iteration 5803 : loss : 0.266571, supervised_loss: 0.266235
[21:43:54.925] iteration 5804 : loss : 0.274226, supervised_loss: 0.273931
[21:43:55.839] iteration 5805 : loss : 0.269328, supervised_loss: 0.268851
[21:43:56.752] iteration 5806 : loss : 0.273719, supervised_loss: 0.273406
[21:43:57.664] iteration 5807 : loss : 0.271528, supervised_loss: 0.270942
[21:43:58.575] iteration 5808 : loss : 0.265463, supervised_loss: 0.265084
[21:44:00.077] iteration 5809 : loss : 0.275167, supervised_loss: 0.274416
[21:44:00.989] iteration 5810 : loss : 0.266191, supervised_loss: 0.265757
[21:44:01.902] iteration 5811 : loss : 0.270234, supervised_loss: 0.269610
[21:44:02.815] iteration 5812 : loss : 0.266137, supervised_loss: 0.265485
[21:44:03.727] iteration 5813 : loss : 0.272867, supervised_loss: 0.272317
[21:44:04.639] iteration 5814 : loss : 0.275379, supervised_loss: 0.274992
[21:44:05.551] iteration 5815 : loss : 0.277021, supervised_loss: 0.276582
[21:44:06.464] iteration 5816 : loss : 0.277831, supervised_loss: 0.277170
[21:44:07.377] iteration 5817 : loss : 0.268856, supervised_loss: 0.268216
[21:44:08.289] iteration 5818 : loss : 0.272177, supervised_loss: 0.271730
[21:44:09.202] iteration 5819 : loss : 0.270118, supervised_loss: 0.269538
[21:44:10.116] iteration 5820 : loss : 0.267372, supervised_loss: 0.266765
[21:44:11.624] iteration 5821 : loss : 0.268512, supervised_loss: 0.267951
[21:44:12.538] iteration 5822 : loss : 0.268004, supervised_loss: 0.267472
[21:44:13.450] iteration 5823 : loss : 0.274765, supervised_loss: 0.274283
[21:44:14.364] iteration 5824 : loss : 0.269707, supervised_loss: 0.269107
[21:44:15.276] iteration 5825 : loss : 0.276302, supervised_loss: 0.275484
[21:44:16.188] iteration 5826 : loss : 0.270921, supervised_loss: 0.270547
[21:44:17.101] iteration 5827 : loss : 0.271300, supervised_loss: 0.270970
[21:44:18.013] iteration 5828 : loss : 0.273609, supervised_loss: 0.273238
[21:44:18.925] iteration 5829 : loss : 0.271399, supervised_loss: 0.270884
[21:44:19.838] iteration 5830 : loss : 0.273964, supervised_loss: 0.273494
[21:44:20.750] iteration 5831 : loss : 0.272219, supervised_loss: 0.271811
[21:44:21.662] iteration 5832 : loss : 0.271769, supervised_loss: 0.271396
[21:44:23.292] iteration 5833 : loss : 0.268059, supervised_loss: 0.267635
[21:44:24.205] iteration 5834 : loss : 0.271270, supervised_loss: 0.270797
[21:44:25.117] iteration 5835 : loss : 0.274620, supervised_loss: 0.274211
[21:44:26.030] iteration 5836 : loss : 0.274550, supervised_loss: 0.274093
[21:44:26.942] iteration 5837 : loss : 0.275474, supervised_loss: 0.274787
[21:44:27.853] iteration 5838 : loss : 0.278630, supervised_loss: 0.278351
[21:44:28.765] iteration 5839 : loss : 0.267624, supervised_loss: 0.266874
[21:44:29.677] iteration 5840 : loss : 0.274510, supervised_loss: 0.274119
[21:44:30.589] iteration 5841 : loss : 0.274445, supervised_loss: 0.273770
[21:44:31.501] iteration 5842 : loss : 0.273925, supervised_loss: 0.273556
[21:44:32.413] iteration 5843 : loss : 0.267927, supervised_loss: 0.267585
[21:44:33.323] iteration 5844 : loss : 0.272334, supervised_loss: 0.271566
[21:44:34.822] iteration 5845 : loss : 0.271971, supervised_loss: 0.271282
[21:44:35.734] iteration 5846 : loss : 0.269414, supervised_loss: 0.268933
[21:44:36.647] iteration 5847 : loss : 0.271603, supervised_loss: 0.271130
[21:44:37.559] iteration 5848 : loss : 0.275394, supervised_loss: 0.274819
[21:44:38.472] iteration 5849 : loss : 0.273454, supervised_loss: 0.273073
[21:44:39.384] iteration 5850 : loss : 0.269291, supervised_loss: 0.268893
[21:44:40.296] iteration 5851 : loss : 0.277351, supervised_loss: 0.276957
[21:44:41.209] iteration 5852 : loss : 0.271804, supervised_loss: 0.271445
[21:44:42.123] iteration 5853 : loss : 0.283993, supervised_loss: 0.283717
[21:44:43.034] iteration 5854 : loss : 0.276507, supervised_loss: 0.276081
[21:44:43.946] iteration 5855 : loss : 0.271971, supervised_loss: 0.271368
[21:44:44.860] iteration 5856 : loss : 0.274340, supervised_loss: 0.273814
[21:44:46.537] iteration 5857 : loss : 0.274347, supervised_loss: 0.273946
[21:44:47.447] iteration 5858 : loss : 0.271494, supervised_loss: 0.271058
[21:44:48.359] iteration 5859 : loss : 0.272563, supervised_loss: 0.272126
[21:44:49.271] iteration 5860 : loss : 0.281232, supervised_loss: 0.280689
[21:44:50.183] iteration 5861 : loss : 0.273550, supervised_loss: 0.273060
[21:44:51.096] iteration 5862 : loss : 0.268933, supervised_loss: 0.268593
[21:44:52.008] iteration 5863 : loss : 0.274377, supervised_loss: 0.273773
[21:44:52.921] iteration 5864 : loss : 0.272794, supervised_loss: 0.272415
[21:44:53.834] iteration 5865 : loss : 0.270640, supervised_loss: 0.270041
[21:44:54.746] iteration 5866 : loss : 0.274546, supervised_loss: 0.274126
[21:44:55.660] iteration 5867 : loss : 0.274780, supervised_loss: 0.274015
[21:44:56.573] iteration 5868 : loss : 0.275915, supervised_loss: 0.275428
[21:44:58.063] iteration 5869 : loss : 0.272006, supervised_loss: 0.271632
[21:44:58.977] iteration 5870 : loss : 0.270478, supervised_loss: 0.270128
[21:44:59.888] iteration 5871 : loss : 0.269324, supervised_loss: 0.268838
[21:45:00.799] iteration 5872 : loss : 0.271703, supervised_loss: 0.271381
[21:45:01.711] iteration 5873 : loss : 0.268995, supervised_loss: 0.268575
[21:45:02.623] iteration 5874 : loss : 0.274533, supervised_loss: 0.274167
[21:45:03.535] iteration 5875 : loss : 0.279358, supervised_loss: 0.278852
[21:45:04.447] iteration 5876 : loss : 0.269513, supervised_loss: 0.269032
[21:45:05.360] iteration 5877 : loss : 0.269996, supervised_loss: 0.269563
[21:45:06.273] iteration 5878 : loss : 0.272760, supervised_loss: 0.272257
[21:45:07.185] iteration 5879 : loss : 0.274584, supervised_loss: 0.272931
[21:45:08.097] iteration 5880 : loss : 0.275418, supervised_loss: 0.274909
[21:45:09.605] iteration 5881 : loss : 0.269038, supervised_loss: 0.268665
[21:45:10.517] iteration 5882 : loss : 0.274182, supervised_loss: 0.273512
[21:45:11.430] iteration 5883 : loss : 0.274130, supervised_loss: 0.273575
[21:45:12.342] iteration 5884 : loss : 0.273307, supervised_loss: 0.272454
[21:45:13.255] iteration 5885 : loss : 0.276169, supervised_loss: 0.275556
[21:45:14.170] iteration 5886 : loss : 0.276570, supervised_loss: 0.275889
[21:45:15.083] iteration 5887 : loss : 0.269409, supervised_loss: 0.268809
[21:45:15.994] iteration 5888 : loss : 0.273999, supervised_loss: 0.273334
[21:45:16.910] iteration 5889 : loss : 0.269789, supervised_loss: 0.269447
[21:45:17.821] iteration 5890 : loss : 0.271862, supervised_loss: 0.271175
[21:45:18.733] iteration 5891 : loss : 0.270976, supervised_loss: 0.270500
[21:45:19.646] iteration 5892 : loss : 0.268804, supervised_loss: 0.268180
[21:45:21.348] iteration 5893 : loss : 0.278100, supervised_loss: 0.277626
[21:45:22.260] iteration 5894 : loss : 0.269006, supervised_loss: 0.268314
[21:45:23.172] iteration 5895 : loss : 0.271204, supervised_loss: 0.270099
[21:45:24.086] iteration 5896 : loss : 0.278258, supervised_loss: 0.277420
[21:45:24.997] iteration 5897 : loss : 0.274572, supervised_loss: 0.274058
[21:45:25.910] iteration 5898 : loss : 0.273478, supervised_loss: 0.273132
[21:45:26.822] iteration 5899 : loss : 0.275173, supervised_loss: 0.274554
[21:45:27.735] iteration 5900 : loss : 0.275299, supervised_loss: 0.274682
[21:45:28.647] iteration 5901 : loss : 0.269062, supervised_loss: 0.268662
[21:45:29.560] iteration 5902 : loss : 0.272943, supervised_loss: 0.272483
[21:45:30.473] iteration 5903 : loss : 0.273574, supervised_loss: 0.273226
[21:45:31.386] iteration 5904 : loss : 0.276084, supervised_loss: 0.275493
[21:45:33.034] iteration 5905 : loss : 0.280838, supervised_loss: 0.280220
[21:45:33.945] iteration 5906 : loss : 0.274914, supervised_loss: 0.274209
[21:45:34.857] iteration 5907 : loss : 0.268844, supervised_loss: 0.268410
[21:45:35.769] iteration 5908 : loss : 0.272308, supervised_loss: 0.271949
[21:45:36.680] iteration 5909 : loss : 0.273614, supervised_loss: 0.273247
[21:45:37.592] iteration 5910 : loss : 0.273960, supervised_loss: 0.273480
[21:45:38.505] iteration 5911 : loss : 0.274722, supervised_loss: 0.274326
[21:45:39.417] iteration 5912 : loss : 0.271286, supervised_loss: 0.270668
[21:45:40.330] iteration 5913 : loss : 0.275874, supervised_loss: 0.275218
[21:45:41.242] iteration 5914 : loss : 0.274316, supervised_loss: 0.273873
[21:45:42.154] iteration 5915 : loss : 0.268966, supervised_loss: 0.268527
[21:45:43.067] iteration 5916 : loss : 0.266561, supervised_loss: 0.266104
[21:45:44.583] iteration 5917 : loss : 0.269187, supervised_loss: 0.268721
[21:45:45.496] iteration 5918 : loss : 0.271333, supervised_loss: 0.270928
[21:45:46.408] iteration 5919 : loss : 0.270854, supervised_loss: 0.270314
[21:45:47.321] iteration 5920 : loss : 0.268410, supervised_loss: 0.268052
[21:45:48.233] iteration 5921 : loss : 0.270109, supervised_loss: 0.269630
[21:45:49.145] iteration 5922 : loss : 0.274536, supervised_loss: 0.273927
[21:45:50.058] iteration 5923 : loss : 0.271226, supervised_loss: 0.270844
[21:45:50.971] iteration 5924 : loss : 0.278623, supervised_loss: 0.277771
[21:45:51.883] iteration 5925 : loss : 0.274153, supervised_loss: 0.273802
[21:45:52.796] iteration 5926 : loss : 0.274177, supervised_loss: 0.273645
[21:45:53.708] iteration 5927 : loss : 0.271466, supervised_loss: 0.271150
[21:45:54.620] iteration 5928 : loss : 0.276925, supervised_loss: 0.276090
[21:45:56.107] iteration 5929 : loss : 0.266451, supervised_loss: 0.266029
[21:45:57.019] iteration 5930 : loss : 0.270343, supervised_loss: 0.270009
[21:45:57.932] iteration 5931 : loss : 0.269774, supervised_loss: 0.269454
[21:45:58.844] iteration 5932 : loss : 0.283366, supervised_loss: 0.282841
[21:45:59.757] iteration 5933 : loss : 0.276349, supervised_loss: 0.275645
[21:46:00.670] iteration 5934 : loss : 0.274138, supervised_loss: 0.273416
[21:46:01.582] iteration 5935 : loss : 0.267152, supervised_loss: 0.266477
[21:46:02.494] iteration 5936 : loss : 0.268000, supervised_loss: 0.267629
[21:46:03.407] iteration 5937 : loss : 0.269573, supervised_loss: 0.268195
[21:46:04.321] iteration 5938 : loss : 0.272508, supervised_loss: 0.272154
[21:46:05.235] iteration 5939 : loss : 0.272858, supervised_loss: 0.271901
[21:46:06.149] iteration 5940 : loss : 0.274144, supervised_loss: 0.273787
[21:46:07.682] iteration 5941 : loss : 0.272274, supervised_loss: 0.271835
[21:46:08.595] iteration 5942 : loss : 0.271020, supervised_loss: 0.270356
[21:46:09.507] iteration 5943 : loss : 0.277154, supervised_loss: 0.276729
[21:46:10.419] iteration 5944 : loss : 0.276510, supervised_loss: 0.275988
[21:46:11.332] iteration 5945 : loss : 0.270249, supervised_loss: 0.269811
[21:46:12.245] iteration 5946 : loss : 0.268440, supervised_loss: 0.267709
[21:46:13.158] iteration 5947 : loss : 0.280488, supervised_loss: 0.280121
[21:46:14.072] iteration 5948 : loss : 0.272309, supervised_loss: 0.271955
[21:46:14.987] iteration 5949 : loss : 0.274155, supervised_loss: 0.273754
[21:46:15.901] iteration 5950 : loss : 0.273531, supervised_loss: 0.272969
[21:46:16.814] iteration 5951 : loss : 0.272755, supervised_loss: 0.272317
[21:46:17.728] iteration 5952 : loss : 0.274052, supervised_loss: 0.273560
[21:46:19.242] iteration 5953 : loss : 0.278677, supervised_loss: 0.278243
[21:46:20.155] iteration 5954 : loss : 0.271919, supervised_loss: 0.271511
[21:46:21.068] iteration 5955 : loss : 0.269661, supervised_loss: 0.269089
[21:46:21.979] iteration 5956 : loss : 0.272084, supervised_loss: 0.271658
[21:46:22.892] iteration 5957 : loss : 0.271447, supervised_loss: 0.270877
[21:46:23.803] iteration 5958 : loss : 0.274736, supervised_loss: 0.274220
[21:46:24.715] iteration 5959 : loss : 0.275663, supervised_loss: 0.275192
[21:46:25.628] iteration 5960 : loss : 0.269378, supervised_loss: 0.268957
[21:46:26.540] iteration 5961 : loss : 0.268983, supervised_loss: 0.268418
[21:46:27.453] iteration 5962 : loss : 0.268283, supervised_loss: 0.267654
[21:46:28.366] iteration 5963 : loss : 0.267465, supervised_loss: 0.266832
[21:46:29.279] iteration 5964 : loss : 0.273866, supervised_loss: 0.273453
[21:46:30.772] iteration 5965 : loss : 0.269657, supervised_loss: 0.269068
[21:46:31.683] iteration 5966 : loss : 0.273908, supervised_loss: 0.273413
[21:46:32.595] iteration 5967 : loss : 0.270020, supervised_loss: 0.269506
[21:46:33.507] iteration 5968 : loss : 0.272780, supervised_loss: 0.272359
[21:46:34.420] iteration 5969 : loss : 0.269578, supervised_loss: 0.268999
[21:46:35.332] iteration 5970 : loss : 0.276363, supervised_loss: 0.275911
[21:46:36.244] iteration 5971 : loss : 0.278029, supervised_loss: 0.277356
[21:46:37.156] iteration 5972 : loss : 0.274598, supervised_loss: 0.273975
[21:46:38.069] iteration 5973 : loss : 0.271105, supervised_loss: 0.270799
[21:46:38.983] iteration 5974 : loss : 0.273874, supervised_loss: 0.273580
[21:46:39.894] iteration 5975 : loss : 0.271028, supervised_loss: 0.270516
[21:46:40.808] iteration 5976 : loss : 0.271529, supervised_loss: 0.271167
[21:46:42.364] iteration 5977 : loss : 0.266723, supervised_loss: 0.266412
[21:46:43.276] iteration 5978 : loss : 0.268500, supervised_loss: 0.268052
[21:46:44.190] iteration 5979 : loss : 0.275537, supervised_loss: 0.274957
[21:46:45.102] iteration 5980 : loss : 0.265518, supervised_loss: 0.264251
[21:46:46.014] iteration 5981 : loss : 0.274574, supervised_loss: 0.273998
[21:46:46.927] iteration 5982 : loss : 0.278728, supervised_loss: 0.278274
[21:46:47.840] iteration 5983 : loss : 0.275606, supervised_loss: 0.275128
[21:46:48.754] iteration 5984 : loss : 0.269015, supervised_loss: 0.268638
[21:46:49.666] iteration 5985 : loss : 0.277291, supervised_loss: 0.276736
[21:46:50.578] iteration 5986 : loss : 0.272182, supervised_loss: 0.271789
[21:46:51.490] iteration 5987 : loss : 0.273743, supervised_loss: 0.273316
[21:46:52.403] iteration 5988 : loss : 0.270945, supervised_loss: 0.270460
[21:46:53.941] iteration 5989 : loss : 0.272763, supervised_loss: 0.272247
[21:46:54.851] iteration 5990 : loss : 0.275659, supervised_loss: 0.275321
[21:46:55.763] iteration 5991 : loss : 0.272419, supervised_loss: 0.272088
[21:46:56.675] iteration 5992 : loss : 0.273887, supervised_loss: 0.273401
[21:46:57.588] iteration 5993 : loss : 0.273343, supervised_loss: 0.272812
[21:46:58.500] iteration 5994 : loss : 0.272532, supervised_loss: 0.271715
[21:46:59.413] iteration 5995 : loss : 0.268800, supervised_loss: 0.268253
[21:47:00.325] iteration 5996 : loss : 0.274494, supervised_loss: 0.273901
[21:47:01.237] iteration 5997 : loss : 0.275435, supervised_loss: 0.274892
[21:47:02.149] iteration 5998 : loss : 0.266362, supervised_loss: 0.266028
[21:47:03.062] iteration 5999 : loss : 0.276823, supervised_loss: 0.276388
[21:47:03.975] iteration 6000 : loss : 0.272554, supervised_loss: 0.271997
[21:47:06.061] save model to model/LA_vnet_25_labeled/URPC/iter_6000.pth