log_29_09_2019.log 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650
  1. --------[29_09_2019 16:52:06]--------
  2. second stage Hyperparameter Tuning with 1 net
  3. Configuration learning_rate=0.3, decay_step=20
  4. [16:53:20] INIT Loss(val): 0.180713 Accuarcy: 0.083435
  5. [16:55:33] Epoch 1: Loss(train): 0.106532 Loss(val): 0.109270
  6. [16:56:20] Epoch 2: Loss(train): 0.095428 Loss(val): 0.097530
  7. [16:57:06] Epoch 3: Loss(train): 0.090024 Loss(val): 0.091654
  8. [16:57:52] Epoch 4: Loss(train): 0.086840 Loss(val): 0.088290
  9. [16:58:38] Epoch 5: Loss(train): 0.085175 Loss(val): 0.086443
  10. [16:59:24] Epoch 6: Loss(train): 0.084256 Loss(val): 0.085231
  11. [17:00:09] Epoch 7: Loss(train): 0.083388 Loss(val): 0.084379
  12. [17:00:54] Epoch 8: Loss(train): 0.082766 Loss(val): 0.083685
  13. [17:01:40] Epoch 9: Loss(train): 0.082271 Loss(val): 0.083217
  14. [17:02:25] Epoch 10: Loss(train): 0.081836 Loss(val): 0.082803
  15. [17:03:10] Epoch 11: Loss(train): 0.081547 Loss(val): 0.082447
  16. [17:03:56] Epoch 12: Loss(train): 0.081178 Loss(val): 0.082092
  17. [17:04:42] Epoch 13: Loss(train): 0.080869 Loss(val): 0.081751
  18. [17:05:27] Epoch 14: Loss(train): 0.080644 Loss(val): 0.081556
  19. [17:06:12] Epoch 15: Loss(train): 0.080391 Loss(val): 0.081279
  20. [17:06:57] Epoch 16: Loss(train): 0.080222 Loss(val): 0.081098
  21. [17:07:43] Epoch 17: Loss(train): 0.080038 Loss(val): 0.080880
  22. [17:08:28] Epoch 18: Loss(train): 0.079911 Loss(val): 0.080777
  23. [17:09:14] Epoch 19: Loss(train): 0.079714 Loss(val): 0.080606
  24. [17:09:59] Epoch 20: Loss(train): 0.079589 Loss(val): 0.080480
  25. [17:10:45] Epoch 21: Loss(train): 0.079406 Loss(val): 0.080328
  26. [17:11:31] Epoch 22: Loss(train): 0.079339 Loss(val): 0.080205
  27. [17:12:16] Epoch 23: Loss(train): 0.079207 Loss(val): 0.080088
  28. [17:13:02] Epoch 24: Loss(train): 0.079084 Loss(val): 0.079978
  29. [17:13:48] Epoch 25: Loss(train): 0.079040 Loss(val): 0.079932
  30. [17:14:35] Epoch 26: Loss(train): 0.078889 Loss(val): 0.079799
  31. [17:15:26] Epoch 27: Loss(train): 0.078822 Loss(val): 0.079725
  32. [17:16:19] Epoch 28: Loss(train): 0.078742 Loss(val): 0.079628
  33. [17:17:12] Epoch 29: Loss(train): 0.078611 Loss(val): 0.079555
  34. [17:18:01] Epoch 30: Loss(train): 0.078586 Loss(val): 0.079513
  35. [17:18:51] Epoch 31: Loss(train): 0.078523 Loss(val): 0.079450
  36. [17:19:43] Epoch 32: Loss(train): 0.078456 Loss(val): 0.079391
  37. [17:20:29] Epoch 33: Loss(train): 0.078401 Loss(val): 0.079339
  38. [17:21:16] Epoch 34: Loss(train): 0.078364 Loss(val): 0.079309
  39. [17:22:03] Epoch 35: Loss(train): 0.078296 Loss(val): 0.079267
  40. [17:22:49] Epoch 36: Loss(train): 0.078242 Loss(val): 0.079220
  41. [17:23:36] Epoch 37: Loss(train): 0.078217 Loss(val): 0.079206
  42. [17:24:27] Epoch 38: Loss(train): 0.078172 Loss(val): 0.079159
  43. [17:25:13] Epoch 39: Loss(train): 0.078125 Loss(val): 0.079112
  44. [17:25:59] Epoch 40: Loss(train): 0.078090 Loss(val): 0.079078
  45. [17:26:47] Epoch 41: Loss(train): 0.078064 Loss(val): 0.079061
  46. [17:27:34] Epoch 42: Loss(train): 0.078024 Loss(val): 0.079030
  47. [17:28:21] Epoch 43: Loss(train): 0.077999 Loss(val): 0.079014
  48. [17:29:07] Epoch 44: Loss(train): 0.077969 Loss(val): 0.079004
  49. [17:29:54] Epoch 45: Loss(train): 0.077942 Loss(val): 0.078974
  50. [17:30:40] Epoch 46: Loss(train): 0.077913 Loss(val): 0.078955
  51. [17:31:26] Epoch 47: Loss(train): 0.077897 Loss(val): 0.078943
  52. [17:32:14] Epoch 48: Loss(train): 0.077872 Loss(val): 0.078943
  53. [17:33:00] Epoch 49: Loss(train): 0.077860 Loss(val): 0.078925
  54. [17:33:46] Epoch 50: Loss(train): 0.077845 Loss(val): 0.078909
  55. Converged at Loss(train): 0.082486, Loss(val): 0.083474 in epoch 50 with accuracy(val): 0.445493
  56. Configuration learning_rate=0.3, decay_step=40
  57. [17:34:11] INIT Loss(val): 0.154576 Accuarcy: 0.089405
  58. [17:34:58] Epoch 1: Loss(train): 0.106738 Loss(val): 0.110443
  59. [17:35:46] Epoch 2: Loss(train): 0.094961 Loss(val): 0.097940
  60. [17:36:33] Epoch 3: Loss(train): 0.089637 Loss(val): 0.092018
  61. [17:37:20] Epoch 4: Loss(train): 0.086759 Loss(val): 0.088715
  62. [17:38:07] Epoch 5: Loss(train): 0.085159 Loss(val): 0.086774
  63. [17:38:54] Epoch 6: Loss(train): 0.084000 Loss(val): 0.085421
  64. [17:39:40] Epoch 7: Loss(train): 0.083180 Loss(val): 0.084470
  65. [17:40:26] Epoch 8: Loss(train): 0.082572 Loss(val): 0.083797
  66. [17:41:13] Epoch 9: Loss(train): 0.082123 Loss(val): 0.083295
  67. [17:41:59] Epoch 10: Loss(train): 0.081630 Loss(val): 0.082839
  68. [17:42:45] Epoch 11: Loss(train): 0.081291 Loss(val): 0.082452
  69. [17:43:32] Epoch 12: Loss(train): 0.080972 Loss(val): 0.082084
  70. [17:44:18] Epoch 13: Loss(train): 0.080653 Loss(val): 0.081747
  71. [17:45:04] Epoch 14: Loss(train): 0.080442 Loss(val): 0.081523
  72. [17:45:50] Epoch 15: Loss(train): 0.080123 Loss(val): 0.081291
  73. [17:46:37] Epoch 16: Loss(train): 0.079901 Loss(val): 0.081047
  74. [17:47:24] Epoch 17: Loss(train): 0.079744 Loss(val): 0.080816
  75. [17:48:12] Epoch 18: Loss(train): 0.079591 Loss(val): 0.080666
  76. [17:48:58] Epoch 19: Loss(train): 0.079401 Loss(val): 0.080512
  77. [17:49:45] Epoch 20: Loss(train): 0.079236 Loss(val): 0.080347
  78. [17:50:33] Epoch 21: Loss(train): 0.079109 Loss(val): 0.080221
  79. [17:51:24] Epoch 22: Loss(train): 0.078977 Loss(val): 0.080101
  80. [17:52:12] Epoch 23: Loss(train): 0.078854 Loss(val): 0.080001
  81. [17:52:59] Epoch 24: Loss(train): 0.078805 Loss(val): 0.079867
  82. [17:53:46] Epoch 25: Loss(train): 0.078686 Loss(val): 0.079800
  83. [17:54:36] Epoch 26: Loss(train): 0.078619 Loss(val): 0.079673
  84. [17:55:28] Epoch 27: Loss(train): 0.078528 Loss(val): 0.079612
  85. [17:56:15] Epoch 28: Loss(train): 0.078430 Loss(val): 0.079535
  86. [17:57:02] Epoch 29: Loss(train): 0.078359 Loss(val): 0.079456
  87. [17:57:50] Epoch 30: Loss(train): 0.078281 Loss(val): 0.079384
  88. [17:58:44] Epoch 31: Loss(train): 0.078242 Loss(val): 0.079335
  89. [17:59:33] Epoch 32: Loss(train): 0.078183 Loss(val): 0.079284
  90. [18:00:24] Epoch 33: Loss(train): 0.078123 Loss(val): 0.079225
  91. [18:01:13] Epoch 34: Loss(train): 0.078082 Loss(val): 0.079195
  92. [18:02:00] Epoch 35: Loss(train): 0.078023 Loss(val): 0.079155
  93. [18:02:49] Epoch 36: Loss(train): 0.077983 Loss(val): 0.079103
  94. [18:03:38] Epoch 37: Loss(train): 0.077936 Loss(val): 0.079066
  95. [18:04:26] Epoch 38: Loss(train): 0.077902 Loss(val): 0.079045
  96. [18:05:14] Epoch 39: Loss(train): 0.077875 Loss(val): 0.079019
  97. [18:06:03] Epoch 40: Loss(train): 0.077839 Loss(val): 0.078968
  98. [18:06:51] Epoch 41: Loss(train): 0.077803 Loss(val): 0.078962
  99. [18:07:39] Epoch 42: Loss(train): 0.077777 Loss(val): 0.078970
  100. [18:08:27] Epoch 43: Loss(train): 0.077755 Loss(val): 0.078905
  101. [18:09:15] Epoch 44: Loss(train): 0.077733 Loss(val): 0.078915
  102. [18:10:03] Epoch 45: Loss(train): 0.077713 Loss(val): 0.078907
  103. [18:10:52] Epoch 46: Loss(train): 0.077690 Loss(val): 0.078885
  104. [18:11:39] Epoch 47: Loss(train): 0.077666 Loss(val): 0.078863
  105. [18:12:27] Epoch 48: Loss(train): 0.077652 Loss(val): 0.078851
  106. [18:13:15] Epoch 49: Loss(train): 0.077632 Loss(val): 0.078827
  107. [18:14:03] Epoch 50: Loss(train): 0.077617 Loss(val): 0.078828
  108. [18:14:50] Epoch 51: Loss(train): 0.077603 Loss(val): 0.078800
  109. [18:15:38] Epoch 52: Loss(train): 0.077587 Loss(val): 0.078780
  110. [18:16:25] Epoch 53: Loss(train): 0.077573 Loss(val): 0.078771
  111. [18:17:13] Epoch 54: Loss(train): 0.077561 Loss(val): 0.078770
  112. [18:18:00] Epoch 55: Loss(train): 0.077551 Loss(val): 0.078766
  113. [18:18:48] Epoch 56: Loss(train): 0.077541 Loss(val): 0.078755
  114. [18:19:35] Epoch 57: Loss(train): 0.077533 Loss(val): 0.078769
  115. [18:20:22] Epoch 58: Loss(train): 0.077520 Loss(val): 0.078739
  116. [18:21:09] Epoch 59: Loss(train): 0.077514 Loss(val): 0.078746
  117. [18:21:57] Epoch 60: Loss(train): 0.077504 Loss(val): 0.078725
  118. [18:22:44] Epoch 61: Loss(train): 0.077499 Loss(val): 0.078735
  119. [18:23:32] Epoch 62: Loss(train): 0.077492 Loss(val): 0.078728
  120. [18:24:20] Epoch 63: Loss(train): 0.077485 Loss(val): 0.078724
  121. [18:25:08] Epoch 64: Loss(train): 0.077478 Loss(val): 0.078721
  122. [18:25:56] Epoch 65: Loss(train): 0.077468 Loss(val): 0.078695
  123. [18:26:45] Epoch 66: Loss(train): 0.077468 Loss(val): 0.078714
  124. [18:27:40] Epoch 67: Loss(train): 0.077462 Loss(val): 0.078705
  125. [18:28:28] Epoch 68: Loss(train): 0.077458 Loss(val): 0.078700
  126. [18:29:17] Epoch 69: Loss(train): 0.077453 Loss(val): 0.078694
  127. [18:30:11] Epoch 70: Loss(train): 0.077454 Loss(val): 0.078712
  128. [18:31:04] Epoch 71: Loss(train): 0.077449 Loss(val): 0.078707
  129. [18:32:01] Epoch 72: Loss(train): 0.077444 Loss(val): 0.078695
  130. [18:32:56] Epoch 73: Loss(train): 0.077440 Loss(val): 0.078689
  131. [18:33:45] Epoch 74: Loss(train): 0.077438 Loss(val): 0.078692
  132. [18:34:34] Epoch 75: Loss(train): 0.077435 Loss(val): 0.078691
  133. [18:35:26] Epoch 76: Loss(train): 0.077430 Loss(val): 0.078680
  134. [18:36:16] Epoch 77: Loss(train): 0.077427 Loss(val): 0.078679
  135. [18:37:05] Epoch 78: Loss(train): 0.077426 Loss(val): 0.078680
  136. [18:37:54] Epoch 79: Loss(train): 0.077424 Loss(val): 0.078680
  137. [18:38:48] Epoch 80: Loss(train): 0.077420 Loss(val): 0.078671
  138. [18:39:36] Epoch 81: Loss(train): 0.077419 Loss(val): 0.078675
  139. [18:40:28] Epoch 82: Loss(train): 0.077415 Loss(val): 0.078664
  140. [18:41:17] Epoch 83: Loss(train): 0.077414 Loss(val): 0.078668
  141. [18:42:06] Epoch 84: Loss(train): 0.077410 Loss(val): 0.078661
  142. [18:42:55] Epoch 85: Loss(train): 0.077410 Loss(val): 0.078663
  143. [18:43:45] Epoch 86: Loss(train): 0.077409 Loss(val): 0.078667
  144. [18:44:33] Epoch 87: Loss(train): 0.077407 Loss(val): 0.078664
  145. [18:45:23] Epoch 88: Loss(train): 0.077406 Loss(val): 0.078664
  146. [18:46:12] Epoch 89: Loss(train): 0.077404 Loss(val): 0.078660
  147. [18:47:02] Epoch 90: Loss(train): 0.077403 Loss(val): 0.078663
  148. [18:47:51] Epoch 91: Loss(train): 0.077401 Loss(val): 0.078657
  149. [18:48:41] Epoch 92: Loss(train): 0.077399 Loss(val): 0.078655
  150. [18:49:30] Epoch 93: Loss(train): 0.077399 Loss(val): 0.078657
  151. [18:50:19] Epoch 94: Loss(train): 0.077399 Loss(val): 0.078657
  152. [18:51:08] Epoch 95: Loss(train): 0.077398 Loss(val): 0.078657
  153. [18:51:57] Epoch 96: Loss(train): 0.077397 Loss(val): 0.078658
  154. [18:52:46] Epoch 97: Loss(train): 0.077397 Loss(val): 0.078657
  155. [18:53:36] Epoch 98: Loss(train): 0.077394 Loss(val): 0.078651
  156. [18:54:25] Epoch 99: Loss(train): 0.077394 Loss(val): 0.078652
  157. [18:55:14] Epoch 100: Loss(train): 0.077393 Loss(val): 0.078650
  158. [18:56:04] Epoch 101: Loss(train): 0.077392 Loss(val): 0.078649
  159. [18:56:53] Epoch 102: Loss(train): 0.077391 Loss(val): 0.078646
  160. [18:57:42] Epoch 103: Loss(train): 0.077390 Loss(val): 0.078645
  161. [18:58:32] Epoch 104: Loss(train): 0.077389 Loss(val): 0.078644
  162. [18:59:22] Epoch 105: Loss(train): 0.077389 Loss(val): 0.078644
  163. [19:00:12] Epoch 106: Loss(train): 0.077388 Loss(val): 0.078642
  164. [19:01:09] Epoch 107: Loss(train): 0.077388 Loss(val): 0.078642
  165. [19:01:59] Epoch 108: Loss(train): 0.077388 Loss(val): 0.078644
  166. [19:02:49] Epoch 109: Loss(train): 0.077387 Loss(val): 0.078643
  167. [19:03:39] Epoch 110: Loss(train): 0.077387 Loss(val): 0.078643
  168. [19:04:32] Epoch 111: Loss(train): 0.077386 Loss(val): 0.078641
  169. [19:05:30] Epoch 112: Loss(train): 0.077386 Loss(val): 0.078641
  170. [19:06:25] Epoch 113: Loss(train): 0.077386 Loss(val): 0.078642
  171. [19:07:16] Epoch 114: Loss(train): 0.077386 Loss(val): 0.078641
  172. [19:08:06] Epoch 115: Loss(train): 0.077385 Loss(val): 0.078641
  173. [19:08:55] Epoch 116: Loss(train): 0.077385 Loss(val): 0.078641
  174. [19:09:46] Epoch 117: Loss(train): 0.077385 Loss(val): 0.078641
  175. [19:10:37] Epoch 118: Loss(train): 0.077385 Loss(val): 0.078641
  176. [19:11:27] Epoch 119: Loss(train): 0.077385 Loss(val): 0.078642
  177. [19:12:21] Epoch 120: Loss(train): 0.077384 Loss(val): 0.078641
  178. [19:13:14] Epoch 121: Loss(train): 0.077384 Loss(val): 0.078641
  179. [19:14:03] Epoch 122: Loss(train): 0.077384 Loss(val): 0.078641
  180. [19:14:54] Epoch 123: Loss(train): 0.077384 Loss(val): 0.078641
  181. [19:15:46] Epoch 124: Loss(train): 0.077384 Loss(val): 0.078641
  182. [19:16:37] Epoch 125: Loss(train): 0.077384 Loss(val): 0.078640
  183. [19:17:28] Epoch 126: Loss(train): 0.077384 Loss(val): 0.078640
  184. [19:18:20] Epoch 127: Loss(train): 0.077384 Loss(val): 0.078641
  185. [19:19:11] Epoch 128: Loss(train): 0.077384 Loss(val): 0.078640
  186. [19:20:01] Epoch 129: Loss(train): 0.077383 Loss(val): 0.078640
  187. [19:20:52] Epoch 130: Loss(train): 0.077383 Loss(val): 0.078640
  188. [19:21:44] Epoch 131: Loss(train): 0.077383 Loss(val): 0.078639
  189. [19:22:36] Epoch 132: Loss(train): 0.077383 Loss(val): 0.078640
  190. [19:23:26] Epoch 133: Loss(train): 0.077383 Loss(val): 0.078640
  191. [19:24:17] Epoch 134: Loss(train): 0.077383 Loss(val): 0.078639
  192. [19:25:07] Epoch 135: Loss(train): 0.077383 Loss(val): 0.078639
  193. [19:25:58] Epoch 136: Loss(train): 0.077383 Loss(val): 0.078639
  194. [19:26:48] Epoch 137: Loss(train): 0.077383 Loss(val): 0.078639
  195. [19:27:40] Epoch 138: Loss(train): 0.077383 Loss(val): 0.078639
  196. [19:28:31] Epoch 139: Loss(train): 0.077383 Loss(val): 0.078639
  197. [19:29:22] Epoch 140: Loss(train): 0.077383 Loss(val): 0.078639
  198. [19:30:13] Epoch 141: Loss(train): 0.077383 Loss(val): 0.078639
  199. [19:31:04] Epoch 142: Loss(train): 0.077383 Loss(val): 0.078639
  200. [19:31:55] Epoch 143: Loss(train): 0.077383 Loss(val): 0.078639
  201. [19:32:46] Epoch 144: Loss(train): 0.077383 Loss(val): 0.078639
  202. [19:33:36] Epoch 145: Loss(train): 0.077383 Loss(val): 0.078639
  203. [19:34:26] Epoch 146: Loss(train): 0.077383 Loss(val): 0.078640
  204. [19:35:22] Epoch 147: Loss(train): 0.077383 Loss(val): 0.078639
  205. [19:36:17] Epoch 148: Loss(train): 0.077383 Loss(val): 0.078639
  206. [19:37:15] Epoch 149: Loss(train): 0.077383 Loss(val): 0.078639
  207. [19:38:16] Epoch 150: Loss(train): 0.077383 Loss(val): 0.078639
  208. [19:39:07] Epoch 151: Loss(train): 0.077383 Loss(val): 0.078639
  209. [19:39:59] Epoch 152: Loss(train): 0.077383 Loss(val): 0.078639
  210. [19:40:50] Epoch 153: Loss(train): 0.077383 Loss(val): 0.078639
  211. [19:41:45] Epoch 154: Loss(train): 0.077382 Loss(val): 0.078639
  212. [19:42:36] Epoch 155: Loss(train): 0.077382 Loss(val): 0.078639
  213. [19:43:33] Epoch 156: Loss(train): 0.077382 Loss(val): 0.078639
  214. [19:44:26] Epoch 157: Loss(train): 0.077382 Loss(val): 0.078639
  215. [19:45:19] Epoch 158: Loss(train): 0.077382 Loss(val): 0.078639
  216. [19:46:10] Epoch 159: Loss(train): 0.077382 Loss(val): 0.078639
  217. [19:47:03] Epoch 160: Loss(train): 0.077382 Loss(val): 0.078639
  218. [19:47:57] Epoch 161: Loss(train): 0.077382 Loss(val): 0.078639
  219. [19:48:48] Epoch 162: Loss(train): 0.077382 Loss(val): 0.078639
  220. [19:49:40] Epoch 163: Loss(train): 0.077382 Loss(val): 0.078639
  221. [19:50:32] Epoch 164: Loss(train): 0.077382 Loss(val): 0.078639
  222. [19:51:23] Epoch 165: Loss(train): 0.077382 Loss(val): 0.078639
  223. [19:52:18] Epoch 166: Loss(train): 0.077382 Loss(val): 0.078639
  224. [19:53:09] Epoch 167: Loss(train): 0.077382 Loss(val): 0.078639
  225. [19:54:00] Epoch 168: Loss(train): 0.077382 Loss(val): 0.078639
  226. [19:54:52] Epoch 169: Loss(train): 0.077382 Loss(val): 0.078639
  227. [19:55:44] Epoch 170: Loss(train): 0.077382 Loss(val): 0.078639
  228. [19:56:35] Epoch 171: Loss(train): 0.077382 Loss(val): 0.078639
  229. [19:57:27] Epoch 172: Loss(train): 0.077382 Loss(val): 0.078639
  230. [19:58:18] Epoch 173: Loss(train): 0.077382 Loss(val): 0.078639
  231. [19:59:09] Epoch 174: Loss(train): 0.077382 Loss(val): 0.078639
  232. [20:00:01] Epoch 175: Loss(train): 0.077382 Loss(val): 0.078639
  233. [20:00:54] Epoch 176: Loss(train): 0.077382 Loss(val): 0.078639
  234. [20:01:47] Epoch 177: Loss(train): 0.077382 Loss(val): 0.078639
  235. [20:02:39] Epoch 178: Loss(train): 0.077382 Loss(val): 0.078639
  236. [20:03:32] Epoch 179: Loss(train): 0.077382 Loss(val): 0.078639
  237. [20:04:24] Epoch 180: Loss(train): 0.077382 Loss(val): 0.078639
  238. [20:05:17] Epoch 181: Loss(train): 0.077382 Loss(val): 0.078639
  239. [20:06:10] Epoch 182: Loss(train): 0.077382 Loss(val): 0.078639
  240. [20:07:11] Epoch 183: Loss(train): 0.077382 Loss(val): 0.078639
  241. [20:08:09] Epoch 184: Loss(train): 0.077382 Loss(val): 0.078639
  242. [20:09:13] Epoch 185: Loss(train): 0.077382 Loss(val): 0.078639
  243. [20:10:11] Epoch 186: Loss(train): 0.077382 Loss(val): 0.078639
  244. [20:11:09] Epoch 187: Loss(train): 0.077382 Loss(val): 0.078639
  245. [20:12:05] Epoch 188: Loss(train): 0.077382 Loss(val): 0.078639
  246. [20:13:04] Epoch 189: Loss(train): 0.077382 Loss(val): 0.078639
  247. [20:14:00] Epoch 190: Loss(train): 0.077382 Loss(val): 0.078639
  248. [20:14:52] Epoch 191: Loss(train): 0.077382 Loss(val): 0.078639
  249. [20:15:49] Epoch 192: Loss(train): 0.077382 Loss(val): 0.078639
  250. [20:16:44] Epoch 193: Loss(train): 0.077382 Loss(val): 0.078639
  251. [20:17:37] Epoch 194: Loss(train): 0.077382 Loss(val): 0.078639
  252. [20:18:30] Epoch 195: Loss(train): 0.077382 Loss(val): 0.078639
  253. [20:19:27] Epoch 196: Loss(train): 0.077382 Loss(val): 0.078639
  254. [20:20:21] Epoch 197: Loss(train): 0.077382 Loss(val): 0.078639
  255. [20:21:13] Epoch 198: Loss(train): 0.077382 Loss(val): 0.078639
  256. [20:22:06] Epoch 199: Loss(train): 0.077382 Loss(val): 0.078639
  257. [20:23:01] Epoch 200: Loss(train): 0.077382 Loss(val): 0.078639
  258. [20:23:56] Epoch 201: Loss(train): 0.077382 Loss(val): 0.078639
  259. [20:24:49] Epoch 202: Loss(train): 0.077382 Loss(val): 0.078639
  260. [20:25:42] Epoch 203: Loss(train): 0.077382 Loss(val): 0.078639
  261. [20:26:36] Epoch 204: Loss(train): 0.077382 Loss(val): 0.078639
  262. [20:27:28] Epoch 205: Loss(train): 0.077382 Loss(val): 0.078639
  263. [20:28:22] Epoch 206: Loss(train): 0.077382 Loss(val): 0.078639
  264. [20:29:16] Epoch 207: Loss(train): 0.077382 Loss(val): 0.078639
  265. [20:30:10] Epoch 208: Loss(train): 0.077382 Loss(val): 0.078639
  266. [20:31:03] Epoch 209: Loss(train): 0.077382 Loss(val): 0.078639
  267. [20:31:55] Epoch 210: Loss(train): 0.077382 Loss(val): 0.078639
  268. [20:32:49] Epoch 211: Loss(train): 0.077382 Loss(val): 0.078639
  269. [20:33:42] Epoch 212: Loss(train): 0.077382 Loss(val): 0.078639
  270. [20:34:35] Epoch 213: Loss(train): 0.077382 Loss(val): 0.078639
  271. [20:35:28] Epoch 214: Loss(train): 0.077382 Loss(val): 0.078639
  272. [20:37:07] Epoch 215: Loss(train): 0.077382 Loss(val): 0.078639
  273. [20:39:39] Epoch 216: Loss(train): 0.077382 Loss(val): 0.078639
  274. [20:42:07] Epoch 217: Loss(train): 0.077382 Loss(val): 0.078639
  275. [20:44:41] Epoch 218: Loss(train): 0.077382 Loss(val): 0.078639
  276. [20:47:12] Epoch 219: Loss(train): 0.077382 Loss(val): 0.078639
  277. [20:49:23] Epoch 220: Loss(train): 0.077382 Loss(val): 0.078639
  278. [20:51:26] Epoch 221: Loss(train): 0.077382 Loss(val): 0.078639
  279. [20:53:30] Epoch 222: Loss(train): 0.077382 Loss(val): 0.078639
  280. [20:55:28] Epoch 223: Loss(train): 0.077382 Loss(val): 0.078639
  281. [20:57:30] Epoch 224: Loss(train): 0.077382 Loss(val): 0.078639
  282. [20:59:28] Epoch 225: Loss(train): 0.077382 Loss(val): 0.078639
  283. [21:01:23] Epoch 226: Loss(train): 0.077382 Loss(val): 0.078639
  284. [21:03:18] Epoch 227: Loss(train): 0.077382 Loss(val): 0.078639
  285. [21:05:13] Epoch 228: Loss(train): 0.077382 Loss(val): 0.078639
  286. [21:07:08] Epoch 229: Loss(train): 0.077382 Loss(val): 0.078639
  287. [21:09:06] Epoch 230: Loss(train): 0.077382 Loss(val): 0.078639
  288. [21:11:45] Epoch 231: Loss(train): 0.077382 Loss(val): 0.078639
  289. [21:14:17] Epoch 232: Loss(train): 0.077382 Loss(val): 0.078639
  290. [21:16:50] Epoch 233: Loss(train): 0.077382 Loss(val): 0.078639
  291. [21:18:55] Epoch 234: Loss(train): 0.077382 Loss(val): 0.078639
  292. [21:21:14] Epoch 235: Loss(train): 0.077382 Loss(val): 0.078639
  293. [21:23:20] Epoch 236: Loss(train): 0.077382 Loss(val): 0.078639
  294. [21:25:19] Epoch 237: Loss(train): 0.077382 Loss(val): 0.078639
  295. [21:27:31] Epoch 238: Loss(train): 0.077382 Loss(val): 0.078639
  296. [21:29:41] Epoch 239: Loss(train): 0.077382 Loss(val): 0.078639
  297. [21:31:41] Epoch 240: Loss(train): 0.077382 Loss(val): 0.078639
  298. [21:33:47] Epoch 241: Loss(train): 0.077382 Loss(val): 0.078639
  299. [21:35:49] Epoch 242: Loss(train): 0.077382 Loss(val): 0.078639
  300. [21:37:51] Epoch 243: Loss(train): 0.077382 Loss(val): 0.078639
  301. [21:39:51] Epoch 244: Loss(train): 0.077382 Loss(val): 0.078639
  302. [21:42:19] Epoch 245: Loss(train): 0.077382 Loss(val): 0.078639
  303. [21:44:40] Epoch 246: Loss(train): 0.077382 Loss(val): 0.078639
  304. [21:46:46] Epoch 247: Loss(train): 0.077382 Loss(val): 0.078639
  305. [21:49:01] Epoch 248: Loss(train): 0.077382 Loss(val): 0.078639
  306. [21:51:19] Epoch 249: Loss(train): 0.077382 Loss(val): 0.078639
  307. [21:53:26] Epoch 250: Loss(train): 0.077382 Loss(val): 0.078639
  308. [21:55:35] Epoch 251: Loss(train): 0.077382 Loss(val): 0.078639
  309. [21:57:42] Epoch 252: Loss(train): 0.077382 Loss(val): 0.078639
  310. [21:59:52] Epoch 253: Loss(train): 0.077382 Loss(val): 0.078639
  311. [22:01:59] Epoch 254: Loss(train): 0.077382 Loss(val): 0.078639
  312. [22:04:11] Epoch 255: Loss(train): 0.077382 Loss(val): 0.078639
  313. [22:06:13] Epoch 256: Loss(train): 0.077382 Loss(val): 0.078639
  314. [22:08:20] Epoch 257: Loss(train): 0.077382 Loss(val): 0.078639
  315. [22:10:22] Epoch 258: Loss(train): 0.077382 Loss(val): 0.078639
  316. [22:12:25] Epoch 259: Loss(train): 0.077382 Loss(val): 0.078639
  317. [22:14:28] Epoch 260: Loss(train): 0.077382 Loss(val): 0.078639
  318. [22:16:51] Epoch 261: Loss(train): 0.077382 Loss(val): 0.078639
  319. [22:19:19] Epoch 262: Loss(train): 0.077382 Loss(val): 0.078639
  320. [22:21:37] Epoch 263: Loss(train): 0.077382 Loss(val): 0.078639
  321. [22:24:16] Epoch 264: Loss(train): 0.077382 Loss(val): 0.078639
  322. [22:26:31] Epoch 265: Loss(train): 0.077382 Loss(val): 0.078639
  323. [22:28:42] Epoch 266: Loss(train): 0.077382 Loss(val): 0.078639
  324. [22:30:51] Epoch 267: Loss(train): 0.077382 Loss(val): 0.078639
  325. [22:33:05] Epoch 268: Loss(train): 0.077382 Loss(val): 0.078639
  326. [22:35:15] Epoch 269: Loss(train): 0.077382 Loss(val): 0.078639
  327. [22:37:29] Epoch 270: Loss(train): 0.077382 Loss(val): 0.078639
  328. [22:39:37] Epoch 271: Loss(train): 0.077382 Loss(val): 0.078639
  329. [22:41:48] Epoch 272: Loss(train): 0.077382 Loss(val): 0.078639
  330. [22:43:53] Epoch 273: Loss(train): 0.077382 Loss(val): 0.078639
  331. [22:45:58] Epoch 274: Loss(train): 0.077382 Loss(val): 0.078639
  332. [22:48:04] Epoch 275: Loss(train): 0.077382 Loss(val): 0.078639
  333. [22:50:17] Epoch 276: Loss(train): 0.077382 Loss(val): 0.078639
  334. [22:52:25] Epoch 277: Loss(train): 0.077382 Loss(val): 0.078639
  335. [22:54:34] Epoch 278: Loss(train): 0.077382 Loss(val): 0.078639
  336. [22:56:40] Epoch 279: Loss(train): 0.077382 Loss(val): 0.078639
  337. [22:58:46] Epoch 280: Loss(train): 0.077382 Loss(val): 0.078639
  338. [23:01:05] Epoch 281: Loss(train): 0.077382 Loss(val): 0.078639
  339. [23:03:23] Epoch 282: Loss(train): 0.077382 Loss(val): 0.078639
  340. Converged at Loss(train): 0.082034, Loss(val): 0.083194 in epoch 282 with accuracy(val): 0.458486
  341. Configuration learning_rate=0.3, decay_step=60
  342. [23:04:17] INIT Loss(val): 0.148113 Accuarcy: 0.088044
  343. [23:06:34] Epoch 1: Loss(train): 0.107054 Loss(val): 0.109363
  344. [23:08:56] Epoch 2: Loss(train): 0.095789 Loss(val): 0.097876
  345. [23:11:04] Epoch 3: Loss(train): 0.090508 Loss(val): 0.092148
  346. [23:13:35] Epoch 4: Loss(train): 0.087590 Loss(val): 0.088927
  347. [23:17:01] Epoch 5: Loss(train): 0.085521 Loss(val): 0.086741
  348. [23:20:12] Epoch 6: Loss(train): 0.084280 Loss(val): 0.085431
  349. [23:22:24] Epoch 7: Loss(train): 0.083406 Loss(val): 0.084504
  350. [23:24:58] Epoch 8: Loss(train): 0.082772 Loss(val): 0.083805
  351. [23:27:22] Epoch 9: Loss(train): 0.082204 Loss(val): 0.083247
  352. [23:29:41] Epoch 10: Loss(train): 0.081678 Loss(val): 0.082771
  353. [23:32:03] Epoch 11: Loss(train): 0.081268 Loss(val): 0.082301
  354. [23:34:12] Epoch 12: Loss(train): 0.080904 Loss(val): 0.081968
  355. [23:36:27] Epoch 13: Loss(train): 0.080724 Loss(val): 0.081766
  356. [23:38:41] Epoch 14: Loss(train): 0.080442 Loss(val): 0.081502
  357. [23:40:53] Epoch 15: Loss(train): 0.080174 Loss(val): 0.081228
  358. [23:43:06] Epoch 16: Loss(train): 0.079994 Loss(val): 0.081023
  359. [23:46:08] Epoch 17: Loss(train): 0.079753 Loss(val): 0.080801
  360. [23:48:40] Epoch 18: Loss(train): 0.079578 Loss(val): 0.080634
  361. [23:51:34] Epoch 19: Loss(train): 0.079432 Loss(val): 0.080512
  362. [23:54:06] Epoch 20: Loss(train): 0.079271 Loss(val): 0.080309
  363. [23:56:30] Epoch 21: Loss(train): 0.079122 Loss(val): 0.080194
  364. [23:58:59] Epoch 22: Loss(train): 0.079044 Loss(val): 0.080105
  365. [00:01:16] Epoch 23: Loss(train): 0.078923 Loss(val): 0.079973
  366. [00:03:34] Epoch 24: Loss(train): 0.078793 Loss(val): 0.079894
  367. [00:05:51] Epoch 25: Loss(train): 0.078695 Loss(val): 0.079780
  368. [00:08:07] Epoch 26: Loss(train): 0.078608 Loss(val): 0.079694
  369. [00:10:24] Epoch 27: Loss(train): 0.078494 Loss(val): 0.079601
  370. [00:12:39] Epoch 28: Loss(train): 0.078417 Loss(val): 0.079530
  371. [00:15:31] Epoch 29: Loss(train): 0.078361 Loss(val): 0.079450
  372. [00:18:25] Epoch 30: Loss(train): 0.078333 Loss(val): 0.079394
  373. [00:21:07] Epoch 31: Loss(train): 0.078235 Loss(val): 0.079327
  374. [00:24:01] Epoch 32: Loss(train): 0.078153 Loss(val): 0.079252
  375. [00:26:34] Epoch 33: Loss(train): 0.078107 Loss(val): 0.079232
  376. [00:29:00] Epoch 34: Loss(train): 0.078064 Loss(val): 0.079163
  377. [00:31:28] Epoch 35: Loss(train): 0.078004 Loss(val): 0.079139
  378. [00:33:49] Epoch 36: Loss(train): 0.077960 Loss(val): 0.079076
  379. [00:36:08] Epoch 37: Loss(train): 0.077932 Loss(val): 0.079059
  380. [00:38:29] Epoch 38: Loss(train): 0.077874 Loss(val): 0.079005
  381. [00:40:51] Epoch 39: Loss(train): 0.077841 Loss(val): 0.078978
  382. [00:43:11] Epoch 40: Loss(train): 0.077795 Loss(val): 0.078931
  383. [00:45:57] Epoch 41: Loss(train): 0.077764 Loss(val): 0.078925
  384. [00:49:17] Epoch 42: Loss(train): 0.077733 Loss(val): 0.078916
  385. [00:52:09] Epoch 43: Loss(train): 0.077707 Loss(val): 0.078873
  386. [00:54:39] Epoch 44: Loss(train): 0.077685 Loss(val): 0.078852
  387. [00:57:11] Epoch 45: Loss(train): 0.077660 Loss(val): 0.078848
  388. [00:59:43] Epoch 46: Loss(train): 0.077632 Loss(val): 0.078837
  389. [01:02:15] Epoch 47: Loss(train): 0.077619 Loss(val): 0.078815
  390. [01:04:43] Epoch 48: Loss(train): 0.077599 Loss(val): 0.078788
  391. [01:07:05] Epoch 49: Loss(train): 0.077581 Loss(val): 0.078782
  392. [01:09:26] Epoch 50: Loss(train): 0.077557 Loss(val): 0.078758
  393. [01:11:49] Epoch 51: Loss(train): 0.077544 Loss(val): 0.078751
  394. [01:14:11] Epoch 52: Loss(train): 0.077522 Loss(val): 0.078734
  395. [01:17:36] Epoch 53: Loss(train): 0.077510 Loss(val): 0.078726
  396. [01:20:53] Epoch 54: Loss(train): 0.077499 Loss(val): 0.078728
  397. [01:23:46] Epoch 55: Loss(train): 0.077487 Loss(val): 0.078702
  398. [01:26:28] Epoch 56: Loss(train): 0.077474 Loss(val): 0.078712
  399. [01:29:08] Epoch 57: Loss(train): 0.077465 Loss(val): 0.078715
  400. [01:31:37] Epoch 58: Loss(train): 0.077457 Loss(val): 0.078715
  401. [01:34:07] Epoch 59: Loss(train): 0.077445 Loss(val): 0.078696
  402. [01:36:30] Epoch 60: Loss(train): 0.077437 Loss(val): 0.078695
  403. [01:38:58] Epoch 61: Loss(train): 0.077429 Loss(val): 0.078679
  404. [01:41:20] Epoch 62: Loss(train): 0.077426 Loss(val): 0.078690
  405. [01:43:42] Epoch 63: Loss(train): 0.077418 Loss(val): 0.078677
  406. [01:46:26] Epoch 64: Loss(train): 0.077412 Loss(val): 0.078690
  407. [01:49:45] Epoch 65: Loss(train): 0.077403 Loss(val): 0.078668
  408. [01:52:30] Epoch 66: Loss(train): 0.077400 Loss(val): 0.078670
  409. [01:55:42] Epoch 67: Loss(train): 0.077396 Loss(val): 0.078664
  410. [01:58:18] Epoch 68: Loss(train): 0.077391 Loss(val): 0.078665
  411. [02:00:59] Epoch 69: Loss(train): 0.077385 Loss(val): 0.078659
  412. [02:03:31] Epoch 70: Loss(train): 0.077380 Loss(val): 0.078658
  413. [02:06:07] Epoch 71: Loss(train): 0.077376 Loss(val): 0.078658
  414. [02:08:35] Epoch 72: Loss(train): 0.077374 Loss(val): 0.078661
  415. [02:11:04] Epoch 73: Loss(train): 0.077371 Loss(val): 0.078655
  416. [02:13:32] Epoch 74: Loss(train): 0.077368 Loss(val): 0.078656
  417. [02:15:59] Epoch 75: Loss(train): 0.077364 Loss(val): 0.078651
  418. [02:18:26] Epoch 76: Loss(train): 0.077361 Loss(val): 0.078648
  419. [02:21:38] Epoch 77: Loss(train): 0.077359 Loss(val): 0.078647
  420. [02:24:25] Epoch 78: Loss(train): 0.077356 Loss(val): 0.078647
  421. [02:27:14] Epoch 79: Loss(train): 0.077351 Loss(val): 0.078638
  422. [02:30:05] Epoch 80: Loss(train): 0.077349 Loss(val): 0.078634
  423. [02:32:44] Epoch 81: Loss(train): 0.077346 Loss(val): 0.078634
  424. [02:35:28] Epoch 82: Loss(train): 0.077344 Loss(val): 0.078633
  425. [02:38:05] Epoch 83: Loss(train): 0.077345 Loss(val): 0.078639
  426. [02:40:38] Epoch 84: Loss(train): 0.077343 Loss(val): 0.078638
  427. [02:43:12] Epoch 85: Loss(train): 0.077342 Loss(val): 0.078642
  428. [02:45:45] Epoch 86: Loss(train): 0.077339 Loss(val): 0.078636
  429. [02:48:13] Epoch 87: Loss(train): 0.077338 Loss(val): 0.078637
  430. [02:50:41] Epoch 88: Loss(train): 0.077337 Loss(val): 0.078637
  431. [02:53:51] Epoch 89: Loss(train): 0.077335 Loss(val): 0.078632
  432. [02:56:53] Epoch 90: Loss(train): 0.077334 Loss(val): 0.078630
  433. [02:59:47] Epoch 91: Loss(train): 0.077332 Loss(val): 0.078628
  434. [03:02:24] Epoch 92: Loss(train): 0.077331 Loss(val): 0.078627
  435. [03:05:12] Epoch 93: Loss(train): 0.077330 Loss(val): 0.078627
  436. [03:07:58] Epoch 94: Loss(train): 0.077328 Loss(val): 0.078623
  437. [03:10:35] Epoch 95: Loss(train): 0.077327 Loss(val): 0.078619
  438. [03:13:15] Epoch 96: Loss(train): 0.077327 Loss(val): 0.078623
  439. [03:15:50] Epoch 97: Loss(train): 0.077327 Loss(val): 0.078625
  440. [03:18:21] Epoch 98: Loss(train): 0.077326 Loss(val): 0.078626
  441. [03:20:56] Epoch 99: Loss(train): 0.077326 Loss(val): 0.078625
  442. [03:23:41] Epoch 100: Loss(train): 0.077325 Loss(val): 0.078622
  443. [03:26:50] Epoch 101: Loss(train): 0.077324 Loss(val): 0.078623
  444. [03:30:06] Epoch 102: Loss(train): 0.077324 Loss(val): 0.078622
  445. [03:33:21] Epoch 103: Loss(train): 0.077323 Loss(val): 0.078622
  446. [03:36:06] Epoch 104: Loss(train): 0.077323 Loss(val): 0.078622
  447. [03:38:49] Epoch 105: Loss(train): 0.077322 Loss(val): 0.078622
  448. [03:41:29] Epoch 106: Loss(train): 0.077321 Loss(val): 0.078619
  449. [03:44:10] Epoch 107: Loss(train): 0.077320 Loss(val): 0.078616
  450. [03:46:47] Epoch 108: Loss(train): 0.077319 Loss(val): 0.078615
  451. [03:49:22] Epoch 109: Loss(train): 0.077319 Loss(val): 0.078616
  452. [03:51:55] Epoch 110: Loss(train): 0.077319 Loss(val): 0.078615
  453. [03:54:28] Epoch 111: Loss(train): 0.077318 Loss(val): 0.078614
  454. [03:57:08] Epoch 112: Loss(train): 0.077319 Loss(val): 0.078616
  455. [04:00:37] Epoch 113: Loss(train): 0.077318 Loss(val): 0.078615
  456. [04:03:24] Epoch 114: Loss(train): 0.077318 Loss(val): 0.078617
  457. [04:06:31] Epoch 115: Loss(train): 0.077319 Loss(val): 0.078618
  458. [04:09:20] Epoch 116: Loss(train): 0.077319 Loss(val): 0.078618
  459. [04:12:07] Epoch 117: Loss(train): 0.077318 Loss(val): 0.078617
  460. [04:14:48] Epoch 118: Loss(train): 0.077318 Loss(val): 0.078617
  461. [04:17:30] Epoch 119: Loss(train): 0.077318 Loss(val): 0.078617
  462. [04:20:10] Epoch 120: Loss(train): 0.077317 Loss(val): 0.078616
  463. [04:22:50] Epoch 121: Loss(train): 0.077317 Loss(val): 0.078617
  464. [04:25:30] Epoch 122: Loss(train): 0.077317 Loss(val): 0.078618
  465. [04:28:06] Epoch 123: Loss(train): 0.077318 Loss(val): 0.078618
  466. [04:30:57] Epoch 124: Loss(train): 0.077318 Loss(val): 0.078619
  467. [04:33:55] Epoch 125: Loss(train): 0.077317 Loss(val): 0.078619
  468. [04:36:46] Epoch 126: Loss(train): 0.077317 Loss(val): 0.078619
  469. [04:39:57] Epoch 127: Loss(train): 0.077317 Loss(val): 0.078618
  470. [04:42:50] Epoch 128: Loss(train): 0.077317 Loss(val): 0.078618
  471. [04:45:43] Epoch 129: Loss(train): 0.077317 Loss(val): 0.078618
  472. [04:48:25] Epoch 130: Loss(train): 0.077317 Loss(val): 0.078618
  473. [04:51:16] Epoch 131: Loss(train): 0.077317 Loss(val): 0.078618
  474. [04:53:57] Epoch 132: Loss(train): 0.077317 Loss(val): 0.078617
  475. [04:56:34] Epoch 133: Loss(train): 0.077317 Loss(val): 0.078617
  476. [04:59:17] Epoch 134: Loss(train): 0.077316 Loss(val): 0.078617
  477. [05:01:58] Epoch 135: Loss(train): 0.077316 Loss(val): 0.078617
  478. [05:05:03] Epoch 136: Loss(train): 0.077316 Loss(val): 0.078617
  479. [05:08:22] Epoch 137: Loss(train): 0.077316 Loss(val): 0.078616
  480. [05:11:24] Epoch 138: Loss(train): 0.077316 Loss(val): 0.078616
  481. [05:14:25] Epoch 139: Loss(train): 0.077316 Loss(val): 0.078616
  482. [05:17:22] Epoch 140: Loss(train): 0.077316 Loss(val): 0.078616
  483. [05:20:16] Epoch 141: Loss(train): 0.077316 Loss(val): 0.078616
  484. [05:23:04] Epoch 142: Loss(train): 0.077316 Loss(val): 0.078615
  485. [05:25:53] Epoch 143: Loss(train): 0.077316 Loss(val): 0.078615
  486. [05:28:38] Epoch 144: Loss(train): 0.077316 Loss(val): 0.078615
  487. [05:31:22] Epoch 145: Loss(train): 0.077316 Loss(val): 0.078615
  488. [05:34:02] Epoch 146: Loss(train): 0.077316 Loss(val): 0.078615
  489. [05:36:46] Epoch 147: Loss(train): 0.077316 Loss(val): 0.078615
  490. [05:40:11] Epoch 148: Loss(train): 0.077316 Loss(val): 0.078615
  491. [05:43:43] Epoch 149: Loss(train): 0.077316 Loss(val): 0.078615
  492. [05:46:56] Epoch 150: Loss(train): 0.077316 Loss(val): 0.078615
  493. [05:49:55] Epoch 151: Loss(train): 0.077315 Loss(val): 0.078615
  494. [05:52:53] Epoch 152: Loss(train): 0.077315 Loss(val): 0.078615
  495. [05:55:52] Epoch 153: Loss(train): 0.077315 Loss(val): 0.078615
  496. [05:58:45] Epoch 154: Loss(train): 0.077315 Loss(val): 0.078615
  497. [06:01:39] Epoch 155: Loss(train): 0.077315 Loss(val): 0.078615
  498. [06:04:25] Epoch 156: Loss(train): 0.077315 Loss(val): 0.078615
  499. [06:07:07] Epoch 157: Loss(train): 0.077315 Loss(val): 0.078615
  500. [06:09:52] Epoch 158: Loss(train): 0.077315 Loss(val): 0.078615
  501. [06:12:41] Epoch 159: Loss(train): 0.077315 Loss(val): 0.078615
  502. Converged at Loss(train): 0.081795, Loss(val): 0.082810 in epoch 159 with accuracy(val): 0.455119
  503. Configuration learning_rate=0.1, decay_step=20
  504. [06:14:12] INIT Loss(val): 0.151758 Accuarcy: 0.109337
  505. [06:17:00] Epoch 1: Loss(train): 0.108645 Loss(val): 0.110902
  506. [06:19:52] Epoch 2: Loss(train): 0.097215 Loss(val): 0.099030
  507. [06:22:40] Epoch 3: Loss(train): 0.091263 Loss(val): 0.092792
  508. [06:25:30] Epoch 4: Loss(train): 0.088060 Loss(val): 0.089313
  509. [06:28:30] Epoch 5: Loss(train): 0.086003 Loss(val): 0.087071
  510. [06:31:27] Epoch 6: Loss(train): 0.084676 Loss(val): 0.085638
  511. [06:34:27] Epoch 7: Loss(train): 0.083821 Loss(val): 0.084719
  512. [06:37:15] Epoch 8: Loss(train): 0.083102 Loss(val): 0.083924
  513. [06:40:03] Epoch 9: Loss(train): 0.082514 Loss(val): 0.083343
  514. [06:42:51] Epoch 10: Loss(train): 0.082124 Loss(val): 0.082861
  515. [06:45:39] Epoch 11: Loss(train): 0.081586 Loss(val): 0.082425
  516. [06:48:23] Epoch 12: Loss(train): 0.081260 Loss(val): 0.082115
  517. [06:51:13] Epoch 13: Loss(train): 0.080916 Loss(val): 0.081760
  518. [06:53:59] Epoch 14: Loss(train): 0.080611 Loss(val): 0.081422
  519. [06:56:45] Epoch 15: Loss(train): 0.080433 Loss(val): 0.081247
  520. [06:59:36] Epoch 16: Loss(train): 0.080208 Loss(val): 0.081032
  521. [07:02:22] Epoch 17: Loss(train): 0.080056 Loss(val): 0.080846
  522. [07:05:14] Epoch 18: Loss(train): 0.079824 Loss(val): 0.080675
  523. [07:08:09] Epoch 19: Loss(train): 0.079619 Loss(val): 0.080466
  524. [07:11:07] Epoch 20: Loss(train): 0.079563 Loss(val): 0.080365
  525. [07:14:22] Epoch 21: Loss(train): 0.079408 Loss(val): 0.080246
  526. [07:17:29] Epoch 22: Loss(train): 0.079267 Loss(val): 0.080076
  527. [07:22:07] Epoch 23: Loss(train): 0.079120 Loss(val): 0.079969
  528. [07:25:39] Epoch 24: Loss(train): 0.079023 Loss(val): 0.079840
  529. [07:28:35] Epoch 25: Loss(train): 0.078940 Loss(val): 0.079775
  530. [07:31:55] Epoch 26: Loss(train): 0.078815 Loss(val): 0.079648
  531. [07:35:01] Epoch 27: Loss(train): 0.078760 Loss(val): 0.079581
  532. [07:37:52] Epoch 28: Loss(train): 0.078676 Loss(val): 0.079495
  533. [07:40:47] Epoch 29: Loss(train): 0.078605 Loss(val): 0.079448
  534. [07:43:38] Epoch 30: Loss(train): 0.078492 Loss(val): 0.079332
  535. [07:46:29] Epoch 31: Loss(train): 0.078412 Loss(val): 0.079296
  536. [07:50:38] Epoch 32: Loss(train): 0.078362 Loss(val): 0.079227
  537. [07:53:58] Epoch 33: Loss(train): 0.078317 Loss(val): 0.079200
  538. [07:57:46] Epoch 34: Loss(train): 0.078244 Loss(val): 0.079147
  539. [08:00:52] Epoch 35: Loss(train): 0.078228 Loss(val): 0.079104
  540. [08:04:06] Epoch 36: Loss(train): 0.078162 Loss(val): 0.079045
  541. [08:07:02] Epoch 37: Loss(train): 0.078107 Loss(val): 0.079005
  542. [08:10:00] Epoch 38: Loss(train): 0.078047 Loss(val): 0.078963
  543. [08:12:51] Epoch 39: Loss(train): 0.078025 Loss(val): 0.078911
  544. [08:15:43] Epoch 40: Loss(train): 0.077986 Loss(val): 0.078901
  545. [08:18:48] Epoch 41: Loss(train): 0.077959 Loss(val): 0.078886
  546. [08:23:08] Epoch 42: Loss(train): 0.077921 Loss(val): 0.078858
  547. [08:26:57] Epoch 43: Loss(train): 0.077886 Loss(val): 0.078826
  548. [08:30:10] Epoch 44: Loss(train): 0.077849 Loss(val): 0.078798
  549. [08:33:24] Epoch 45: Loss(train): 0.077820 Loss(val): 0.078793
  550. [08:36:39] Epoch 46: Loss(train): 0.077805 Loss(val): 0.078768
  551. [08:39:38] Epoch 47: Loss(train): 0.077782 Loss(val): 0.078746
  552. [08:42:37] Epoch 48: Loss(train): 0.077759 Loss(val): 0.078739
  553. [08:45:34] Epoch 49: Loss(train): 0.077742 Loss(val): 0.078724
  554. [08:48:30] Epoch 50: Loss(train): 0.077717 Loss(val): 0.078697
  555. [08:52:57] Epoch 51: Loss(train): 0.077710 Loss(val): 0.078685
  556. [08:56:15] Epoch 52: Loss(train): 0.077692 Loss(val): 0.078671
  557. [09:00:01] Epoch 53: Loss(train): 0.077666 Loss(val): 0.078661
  558. [09:03:12] Epoch 54: Loss(train): 0.077662 Loss(val): 0.078661
  559. [09:06:28] Epoch 55: Loss(train): 0.077645 Loss(val): 0.078655
  560. [09:09:45] Epoch 56: Loss(train): 0.077640 Loss(val): 0.078639
  561. [09:12:53] Epoch 57: Loss(train): 0.077625 Loss(val): 0.078633
  562. [09:16:00] Epoch 58: Loss(train): 0.077610 Loss(val): 0.078614
  563. [09:19:11] Epoch 59: Loss(train): 0.077602 Loss(val): 0.078613
  564. [09:22:16] Epoch 60: Loss(train): 0.077597 Loss(val): 0.078604
  565. [09:26:11] Epoch 61: Loss(train): 0.077585 Loss(val): 0.078593
  566. [09:29:35] Epoch 62: Loss(train): 0.077575 Loss(val): 0.078597
  567. [09:33:11] Epoch 63: Loss(train): 0.077566 Loss(val): 0.078592
  568. [09:36:24] Epoch 64: Loss(train): 0.077563 Loss(val): 0.078593
  569. [09:39:33] Epoch 65: Loss(train): 0.077556 Loss(val): 0.078583
  570. [09:42:46] Epoch 66: Loss(train): 0.077552 Loss(val): 0.078584
  571. [09:45:59] Epoch 67: Loss(train): 0.077546 Loss(val): 0.078584
  572. [09:49:01] Epoch 68: Loss(train): 0.077539 Loss(val): 0.078566
  573. [09:52:04] Epoch 69: Loss(train): 0.077531 Loss(val): 0.078566
  574. [09:55:07] Epoch 70: Loss(train): 0.077528 Loss(val): 0.078561
  575. [09:58:32] Epoch 71: Loss(train): 0.077520 Loss(val): 0.078549
  576. [10:02:30] Epoch 72: Loss(train): 0.077516 Loss(val): 0.078549
  577. [10:06:15] Epoch 73: Loss(train): 0.077514 Loss(val): 0.078551
  578. [10:09:43] Epoch 74: Loss(train): 0.077510 Loss(val): 0.078550
  579. [10:13:03] Epoch 75: Loss(train): 0.077506 Loss(val): 0.078551
  580. [10:16:21] Epoch 76: Loss(train): 0.077503 Loss(val): 0.078544
  581. [10:19:34] Epoch 77: Loss(train): 0.077501 Loss(val): 0.078549
  582. [10:22:42] Epoch 78: Loss(train): 0.077498 Loss(val): 0.078546
  583. [10:25:45] Epoch 79: Loss(train): 0.077495 Loss(val): 0.078537
  584. [10:29:12] Epoch 80: Loss(train): 0.077493 Loss(val): 0.078541
  585. [10:33:03] Epoch 81: Loss(train): 0.077491 Loss(val): 0.078534
  586. [10:36:51] Epoch 82: Loss(train): 0.077488 Loss(val): 0.078536
  587. [10:40:24] Epoch 83: Loss(train): 0.077485 Loss(val): 0.078529
  588. [10:43:48] Epoch 84: Loss(train): 0.077483 Loss(val): 0.078526
  589. [10:47:08] Epoch 85: Loss(train): 0.077481 Loss(val): 0.078524
  590. [10:50:22] Epoch 86: Loss(train): 0.077480 Loss(val): 0.078528
  591. [10:53:34] Epoch 87: Loss(train): 0.077477 Loss(val): 0.078526
  592. [10:56:41] Epoch 88: Loss(train): 0.077476 Loss(val): 0.078521
  593. [10:59:47] Epoch 89: Loss(train): 0.077475 Loss(val): 0.078524
  594. [11:03:39] Epoch 90: Loss(train): 0.077473 Loss(val): 0.078523
  595. [11:07:25] Epoch 91: Loss(train): 0.077472 Loss(val): 0.078522
  596. [11:10:54] Epoch 92: Loss(train): 0.077471 Loss(val): 0.078519
  597. [11:14:24] Epoch 93: Loss(train): 0.077469 Loss(val): 0.078519
  598. [11:17:48] Epoch 94: Loss(train): 0.077468 Loss(val): 0.078517
  599. [11:21:07] Epoch 95: Loss(train): 0.077467 Loss(val): 0.078516
  600. [11:24:20] Epoch 96: Loss(train): 0.077466 Loss(val): 0.078516
  601. [11:27:26] Epoch 97: Loss(train): 0.077465 Loss(val): 0.078516
  602. [11:30:32] Epoch 98: Loss(train): 0.077464 Loss(val): 0.078514
  603. [11:34:32] Epoch 99: Loss(train): 0.077464 Loss(val): 0.078514
  604. [11:38:37] Epoch 100: Loss(train): 0.077463 Loss(val): 0.078512
  605. [11:42:36] Epoch 101: Loss(train): 0.077462 Loss(val): 0.078514
  606. [11:46:02] Epoch 102: Loss(train): 0.077461 Loss(val): 0.078513
  607. [11:49:25] Epoch 103: Loss(train): 0.077460 Loss(val): 0.078513
  608. [11:52:52] Epoch 104: Loss(train): 0.077460 Loss(val): 0.078512
  609. [11:56:09] Epoch 105: Loss(train): 0.077460 Loss(val): 0.078513
  610. [11:59:18] Epoch 106: Loss(train): 0.077459 Loss(val): 0.078512
  611. [12:02:33] Epoch 107: Loss(train): 0.077459 Loss(val): 0.078512
  612. [12:06:57] Epoch 108: Loss(train): 0.077458 Loss(val): 0.078510
  613. [12:11:14] Epoch 109: Loss(train): 0.077458 Loss(val): 0.078511
  614. [12:14:53] Epoch 110: Loss(train): 0.077457 Loss(val): 0.078510
  615. [12:18:21] Epoch 111: Loss(train): 0.077457 Loss(val): 0.078508
  616. [12:21:48] Epoch 112: Loss(train): 0.077456 Loss(val): 0.078508
  617. [12:24:58] Epoch 113: Loss(train): 0.077456 Loss(val): 0.078507
  618. [12:28:09] Epoch 114: Loss(train): 0.077456 Loss(val): 0.078506
  619. [12:31:19] Epoch 115: Loss(train): 0.077455 Loss(val): 0.078507
  620. [12:34:43] Epoch 116: Loss(train): 0.077455 Loss(val): 0.078507
  621. [12:38:17] Epoch 117: Loss(train): 0.077455 Loss(val): 0.078507
  622. [12:41:56] Epoch 118: Loss(train): 0.077455 Loss(val): 0.078506
  623. [12:45:28] Epoch 119: Loss(train): 0.077455 Loss(val): 0.078507
  624. [12:48:53] Epoch 120: Loss(train): 0.077454 Loss(val): 0.078506
  625. [12:52:25] Epoch 121: Loss(train): 0.077454 Loss(val): 0.078506
  626. [12:55:46] Epoch 122: Loss(train): 0.077454 Loss(val): 0.078507
  627. [12:59:20] Epoch 123: Loss(train): 0.077454 Loss(val): 0.078506
  628. [13:02:35] Epoch 124: Loss(train): 0.077454 Loss(val): 0.078506
  629. [13:05:54] Epoch 125: Loss(train): 0.077454 Loss(val): 0.078506
  630. [13:09:08] Epoch 126: Loss(train): 0.077454 Loss(val): 0.078506
  631. [13:12:19] Epoch 127: Loss(train): 0.077454 Loss(val): 0.078506
  632. [13:16:39] Epoch 128: Loss(train): 0.077454 Loss(val): 0.078505
  633. [13:20:46] Epoch 129: Loss(train): 0.077453 Loss(val): 0.078505
  634. [13:24:20] Epoch 130: Loss(train): 0.077453 Loss(val): 0.078505
  635. [13:27:57] Epoch 131: Loss(train): 0.077453 Loss(val): 0.078505
  636. [13:31:29] Epoch 132: Loss(train): 0.077453 Loss(val): 0.078504
  637. [13:34:57] Epoch 133: Loss(train): 0.077453 Loss(val): 0.078504
  638. [13:38:20] Epoch 134: Loss(train): 0.077453 Loss(val): 0.078504
  639. [13:41:36] Epoch 135: Loss(train): 0.077453 Loss(val): 0.078504
  640. [13:44:56] Epoch 136: Loss(train): 0.077453 Loss(val): 0.078504
  641. [13:48:43] Epoch 137: Loss(train): 0.077453 Loss(val): 0.078504
  642. [13:52:31] Epoch 138: Loss(train): 0.077453 Loss(val): 0.078504
  643. [13:56:19] Epoch 139: Loss(train): 0.077453 Loss(val): 0.078504
  644. [13:59:54] Epoch 140: Loss(train): 0.077453 Loss(val): 0.078504
  645. [14:03:34] Epoch 141: Loss(train): 0.077453 Loss(val): 0.078504