log_27_09_2019.log 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572
  1. --------[27_09_2019 16:51:25]--------
  2. second stage Hyperparameter Tuning with 1 net
  3. Configuration learning_rate=0.03, decay_step=20
  4. [16:52:47] INIT Loss(val): 0.148451 Accuarcy: 0.112245
  5. [16:54:51] Epoch 1: Loss(train): 0.089663 Loss(val): 0.088737
  6. [16:55:10] Epoch 2: Loss(train): 0.070667 Loss(val): 0.068928
  7. [16:55:27] Epoch 3: Loss(train): 0.063542 Loss(val): 0.062128
  8. [16:55:43] Epoch 4: Loss(train): 0.060417 Loss(val): 0.059723
  9. [16:56:01] Epoch 5: Loss(train): 0.059493 Loss(val): 0.059026
  10. [16:56:19] Epoch 6: Loss(train): 0.058942 Loss(val): 0.058448
  11. [16:56:36] Epoch 7: Loss(train): 0.058415 Loss(val): 0.057891
  12. [16:56:56] Epoch 8: Loss(train): 0.057964 Loss(val): 0.057296
  13. [16:57:13] Epoch 9: Loss(train): 0.057504 Loss(val): 0.056888
  14. [16:57:31] Epoch 10: Loss(train): 0.056998 Loss(val): 0.056621
  15. [16:57:48] Epoch 11: Loss(train): 0.056863 Loss(val): 0.056618
  16. [16:58:07] Epoch 12: Loss(train): 0.056772 Loss(val): 0.056483
  17. [16:58:23] Epoch 13: Loss(train): 0.056706 Loss(val): 0.056282
  18. [16:58:39] Epoch 14: Loss(train): 0.056430 Loss(val): 0.055929
  19. [16:58:55] Epoch 15: Loss(train): 0.056260 Loss(val): 0.055614
  20. [16:59:12] Epoch 16: Loss(train): 0.055839 Loss(val): 0.055432
  21. [16:59:30] Epoch 17: Loss(train): 0.055648 Loss(val): 0.055394
  22. [16:59:54] Epoch 18: Loss(train): 0.055889 Loss(val): 0.055650
  23. [17:00:12] Epoch 19: Loss(train): 0.056102 Loss(val): 0.055804
  24. [17:00:28] Epoch 20: Loss(train): 0.055990 Loss(val): 0.055733
  25. [17:00:45] Epoch 21: Loss(train): 0.056002 Loss(val): 0.055650
  26. [17:01:01] Epoch 22: Loss(train): 0.055604 Loss(val): 0.055251
  27. [17:01:18] Epoch 23: Loss(train): 0.054814 Loss(val): 0.054507
  28. [17:01:35] Epoch 24: Loss(train): 0.054289 Loss(val): 0.053961
  29. [17:01:59] Epoch 25: Loss(train): 0.054040 Loss(val): 0.053693
  30. [17:02:16] Epoch 26: Loss(train): 0.054076 Loss(val): 0.053621
  31. [17:02:32] Epoch 27: Loss(train): 0.054120 Loss(val): 0.053601
  32. [17:02:49] Epoch 28: Loss(train): 0.054555 Loss(val): 0.053849
  33. [17:03:06] Epoch 29: Loss(train): 0.055195 Loss(val): 0.054283
  34. [17:03:24] Epoch 30: Loss(train): 0.055647 Loss(val): 0.054590
  35. [17:03:41] Epoch 31: Loss(train): 0.055832 Loss(val): 0.054655
  36. [17:03:58] Epoch 32: Loss(train): 0.055375 Loss(val): 0.054319
  37. [17:04:16] Epoch 33: Loss(train): 0.054487 Loss(val): 0.053729
  38. [17:04:33] Epoch 34: Loss(train): 0.053432 Loss(val): 0.053076
  39. [17:04:50] Epoch 35: Loss(train): 0.052729 Loss(val): 0.052693
  40. [17:05:08] Epoch 36: Loss(train): 0.052390 Loss(val): 0.052524
  41. [17:05:25] Epoch 37: Loss(train): 0.052178 Loss(val): 0.052437
  42. [17:05:43] Epoch 38: Loss(train): 0.052099 Loss(val): 0.052385
  43. [17:06:00] Epoch 39: Loss(train): 0.052070 Loss(val): 0.052320
  44. [17:06:17] Epoch 40: Loss(train): 0.052017 Loss(val): 0.052243
  45. [17:06:35] Epoch 41: Loss(train): 0.051925 Loss(val): 0.052159
  46. [17:06:52] Epoch 42: Loss(train): 0.051824 Loss(val): 0.052056
  47. [17:07:10] Epoch 43: Loss(train): 0.051765 Loss(val): 0.051970
  48. [17:07:27] Epoch 44: Loss(train): 0.051683 Loss(val): 0.051933
  49. [17:07:45] Epoch 45: Loss(train): 0.051606 Loss(val): 0.051889
  50. [17:08:01] Epoch 46: Loss(train): 0.051533 Loss(val): 0.051864
  51. [17:08:18] Epoch 47: Loss(train): 0.051464 Loss(val): 0.051846
  52. [17:08:35] Epoch 48: Loss(train): 0.051401 Loss(val): 0.051838
  53. [17:08:52] Epoch 49: Loss(train): 0.051325 Loss(val): 0.051843
  54. [17:09:09] Epoch 50: Loss(train): 0.051266 Loss(val): 0.051860
  55. [17:09:26] Epoch 51: Loss(train): 0.051222 Loss(val): 0.051899
  56. [17:09:43] Epoch 52: Loss(train): 0.051193 Loss(val): 0.051918
  57. [17:10:01] Epoch 53: Loss(train): 0.051176 Loss(val): 0.051968
  58. [17:10:18] Epoch 54: Loss(train): 0.051162 Loss(val): 0.052006
  59. [17:10:35] Epoch 55: Loss(train): 0.051134 Loss(val): 0.051998
  60. [17:10:52] Epoch 56: Loss(train): 0.051111 Loss(val): 0.051992
  61. [17:11:10] Epoch 57: Loss(train): 0.051075 Loss(val): 0.051958
  62. [17:11:28] Epoch 58: Loss(train): 0.051050 Loss(val): 0.051941
  63. [17:11:45] Epoch 59: Loss(train): 0.051010 Loss(val): 0.051883
  64. [17:12:03] Epoch 60: Loss(train): 0.050980 Loss(val): 0.051837
  65. [17:12:20] Epoch 61: Loss(train): 0.050952 Loss(val): 0.051785
  66. [17:12:37] Epoch 62: Loss(train): 0.050934 Loss(val): 0.051770
  67. [17:12:55] Epoch 63: Loss(train): 0.050916 Loss(val): 0.051740
  68. [17:13:12] Epoch 64: Loss(train): 0.050899 Loss(val): 0.051707
  69. [17:13:29] Epoch 65: Loss(train): 0.050887 Loss(val): 0.051699
  70. [17:13:48] Epoch 66: Loss(train): 0.050875 Loss(val): 0.051674
  71. [17:14:05] Epoch 67: Loss(train): 0.050866 Loss(val): 0.051669
  72. [17:14:22] Epoch 68: Loss(train): 0.050856 Loss(val): 0.051652
  73. [17:14:40] Epoch 69: Loss(train): 0.050847 Loss(val): 0.051641
  74. [17:14:57] Epoch 70: Loss(train): 0.050839 Loss(val): 0.051635
  75. Converged at Loss(train): 0.051801, Loss(val): 0.052561 in epoch 70 with accuracy(val): 0.654473
  76. Configuration learning_rate=0.03, decay_step=40
  77. [17:15:12] INIT Loss(val): 0.149915 Accuarcy: 0.098707
  78. [17:15:33] Epoch 1: Loss(train): 0.088789 Loss(val): 0.087691
  79. [17:15:50] Epoch 2: Loss(train): 0.068032 Loss(val): 0.067004
  80. [17:16:08] Epoch 3: Loss(train): 0.063522 Loss(val): 0.062396
  81. [17:16:26] Epoch 4: Loss(train): 0.061677 Loss(val): 0.060596
  82. [17:16:43] Epoch 5: Loss(train): 0.060235 Loss(val): 0.059540
  83. [17:17:01] Epoch 6: Loss(train): 0.060055 Loss(val): 0.059479
  84. [17:17:18] Epoch 7: Loss(train): 0.059696 Loss(val): 0.058983
  85. [17:17:36] Epoch 8: Loss(train): 0.058420 Loss(val): 0.057931
  86. [17:17:53] Epoch 9: Loss(train): 0.057890 Loss(val): 0.057784
  87. [17:18:11] Epoch 10: Loss(train): 0.057887 Loss(val): 0.057939
  88. [17:18:28] Epoch 11: Loss(train): 0.058021 Loss(val): 0.058248
  89. [17:18:45] Epoch 12: Loss(train): 0.057937 Loss(val): 0.058021
  90. [17:19:03] Epoch 13: Loss(train): 0.057402 Loss(val): 0.057456
  91. [17:19:20] Epoch 14: Loss(train): 0.056914 Loss(val): 0.057007
  92. [17:19:37] Epoch 15: Loss(train): 0.056709 Loss(val): 0.056965
  93. [17:19:55] Epoch 16: Loss(train): 0.056852 Loss(val): 0.057157
  94. [17:20:13] Epoch 17: Loss(train): 0.057367 Loss(val): 0.057542
  95. [17:20:31] Epoch 18: Loss(train): 0.057601 Loss(val): 0.057665
  96. [17:20:49] Epoch 19: Loss(train): 0.057731 Loss(val): 0.057718
  97. [17:21:07] Epoch 20: Loss(train): 0.057218 Loss(val): 0.057077
  98. [17:21:24] Epoch 21: Loss(train): 0.056135 Loss(val): 0.056025
  99. [17:21:43] Epoch 22: Loss(train): 0.055024 Loss(val): 0.054987
  100. [17:22:00] Epoch 23: Loss(train): 0.054295 Loss(val): 0.054281
  101. [17:22:18] Epoch 24: Loss(train): 0.053959 Loss(val): 0.053951
  102. [17:22:36] Epoch 25: Loss(train): 0.053691 Loss(val): 0.053717
  103. [17:22:53] Epoch 26: Loss(train): 0.053627 Loss(val): 0.053622
  104. [17:23:11] Epoch 27: Loss(train): 0.053773 Loss(val): 0.053724
  105. [17:23:29] Epoch 28: Loss(train): 0.054236 Loss(val): 0.053973
  106. [17:23:47] Epoch 29: Loss(train): 0.054522 Loss(val): 0.054070
  107. [17:24:04] Epoch 30: Loss(train): 0.054943 Loss(val): 0.054246
  108. [17:24:22] Epoch 31: Loss(train): 0.055004 Loss(val): 0.054189
  109. [17:24:40] Epoch 32: Loss(train): 0.054876 Loss(val): 0.053994
  110. [17:24:57] Epoch 33: Loss(train): 0.054486 Loss(val): 0.053667
  111. [17:25:15] Epoch 34: Loss(train): 0.053800 Loss(val): 0.053187
  112. [17:25:33] Epoch 35: Loss(train): 0.053172 Loss(val): 0.052797
  113. [17:25:51] Epoch 36: Loss(train): 0.052741 Loss(val): 0.052563
  114. [17:26:09] Epoch 37: Loss(train): 0.052461 Loss(val): 0.052420
  115. [17:26:27] Epoch 38: Loss(train): 0.052257 Loss(val): 0.052302
  116. [17:26:46] Epoch 39: Loss(train): 0.052129 Loss(val): 0.052214
  117. [17:27:14] Epoch 40: Loss(train): 0.052035 Loss(val): 0.052163
  118. [17:27:44] Epoch 41: Loss(train): 0.051944 Loss(val): 0.052115
  119. [17:28:14] Epoch 42: Loss(train): 0.051880 Loss(val): 0.052084
  120. [17:28:31] Epoch 43: Loss(train): 0.051804 Loss(val): 0.052055
  121. [17:28:51] Epoch 44: Loss(train): 0.051738 Loss(val): 0.052032
  122. [17:29:18] Epoch 45: Loss(train): 0.051650 Loss(val): 0.052006
  123. [17:29:47] Epoch 46: Loss(train): 0.051562 Loss(val): 0.051986
  124. [17:30:09] Epoch 47: Loss(train): 0.051479 Loss(val): 0.051978
  125. [17:30:29] Epoch 48: Loss(train): 0.051412 Loss(val): 0.051979
  126. [17:30:54] Epoch 49: Loss(train): 0.051361 Loss(val): 0.051999
  127. [17:31:19] Epoch 50: Loss(train): 0.051323 Loss(val): 0.052038
  128. [17:31:38] Epoch 51: Loss(train): 0.051302 Loss(val): 0.052081
  129. [17:31:56] Epoch 52: Loss(train): 0.051278 Loss(val): 0.052096
  130. [17:32:15] Epoch 53: Loss(train): 0.051261 Loss(val): 0.052118
  131. [17:32:46] Epoch 54: Loss(train): 0.051239 Loss(val): 0.052120
  132. [17:33:13] Epoch 55: Loss(train): 0.051207 Loss(val): 0.052095
  133. Converged at Loss(train): 0.052161, Loss(val): 0.053005 in epoch 55 with accuracy(val): 0.645272
  134. Configuration learning_rate=0.03, decay_step=60
  135. [17:33:32] INIT Loss(val): 0.165520 Accuarcy: 0.073197
  136. [17:33:53] Epoch 1: Loss(train): 0.090615 Loss(val): 0.089994
  137. [17:34:11] Epoch 2: Loss(train): 0.069812 Loss(val): 0.068600
  138. [17:34:30] Epoch 3: Loss(train): 0.064214 Loss(val): 0.063470
  139. [17:34:49] Epoch 4: Loss(train): 0.061411 Loss(val): 0.061059
  140. [17:35:10] Epoch 5: Loss(train): 0.060100 Loss(val): 0.059814
  141. [17:35:31] Epoch 6: Loss(train): 0.059087 Loss(val): 0.059043
  142. [17:35:52] Epoch 7: Loss(train): 0.058686 Loss(val): 0.058661
  143. [17:36:10] Epoch 8: Loss(train): 0.058388 Loss(val): 0.058214
  144. [17:36:30] Epoch 9: Loss(train): 0.058066 Loss(val): 0.057698
  145. [17:36:51] Epoch 10: Loss(train): 0.057511 Loss(val): 0.057332
  146. [17:37:16] Epoch 11: Loss(train): 0.057068 Loss(val): 0.057097
  147. [17:37:35] Epoch 12: Loss(train): 0.057114 Loss(val): 0.057160
  148. [17:37:55] Epoch 13: Loss(train): 0.056885 Loss(val): 0.056944
  149. [17:38:15] Epoch 14: Loss(train): 0.056694 Loss(val): 0.056566
  150. [17:38:35] Epoch 15: Loss(train): 0.056233 Loss(val): 0.056134
  151. [17:38:55] Epoch 16: Loss(train): 0.055359 Loss(val): 0.055511
  152. [17:39:15] Epoch 17: Loss(train): 0.055256 Loss(val): 0.055431
  153. [17:39:35] Epoch 18: Loss(train): 0.055643 Loss(val): 0.055726
  154. [17:39:54] Epoch 19: Loss(train): 0.056205 Loss(val): 0.056171
  155. [17:40:14] Epoch 20: Loss(train): 0.056926 Loss(val): 0.056773
  156. [17:40:34] Epoch 21: Loss(train): 0.057006 Loss(val): 0.056768
  157. [17:40:53] Epoch 22: Loss(train): 0.056528 Loss(val): 0.056344
  158. [17:41:12] Epoch 23: Loss(train): 0.055433 Loss(val): 0.055290
  159. [17:41:31] Epoch 24: Loss(train): 0.054533 Loss(val): 0.054371
  160. [17:41:50] Epoch 25: Loss(train): 0.054282 Loss(val): 0.054007
  161. [17:42:10] Epoch 26: Loss(train): 0.054135 Loss(val): 0.053829
  162. [17:42:29] Epoch 27: Loss(train): 0.054173 Loss(val): 0.053806
  163. [17:42:48] Epoch 28: Loss(train): 0.054269 Loss(val): 0.053833
  164. [17:43:07] Epoch 29: Loss(train): 0.054556 Loss(val): 0.054030
  165. [17:43:27] Epoch 30: Loss(train): 0.054662 Loss(val): 0.054104
  166. [17:43:47] Epoch 31: Loss(train): 0.054619 Loss(val): 0.054089
  167. [17:44:06] Epoch 32: Loss(train): 0.054174 Loss(val): 0.053796
  168. [17:44:25] Epoch 33: Loss(train): 0.053649 Loss(val): 0.053463
  169. [17:44:44] Epoch 34: Loss(train): 0.052997 Loss(val): 0.053043
  170. [17:45:03] Epoch 35: Loss(train): 0.052595 Loss(val): 0.052792
  171. [17:45:22] Epoch 36: Loss(train): 0.052346 Loss(val): 0.052621
  172. [17:45:41] Epoch 37: Loss(train): 0.052151 Loss(val): 0.052500
  173. [17:46:00] Epoch 38: Loss(train): 0.052059 Loss(val): 0.052406
  174. [17:46:19] Epoch 39: Loss(train): 0.052001 Loss(val): 0.052335
  175. [17:46:38] Epoch 40: Loss(train): 0.051941 Loss(val): 0.052277
  176. [17:46:57] Epoch 41: Loss(train): 0.051894 Loss(val): 0.052226
  177. [17:47:17] Epoch 42: Loss(train): 0.051868 Loss(val): 0.052181
  178. [17:47:36] Epoch 43: Loss(train): 0.051789 Loss(val): 0.052130
  179. [17:47:55] Epoch 44: Loss(train): 0.051722 Loss(val): 0.052078
  180. [17:48:13] Epoch 45: Loss(train): 0.051639 Loss(val): 0.052052
  181. [17:48:33] Epoch 46: Loss(train): 0.051541 Loss(val): 0.052007
  182. [17:48:52] Epoch 47: Loss(train): 0.051465 Loss(val): 0.052000
  183. [17:49:12] Epoch 48: Loss(train): 0.051404 Loss(val): 0.051979
  184. [17:49:31] Epoch 49: Loss(train): 0.051339 Loss(val): 0.051980
  185. [17:49:51] Epoch 50: Loss(train): 0.051292 Loss(val): 0.051991
  186. [17:50:11] Epoch 51: Loss(train): 0.051249 Loss(val): 0.051991
  187. [17:50:30] Epoch 52: Loss(train): 0.051222 Loss(val): 0.052008
  188. [17:50:50] Epoch 53: Loss(train): 0.051198 Loss(val): 0.052018
  189. [17:51:09] Epoch 54: Loss(train): 0.051173 Loss(val): 0.052017
  190. [17:51:29] Epoch 55: Loss(train): 0.051146 Loss(val): 0.052003
  191. [17:51:48] Epoch 56: Loss(train): 0.051118 Loss(val): 0.051979
  192. [17:52:08] Epoch 57: Loss(train): 0.051087 Loss(val): 0.051948
  193. [17:52:27] Epoch 58: Loss(train): 0.051059 Loss(val): 0.051916
  194. [17:52:48] Epoch 59: Loss(train): 0.051032 Loss(val): 0.051880
  195. [17:53:08] Epoch 60: Loss(train): 0.051009 Loss(val): 0.051852
  196. Converged at Loss(train): 0.051977, Loss(val): 0.052759 in epoch 60 with accuracy(val): 0.642109
  197. Configuration learning_rate=0.01, decay_step=20
  198. [17:53:25] INIT Loss(val): 0.187323 Accuarcy: 0.077789
  199. [17:53:47] Epoch 1: Loss(train): 0.087576 Loss(val): 0.086033
  200. [17:54:07] Epoch 2: Loss(train): 0.068133 Loss(val): 0.066643
  201. [17:54:26] Epoch 3: Loss(train): 0.062122 Loss(val): 0.060931
  202. [17:54:46] Epoch 4: Loss(train): 0.058892 Loss(val): 0.058234
  203. [17:55:05] Epoch 5: Loss(train): 0.057786 Loss(val): 0.057269
  204. [17:55:24] Epoch 6: Loss(train): 0.057601 Loss(val): 0.057197
  205. [17:55:44] Epoch 7: Loss(train): 0.056949 Loss(val): 0.056568
  206. [17:56:03] Epoch 8: Loss(train): 0.056481 Loss(val): 0.056139
  207. [17:56:23] Epoch 9: Loss(train): 0.056194 Loss(val): 0.055886
  208. [17:56:43] Epoch 10: Loss(train): 0.055974 Loss(val): 0.055750
  209. [17:57:03] Epoch 11: Loss(train): 0.055800 Loss(val): 0.055777
  210. [17:57:22] Epoch 12: Loss(train): 0.055692 Loss(val): 0.055731
  211. [17:57:42] Epoch 13: Loss(train): 0.055691 Loss(val): 0.055779
  212. [17:58:02] Epoch 14: Loss(train): 0.055478 Loss(val): 0.055565
  213. [17:58:22] Epoch 15: Loss(train): 0.055101 Loss(val): 0.055146
  214. [17:58:42] Epoch 16: Loss(train): 0.054910 Loss(val): 0.054922
  215. [17:59:01] Epoch 17: Loss(train): 0.054790 Loss(val): 0.054875
  216. [17:59:21] Epoch 18: Loss(train): 0.054960 Loss(val): 0.055119
  217. [17:59:41] Epoch 19: Loss(train): 0.055063 Loss(val): 0.055270
  218. [18:00:02] Epoch 20: Loss(train): 0.054944 Loss(val): 0.055221
  219. [18:00:23] Epoch 21: Loss(train): 0.054798 Loss(val): 0.055048
  220. [18:00:44] Epoch 22: Loss(train): 0.054379 Loss(val): 0.054582
  221. [18:01:05] Epoch 23: Loss(train): 0.053794 Loss(val): 0.053956
  222. [18:01:31] Epoch 24: Loss(train): 0.053113 Loss(val): 0.053302
  223. [18:01:56] Epoch 25: Loss(train): 0.052780 Loss(val): 0.052874
  224. [18:02:22] Epoch 26: Loss(train): 0.052543 Loss(val): 0.052660
  225. [18:02:56] Epoch 27: Loss(train): 0.052278 Loss(val): 0.052454
  226. [18:03:31] Epoch 28: Loss(train): 0.052146 Loss(val): 0.052352
  227. [18:03:55] Epoch 29: Loss(train): 0.052045 Loss(val): 0.052292
  228. [18:04:17] Epoch 30: Loss(train): 0.051909 Loss(val): 0.052243
  229. [18:04:38] Epoch 31: Loss(train): 0.051838 Loss(val): 0.052235
  230. [18:05:01] Epoch 32: Loss(train): 0.051793 Loss(val): 0.052183
  231. [18:05:34] Epoch 33: Loss(train): 0.051737 Loss(val): 0.052164
  232. [18:06:03] Epoch 34: Loss(train): 0.051672 Loss(val): 0.052101
  233. [18:06:27] Epoch 35: Loss(train): 0.051629 Loss(val): 0.052036
  234. [18:07:02] Epoch 36: Loss(train): 0.051564 Loss(val): 0.051960
  235. [18:07:36] Epoch 37: Loss(train): 0.051538 Loss(val): 0.051894
  236. [18:08:09] Epoch 38: Loss(train): 0.051462 Loss(val): 0.051794
  237. [18:08:38] Epoch 39: Loss(train): 0.051404 Loss(val): 0.051718
  238. [18:09:02] Epoch 40: Loss(train): 0.051335 Loss(val): 0.051661
  239. [18:09:22] Epoch 41: Loss(train): 0.051274 Loss(val): 0.051597
  240. [18:09:44] Epoch 42: Loss(train): 0.051173 Loss(val): 0.051555
  241. [18:10:09] Epoch 43: Loss(train): 0.051112 Loss(val): 0.051520
  242. [18:10:30] Epoch 44: Loss(train): 0.051043 Loss(val): 0.051497
  243. [18:10:52] Epoch 45: Loss(train): 0.050990 Loss(val): 0.051464
  244. [18:11:13] Epoch 46: Loss(train): 0.050939 Loss(val): 0.051442
  245. [18:11:34] Epoch 47: Loss(train): 0.050889 Loss(val): 0.051419
  246. [18:11:55] Epoch 48: Loss(train): 0.050855 Loss(val): 0.051395
  247. [18:12:22] Epoch 49: Loss(train): 0.050825 Loss(val): 0.051377
  248. [18:12:44] Epoch 50: Loss(train): 0.050809 Loss(val): 0.051350
  249. [18:13:13] Epoch 51: Loss(train): 0.050787 Loss(val): 0.051347
  250. [18:13:35] Epoch 52: Loss(train): 0.050774 Loss(val): 0.051326
  251. [18:13:57] Epoch 53: Loss(train): 0.050761 Loss(val): 0.051317
  252. [18:14:18] Epoch 54: Loss(train): 0.050748 Loss(val): 0.051308
  253. [18:14:40] Epoch 55: Loss(train): 0.050737 Loss(val): 0.051294
  254. Converged at Loss(train): 0.051697, Loss(val): 0.052241 in epoch 55 with accuracy(val): 0.656922
  255. Configuration learning_rate=0.01, decay_step=40
  256. [18:15:00] INIT Loss(val): 0.177921 Accuarcy: 0.078452
  257. [18:15:25] Epoch 1: Loss(train): 0.085913 Loss(val): 0.084972
  258. [18:15:47] Epoch 2: Loss(train): 0.067994 Loss(val): 0.067212
  259. [18:16:08] Epoch 3: Loss(train): 0.064593 Loss(val): 0.063531
  260. [18:16:29] Epoch 4: Loss(train): 0.062117 Loss(val): 0.061084
  261. [18:16:49] Epoch 5: Loss(train): 0.060363 Loss(val): 0.060003
  262. [18:17:10] Epoch 6: Loss(train): 0.059131 Loss(val): 0.058921
  263. [18:17:31] Epoch 7: Loss(train): 0.058338 Loss(val): 0.058271
  264. [18:17:52] Epoch 8: Loss(train): 0.057717 Loss(val): 0.057788
  265. [18:18:15] Epoch 9: Loss(train): 0.057529 Loss(val): 0.057686
  266. [18:18:35] Epoch 10: Loss(train): 0.057496 Loss(val): 0.057720
  267. [18:18:56] Epoch 11: Loss(train): 0.057429 Loss(val): 0.057801
  268. [18:19:17] Epoch 12: Loss(train): 0.057253 Loss(val): 0.057779
  269. [18:19:40] Epoch 13: Loss(train): 0.056893 Loss(val): 0.057320
  270. [18:20:02] Epoch 14: Loss(train): 0.056243 Loss(val): 0.056562
  271. [18:20:22] Epoch 15: Loss(train): 0.056184 Loss(val): 0.056340
  272. [18:20:45] Epoch 16: Loss(train): 0.056221 Loss(val): 0.056419
  273. [18:21:06] Epoch 17: Loss(train): 0.056585 Loss(val): 0.056762
  274. [18:21:28] Epoch 18: Loss(train): 0.056598 Loss(val): 0.056918
  275. [18:21:49] Epoch 19: Loss(train): 0.056542 Loss(val): 0.056837
  276. [18:22:10] Epoch 20: Loss(train): 0.056023 Loss(val): 0.056362
  277. [18:22:34] Epoch 21: Loss(train): 0.055122 Loss(val): 0.055430
  278. [18:22:55] Epoch 22: Loss(train): 0.054322 Loss(val): 0.054565
  279. [18:23:15] Epoch 23: Loss(train): 0.053714 Loss(val): 0.053932
  280. [18:24:01] Epoch 24: Loss(train): 0.053428 Loss(val): 0.053591
  281. [18:24:44] Epoch 25: Loss(train): 0.053074 Loss(val): 0.053378
  282. [18:25:28] Epoch 26: Loss(train): 0.052950 Loss(val): 0.053381
  283. [18:26:12] Epoch 27: Loss(train): 0.052878 Loss(val): 0.053369
  284. [18:26:55] Epoch 28: Loss(train): 0.052749 Loss(val): 0.053334
  285. [18:27:39] Epoch 29: Loss(train): 0.052654 Loss(val): 0.053279
  286. [18:28:23] Epoch 30: Loss(train): 0.052551 Loss(val): 0.053160
  287. [18:29:07] Epoch 31: Loss(train): 0.052434 Loss(val): 0.052998
  288. [18:29:51] Epoch 32: Loss(train): 0.052321 Loss(val): 0.052879
  289. [18:30:34] Epoch 33: Loss(train): 0.052148 Loss(val): 0.052693
  290. [18:31:20] Epoch 34: Loss(train): 0.052083 Loss(val): 0.052561
  291. [18:32:09] Epoch 35: Loss(train): 0.051987 Loss(val): 0.052415
  292. [18:33:10] Epoch 36: Loss(train): 0.051942 Loss(val): 0.052319
  293. [18:34:01] Epoch 37: Loss(train): 0.051942 Loss(val): 0.052249
  294. [18:35:10] Epoch 38: Loss(train): 0.051861 Loss(val): 0.052164
  295. [18:36:23] Epoch 39: Loss(train): 0.051847 Loss(val): 0.052113
  296. [18:37:32] Epoch 40: Loss(train): 0.051747 Loss(val): 0.052048
  297. [18:38:32] Epoch 41: Loss(train): 0.051614 Loss(val): 0.051969
  298. [18:39:30] Epoch 42: Loss(train): 0.051501 Loss(val): 0.051891
  299. [18:40:41] Epoch 43: Loss(train): 0.051425 Loss(val): 0.051849
  300. [18:41:25] Epoch 44: Loss(train): 0.051336 Loss(val): 0.051807
  301. [18:42:13] Epoch 45: Loss(train): 0.051292 Loss(val): 0.051769
  302. [18:43:09] Epoch 46: Loss(train): 0.051229 Loss(val): 0.051732
  303. [18:43:55] Epoch 47: Loss(train): 0.051206 Loss(val): 0.051705
  304. [18:44:47] Epoch 48: Loss(train): 0.051182 Loss(val): 0.051671
  305. [18:45:34] Epoch 49: Loss(train): 0.051149 Loss(val): 0.051648
  306. [18:46:25] Epoch 50: Loss(train): 0.051119 Loss(val): 0.051630
  307. [18:47:15] Epoch 51: Loss(train): 0.051105 Loss(val): 0.051612
  308. [18:48:02] Epoch 52: Loss(train): 0.051082 Loss(val): 0.051598
  309. [18:48:48] Epoch 53: Loss(train): 0.051052 Loss(val): 0.051591
  310. [18:49:37] Epoch 54: Loss(train): 0.051018 Loss(val): 0.051580
  311. Converged at Loss(train): 0.051932, Loss(val): 0.052555 in epoch 54 with accuracy(val): 0.645799
  312. Configuration learning_rate=0.01, decay_step=60
  313. [18:50:03] INIT Loss(val): 0.131412 Accuarcy: 0.101854
  314. [18:50:48] Epoch 1: Loss(train): 0.080744 Loss(val): 0.079622
  315. [18:51:38] Epoch 2: Loss(train): 0.066512 Loss(val): 0.064695
  316. [18:52:26] Epoch 3: Loss(train): 0.062622 Loss(val): 0.061448
  317. [18:53:13] Epoch 4: Loss(train): 0.060374 Loss(val): 0.059260
  318. [18:54:00] Epoch 5: Loss(train): 0.058809 Loss(val): 0.058090
  319. [18:54:47] Epoch 6: Loss(train): 0.057931 Loss(val): 0.057646
  320. [18:55:35] Epoch 7: Loss(train): 0.057441 Loss(val): 0.057309
  321. [18:56:22] Epoch 8: Loss(train): 0.057106 Loss(val): 0.056946
  322. [18:57:08] Epoch 9: Loss(train): 0.056751 Loss(val): 0.056449
  323. [18:57:55] Epoch 10: Loss(train): 0.056720 Loss(val): 0.056406
  324. [18:58:42] Epoch 11: Loss(train): 0.056405 Loss(val): 0.056351
  325. [18:59:30] Epoch 12: Loss(train): 0.056383 Loss(val): 0.056457
  326. [19:00:17] Epoch 13: Loss(train): 0.056200 Loss(val): 0.056471
  327. [19:01:04] Epoch 14: Loss(train): 0.055845 Loss(val): 0.056025
  328. [19:01:52] Epoch 15: Loss(train): 0.055829 Loss(val): 0.055832
  329. [19:02:39] Epoch 16: Loss(train): 0.055686 Loss(val): 0.055626
  330. [19:03:25] Epoch 17: Loss(train): 0.055622 Loss(val): 0.055737
  331. [19:04:14] Epoch 18: Loss(train): 0.055771 Loss(val): 0.055958
  332. [19:05:05] Epoch 19: Loss(train): 0.055711 Loss(val): 0.056073
  333. [19:06:15] Epoch 20: Loss(train): 0.055973 Loss(val): 0.056337
  334. [19:07:04] Epoch 21: Loss(train): 0.055825 Loss(val): 0.056190
  335. [19:07:52] Epoch 22: Loss(train): 0.055313 Loss(val): 0.055599
  336. [19:08:41] Epoch 23: Loss(train): 0.054893 Loss(val): 0.055099
  337. [19:09:42] Epoch 24: Loss(train): 0.054391 Loss(val): 0.054471
  338. [19:10:44] Epoch 25: Loss(train): 0.053839 Loss(val): 0.053866
  339. [19:11:36] Epoch 26: Loss(train): 0.053508 Loss(val): 0.053500
  340. [19:12:40] Epoch 27: Loss(train): 0.053171 Loss(val): 0.053214
  341. [19:13:37] Epoch 28: Loss(train): 0.052970 Loss(val): 0.053028
  342. [19:14:26] Epoch 29: Loss(train): 0.052803 Loss(val): 0.052932
  343. [19:15:16] Epoch 30: Loss(train): 0.052774 Loss(val): 0.052891
  344. [19:16:07] Epoch 31: Loss(train): 0.052981 Loss(val): 0.052950
  345. [19:17:00] Epoch 32: Loss(train): 0.053209 Loss(val): 0.053051
  346. [19:18:01] Epoch 33: Loss(train): 0.053366 Loss(val): 0.053110
  347. [19:19:03] Epoch 34: Loss(train): 0.053671 Loss(val): 0.053226
  348. [19:19:51] Epoch 35: Loss(train): 0.053941 Loss(val): 0.053339
  349. [19:20:42] Epoch 36: Loss(train): 0.053890 Loss(val): 0.053279
  350. [19:21:33] Epoch 37: Loss(train): 0.053691 Loss(val): 0.053105
  351. [19:22:22] Epoch 38: Loss(train): 0.053180 Loss(val): 0.052768
  352. [19:23:23] Epoch 39: Loss(train): 0.052641 Loss(val): 0.052439
  353. [19:24:15] Epoch 40: Loss(train): 0.052294 Loss(val): 0.052223
  354. [19:25:05] Epoch 41: Loss(train): 0.052021 Loss(val): 0.052078
  355. [19:25:57] Epoch 42: Loss(train): 0.051852 Loss(val): 0.051986
  356. [19:26:46] Epoch 43: Loss(train): 0.051764 Loss(val): 0.051918
  357. [19:27:36] Epoch 44: Loss(train): 0.051702 Loss(val): 0.051857
  358. [19:28:29] Epoch 45: Loss(train): 0.051609 Loss(val): 0.051782
  359. [19:29:18] Epoch 46: Loss(train): 0.051520 Loss(val): 0.051719
  360. [19:30:08] Epoch 47: Loss(train): 0.051464 Loss(val): 0.051675
  361. [19:30:57] Epoch 48: Loss(train): 0.051390 Loss(val): 0.051640
  362. [19:31:47] Epoch 49: Loss(train): 0.051337 Loss(val): 0.051621
  363. [19:32:37] Epoch 50: Loss(train): 0.051246 Loss(val): 0.051601
  364. [19:33:26] Epoch 51: Loss(train): 0.051194 Loss(val): 0.051589
  365. [19:34:16] Epoch 52: Loss(train): 0.051123 Loss(val): 0.051589
  366. [19:35:06] Epoch 53: Loss(train): 0.051061 Loss(val): 0.051593
  367. [19:35:56] Epoch 54: Loss(train): 0.051009 Loss(val): 0.051610
  368. [19:37:03] Epoch 55: Loss(train): 0.050970 Loss(val): 0.051631
  369. [19:38:13] Epoch 56: Loss(train): 0.050941 Loss(val): 0.051682
  370. [19:39:31] Epoch 57: Loss(train): 0.050926 Loss(val): 0.051712
  371. [19:40:44] Epoch 58: Loss(train): 0.050924 Loss(val): 0.051774
  372. [19:41:48] Epoch 59: Loss(train): 0.050913 Loss(val): 0.051794
  373. [19:42:38] Epoch 60: Loss(train): 0.050901 Loss(val): 0.051803
  374. [19:43:59] Epoch 61: Loss(train): 0.050880 Loss(val): 0.051795
  375. [19:45:01] Epoch 62: Loss(train): 0.050853 Loss(val): 0.051767
  376. [19:46:02] Epoch 63: Loss(train): 0.050823 Loss(val): 0.051729
  377. Converged at Loss(train): 0.051745, Loss(val): 0.052544 in epoch 63 with accuracy(val): 0.647007
  378. Configuration learning_rate=0.003, decay_step=20
  379. [19:46:27] INIT Loss(val): 0.197653 Accuarcy: 0.091241
  380. [19:47:20] Epoch 1: Loss(train): 0.079464 Loss(val): 0.077783
  381. [19:48:20] Epoch 2: Loss(train): 0.067149 Loss(val): 0.066139
  382. [19:49:25] Epoch 3: Loss(train): 0.062640 Loss(val): 0.061764
  383. [19:50:18] Epoch 4: Loss(train): 0.060908 Loss(val): 0.059928
  384. [19:51:14] Epoch 5: Loss(train): 0.058845 Loss(val): 0.058380
  385. [19:52:13] Epoch 6: Loss(train): 0.058177 Loss(val): 0.057776
  386. [19:53:06] Epoch 7: Loss(train): 0.057540 Loss(val): 0.057122
  387. [19:53:59] Epoch 8: Loss(train): 0.056814 Loss(val): 0.056477
  388. [19:54:56] Epoch 9: Loss(train): 0.056360 Loss(val): 0.056050
  389. [19:55:52] Epoch 10: Loss(train): 0.056028 Loss(val): 0.055813
  390. [19:56:48] Epoch 11: Loss(train): 0.055597 Loss(val): 0.055646
  391. [19:57:44] Epoch 12: Loss(train): 0.055225 Loss(val): 0.055342
  392. [19:58:37] Epoch 13: Loss(train): 0.055302 Loss(val): 0.055394
  393. [19:59:31] Epoch 14: Loss(train): 0.055238 Loss(val): 0.055283
  394. [20:00:26] Epoch 15: Loss(train): 0.055095 Loss(val): 0.055047
  395. [20:01:19] Epoch 16: Loss(train): 0.054809 Loss(val): 0.054673
  396. [20:02:12] Epoch 17: Loss(train): 0.054337 Loss(val): 0.054301
  397. [20:03:06] Epoch 18: Loss(train): 0.053958 Loss(val): 0.054079
  398. [20:03:59] Epoch 19: Loss(train): 0.053873 Loss(val): 0.054106
  399. [20:04:51] Epoch 20: Loss(train): 0.054026 Loss(val): 0.054221
  400. [20:05:45] Epoch 21: Loss(train): 0.054201 Loss(val): 0.054455
  401. [20:06:38] Epoch 22: Loss(train): 0.054305 Loss(val): 0.054560
  402. [20:07:32] Epoch 23: Loss(train): 0.054394 Loss(val): 0.054562
  403. [20:08:25] Epoch 24: Loss(train): 0.054172 Loss(val): 0.054326
  404. [20:09:19] Epoch 25: Loss(train): 0.053743 Loss(val): 0.053848
  405. [20:10:12] Epoch 26: Loss(train): 0.053142 Loss(val): 0.053302
  406. [20:11:06] Epoch 27: Loss(train): 0.052741 Loss(val): 0.052896
  407. [20:11:59] Epoch 28: Loss(train): 0.052561 Loss(val): 0.052737
  408. [20:13:15] Epoch 29: Loss(train): 0.052489 Loss(val): 0.052637
  409. [20:14:12] Epoch 30: Loss(train): 0.052407 Loss(val): 0.052581
  410. [20:15:14] Epoch 31: Loss(train): 0.052487 Loss(val): 0.052619
  411. [20:16:17] Epoch 32: Loss(train): 0.052642 Loss(val): 0.052717
  412. [20:17:31] Epoch 33: Loss(train): 0.052745 Loss(val): 0.052786
  413. [20:18:41] Epoch 34: Loss(train): 0.052957 Loss(val): 0.052931
  414. [20:19:53] Epoch 35: Loss(train): 0.053042 Loss(val): 0.052976
  415. [20:20:52] Epoch 36: Loss(train): 0.053068 Loss(val): 0.052970
  416. [20:21:49] Epoch 37: Loss(train): 0.052881 Loss(val): 0.052853
  417. [20:22:59] Epoch 38: Loss(train): 0.052606 Loss(val): 0.052675
  418. [20:24:06] Epoch 39: Loss(train): 0.052210 Loss(val): 0.052439
  419. [20:25:05] Epoch 40: Loss(train): 0.051897 Loss(val): 0.052271
  420. [20:26:08] Epoch 41: Loss(train): 0.051670 Loss(val): 0.052125
  421. [20:27:04] Epoch 42: Loss(train): 0.051492 Loss(val): 0.052033
  422. [20:28:03] Epoch 43: Loss(train): 0.051401 Loss(val): 0.051973
  423. [20:28:59] Epoch 44: Loss(train): 0.051340 Loss(val): 0.051914
  424. [20:30:00] Epoch 45: Loss(train): 0.051286 Loss(val): 0.051879
  425. [20:31:02] Epoch 46: Loss(train): 0.051234 Loss(val): 0.051821
  426. [20:31:59] Epoch 47: Loss(train): 0.051182 Loss(val): 0.051773
  427. [20:32:55] Epoch 48: Loss(train): 0.051136 Loss(val): 0.051725
  428. [20:33:51] Epoch 49: Loss(train): 0.051084 Loss(val): 0.051684
  429. [20:34:47] Epoch 50: Loss(train): 0.051037 Loss(val): 0.051649
  430. [20:35:43] Epoch 51: Loss(train): 0.050997 Loss(val): 0.051624
  431. [20:36:43] Epoch 52: Loss(train): 0.050962 Loss(val): 0.051611
  432. [20:37:39] Epoch 53: Loss(train): 0.050926 Loss(val): 0.051608
  433. [20:38:34] Epoch 54: Loss(train): 0.050892 Loss(val): 0.051609
  434. [20:39:31] Epoch 55: Loss(train): 0.050861 Loss(val): 0.051612
  435. [20:40:26] Epoch 56: Loss(train): 0.050835 Loss(val): 0.051619
  436. [20:41:23] Epoch 57: Loss(train): 0.050810 Loss(val): 0.051647
  437. [20:42:18] Epoch 58: Loss(train): 0.050793 Loss(val): 0.051664
  438. [20:43:13] Epoch 59: Loss(train): 0.050780 Loss(val): 0.051697
  439. [20:44:09] Epoch 60: Loss(train): 0.050769 Loss(val): 0.051714
  440. [20:45:06] Epoch 61: Loss(train): 0.050759 Loss(val): 0.051723
  441. [20:46:02] Epoch 62: Loss(train): 0.050747 Loss(val): 0.051723
  442. [20:47:04] Epoch 63: Loss(train): 0.050739 Loss(val): 0.051730
  443. [20:48:21] Epoch 64: Loss(train): 0.050730 Loss(val): 0.051738
  444. [20:49:22] Epoch 65: Loss(train): 0.050708 Loss(val): 0.051697
  445. Converged at Loss(train): 0.051663, Loss(val): 0.052587 in epoch 65 with accuracy(val): 0.643129
  446. Configuration learning_rate=0.003, decay_step=40
  447. [20:49:52] INIT Loss(val): 0.131267 Accuarcy: 0.083214
  448. [20:50:53] Epoch 1: Loss(train): 0.085294 Loss(val): 0.084576
  449. [20:52:03] Epoch 2: Loss(train): 0.067979 Loss(val): 0.066454
  450. [20:53:25] Epoch 3: Loss(train): 0.064692 Loss(val): 0.063468
  451. [20:54:50] Epoch 4: Loss(train): 0.060204 Loss(val): 0.059563
  452. [20:55:54] Epoch 5: Loss(train): 0.059113 Loss(val): 0.058688
  453. [20:56:51] Epoch 6: Loss(train): 0.058466 Loss(val): 0.058163
  454. [20:57:49] Epoch 7: Loss(train): 0.058073 Loss(val): 0.057866
  455. [20:58:55] Epoch 8: Loss(train): 0.057293 Loss(val): 0.057096
  456. [20:59:57] Epoch 9: Loss(train): 0.056591 Loss(val): 0.056324
  457. [21:01:01] Epoch 10: Loss(train): 0.056213 Loss(val): 0.056077
  458. [21:02:04] Epoch 11: Loss(train): 0.055718 Loss(val): 0.055704
  459. [21:03:08] Epoch 12: Loss(train): 0.055250 Loss(val): 0.055418
  460. [21:04:14] Epoch 13: Loss(train): 0.055137 Loss(val): 0.055309
  461. [21:05:16] Epoch 14: Loss(train): 0.055121 Loss(val): 0.055232
  462. [21:06:19] Epoch 15: Loss(train): 0.055151 Loss(val): 0.055145
  463. [21:07:20] Epoch 16: Loss(train): 0.055220 Loss(val): 0.055140
  464. [21:08:20] Epoch 17: Loss(train): 0.054638 Loss(val): 0.054753
  465. [21:09:19] Epoch 18: Loss(train): 0.054304 Loss(val): 0.054612
  466. [21:10:20] Epoch 19: Loss(train): 0.054102 Loss(val): 0.054496
  467. [21:11:21] Epoch 20: Loss(train): 0.054121 Loss(val): 0.054624
  468. [21:12:20] Epoch 21: Loss(train): 0.054457 Loss(val): 0.054914
  469. [21:13:20] Epoch 22: Loss(train): 0.054731 Loss(val): 0.055191
  470. [21:14:20] Epoch 23: Loss(train): 0.054807 Loss(val): 0.055170
  471. [21:15:19] Epoch 24: Loss(train): 0.054750 Loss(val): 0.055053
  472. [21:16:20] Epoch 25: Loss(train): 0.054323 Loss(val): 0.054540
  473. [21:17:19] Epoch 26: Loss(train): 0.053600 Loss(val): 0.053798
  474. [21:18:17] Epoch 27: Loss(train): 0.053148 Loss(val): 0.053314
  475. [21:19:16] Epoch 28: Loss(train): 0.052852 Loss(val): 0.053044
  476. [21:20:15] Epoch 29: Loss(train): 0.052607 Loss(val): 0.052848
  477. [21:21:39] Epoch 30: Loss(train): 0.052449 Loss(val): 0.052746
  478. [21:23:03] Epoch 31: Loss(train): 0.052500 Loss(val): 0.052788
  479. [21:24:22] Epoch 32: Loss(train): 0.052512 Loss(val): 0.052785
  480. [21:25:49] Epoch 33: Loss(train): 0.052571 Loss(val): 0.052789
  481. [21:26:54] Epoch 34: Loss(train): 0.052684 Loss(val): 0.052806
  482. [21:28:20] Epoch 35: Loss(train): 0.052755 Loss(val): 0.052841
  483. [21:29:30] Epoch 36: Loss(train): 0.052775 Loss(val): 0.052819
  484. [21:30:48] Epoch 37: Loss(train): 0.052677 Loss(val): 0.052722
  485. [21:32:02] Epoch 38: Loss(train): 0.052491 Loss(val): 0.052566
  486. [21:33:11] Epoch 39: Loss(train): 0.052202 Loss(val): 0.052362
  487. [21:34:15] Epoch 40: Loss(train): 0.051935 Loss(val): 0.052191
  488. [21:35:18] Epoch 41: Loss(train): 0.051739 Loss(val): 0.052059
  489. [21:36:22] Epoch 42: Loss(train): 0.051572 Loss(val): 0.051957
  490. [21:37:25] Epoch 43: Loss(train): 0.051486 Loss(val): 0.051892
  491. [21:38:29] Epoch 44: Loss(train): 0.051410 Loss(val): 0.051844
  492. [21:39:35] Epoch 45: Loss(train): 0.051352 Loss(val): 0.051801
  493. [21:40:38] Epoch 46: Loss(train): 0.051312 Loss(val): 0.051755
  494. [21:41:45] Epoch 47: Loss(train): 0.051259 Loss(val): 0.051708
  495. [21:42:48] Epoch 48: Loss(train): 0.051191 Loss(val): 0.051682
  496. [21:43:52] Epoch 49: Loss(train): 0.051153 Loss(val): 0.051656
  497. [21:44:55] Epoch 50: Loss(train): 0.051126 Loss(val): 0.051639
  498. [21:45:56] Epoch 51: Loss(train): 0.051106 Loss(val): 0.051628
  499. [21:46:59] Epoch 52: Loss(train): 0.051067 Loss(val): 0.051622
  500. [21:48:02] Epoch 53: Loss(train): 0.051033 Loss(val): 0.051616
  501. [21:49:05] Epoch 54: Loss(train): 0.051009 Loss(val): 0.051614
  502. [21:50:07] Epoch 55: Loss(train): 0.050979 Loss(val): 0.051613
  503. [21:51:08] Epoch 56: Loss(train): 0.050938 Loss(val): 0.051623
  504. [21:52:11] Epoch 57: Loss(train): 0.050908 Loss(val): 0.051627
  505. [21:53:14] Epoch 58: Loss(train): 0.050878 Loss(val): 0.051646
  506. [21:54:23] Epoch 59: Loss(train): 0.050853 Loss(val): 0.051660
  507. [21:55:29] Epoch 60: Loss(train): 0.050829 Loss(val): 0.051672
  508. Converged at Loss(train): 0.051769, Loss(val): 0.052593 in epoch 60 with accuracy(val): 0.647092
  509. Configuration learning_rate=0.003, decay_step=60
  510. [21:56:01] INIT Loss(val): 0.128400 Accuarcy: 0.091173
  511. [21:57:04] Epoch 1: Loss(train): 0.087630 Loss(val): 0.086648
  512. [21:58:09] Epoch 2: Loss(train): 0.068650 Loss(val): 0.067234
  513. [21:59:13] Epoch 3: Loss(train): 0.064484 Loss(val): 0.062984
  514. [22:00:15] Epoch 4: Loss(train): 0.062109 Loss(val): 0.060795
  515. [22:01:20] Epoch 5: Loss(train): 0.059852 Loss(val): 0.059078
  516. [22:02:26] Epoch 6: Loss(train): 0.059096 Loss(val): 0.058652
  517. [22:03:28] Epoch 7: Loss(train): 0.058634 Loss(val): 0.058505
  518. [22:04:30] Epoch 8: Loss(train): 0.058904 Loss(val): 0.058879
  519. [22:05:33] Epoch 9: Loss(train): 0.058865 Loss(val): 0.058817
  520. [22:06:40] Epoch 10: Loss(train): 0.058145 Loss(val): 0.058074
  521. [22:07:45] Epoch 11: Loss(train): 0.057094 Loss(val): 0.057320
  522. [22:08:57] Epoch 12: Loss(train): 0.056686 Loss(val): 0.057000
  523. [22:10:03] Epoch 13: Loss(train): 0.056440 Loss(val): 0.056806
  524. [22:11:07] Epoch 14: Loss(train): 0.056780 Loss(val): 0.056969
  525. [22:12:15] Epoch 15: Loss(train): 0.057121 Loss(val): 0.057249
  526. [22:13:19] Epoch 16: Loss(train): 0.057089 Loss(val): 0.057170
  527. [22:14:23] Epoch 17: Loss(train): 0.056695 Loss(val): 0.056844
  528. [22:15:28] Epoch 18: Loss(train): 0.056033 Loss(val): 0.056204
  529. [22:16:41] Epoch 19: Loss(train): 0.055199 Loss(val): 0.055399
  530. [22:17:51] Epoch 20: Loss(train): 0.054661 Loss(val): 0.054834
  531. [22:18:59] Epoch 21: Loss(train): 0.054271 Loss(val): 0.054396
  532. [22:20:05] Epoch 22: Loss(train): 0.054036 Loss(val): 0.054124
  533. [22:21:12] Epoch 23: Loss(train): 0.054021 Loss(val): 0.054075
  534. [22:22:18] Epoch 24: Loss(train): 0.053884 Loss(val): 0.054009
  535. [22:23:23] Epoch 25: Loss(train): 0.053809 Loss(val): 0.053988
  536. [22:24:28] Epoch 26: Loss(train): 0.053688 Loss(val): 0.053978
  537. [22:25:32] Epoch 27: Loss(train): 0.053466 Loss(val): 0.053863
  538. [22:26:35] Epoch 28: Loss(train): 0.053356 Loss(val): 0.053755
  539. [22:27:42] Epoch 29: Loss(train): 0.053260 Loss(val): 0.053652
  540. [22:28:47] Epoch 30: Loss(train): 0.053242 Loss(val): 0.053502
  541. [22:29:52] Epoch 31: Loss(train): 0.053209 Loss(val): 0.053376
  542. [22:30:55] Epoch 32: Loss(train): 0.053310 Loss(val): 0.053301
  543. [22:32:02] Epoch 33: Loss(train): 0.053360 Loss(val): 0.053194
  544. [22:33:11] Epoch 34: Loss(train): 0.053411 Loss(val): 0.053129
  545. [22:34:18] Epoch 35: Loss(train): 0.053490 Loss(val): 0.053088
  546. [22:35:25] Epoch 36: Loss(train): 0.053329 Loss(val): 0.052931
  547. [22:36:35] Epoch 37: Loss(train): 0.053057 Loss(val): 0.052750
  548. [22:37:46] Epoch 38: Loss(train): 0.052699 Loss(val): 0.052533
  549. [22:38:57] Epoch 39: Loss(train): 0.052314 Loss(val): 0.052344
  550. [22:40:06] Epoch 40: Loss(train): 0.052103 Loss(val): 0.052243
  551. [22:41:26] Epoch 41: Loss(train): 0.051960 Loss(val): 0.052185
  552. [22:42:35] Epoch 42: Loss(train): 0.051858 Loss(val): 0.052141
  553. [22:43:43] Epoch 43: Loss(train): 0.051790 Loss(val): 0.052123
  554. [22:44:48] Epoch 44: Loss(train): 0.051754 Loss(val): 0.052102
  555. [22:45:55] Epoch 45: Loss(train): 0.051746 Loss(val): 0.052096
  556. [22:47:15] Epoch 46: Loss(train): 0.051733 Loss(val): 0.052079
  557. [22:48:27] Epoch 47: Loss(train): 0.051714 Loss(val): 0.052058
  558. [22:49:37] Epoch 48: Loss(train): 0.051704 Loss(val): 0.052038
  559. [22:50:50] Epoch 49: Loss(train): 0.051654 Loss(val): 0.052020
  560. [22:51:58] Epoch 50: Loss(train): 0.051591 Loss(val): 0.051994
  561. [22:53:08] Epoch 51: Loss(train): 0.051546 Loss(val): 0.051980
  562. Converged at Loss(train): 0.052474, Loss(val): 0.052877 in epoch 51 with accuracy(val): 0.638793