log_30_09_2019.log 74 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939
  1. --------[30_09_2019 14:27:20]--------
  2. second stage Hyperparameter Tuning with 1 net, reevaluation with fixed epochs now, using testset
  3. Configuration learning_rate=0.03, decay_step=20
  4. [14:29:16] INIT Loss(test): 0.191895 Accuarcy: 0.061824
  5. [14:31:42] Epoch 1: Loss(train): 0.093080 Loss(val): 0.092508 acc(val): 0.274541
  6. [14:32:12] Epoch 2: Loss(train): 0.069031 Loss(val): 0.067629 acc(val): 0.436054
  7. [14:32:32] Epoch 3: Loss(train): 0.064291 Loss(val): 0.062943 acc(val): 0.508061
  8. [14:32:52] Epoch 4: Loss(train): 0.060827 Loss(val): 0.059568 acc(val): 0.543418
  9. [14:33:29] Epoch 5: Loss(train): 0.059301 Loss(val): 0.058460 acc(val): 0.555391
  10. [14:33:58] Epoch 6: Loss(train): 0.058672 Loss(val): 0.057920 acc(val): 0.556820
  11. [14:34:30] Epoch 7: Loss(train): 0.057834 Loss(val): 0.057255 acc(val): 0.570561
  12. [14:34:56] Epoch 8: Loss(train): 0.057435 Loss(val): 0.057099 acc(val): 0.570017
  13. [14:35:21] Epoch 9: Loss(train): 0.057113 Loss(val): 0.056812 acc(val): 0.573146
  14. [14:35:39] Epoch 10: Loss(train): 0.057030 Loss(val): 0.056849 acc(val): 0.571531
  15. [14:35:57] Epoch 11: Loss(train): 0.056916 Loss(val): 0.056893 acc(val): 0.572823
  16. [14:36:15] Epoch 12: Loss(train): 0.056653 Loss(val): 0.056813 acc(val): 0.571803
  17. [14:36:34] Epoch 13: Loss(train): 0.056223 Loss(val): 0.056521 acc(val): 0.577925
  18. [14:36:52] Epoch 14: Loss(train): 0.055907 Loss(val): 0.056257 acc(val): 0.581054
  19. [14:37:10] Epoch 15: Loss(train): 0.055811 Loss(val): 0.056153 acc(val): 0.581939
  20. [14:37:29] Epoch 16: Loss(train): 0.055996 Loss(val): 0.056253 acc(val): 0.577585
  21. [14:37:48] Epoch 17: Loss(train): 0.056509 Loss(val): 0.056749 acc(val): 0.566701
  22. [14:38:09] Epoch 18: Loss(train): 0.056736 Loss(val): 0.057002 acc(val): 0.561259
  23. [14:38:28] Epoch 19: Loss(train): 0.056825 Loss(val): 0.057148 acc(val): 0.557993
  24. [14:38:47] Epoch 20: Loss(train): 0.056349 Loss(val): 0.056709 acc(val): 0.564524
  25. [14:39:06] Epoch 21: Loss(train): 0.055339 Loss(val): 0.055721 acc(val): 0.583095
  26. [14:39:25] Epoch 22: Loss(train): 0.054080 Loss(val): 0.054463 acc(val): 0.616633
  27. [14:39:43] Epoch 23: Loss(train): 0.053393 Loss(val): 0.053672 acc(val): 0.634405
  28. [14:40:01] Epoch 24: Loss(train): 0.053121 Loss(val): 0.053326 acc(val): 0.641888
  29. [14:40:21] Epoch 25: Loss(train): 0.053043 Loss(val): 0.053181 acc(val): 0.640799
  30. [14:40:45] Epoch 26: Loss(train): 0.052858 Loss(val): 0.053029 acc(val): 0.641956
  31. [14:41:04] Epoch 27: Loss(train): 0.052826 Loss(val): 0.052986 acc(val): 0.641003
  32. [14:41:23] Epoch 28: Loss(train): 0.052841 Loss(val): 0.053040 acc(val): 0.639337
  33. [14:41:42] Epoch 29: Loss(train): 0.052777 Loss(val): 0.053036 acc(val): 0.638929
  34. [14:42:01] Epoch 30: Loss(train): 0.052711 Loss(val): 0.052963 acc(val): 0.639813
  35. [14:42:20] Epoch 31: Loss(train): 0.052688 Loss(val): 0.052891 acc(val): 0.640765
  36. [14:42:39] Epoch 32: Loss(train): 0.052576 Loss(val): 0.052782 acc(val): 0.643214
  37. [14:42:59] Epoch 33: Loss(train): 0.052667 Loss(val): 0.052730 acc(val): 0.642058
  38. [14:43:17] Epoch 34: Loss(train): 0.052779 Loss(val): 0.052696 acc(val): 0.639490
  39. [14:43:37] Epoch 35: Loss(train): 0.052854 Loss(val): 0.052653 acc(val): 0.637721
  40. [14:43:55] Epoch 36: Loss(train): 0.053212 Loss(val): 0.052754 acc(val): 0.630391
  41. [14:44:14] Epoch 37: Loss(train): 0.053448 Loss(val): 0.052824 acc(val): 0.625561
  42. [14:44:33] Epoch 38: Loss(train): 0.053491 Loss(val): 0.052787 acc(val): 0.625153
  43. [14:44:52] Epoch 39: Loss(train): 0.053333 Loss(val): 0.052663 acc(val): 0.629847
  44. [14:45:11] Epoch 40: Loss(train): 0.052837 Loss(val): 0.052362 acc(val): 0.641480
  45. [14:45:32] Epoch 41: Loss(train): 0.052389 Loss(val): 0.052117 acc(val): 0.649711
  46. [14:45:51] Epoch 42: Loss(train): 0.052066 Loss(val): 0.051959 acc(val): 0.657534
  47. [14:46:10] Epoch 43: Loss(train): 0.051788 Loss(val): 0.051844 acc(val): 0.663503
  48. [14:46:29] Epoch 44: Loss(train): 0.051640 Loss(val): 0.051785 acc(val): 0.666020
  49. [14:46:48] Epoch 45: Loss(train): 0.051530 Loss(val): 0.051748 acc(val): 0.668265
  50. [14:47:07] Epoch 46: Loss(train): 0.051493 Loss(val): 0.051737 acc(val): 0.667857
  51. [14:47:26] Epoch 47: Loss(train): 0.051452 Loss(val): 0.051726 acc(val): 0.668078
  52. [14:47:46] Epoch 48: Loss(train): 0.051433 Loss(val): 0.051714 acc(val): 0.668486
  53. [14:48:05] Epoch 49: Loss(train): 0.051401 Loss(val): 0.051703 acc(val): 0.668690
  54. [14:48:25] Epoch 50: Loss(train): 0.051384 Loss(val): 0.051694 acc(val): 0.669235
  55. [14:48:46] Epoch 51: Loss(train): 0.051362 Loss(val): 0.051680 acc(val): 0.669303
  56. [14:49:05] Epoch 52: Loss(train): 0.051325 Loss(val): 0.051665 acc(val): 0.670527
  57. [14:49:24] Epoch 53: Loss(train): 0.051259 Loss(val): 0.051651 acc(val): 0.670595
  58. [14:49:43] Epoch 54: Loss(train): 0.051197 Loss(val): 0.051644 acc(val): 0.670867
  59. [14:50:02] Epoch 55: Loss(train): 0.051136 Loss(val): 0.051634 acc(val): 0.671276
  60. [14:50:21] Epoch 56: Loss(train): 0.051068 Loss(val): 0.051633 acc(val): 0.671871
  61. [14:50:40] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051637 acc(val): 0.674116
  62. [14:50:59] Epoch 58: Loss(train): 0.050977 Loss(val): 0.051658 acc(val): 0.674456
  63. [14:51:18] Epoch 59: Loss(train): 0.050942 Loss(val): 0.051691 acc(val): 0.673231
  64. [14:51:37] Epoch 60: Loss(train): 0.050919 Loss(val): 0.051711 acc(val): 0.673248
  65. [14:51:56] Epoch 61: Loss(train): 0.050904 Loss(val): 0.051738 acc(val): 0.672704
  66. [14:52:15] Epoch 62: Loss(train): 0.050891 Loss(val): 0.051758 acc(val): 0.672432
  67. [14:52:34] Epoch 63: Loss(train): 0.050875 Loss(val): 0.051761 acc(val): 0.672908
  68. [14:52:53] Epoch 64: Loss(train): 0.050858 Loss(val): 0.051750 acc(val): 0.673248
  69. [14:53:12] Epoch 65: Loss(train): 0.050837 Loss(val): 0.051725 acc(val): 0.674065
  70. [14:53:30] Epoch 66: Loss(train): 0.050814 Loss(val): 0.051685 acc(val): 0.674201
  71. [14:53:49] Epoch 67: Loss(train): 0.050799 Loss(val): 0.051681 acc(val): 0.674133
  72. [14:54:08] Epoch 68: Loss(train): 0.050780 Loss(val): 0.051648 acc(val): 0.674269
  73. [14:54:27] Epoch 69: Loss(train): 0.050765 Loss(val): 0.051627 acc(val): 0.674133
  74. [14:54:46] Epoch 70: Loss(train): 0.050754 Loss(val): 0.051615 acc(val): 0.674133
  75. [14:55:05] Epoch 71: Loss(train): 0.050742 Loss(val): 0.051595 acc(val): 0.674541
  76. [14:55:24] Epoch 72: Loss(train): 0.050732 Loss(val): 0.051581 acc(val): 0.674745
  77. [14:55:42] Epoch 73: Loss(train): 0.050722 Loss(val): 0.051563 acc(val): 0.675561
  78. [14:56:01] Epoch 74: Loss(train): 0.050714 Loss(val): 0.051547 acc(val): 0.675833
  79. [14:56:20] Epoch 75: Loss(train): 0.050708 Loss(val): 0.051543 acc(val): 0.675493
  80. [14:56:38] Epoch 76: Loss(train): 0.050701 Loss(val): 0.051528 acc(val): 0.676446
  81. [14:56:59] Epoch 77: Loss(train): 0.050696 Loss(val): 0.051524 acc(val): 0.676514
  82. [14:57:19] Epoch 78: Loss(train): 0.050690 Loss(val): 0.051518 acc(val): 0.676718
  83. [14:57:39] Epoch 79: Loss(train): 0.050685 Loss(val): 0.051510 acc(val): 0.676990
  84. [14:57:59] Epoch 80: Loss(train): 0.050680 Loss(val): 0.051509 acc(val): 0.677126
  85. [14:58:19] Epoch 81: Loss(train): 0.050676 Loss(val): 0.051503 acc(val): 0.677126
  86. [14:58:39] Epoch 82: Loss(train): 0.050673 Loss(val): 0.051498 acc(val): 0.677398
  87. [14:58:59] Epoch 83: Loss(train): 0.050669 Loss(val): 0.051496 acc(val): 0.677534
  88. [14:59:19] Epoch 84: Loss(train): 0.050666 Loss(val): 0.051490 acc(val): 0.677602
  89. [14:59:40] Epoch 85: Loss(train): 0.050663 Loss(val): 0.051489 acc(val): 0.677330
  90. [15:00:01] Epoch 86: Loss(train): 0.050660 Loss(val): 0.051486 acc(val): 0.677534
  91. [15:00:21] Epoch 87: Loss(train): 0.050657 Loss(val): 0.051482 acc(val): 0.677330
  92. [15:00:41] Epoch 88: Loss(train): 0.050655 Loss(val): 0.051479 acc(val): 0.677602
  93. [15:01:01] Epoch 89: Loss(train): 0.050653 Loss(val): 0.051477 acc(val): 0.677466
  94. [15:01:21] Epoch 90: Loss(train): 0.050651 Loss(val): 0.051476 acc(val): 0.677466
  95. [15:01:41] Epoch 91: Loss(train): 0.050649 Loss(val): 0.051473 acc(val): 0.677534
  96. [15:02:01] Epoch 92: Loss(train): 0.050647 Loss(val): 0.051470 acc(val): 0.677534
  97. [15:02:21] Epoch 93: Loss(train): 0.050646 Loss(val): 0.051464 acc(val): 0.677466
  98. [15:02:41] Epoch 94: Loss(train): 0.050644 Loss(val): 0.051465 acc(val): 0.677534
  99. [15:03:01] Epoch 95: Loss(train): 0.050643 Loss(val): 0.051466 acc(val): 0.677466
  100. [15:03:21] Epoch 96: Loss(train): 0.050641 Loss(val): 0.051465 acc(val): 0.677398
  101. [15:03:42] Epoch 97: Loss(train): 0.050640 Loss(val): 0.051461 acc(val): 0.677330
  102. [15:04:02] Epoch 98: Loss(train): 0.050639 Loss(val): 0.051463 acc(val): 0.677466
  103. [15:04:22] Epoch 99: Loss(train): 0.050638 Loss(val): 0.051463 acc(val): 0.677602
  104. [15:04:43] Epoch 100: Loss(train): 0.050637 Loss(val): 0.051460
  105. [15:04:46] FINAL(100) Loss(test): 0.051915 Accuarcy: 0.600135
  106. Configuration learning_rate=0.03, decay_step=40
  107. [15:04:52] INIT Loss(test): 0.133920 Accuarcy: 0.117973
  108. [15:05:10] Epoch 1: Loss(train): 0.085145 Loss(val): 0.084277 acc(val): 0.303997
  109. [15:05:31] Epoch 2: Loss(train): 0.068091 Loss(val): 0.066436 acc(val): 0.462330
  110. [15:05:52] Epoch 3: Loss(train): 0.062043 Loss(val): 0.061092 acc(val): 0.528452
  111. [15:06:12] Epoch 4: Loss(train): 0.059993 Loss(val): 0.059272 acc(val): 0.556088
  112. [15:06:32] Epoch 5: Loss(train): 0.058545 Loss(val): 0.058204 acc(val): 0.572500
  113. [15:06:53] Epoch 6: Loss(train): 0.057621 Loss(val): 0.057397 acc(val): 0.584167
  114. [15:07:14] Epoch 7: Loss(train): 0.057388 Loss(val): 0.057294 acc(val): 0.581582
  115. [15:07:35] Epoch 8: Loss(train): 0.057485 Loss(val): 0.057341 acc(val): 0.577262
  116. [15:07:55] Epoch 9: Loss(train): 0.057354 Loss(val): 0.057369 acc(val): 0.581293
  117. [15:08:16] Epoch 10: Loss(train): 0.056581 Loss(val): 0.056680 acc(val): 0.596803
  118. [15:08:36] Epoch 11: Loss(train): 0.056511 Loss(val): 0.056548 acc(val): 0.596735
  119. [15:08:56] Epoch 12: Loss(train): 0.056471 Loss(val): 0.056751 acc(val): 0.591293
  120. [15:09:16] Epoch 13: Loss(train): 0.056514 Loss(val): 0.056947 acc(val): 0.584286
  121. [15:09:35] Epoch 14: Loss(train): 0.056266 Loss(val): 0.056668 acc(val): 0.586871
  122. [15:09:55] Epoch 15: Loss(train): 0.056044 Loss(val): 0.056443 acc(val): 0.589592
  123. [15:10:16] Epoch 16: Loss(train): 0.056162 Loss(val): 0.056465 acc(val): 0.584524
  124. [15:10:36] Epoch 17: Loss(train): 0.056294 Loss(val): 0.056613 acc(val): 0.581003
  125. [15:10:56] Epoch 18: Loss(train): 0.056623 Loss(val): 0.056966 acc(val): 0.573537
  126. [15:11:16] Epoch 19: Loss(train): 0.056799 Loss(val): 0.057218 acc(val): 0.571514
  127. [15:11:37] Epoch 20: Loss(train): 0.056388 Loss(val): 0.056934 acc(val): 0.577432
  128. [15:11:58] Epoch 21: Loss(train): 0.055646 Loss(val): 0.056139 acc(val): 0.588741
  129. [15:12:19] Epoch 22: Loss(train): 0.054702 Loss(val): 0.055054 acc(val): 0.609184
  130. [15:12:39] Epoch 23: Loss(train): 0.054107 Loss(val): 0.054288 acc(val): 0.625357
  131. [15:12:59] Epoch 24: Loss(train): 0.053836 Loss(val): 0.053849 acc(val): 0.634252
  132. [15:13:19] Epoch 25: Loss(train): 0.053429 Loss(val): 0.053493 acc(val): 0.643112
  133. [15:13:40] Epoch 26: Loss(train): 0.053345 Loss(val): 0.053398 acc(val): 0.643912
  134. [15:14:01] Epoch 27: Loss(train): 0.053229 Loss(val): 0.053307 acc(val): 0.645136
  135. [15:14:21] Epoch 28: Loss(train): 0.053138 Loss(val): 0.053215 acc(val): 0.644388
  136. [15:14:42] Epoch 29: Loss(train): 0.052939 Loss(val): 0.053058 acc(val): 0.649422
  137. [15:15:03] Epoch 30: Loss(train): 0.052790 Loss(val): 0.052942 acc(val): 0.651071
  138. [15:15:23] Epoch 31: Loss(train): 0.052683 Loss(val): 0.052816 acc(val): 0.653112
  139. [15:15:44] Epoch 32: Loss(train): 0.052602 Loss(val): 0.052753 acc(val): 0.654065
  140. [15:16:05] Epoch 33: Loss(train): 0.052579 Loss(val): 0.052674 acc(val): 0.654133
  141. [15:16:26] Epoch 34: Loss(train): 0.052610 Loss(val): 0.052597 acc(val): 0.655085
  142. [15:16:47] Epoch 35: Loss(train): 0.052611 Loss(val): 0.052530 acc(val): 0.655357
  143. [15:17:11] Epoch 36: Loss(train): 0.052680 Loss(val): 0.052476 acc(val): 0.653639
  144. [15:17:34] Epoch 37: Loss(train): 0.052791 Loss(val): 0.052449 acc(val): 0.653095
  145. [15:17:58] Epoch 38: Loss(train): 0.052758 Loss(val): 0.052363 acc(val): 0.655340
  146. [15:18:21] Epoch 39: Loss(train): 0.052682 Loss(val): 0.052269 acc(val): 0.656905
  147. [15:18:43] Epoch 40: Loss(train): 0.052525 Loss(val): 0.052156 acc(val): 0.661003
  148. [15:19:03] Epoch 41: Loss(train): 0.052267 Loss(val): 0.051996 acc(val): 0.665833
  149. [15:19:24] Epoch 42: Loss(train): 0.051986 Loss(val): 0.051854 acc(val): 0.671139
  150. [15:19:44] Epoch 43: Loss(train): 0.051762 Loss(val): 0.051751 acc(val): 0.675017
  151. [15:20:04] Epoch 44: Loss(train): 0.051592 Loss(val): 0.051681 acc(val): 0.679235
  152. [15:20:25] Epoch 45: Loss(train): 0.051479 Loss(val): 0.051643 acc(val): 0.680119
  153. [15:20:45] Epoch 46: Loss(train): 0.051394 Loss(val): 0.051616 acc(val): 0.681071
  154. [15:21:05] Epoch 47: Loss(train): 0.051367 Loss(val): 0.051600 acc(val): 0.680051
  155. [15:21:25] Epoch 48: Loss(train): 0.051341 Loss(val): 0.051579 acc(val): 0.680867
  156. [15:21:46] Epoch 49: Loss(train): 0.051304 Loss(val): 0.051564 acc(val): 0.681344
  157. [15:22:06] Epoch 50: Loss(train): 0.051285 Loss(val): 0.051563 acc(val): 0.681003
  158. [15:22:26] Epoch 51: Loss(train): 0.051286 Loss(val): 0.051553 acc(val): 0.681888
  159. [15:22:50] Epoch 52: Loss(train): 0.051247 Loss(val): 0.051551 acc(val): 0.681548
  160. [15:23:29] Epoch 53: Loss(train): 0.051221 Loss(val): 0.051543 acc(val): 0.681412
  161. [15:24:06] Epoch 54: Loss(train): 0.051172 Loss(val): 0.051541 acc(val): 0.680663
  162. [15:24:47] Epoch 55: Loss(train): 0.051131 Loss(val): 0.051537 acc(val): 0.679711
  163. [15:25:26] Epoch 56: Loss(train): 0.051072 Loss(val): 0.051525 acc(val): 0.680459
  164. [15:26:06] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051529 acc(val): 0.679575
  165. [15:26:46] Epoch 58: Loss(train): 0.050963 Loss(val): 0.051544 acc(val): 0.680816
  166. [15:27:24] Epoch 59: Loss(train): 0.050920 Loss(val): 0.051560 acc(val): 0.680884
  167. [15:28:03] Epoch 60: Loss(train): 0.050894 Loss(val): 0.051591 acc(val): 0.681429
  168. [15:28:39] Epoch 61: Loss(train): 0.050878 Loss(val): 0.051636 acc(val): 0.680408
  169. [15:29:12] Epoch 62: Loss(train): 0.050867 Loss(val): 0.051668 acc(val): 0.678844
  170. [15:29:41] Epoch 63: Loss(train): 0.050860 Loss(val): 0.051702 acc(val): 0.676667
  171. [15:30:06] Epoch 64: Loss(train): 0.050847 Loss(val): 0.051702 acc(val): 0.677619
  172. [15:30:36] Epoch 65: Loss(train): 0.050832 Loss(val): 0.051699 acc(val): 0.677211
  173. [15:30:56] Epoch 66: Loss(train): 0.050821 Loss(val): 0.051700 acc(val): 0.677687
  174. [15:31:17] Epoch 67: Loss(train): 0.050801 Loss(val): 0.051675 acc(val): 0.678435
  175. [15:31:38] Epoch 68: Loss(train): 0.050779 Loss(val): 0.051646 acc(val): 0.678980
  176. [15:31:59] Epoch 69: Loss(train): 0.050759 Loss(val): 0.051619 acc(val): 0.679796
  177. [15:32:20] Epoch 70: Loss(train): 0.050744 Loss(val): 0.051600 acc(val): 0.679796
  178. [15:32:41] Epoch 71: Loss(train): 0.050733 Loss(val): 0.051590 acc(val): 0.680272
  179. [15:33:02] Epoch 72: Loss(train): 0.050716 Loss(val): 0.051555 acc(val): 0.680748
  180. [15:33:23] Epoch 73: Loss(train): 0.050704 Loss(val): 0.051538 acc(val): 0.681156
  181. [15:33:44] Epoch 74: Loss(train): 0.050693 Loss(val): 0.051523 acc(val): 0.681156
  182. [15:34:06] Epoch 75: Loss(train): 0.050684 Loss(val): 0.051510 acc(val): 0.681361
  183. [15:34:29] Epoch 76: Loss(train): 0.050677 Loss(val): 0.051499 acc(val): 0.681905
  184. [15:35:14] Epoch 77: Loss(train): 0.050668 Loss(val): 0.051481 acc(val): 0.682041
  185. [15:35:46] Epoch 78: Loss(train): 0.050662 Loss(val): 0.051477 acc(val): 0.681769
  186. [15:36:15] Epoch 79: Loss(train): 0.050656 Loss(val): 0.051464 acc(val): 0.682041
  187. [15:36:43] Epoch 80: Loss(train): 0.050651 Loss(val): 0.051460 acc(val): 0.682177
  188. [15:37:03] Epoch 81: Loss(train): 0.050647 Loss(val): 0.051460 acc(val): 0.681633
  189. [15:37:24] Epoch 82: Loss(train): 0.050642 Loss(val): 0.051450 acc(val): 0.681633
  190. [15:37:45] Epoch 83: Loss(train): 0.050638 Loss(val): 0.051445 acc(val): 0.681429
  191. [15:38:07] Epoch 84: Loss(train): 0.050634 Loss(val): 0.051439 acc(val): 0.681293
  192. [15:38:29] Epoch 85: Loss(train): 0.050631 Loss(val): 0.051434 acc(val): 0.681224
  193. [15:38:50] Epoch 86: Loss(train): 0.050628 Loss(val): 0.051434 acc(val): 0.681497
  194. [15:39:11] Epoch 87: Loss(train): 0.050625 Loss(val): 0.051429 acc(val): 0.681565
  195. [15:39:32] Epoch 88: Loss(train): 0.050622 Loss(val): 0.051431 acc(val): 0.681633
  196. [15:39:53] Epoch 89: Loss(train): 0.050619 Loss(val): 0.051418 acc(val): 0.681293
  197. [15:40:18] Epoch 90: Loss(train): 0.050617 Loss(val): 0.051419 acc(val): 0.681361
  198. [15:40:47] Epoch 91: Loss(train): 0.050615 Loss(val): 0.051416 acc(val): 0.681429
  199. [15:41:10] Epoch 92: Loss(train): 0.050613 Loss(val): 0.051413 acc(val): 0.681429
  200. [15:41:34] Epoch 93: Loss(train): 0.050611 Loss(val): 0.051410 acc(val): 0.681429
  201. [15:41:55] Epoch 94: Loss(train): 0.050610 Loss(val): 0.051410 acc(val): 0.681497
  202. [15:42:17] Epoch 95: Loss(train): 0.050608 Loss(val): 0.051405 acc(val): 0.681769
  203. [15:42:38] Epoch 96: Loss(train): 0.050606 Loss(val): 0.051401 acc(val): 0.681905
  204. [15:42:59] Epoch 97: Loss(train): 0.050605 Loss(val): 0.051403 acc(val): 0.681429
  205. [15:43:20] Epoch 98: Loss(train): 0.050604 Loss(val): 0.051404 acc(val): 0.681361
  206. [15:43:41] Epoch 99: Loss(train): 0.050603 Loss(val): 0.051399 acc(val): 0.681565
  207. [15:44:02] Epoch 100: Loss(train): 0.050602 Loss(val): 0.051401
  208. [15:44:05] FINAL(100) Loss(test): 0.051667 Accuarcy: 0.604730
  209. Configuration learning_rate=0.03, decay_step=60
  210. [15:44:11] INIT Loss(test): 0.142111 Accuarcy: 0.116081
  211. [15:44:29] Epoch 1: Loss(train): 0.085466 Loss(val): 0.083448 acc(val): 0.324014
  212. [15:44:52] Epoch 2: Loss(train): 0.067009 Loss(val): 0.065906 acc(val): 0.454150
  213. [15:45:17] Epoch 3: Loss(train): 0.063191 Loss(val): 0.062418 acc(val): 0.506139
  214. [15:45:38] Epoch 4: Loss(train): 0.059964 Loss(val): 0.059266 acc(val): 0.545680
  215. [15:46:00] Epoch 5: Loss(train): 0.059229 Loss(val): 0.058537 acc(val): 0.556088
  216. [15:46:21] Epoch 6: Loss(train): 0.059260 Loss(val): 0.058677 acc(val): 0.551599
  217. [15:46:42] Epoch 7: Loss(train): 0.059613 Loss(val): 0.058838 acc(val): 0.540663
  218. [15:47:04] Epoch 8: Loss(train): 0.058372 Loss(val): 0.057957 acc(val): 0.553929
  219. [15:47:25] Epoch 9: Loss(train): 0.058023 Loss(val): 0.057690 acc(val): 0.559796
  220. [15:47:47] Epoch 10: Loss(train): 0.058341 Loss(val): 0.057856 acc(val): 0.557891
  221. [15:48:08] Epoch 11: Loss(train): 0.058291 Loss(val): 0.057867 acc(val): 0.555442
  222. [15:48:29] Epoch 12: Loss(train): 0.057441 Loss(val): 0.057287 acc(val): 0.566054
  223. [15:48:51] Epoch 13: Loss(train): 0.056272 Loss(val): 0.056194 acc(val): 0.584490
  224. [15:49:13] Epoch 14: Loss(train): 0.055851 Loss(val): 0.055663 acc(val): 0.594898
  225. [15:49:36] Epoch 15: Loss(train): 0.055405 Loss(val): 0.055219 acc(val): 0.603129
  226. [15:49:58] Epoch 16: Loss(train): 0.055548 Loss(val): 0.055248 acc(val): 0.600952
  227. [15:50:20] Epoch 17: Loss(train): 0.055654 Loss(val): 0.055321 acc(val): 0.599456
  228. [15:50:41] Epoch 18: Loss(train): 0.055723 Loss(val): 0.055456 acc(val): 0.598384
  229. [15:51:03] Epoch 19: Loss(train): 0.055744 Loss(val): 0.055502 acc(val): 0.597228
  230. [15:51:25] Epoch 20: Loss(train): 0.055564 Loss(val): 0.055416 acc(val): 0.596956
  231. [15:51:48] Epoch 21: Loss(train): 0.055276 Loss(val): 0.055244 acc(val): 0.600612
  232. [15:52:11] Epoch 22: Loss(train): 0.054920 Loss(val): 0.054917 acc(val): 0.607024
  233. [15:52:34] Epoch 23: Loss(train): 0.054450 Loss(val): 0.054397 acc(val): 0.612534
  234. [15:52:57] Epoch 24: Loss(train): 0.054105 Loss(val): 0.053996 acc(val): 0.619677
  235. [15:53:19] Epoch 25: Loss(train): 0.053674 Loss(val): 0.053549 acc(val): 0.630493
  236. [15:53:42] Epoch 26: Loss(train): 0.053361 Loss(val): 0.053270 acc(val): 0.637772
  237. [15:54:11] Epoch 27: Loss(train): 0.053233 Loss(val): 0.053125 acc(val): 0.640765
  238. [15:54:50] Epoch 28: Loss(train): 0.053071 Loss(val): 0.053007 acc(val): 0.644643
  239. [15:55:27] Epoch 29: Loss(train): 0.052948 Loss(val): 0.052935 acc(val): 0.644099
  240. [15:56:04] Epoch 30: Loss(train): 0.052997 Loss(val): 0.052941 acc(val): 0.643214
  241. [15:56:40] Epoch 31: Loss(train): 0.053064 Loss(val): 0.052970 acc(val): 0.639133
  242. [15:57:17] Epoch 32: Loss(train): 0.053172 Loss(val): 0.052973 acc(val): 0.636888
  243. [15:57:43] Epoch 33: Loss(train): 0.053340 Loss(val): 0.053012 acc(val): 0.632194
  244. [15:58:05] Epoch 34: Loss(train): 0.053407 Loss(val): 0.053008 acc(val): 0.631173
  245. [15:58:26] Epoch 35: Loss(train): 0.053488 Loss(val): 0.053011 acc(val): 0.630561
  246. [15:58:48] Epoch 36: Loss(train): 0.053523 Loss(val): 0.052991 acc(val): 0.626956
  247. [15:59:11] Epoch 37: Loss(train): 0.053434 Loss(val): 0.052903 acc(val): 0.629473
  248. [15:59:39] Epoch 38: Loss(train): 0.053004 Loss(val): 0.052646 acc(val): 0.638656
  249. [16:00:06] Epoch 39: Loss(train): 0.052548 Loss(val): 0.052387 acc(val): 0.650289
  250. [16:00:30] Epoch 40: Loss(train): 0.052184 Loss(val): 0.052197 acc(val): 0.658656
  251. [16:00:53] Epoch 41: Loss(train): 0.051921 Loss(val): 0.052086 acc(val): 0.663895
  252. [16:01:25] Epoch 42: Loss(train): 0.051787 Loss(val): 0.052001 acc(val): 0.667024
  253. [16:01:57] Epoch 43: Loss(train): 0.051664 Loss(val): 0.051921 acc(val): 0.669269
  254. [16:02:21] Epoch 44: Loss(train): 0.051557 Loss(val): 0.051856 acc(val): 0.670272
  255. [16:02:57] Epoch 45: Loss(train): 0.051499 Loss(val): 0.051807 acc(val): 0.671582
  256. [16:03:22] Epoch 46: Loss(train): 0.051440 Loss(val): 0.051767 acc(val): 0.672058
  257. [16:03:51] Epoch 47: Loss(train): 0.051405 Loss(val): 0.051744 acc(val): 0.672874
  258. [16:04:31] Epoch 48: Loss(train): 0.051368 Loss(val): 0.051721 acc(val): 0.673146
  259. [16:05:07] Epoch 49: Loss(train): 0.051326 Loss(val): 0.051705 acc(val): 0.673690
  260. [16:05:30] Epoch 50: Loss(train): 0.051288 Loss(val): 0.051689 acc(val): 0.673759
  261. [16:05:53] Epoch 51: Loss(train): 0.051239 Loss(val): 0.051667 acc(val): 0.674915
  262. [16:06:16] Epoch 52: Loss(train): 0.051180 Loss(val): 0.051656 acc(val): 0.675595
  263. [16:06:38] Epoch 53: Loss(train): 0.051117 Loss(val): 0.051658 acc(val): 0.677500
  264. [16:07:00] Epoch 54: Loss(train): 0.051078 Loss(val): 0.051665 acc(val): 0.678044
  265. [16:07:22] Epoch 55: Loss(train): 0.051036 Loss(val): 0.051678 acc(val): 0.677432
  266. [16:07:45] Epoch 56: Loss(train): 0.051011 Loss(val): 0.051702 acc(val): 0.677636
  267. [16:08:08] Epoch 57: Loss(train): 0.050990 Loss(val): 0.051737 acc(val): 0.676752
  268. [16:08:31] Epoch 58: Loss(train): 0.050977 Loss(val): 0.051756 acc(val): 0.675323
  269. [16:08:56] Epoch 59: Loss(train): 0.050971 Loss(val): 0.051791 acc(val): 0.675867
  270. [16:09:23] Epoch 60: Loss(train): 0.050955 Loss(val): 0.051786 acc(val): 0.675272
  271. [16:09:49] Epoch 61: Loss(train): 0.050941 Loss(val): 0.051792 acc(val): 0.675544
  272. [16:10:14] Epoch 62: Loss(train): 0.050929 Loss(val): 0.051794 acc(val): 0.674864
  273. [16:10:37] Epoch 63: Loss(train): 0.050904 Loss(val): 0.051764 acc(val): 0.674932
  274. [16:10:59] Epoch 64: Loss(train): 0.050879 Loss(val): 0.051731 acc(val): 0.675476
  275. [16:11:22] Epoch 65: Loss(train): 0.050859 Loss(val): 0.051707 acc(val): 0.676361
  276. [16:11:44] Epoch 66: Loss(train): 0.050838 Loss(val): 0.051671 acc(val): 0.677857
  277. [16:12:06] Epoch 67: Loss(train): 0.050822 Loss(val): 0.051655 acc(val): 0.678741
  278. [16:12:28] Epoch 68: Loss(train): 0.050804 Loss(val): 0.051619 acc(val): 0.680238
  279. [16:12:50] Epoch 69: Loss(train): 0.050792 Loss(val): 0.051603 acc(val): 0.680170
  280. [16:13:14] Epoch 70: Loss(train): 0.050781 Loss(val): 0.051588 acc(val): 0.680510
  281. [16:13:37] Epoch 71: Loss(train): 0.050770 Loss(val): 0.051567 acc(val): 0.681463
  282. [16:14:00] Epoch 72: Loss(train): 0.050762 Loss(val): 0.051553 acc(val): 0.681463
  283. [16:14:23] Epoch 73: Loss(train): 0.050755 Loss(val): 0.051545 acc(val): 0.681531
  284. [16:14:47] Epoch 74: Loss(train): 0.050748 Loss(val): 0.051538 acc(val): 0.680986
  285. [16:15:11] Epoch 75: Loss(train): 0.050742 Loss(val): 0.051527 acc(val): 0.681259
  286. [16:15:33] Epoch 76: Loss(train): 0.050736 Loss(val): 0.051523 acc(val): 0.681054
  287. [16:15:56] Epoch 77: Loss(train): 0.050731 Loss(val): 0.051517 acc(val): 0.681122
  288. [16:16:19] Epoch 78: Loss(train): 0.050726 Loss(val): 0.051506 acc(val): 0.681463
  289. [16:16:41] Epoch 79: Loss(train): 0.050722 Loss(val): 0.051507 acc(val): 0.681463
  290. [16:17:04] Epoch 80: Loss(train): 0.050717 Loss(val): 0.051499 acc(val): 0.681259
  291. [16:17:26] Epoch 81: Loss(train): 0.050713 Loss(val): 0.051492 acc(val): 0.681463
  292. [16:17:49] Epoch 82: Loss(train): 0.050709 Loss(val): 0.051488 acc(val): 0.681122
  293. [16:18:12] Epoch 83: Loss(train): 0.050706 Loss(val): 0.051491 acc(val): 0.681463
  294. [16:18:35] Epoch 84: Loss(train): 0.050703 Loss(val): 0.051484 acc(val): 0.681463
  295. [16:18:57] Epoch 85: Loss(train): 0.050700 Loss(val): 0.051476 acc(val): 0.681939
  296. [16:19:20] Epoch 86: Loss(train): 0.050698 Loss(val): 0.051476 acc(val): 0.681803
  297. [16:19:43] Epoch 87: Loss(train): 0.050695 Loss(val): 0.051471 acc(val): 0.682007
  298. [16:20:05] Epoch 88: Loss(train): 0.050693 Loss(val): 0.051474 acc(val): 0.681735
  299. [16:20:28] Epoch 89: Loss(train): 0.050691 Loss(val): 0.051469 acc(val): 0.681939
  300. [16:20:51] Epoch 90: Loss(train): 0.050689 Loss(val): 0.051463 acc(val): 0.681803
  301. [16:21:14] Epoch 91: Loss(train): 0.050687 Loss(val): 0.051464 acc(val): 0.681803
  302. [16:21:37] Epoch 92: Loss(train): 0.050685 Loss(val): 0.051462 acc(val): 0.681667
  303. [16:21:59] Epoch 93: Loss(train): 0.050683 Loss(val): 0.051460 acc(val): 0.681939
  304. [16:22:22] Epoch 94: Loss(train): 0.050682 Loss(val): 0.051455 acc(val): 0.682007
  305. [16:22:45] Epoch 95: Loss(train): 0.050681 Loss(val): 0.051457 acc(val): 0.681939
  306. [16:23:08] Epoch 96: Loss(train): 0.050679 Loss(val): 0.051455 acc(val): 0.682007
  307. [16:23:31] Epoch 97: Loss(train): 0.050678 Loss(val): 0.051454 acc(val): 0.682075
  308. [16:23:54] Epoch 98: Loss(train): 0.050677 Loss(val): 0.051455 acc(val): 0.682211
  309. [16:24:17] Epoch 99: Loss(train): 0.050676 Loss(val): 0.051454 acc(val): 0.682211
  310. [16:24:40] Epoch 100: Loss(train): 0.050675 Loss(val): 0.051452
  311. [16:24:43] FINAL(100) Loss(test): 0.051842 Accuarcy: 0.601622
  312. Configuration learning_rate=0.01, decay_step=20
  313. [16:24:49] INIT Loss(test): 0.123722 Accuarcy: 0.127973
  314. [16:25:10] Epoch 1: Loss(train): 0.086237 Loss(val): 0.086168 acc(val): 0.291888
  315. [16:25:47] Epoch 2: Loss(train): 0.066817 Loss(val): 0.065911 acc(val): 0.445901
  316. [16:26:21] Epoch 3: Loss(train): 0.063177 Loss(val): 0.062407 acc(val): 0.499915
  317. [16:26:54] Epoch 4: Loss(train): 0.061026 Loss(val): 0.060297 acc(val): 0.540425
  318. [16:27:28] Epoch 5: Loss(train): 0.059943 Loss(val): 0.059473 acc(val): 0.551259
  319. [16:28:03] Epoch 6: Loss(train): 0.058618 Loss(val): 0.058114 acc(val): 0.565204
  320. [16:28:45] Epoch 7: Loss(train): 0.057991 Loss(val): 0.057816 acc(val): 0.570017
  321. [16:29:12] Epoch 8: Loss(train): 0.057960 Loss(val): 0.057804 acc(val): 0.568316
  322. [16:29:53] Epoch 9: Loss(train): 0.058586 Loss(val): 0.058320 acc(val): 0.558980
  323. [16:31:02] Epoch 10: Loss(train): 0.058809 Loss(val): 0.058750 acc(val): 0.551156
  324. [16:32:04] Epoch 11: Loss(train): 0.057933 Loss(val): 0.058105 acc(val): 0.555714
  325. [16:33:09] Epoch 12: Loss(train): 0.057471 Loss(val): 0.057670 acc(val): 0.557619
  326. [16:34:03] Epoch 13: Loss(train): 0.057318 Loss(val): 0.057520 acc(val): 0.555918
  327. [16:34:45] Epoch 14: Loss(train): 0.057432 Loss(val): 0.057578 acc(val): 0.551905
  328. [16:35:26] Epoch 15: Loss(train): 0.057712 Loss(val): 0.057838 acc(val): 0.549864
  329. [16:36:24] Epoch 16: Loss(train): 0.058157 Loss(val): 0.058160 acc(val): 0.547075
  330. [16:37:16] Epoch 17: Loss(train): 0.058045 Loss(val): 0.058048 acc(val): 0.550969
  331. [16:38:01] Epoch 18: Loss(train): 0.057258 Loss(val): 0.057315 acc(val): 0.562279
  332. [16:38:48] Epoch 19: Loss(train): 0.056103 Loss(val): 0.056101 acc(val): 0.582143
  333. [16:39:35] Epoch 20: Loss(train): 0.055167 Loss(val): 0.055090 acc(val): 0.605000
  334. [16:40:16] Epoch 21: Loss(train): 0.054440 Loss(val): 0.054323 acc(val): 0.621939
  335. [16:40:59] Epoch 22: Loss(train): 0.053996 Loss(val): 0.053912 acc(val): 0.627585
  336. [16:41:44] Epoch 23: Loss(train): 0.053813 Loss(val): 0.053733 acc(val): 0.627653
  337. [16:42:31] Epoch 24: Loss(train): 0.053584 Loss(val): 0.053587 acc(val): 0.629626
  338. [16:43:12] Epoch 25: Loss(train): 0.053363 Loss(val): 0.053465 acc(val): 0.630238
  339. [16:43:56] Epoch 26: Loss(train): 0.053242 Loss(val): 0.053413 acc(val): 0.630306
  340. [16:44:40] Epoch 27: Loss(train): 0.053028 Loss(val): 0.053261 acc(val): 0.634932
  341. [16:45:23] Epoch 28: Loss(train): 0.052812 Loss(val): 0.053128 acc(val): 0.638197
  342. [16:46:04] Epoch 29: Loss(train): 0.052652 Loss(val): 0.052984 acc(val): 0.642143
  343. [16:46:49] Epoch 30: Loss(train): 0.052535 Loss(val): 0.052863 acc(val): 0.645680
  344. [16:47:31] Epoch 31: Loss(train): 0.052439 Loss(val): 0.052725 acc(val): 0.649694
  345. [16:48:14] Epoch 32: Loss(train): 0.052366 Loss(val): 0.052586 acc(val): 0.653844
  346. [16:48:57] Epoch 33: Loss(train): 0.052277 Loss(val): 0.052462 acc(val): 0.655204
  347. [16:49:39] Epoch 34: Loss(train): 0.052236 Loss(val): 0.052378 acc(val): 0.655340
  348. [16:50:20] Epoch 35: Loss(train): 0.052206 Loss(val): 0.052301 acc(val): 0.656905
  349. [16:51:02] Epoch 36: Loss(train): 0.052131 Loss(val): 0.052229 acc(val): 0.657262
  350. [16:51:43] Epoch 37: Loss(train): 0.052080 Loss(val): 0.052175 acc(val): 0.659235
  351. [16:52:26] Epoch 38: Loss(train): 0.051932 Loss(val): 0.052086 acc(val): 0.663112
  352. [16:53:08] Epoch 39: Loss(train): 0.051834 Loss(val): 0.052032 acc(val): 0.663929
  353. [16:53:52] Epoch 40: Loss(train): 0.051742 Loss(val): 0.051984 acc(val): 0.664745
  354. [16:54:35] Epoch 41: Loss(train): 0.051577 Loss(val): 0.051924 acc(val): 0.668078
  355. [16:55:19] Epoch 42: Loss(train): 0.051501 Loss(val): 0.051901 acc(val): 0.667466
  356. [16:56:06] Epoch 43: Loss(train): 0.051449 Loss(val): 0.051876 acc(val): 0.668078
  357. [16:57:15] Epoch 44: Loss(train): 0.051393 Loss(val): 0.051869 acc(val): 0.667721
  358. [16:58:19] Epoch 45: Loss(train): 0.051368 Loss(val): 0.051851 acc(val): 0.666497
  359. [16:59:26] Epoch 46: Loss(train): 0.051376 Loss(val): 0.051837 acc(val): 0.666241
  360. [17:00:17] Epoch 47: Loss(train): 0.051365 Loss(val): 0.051807 acc(val): 0.667398
  361. [17:01:31] Epoch 48: Loss(train): 0.051349 Loss(val): 0.051793 acc(val): 0.667262
  362. [17:02:32] Epoch 49: Loss(train): 0.051350 Loss(val): 0.051774 acc(val): 0.667262
  363. [17:03:24] Epoch 50: Loss(train): 0.051308 Loss(val): 0.051759 acc(val): 0.668622
  364. [17:04:19] Epoch 51: Loss(train): 0.051285 Loss(val): 0.051734 acc(val): 0.669439
  365. [17:05:10] Epoch 52: Loss(train): 0.051267 Loss(val): 0.051716 acc(val): 0.669847
  366. [17:05:54] Epoch 53: Loss(train): 0.051197 Loss(val): 0.051707 acc(val): 0.670663
  367. [17:06:43] Epoch 54: Loss(train): 0.051160 Loss(val): 0.051691 acc(val): 0.672432
  368. [17:07:32] Epoch 55: Loss(train): 0.051111 Loss(val): 0.051690 acc(val): 0.672772
  369. [17:08:29] Epoch 56: Loss(train): 0.051065 Loss(val): 0.051692 acc(val): 0.674269
  370. [17:09:18] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051708 acc(val): 0.672483
  371. [17:10:05] Epoch 58: Loss(train): 0.050985 Loss(val): 0.051736 acc(val): 0.673231
  372. [17:10:50] Epoch 59: Loss(train): 0.050964 Loss(val): 0.051761 acc(val): 0.673163
  373. [17:11:38] Epoch 60: Loss(train): 0.050948 Loss(val): 0.051782 acc(val): 0.671667
  374. [17:12:21] Epoch 61: Loss(train): 0.050943 Loss(val): 0.051836 acc(val): 0.671684
  375. [17:13:13] Epoch 62: Loss(train): 0.050930 Loss(val): 0.051840 acc(val): 0.671412
  376. [17:14:01] Epoch 63: Loss(train): 0.050921 Loss(val): 0.051851 acc(val): 0.671276
  377. [17:14:46] Epoch 64: Loss(train): 0.050902 Loss(val): 0.051832 acc(val): 0.671956
  378. [17:15:30] Epoch 65: Loss(train): 0.050885 Loss(val): 0.051809 acc(val): 0.672092
  379. [17:16:19] Epoch 66: Loss(train): 0.050869 Loss(val): 0.051790 acc(val): 0.672024
  380. [17:17:03] Epoch 67: Loss(train): 0.050854 Loss(val): 0.051774 acc(val): 0.673044
  381. [17:17:51] Epoch 68: Loss(train): 0.050836 Loss(val): 0.051741 acc(val): 0.672891
  382. [17:18:37] Epoch 69: Loss(train): 0.050824 Loss(val): 0.051727 acc(val): 0.673435
  383. [17:19:23] Epoch 70: Loss(train): 0.050812 Loss(val): 0.051704 acc(val): 0.674116
  384. [17:20:07] Epoch 71: Loss(train): 0.050800 Loss(val): 0.051679 acc(val): 0.673912
  385. [17:20:53] Epoch 72: Loss(train): 0.050792 Loss(val): 0.051671 acc(val): 0.674388
  386. [17:21:38] Epoch 73: Loss(train): 0.050785 Loss(val): 0.051665 acc(val): 0.673776
  387. [17:22:24] Epoch 74: Loss(train): 0.050776 Loss(val): 0.051649 acc(val): 0.674116
  388. [17:23:09] Epoch 75: Loss(train): 0.050770 Loss(val): 0.051641 acc(val): 0.674660
  389. [17:23:55] Epoch 76: Loss(train): 0.050764 Loss(val): 0.051627 acc(val): 0.675544
  390. [17:24:40] Epoch 77: Loss(train): 0.050759 Loss(val): 0.051624 acc(val): 0.675408
  391. [17:25:26] Epoch 78: Loss(train): 0.050753 Loss(val): 0.051614 acc(val): 0.675952
  392. [17:26:11] Epoch 79: Loss(train): 0.050748 Loss(val): 0.051609 acc(val): 0.675612
  393. [17:27:19] Epoch 80: Loss(train): 0.050744 Loss(val): 0.051604 acc(val): 0.675884
  394. [17:28:38] Epoch 81: Loss(train): 0.050740 Loss(val): 0.051599 acc(val): 0.675816
  395. [17:29:45] Epoch 82: Loss(train): 0.050736 Loss(val): 0.051592 acc(val): 0.676020
  396. [17:30:30] Epoch 83: Loss(train): 0.050733 Loss(val): 0.051590 acc(val): 0.676088
  397. [17:31:31] Epoch 84: Loss(train): 0.050730 Loss(val): 0.051590 acc(val): 0.676020
  398. [17:32:34] Epoch 85: Loss(train): 0.050727 Loss(val): 0.051587 acc(val): 0.676293
  399. [17:33:24] Epoch 86: Loss(train): 0.050724 Loss(val): 0.051580 acc(val): 0.676429
  400. [17:34:37] Epoch 87: Loss(train): 0.050721 Loss(val): 0.051575 acc(val): 0.676837
  401. [17:35:41] Epoch 88: Loss(train): 0.050719 Loss(val): 0.051574 acc(val): 0.676769
  402. [17:36:29] Epoch 89: Loss(train): 0.050717 Loss(val): 0.051573 acc(val): 0.676701
  403. [17:37:33] Epoch 90: Loss(train): 0.050715 Loss(val): 0.051568 acc(val): 0.676701
  404. [17:38:23] Epoch 91: Loss(train): 0.050713 Loss(val): 0.051565 acc(val): 0.676905
  405. [17:39:10] Epoch 92: Loss(train): 0.050712 Loss(val): 0.051564 acc(val): 0.676837
  406. [17:39:57] Epoch 93: Loss(train): 0.050710 Loss(val): 0.051562 acc(val): 0.677177
  407. [17:40:43] Epoch 94: Loss(train): 0.050709 Loss(val): 0.051560 acc(val): 0.676973
  408. [17:41:33] Epoch 95: Loss(train): 0.050707 Loss(val): 0.051559 acc(val): 0.676905
  409. [17:42:22] Epoch 96: Loss(train): 0.050706 Loss(val): 0.051555 acc(val): 0.677177
  410. [17:43:18] Epoch 97: Loss(train): 0.050705 Loss(val): 0.051552 acc(val): 0.677177
  411. [17:44:05] Epoch 98: Loss(train): 0.050704 Loss(val): 0.051554 acc(val): 0.677041
  412. [17:44:52] Epoch 99: Loss(train): 0.050703 Loss(val): 0.051554 acc(val): 0.677109
  413. [17:45:40] Epoch 100: Loss(train): 0.050702 Loss(val): 0.051551
  414. [17:45:47] FINAL(100) Loss(test): 0.052029 Accuarcy: 0.599189
  415. Configuration learning_rate=0.01, decay_step=40
  416. [17:46:00] INIT Loss(test): 0.176727 Accuarcy: 0.092973
  417. [17:46:50] Epoch 1: Loss(train): 0.089826 Loss(val): 0.087610 acc(val): 0.284201
  418. [17:47:38] Epoch 2: Loss(train): 0.067577 Loss(val): 0.066210 acc(val): 0.447466
  419. [17:48:25] Epoch 3: Loss(train): 0.064038 Loss(val): 0.063006 acc(val): 0.497551
  420. [17:49:13] Epoch 4: Loss(train): 0.060244 Loss(val): 0.059602 acc(val): 0.554031
  421. [17:49:58] Epoch 5: Loss(train): 0.059227 Loss(val): 0.058822 acc(val): 0.565595
  422. [17:50:47] Epoch 6: Loss(train): 0.058737 Loss(val): 0.058792 acc(val): 0.559677
  423. [17:51:34] Epoch 7: Loss(train): 0.058215 Loss(val): 0.058426 acc(val): 0.559405
  424. [17:52:21] Epoch 8: Loss(train): 0.057274 Loss(val): 0.057502 acc(val): 0.570986
  425. [17:53:07] Epoch 9: Loss(train): 0.057132 Loss(val): 0.057426 acc(val): 0.570986
  426. [17:53:55] Epoch 10: Loss(train): 0.056904 Loss(val): 0.057230 acc(val): 0.571310
  427. [17:54:40] Epoch 11: Loss(train): 0.057148 Loss(val): 0.057484 acc(val): 0.560357
  428. [17:55:27] Epoch 12: Loss(train): 0.057290 Loss(val): 0.057630 acc(val): 0.556003
  429. [17:56:12] Epoch 13: Loss(train): 0.057313 Loss(val): 0.057586 acc(val): 0.550935
  430. [17:56:59] Epoch 14: Loss(train): 0.056691 Loss(val): 0.056890 acc(val): 0.563316
  431. [17:57:44] Epoch 15: Loss(train): 0.055783 Loss(val): 0.055999 acc(val): 0.579507
  432. [17:58:36] Epoch 16: Loss(train): 0.055334 Loss(val): 0.055583 acc(val): 0.590187
  433. [17:59:33] Epoch 17: Loss(train): 0.055496 Loss(val): 0.055625 acc(val): 0.587330
  434. [18:00:46] Epoch 18: Loss(train): 0.055848 Loss(val): 0.055966 acc(val): 0.582908
  435. [18:01:46] Epoch 19: Loss(train): 0.056469 Loss(val): 0.056572 acc(val): 0.573180
  436. [18:02:41] Epoch 20: Loss(train): 0.057220 Loss(val): 0.057206 acc(val): 0.559711
  437. [18:03:34] Epoch 21: Loss(train): 0.057383 Loss(val): 0.057452 acc(val): 0.557330
  438. [18:04:45] Epoch 22: Loss(train): 0.056780 Loss(val): 0.056885 acc(val): 0.568622
  439. [18:05:45] Epoch 23: Loss(train): 0.055630 Loss(val): 0.055793 acc(val): 0.584541
  440. [18:06:40] Epoch 24: Loss(train): 0.054334 Loss(val): 0.054489 acc(val): 0.612364
  441. [18:07:32] Epoch 25: Loss(train): 0.053592 Loss(val): 0.053724 acc(val): 0.624881
  442. [18:08:31] Epoch 26: Loss(train): 0.053303 Loss(val): 0.053389 acc(val): 0.628095
  443. [18:09:22] Epoch 27: Loss(train): 0.053062 Loss(val): 0.053169 acc(val): 0.633333
  444. [18:10:13] Epoch 28: Loss(train): 0.053093 Loss(val): 0.053166 acc(val): 0.630663
  445. [18:11:03] Epoch 29: Loss(train): 0.053281 Loss(val): 0.053265 acc(val): 0.624609
  446. [18:12:01] Epoch 30: Loss(train): 0.053359 Loss(val): 0.053331 acc(val): 0.620595
  447. [18:12:51] Epoch 31: Loss(train): 0.053417 Loss(val): 0.053345 acc(val): 0.619507
  448. [18:13:42] Epoch 32: Loss(train): 0.053442 Loss(val): 0.053321 acc(val): 0.619847
  449. [18:14:34] Epoch 33: Loss(train): 0.053280 Loss(val): 0.053169 acc(val): 0.622364
  450. [18:15:26] Epoch 34: Loss(train): 0.053042 Loss(val): 0.052970 acc(val): 0.628282
  451. [18:16:15] Epoch 35: Loss(train): 0.052677 Loss(val): 0.052694 acc(val): 0.637058
  452. [18:17:06] Epoch 36: Loss(train): 0.052356 Loss(val): 0.052496 acc(val): 0.643793
  453. [18:17:55] Epoch 37: Loss(train): 0.052130 Loss(val): 0.052343 acc(val): 0.649915
  454. [18:18:48] Epoch 38: Loss(train): 0.051967 Loss(val): 0.052270 acc(val): 0.653044
  455. [18:19:37] Epoch 39: Loss(train): 0.051858 Loss(val): 0.052225 acc(val): 0.655510
  456. [18:20:28] Epoch 40: Loss(train): 0.051766 Loss(val): 0.052186 acc(val): 0.659609
  457. [18:21:17] Epoch 41: Loss(train): 0.051688 Loss(val): 0.052134 acc(val): 0.661922
  458. [18:22:09] Epoch 42: Loss(train): 0.051594 Loss(val): 0.052048 acc(val): 0.664235
  459. [18:22:57] Epoch 43: Loss(train): 0.051490 Loss(val): 0.051962 acc(val): 0.665799
  460. [18:23:47] Epoch 44: Loss(train): 0.051394 Loss(val): 0.051891 acc(val): 0.666956
  461. [18:24:36] Epoch 45: Loss(train): 0.051314 Loss(val): 0.051834 acc(val): 0.670221
  462. [18:25:27] Epoch 46: Loss(train): 0.051240 Loss(val): 0.051802 acc(val): 0.670544
  463. [18:26:16] Epoch 47: Loss(train): 0.051188 Loss(val): 0.051788 acc(val): 0.671088
  464. [18:27:07] Epoch 48: Loss(train): 0.051140 Loss(val): 0.051788 acc(val): 0.670680
  465. [18:27:55] Epoch 49: Loss(train): 0.051096 Loss(val): 0.051787 acc(val): 0.670816
  466. [18:28:45] Epoch 50: Loss(train): 0.051063 Loss(val): 0.051781 acc(val): 0.671497
  467. [18:29:34] Epoch 51: Loss(train): 0.051026 Loss(val): 0.051795 acc(val): 0.670612
  468. [18:30:36] Epoch 52: Loss(train): 0.050999 Loss(val): 0.051804 acc(val): 0.671769
  469. [18:31:57] Epoch 53: Loss(train): 0.050980 Loss(val): 0.051816 acc(val): 0.671684
  470. [18:32:49] Epoch 54: Loss(train): 0.050963 Loss(val): 0.051823 acc(val): 0.672500
  471. [18:33:40] Epoch 55: Loss(train): 0.050948 Loss(val): 0.051837 acc(val): 0.672296
  472. [18:34:51] Epoch 56: Loss(train): 0.050931 Loss(val): 0.051833 acc(val): 0.673112
  473. [18:35:48] Epoch 57: Loss(train): 0.050918 Loss(val): 0.051838 acc(val): 0.672840
  474. [18:36:51] Epoch 58: Loss(train): 0.050899 Loss(val): 0.051821 acc(val): 0.673520
  475. [18:37:45] Epoch 59: Loss(train): 0.050875 Loss(val): 0.051788 acc(val): 0.674133
  476. [18:39:01] Epoch 60: Loss(train): 0.050850 Loss(val): 0.051753 acc(val): 0.673861
  477. [18:40:02] Epoch 61: Loss(train): 0.050831 Loss(val): 0.051731 acc(val): 0.673997
  478. [18:40:53] Epoch 62: Loss(train): 0.050811 Loss(val): 0.051693 acc(val): 0.673997
  479. [18:41:46] Epoch 63: Loss(train): 0.050796 Loss(val): 0.051672 acc(val): 0.674609
  480. [18:42:42] Epoch 64: Loss(train): 0.050784 Loss(val): 0.051655 acc(val): 0.674813
  481. [18:43:46] Epoch 65: Loss(train): 0.050771 Loss(val): 0.051625 acc(val): 0.675289
  482. [18:44:43] Epoch 66: Loss(train): 0.050760 Loss(val): 0.051608 acc(val): 0.674745
  483. [18:45:35] Epoch 67: Loss(train): 0.050751 Loss(val): 0.051598 acc(val): 0.675221
  484. [18:46:26] Epoch 68: Loss(train): 0.050744 Loss(val): 0.051597 acc(val): 0.675289
  485. [18:47:18] Epoch 69: Loss(train): 0.050736 Loss(val): 0.051580 acc(val): 0.675493
  486. [18:48:13] Epoch 70: Loss(train): 0.050730 Loss(val): 0.051581 acc(val): 0.675425
  487. [18:49:04] Epoch 71: Loss(train): 0.050723 Loss(val): 0.051565 acc(val): 0.675085
  488. [18:50:03] Epoch 72: Loss(train): 0.050717 Loss(val): 0.051560 acc(val): 0.674881
  489. [18:50:57] Epoch 73: Loss(train): 0.050711 Loss(val): 0.051549 acc(val): 0.675646
  490. [18:51:50] Epoch 74: Loss(train): 0.050706 Loss(val): 0.051550 acc(val): 0.675153
  491. [18:52:42] Epoch 75: Loss(train): 0.050702 Loss(val): 0.051542 acc(val): 0.675289
  492. [18:53:33] Epoch 76: Loss(train): 0.050696 Loss(val): 0.051532 acc(val): 0.675714
  493. [18:54:24] Epoch 77: Loss(train): 0.050693 Loss(val): 0.051534 acc(val): 0.675493
  494. [18:55:16] Epoch 78: Loss(train): 0.050688 Loss(val): 0.051525 acc(val): 0.675493
  495. [18:56:07] Epoch 79: Loss(train): 0.050684 Loss(val): 0.051525 acc(val): 0.675561
  496. [18:57:00] Epoch 80: Loss(train): 0.050681 Loss(val): 0.051526 acc(val): 0.675561
  497. [18:57:51] Epoch 81: Loss(train): 0.050677 Loss(val): 0.051521 acc(val): 0.675901
  498. [18:58:44] Epoch 82: Loss(train): 0.050675 Loss(val): 0.051520 acc(val): 0.675629
  499. [18:59:33] Epoch 83: Loss(train): 0.050671 Loss(val): 0.051510 acc(val): 0.675510
  500. [19:00:25] Epoch 84: Loss(train): 0.050668 Loss(val): 0.051509 acc(val): 0.675085
  501. [19:01:14] Epoch 85: Loss(train): 0.050666 Loss(val): 0.051507 acc(val): 0.675289
  502. [19:02:05] Epoch 86: Loss(train): 0.050664 Loss(val): 0.051506 acc(val): 0.675357
  503. [19:02:56] Epoch 87: Loss(train): 0.050661 Loss(val): 0.051503 acc(val): 0.675425
  504. [19:03:52] Epoch 88: Loss(train): 0.050659 Loss(val): 0.051504 acc(val): 0.675289
  505. [19:05:08] Epoch 89: Loss(train): 0.050657 Loss(val): 0.051499 acc(val): 0.675289
  506. [19:06:39] Epoch 90: Loss(train): 0.050655 Loss(val): 0.051496 acc(val): 0.675425
  507. [19:07:35] Epoch 91: Loss(train): 0.050654 Loss(val): 0.051497 acc(val): 0.675289
  508. [19:08:29] Epoch 92: Loss(train): 0.050652 Loss(val): 0.051498 acc(val): 0.675425
  509. [19:09:40] Epoch 93: Loss(train): 0.050650 Loss(val): 0.051495 acc(val): 0.675561
  510. [19:10:33] Epoch 94: Loss(train): 0.050649 Loss(val): 0.051495 acc(val): 0.675425
  511. [19:11:24] Epoch 95: Loss(train): 0.050648 Loss(val): 0.051491 acc(val): 0.675493
  512. [19:12:25] Epoch 96: Loss(train): 0.050647 Loss(val): 0.051491 acc(val): 0.675629
  513. [19:13:50] Epoch 97: Loss(train): 0.050646 Loss(val): 0.051491 acc(val): 0.675561
  514. [19:14:53] Epoch 98: Loss(train): 0.050645 Loss(val): 0.051490 acc(val): 0.675425
  515. [19:15:44] Epoch 99: Loss(train): 0.050644 Loss(val): 0.051491 acc(val): 0.675629
  516. [19:16:42] Epoch 100: Loss(train): 0.050643 Loss(val): 0.051489
  517. [19:16:51] FINAL(100) Loss(test): 0.051861 Accuarcy: 0.601014
  518. Configuration learning_rate=0.01, decay_step=60
  519. [19:17:05] INIT Loss(test): 0.146561 Accuarcy: 0.112095
  520. [19:18:05] Epoch 1: Loss(train): 0.085300 Loss(val): 0.083685 acc(val): 0.301548
  521. [19:19:02] Epoch 2: Loss(train): 0.066953 Loss(val): 0.065985 acc(val): 0.446310
  522. [19:19:57] Epoch 3: Loss(train): 0.064071 Loss(val): 0.063177 acc(val): 0.500459
  523. [19:20:56] Epoch 4: Loss(train): 0.061052 Loss(val): 0.060448 acc(val): 0.525357
  524. [19:21:48] Epoch 5: Loss(train): 0.059383 Loss(val): 0.059186 acc(val): 0.546735
  525. [19:22:43] Epoch 6: Loss(train): 0.058582 Loss(val): 0.058385 acc(val): 0.555374
  526. [19:23:36] Epoch 7: Loss(train): 0.057840 Loss(val): 0.057679 acc(val): 0.568571
  527. [19:24:32] Epoch 8: Loss(train): 0.057490 Loss(val): 0.057312 acc(val): 0.574575
  528. [19:25:24] Epoch 9: Loss(train): 0.057308 Loss(val): 0.057126 acc(val): 0.578878
  529. [19:26:21] Epoch 10: Loss(train): 0.056718 Loss(val): 0.056649 acc(val): 0.588265
  530. [19:27:16] Epoch 11: Loss(train): 0.056284 Loss(val): 0.056397 acc(val): 0.592483
  531. [19:28:11] Epoch 12: Loss(train): 0.055857 Loss(val): 0.056005 acc(val): 0.598810
  532. [19:29:04] Epoch 13: Loss(train): 0.055451 Loss(val): 0.055661 acc(val): 0.603044
  533. [19:29:59] Epoch 14: Loss(train): 0.055363 Loss(val): 0.055531 acc(val): 0.600935
  534. [19:30:50] Epoch 15: Loss(train): 0.055445 Loss(val): 0.055545 acc(val): 0.597874
  535. [19:31:43] Epoch 16: Loss(train): 0.055360 Loss(val): 0.055467 acc(val): 0.598963
  536. [19:32:36] Epoch 17: Loss(train): 0.055204 Loss(val): 0.055398 acc(val): 0.599099
  537. [19:33:31] Epoch 18: Loss(train): 0.054983 Loss(val): 0.055307 acc(val): 0.599456
  538. [19:34:24] Epoch 19: Loss(train): 0.054601 Loss(val): 0.055000 acc(val): 0.606395
  539. [19:35:21] Epoch 20: Loss(train): 0.054591 Loss(val): 0.054917 acc(val): 0.607279
  540. [19:36:13] Epoch 21: Loss(train): 0.054396 Loss(val): 0.054738 acc(val): 0.610272
  541. [19:37:07] Epoch 22: Loss(train): 0.054501 Loss(val): 0.054852 acc(val): 0.605850
  542. [19:37:59] Epoch 23: Loss(train): 0.054861 Loss(val): 0.055244 acc(val): 0.594490
  543. [19:38:53] Epoch 24: Loss(train): 0.055324 Loss(val): 0.055727 acc(val): 0.583316
  544. [19:39:44] Epoch 25: Loss(train): 0.055759 Loss(val): 0.056135 acc(val): 0.574099
  545. [19:40:46] Epoch 26: Loss(train): 0.056071 Loss(val): 0.056407 acc(val): 0.565527
  546. [19:41:55] Epoch 27: Loss(train): 0.055936 Loss(val): 0.056284 acc(val): 0.567636
  547. [19:43:12] Epoch 28: Loss(train): 0.055192 Loss(val): 0.055479 acc(val): 0.581173
  548. [19:44:18] Epoch 29: Loss(train): 0.054194 Loss(val): 0.054437 acc(val): 0.602874
  549. [19:45:41] Epoch 30: Loss(train): 0.053646 Loss(val): 0.053798 acc(val): 0.615340
  550. [19:46:36] Epoch 31: Loss(train): 0.053466 Loss(val): 0.053572 acc(val): 0.619558
  551. [19:47:32] Epoch 32: Loss(train): 0.053537 Loss(val): 0.053568 acc(val): 0.618214
  552. [19:48:26] Epoch 33: Loss(train): 0.053592 Loss(val): 0.053585 acc(val): 0.616446
  553. [19:49:32] Epoch 34: Loss(train): 0.053791 Loss(val): 0.053701 acc(val): 0.610850
  554. [19:50:27] Epoch 35: Loss(train): 0.053868 Loss(val): 0.053710 acc(val): 0.609422
  555. [19:51:33] Epoch 36: Loss(train): 0.053803 Loss(val): 0.053639 acc(val): 0.611122
  556. [19:52:48] Epoch 37: Loss(train): 0.053587 Loss(val): 0.053460 acc(val): 0.617041
  557. [19:53:47] Epoch 38: Loss(train): 0.053151 Loss(val): 0.053148 acc(val): 0.626616
  558. [19:54:45] Epoch 39: Loss(train): 0.052732 Loss(val): 0.052863 acc(val): 0.636973
  559. [19:55:43] Epoch 40: Loss(train): 0.052385 Loss(val): 0.052624 acc(val): 0.646854
  560. [19:56:37] Epoch 41: Loss(train): 0.052123 Loss(val): 0.052437 acc(val): 0.653316
  561. [19:57:42] Epoch 42: Loss(train): 0.051956 Loss(val): 0.052307 acc(val): 0.656922
  562. [19:58:41] Epoch 43: Loss(train): 0.051858 Loss(val): 0.052221 acc(val): 0.658776
  563. [19:59:43] Epoch 44: Loss(train): 0.051767 Loss(val): 0.052147 acc(val): 0.661769
  564. [20:00:38] Epoch 45: Loss(train): 0.051678 Loss(val): 0.052093 acc(val): 0.663197
  565. [20:01:37] Epoch 46: Loss(train): 0.051649 Loss(val): 0.052045 acc(val): 0.664422
  566. [20:02:33] Epoch 47: Loss(train): 0.051587 Loss(val): 0.051998 acc(val): 0.664983
  567. [20:03:31] Epoch 48: Loss(train): 0.051510 Loss(val): 0.051950 acc(val): 0.665595
  568. [20:04:27] Epoch 49: Loss(train): 0.051464 Loss(val): 0.051920 acc(val): 0.665595
  569. [20:05:24] Epoch 50: Loss(train): 0.051407 Loss(val): 0.051894 acc(val): 0.665867
  570. [20:06:19] Epoch 51: Loss(train): 0.051346 Loss(val): 0.051874 acc(val): 0.665255
  571. [20:07:15] Epoch 52: Loss(train): 0.051286 Loss(val): 0.051866 acc(val): 0.665391
  572. [20:08:09] Epoch 53: Loss(train): 0.051240 Loss(val): 0.051856 acc(val): 0.665731
  573. [20:09:05] Epoch 54: Loss(train): 0.051194 Loss(val): 0.051873 acc(val): 0.666939
  574. [20:09:59] Epoch 55: Loss(train): 0.051154 Loss(val): 0.051884 acc(val): 0.668316
  575. [20:10:57] Epoch 56: Loss(train): 0.051128 Loss(val): 0.051895 acc(val): 0.669065
  576. [20:12:18] Epoch 57: Loss(train): 0.051107 Loss(val): 0.051928 acc(val): 0.669609
  577. [20:13:30] Epoch 58: Loss(train): 0.051096 Loss(val): 0.051963 acc(val): 0.668571
  578. [20:15:09] Epoch 59: Loss(train): 0.051089 Loss(val): 0.051990 acc(val): 0.666327
  579. [20:16:36] Epoch 60: Loss(train): 0.051084 Loss(val): 0.052012 acc(val): 0.667007
  580. [20:17:35] Epoch 61: Loss(train): 0.051071 Loss(val): 0.052018 acc(val): 0.667075
  581. [20:18:56] Epoch 62: Loss(train): 0.051056 Loss(val): 0.052011 acc(val): 0.667279
  582. [20:20:15] Epoch 63: Loss(train): 0.051039 Loss(val): 0.052001 acc(val): 0.667075
  583. [20:21:25] Epoch 64: Loss(train): 0.051017 Loss(val): 0.051979 acc(val): 0.667347
  584. [20:22:32] Epoch 65: Loss(train): 0.050996 Loss(val): 0.051954 acc(val): 0.668571
  585. [20:23:39] Epoch 66: Loss(train): 0.050975 Loss(val): 0.051927 acc(val): 0.668639
  586. [20:24:43] Epoch 67: Loss(train): 0.050955 Loss(val): 0.051898 acc(val): 0.669048
  587. [20:25:45] Epoch 68: Loss(train): 0.050940 Loss(val): 0.051876 acc(val): 0.669864
  588. [20:26:40] Epoch 69: Loss(train): 0.050923 Loss(val): 0.051856 acc(val): 0.670408
  589. [20:27:41] Epoch 70: Loss(train): 0.050910 Loss(val): 0.051839 acc(val): 0.671361
  590. [20:28:42] Epoch 71: Loss(train): 0.050899 Loss(val): 0.051830 acc(val): 0.670748
  591. [20:29:47] Epoch 72: Loss(train): 0.050885 Loss(val): 0.051802 acc(val): 0.671769
  592. [20:30:46] Epoch 73: Loss(train): 0.050876 Loss(val): 0.051789 acc(val): 0.671905
  593. [20:31:45] Epoch 74: Loss(train): 0.050868 Loss(val): 0.051780 acc(val): 0.671769
  594. [20:32:43] Epoch 75: Loss(train): 0.050859 Loss(val): 0.051767 acc(val): 0.672517
  595. [20:33:43] Epoch 76: Loss(train): 0.050852 Loss(val): 0.051755 acc(val): 0.673061
  596. [20:34:42] Epoch 77: Loss(train): 0.050847 Loss(val): 0.051750 acc(val): 0.672789
  597. [20:35:42] Epoch 78: Loss(train): 0.050841 Loss(val): 0.051743 acc(val): 0.672653
  598. [20:36:39] Epoch 79: Loss(train): 0.050836 Loss(val): 0.051736 acc(val): 0.672585
  599. [20:37:39] Epoch 80: Loss(train): 0.050831 Loss(val): 0.051728 acc(val): 0.672245
  600. [20:38:37] Epoch 81: Loss(train): 0.050826 Loss(val): 0.051721 acc(val): 0.672449
  601. [20:39:38] Epoch 82: Loss(train): 0.050822 Loss(val): 0.051717 acc(val): 0.672517
  602. [20:40:37] Epoch 83: Loss(train): 0.050818 Loss(val): 0.051712 acc(val): 0.672381
  603. [20:41:36] Epoch 84: Loss(train): 0.050814 Loss(val): 0.051704 acc(val): 0.672925
  604. [20:42:32] Epoch 85: Loss(train): 0.050811 Loss(val): 0.051707 acc(val): 0.672449
  605. [20:44:12] Epoch 86: Loss(train): 0.050808 Loss(val): 0.051702 acc(val): 0.672449
  606. [20:45:56] Epoch 87: Loss(train): 0.050805 Loss(val): 0.051700 acc(val): 0.672517
  607. [20:47:19] Epoch 88: Loss(train): 0.050802 Loss(val): 0.051695 acc(val): 0.672653
  608. [20:48:38] Epoch 89: Loss(train): 0.050800 Loss(val): 0.051694 acc(val): 0.672721
  609. [20:49:47] Epoch 90: Loss(train): 0.050797 Loss(val): 0.051688 acc(val): 0.672993
  610. [20:51:04] Epoch 91: Loss(train): 0.050796 Loss(val): 0.051688 acc(val): 0.672993
  611. [20:52:29] Epoch 92: Loss(train): 0.050794 Loss(val): 0.051688 acc(val): 0.673129
  612. [20:53:54] Epoch 93: Loss(train): 0.050792 Loss(val): 0.051685 acc(val): 0.673197
  613. [20:54:59] Epoch 94: Loss(train): 0.050790 Loss(val): 0.051679 acc(val): 0.673129
  614. [20:56:01] Epoch 95: Loss(train): 0.050788 Loss(val): 0.051678 acc(val): 0.673129
  615. [20:57:04] Epoch 96: Loss(train): 0.050787 Loss(val): 0.051676 acc(val): 0.673197
  616. [20:58:13] Epoch 97: Loss(train): 0.050786 Loss(val): 0.051680 acc(val): 0.673537
  617. [20:59:20] Epoch 98: Loss(train): 0.050784 Loss(val): 0.051672 acc(val): 0.673265
  618. [21:00:19] Epoch 99: Loss(train): 0.050783 Loss(val): 0.051673 acc(val): 0.673469
  619. [21:01:21] Epoch 100: Loss(train): 0.050782 Loss(val): 0.051673
  620. [21:01:28] FINAL(100) Loss(test): 0.051884 Accuarcy: 0.596351
  621. Configuration learning_rate=0.003, decay_step=20
  622. [21:01:44] INIT Loss(test): 0.158548 Accuarcy: 0.098919
  623. [21:02:53] Epoch 1: Loss(train): 0.083241 Loss(val): 0.081338 acc(val): 0.315714
  624. [21:03:52] Epoch 2: Loss(train): 0.068011 Loss(val): 0.066275 acc(val): 0.454677
  625. [21:04:49] Epoch 3: Loss(train): 0.064500 Loss(val): 0.062939 acc(val): 0.506276
  626. [21:05:51] Epoch 4: Loss(train): 0.062763 Loss(val): 0.061977 acc(val): 0.521054
  627. [21:06:49] Epoch 5: Loss(train): 0.060735 Loss(val): 0.060340 acc(val): 0.539966
  628. [21:07:50] Epoch 6: Loss(train): 0.059243 Loss(val): 0.058967 acc(val): 0.554847
  629. [21:08:47] Epoch 7: Loss(train): 0.058659 Loss(val): 0.058494 acc(val): 0.556156
  630. [21:09:46] Epoch 8: Loss(train): 0.058254 Loss(val): 0.058243 acc(val): 0.558197
  631. [21:10:44] Epoch 9: Loss(train): 0.058391 Loss(val): 0.058417 acc(val): 0.550850
  632. [21:11:44] Epoch 10: Loss(train): 0.058190 Loss(val): 0.058318 acc(val): 0.549354
  633. [21:12:40] Epoch 11: Loss(train): 0.057913 Loss(val): 0.058267 acc(val): 0.551378
  634. [21:14:00] Epoch 12: Loss(train): 0.057026 Loss(val): 0.057716 acc(val): 0.558044
  635. [21:15:32] Epoch 13: Loss(train): 0.056899 Loss(val): 0.057677 acc(val): 0.556956
  636. [21:16:50] Epoch 14: Loss(train): 0.056948 Loss(val): 0.057710 acc(val): 0.556480
  637. [21:18:38] Epoch 15: Loss(train): 0.056834 Loss(val): 0.057578 acc(val): 0.559133
  638. [21:20:31] Epoch 16: Loss(train): 0.056879 Loss(val): 0.057601 acc(val): 0.558929
  639. [21:22:05] Epoch 17: Loss(train): 0.056644 Loss(val): 0.057316 acc(val): 0.562126
  640. [21:23:53] Epoch 18: Loss(train): 0.056123 Loss(val): 0.056696 acc(val): 0.573963
  641. [21:24:56] Epoch 19: Loss(train): 0.055539 Loss(val): 0.055993 acc(val): 0.589218
  642. [21:25:58] Epoch 20: Loss(train): 0.054817 Loss(val): 0.055192 acc(val): 0.604456
  643. [21:27:20] Epoch 21: Loss(train): 0.054251 Loss(val): 0.054556 acc(val): 0.617109
  644. [21:28:24] Epoch 22: Loss(train): 0.053735 Loss(val): 0.054037 acc(val): 0.628810
  645. [21:29:40] Epoch 23: Loss(train): 0.053590 Loss(val): 0.053807 acc(val): 0.629881
  646. [21:30:46] Epoch 24: Loss(train): 0.053532 Loss(val): 0.053687 acc(val): 0.628997
  647. [21:31:47] Epoch 25: Loss(train): 0.053535 Loss(val): 0.053628 acc(val): 0.628997
  648. [21:32:51] Epoch 26: Loss(train): 0.053524 Loss(val): 0.053611 acc(val): 0.626071
  649. [21:33:53] Epoch 27: Loss(train): 0.053373 Loss(val): 0.053523 acc(val): 0.628384
  650. [21:35:07] Epoch 28: Loss(train): 0.053050 Loss(val): 0.053344 acc(val): 0.634711
  651. [21:36:09] Epoch 29: Loss(train): 0.052943 Loss(val): 0.053259 acc(val): 0.636071
  652. [21:37:08] Epoch 30: Loss(train): 0.052761 Loss(val): 0.053106 acc(val): 0.640153
  653. [21:38:11] Epoch 31: Loss(train): 0.052696 Loss(val): 0.053025 acc(val): 0.642670
  654. [21:39:12] Epoch 32: Loss(train): 0.052689 Loss(val): 0.052930 acc(val): 0.642330
  655. [21:40:15] Epoch 33: Loss(train): 0.052740 Loss(val): 0.052846 acc(val): 0.641514
  656. [21:41:14] Epoch 34: Loss(train): 0.052706 Loss(val): 0.052756 acc(val): 0.642058
  657. [21:42:16] Epoch 35: Loss(train): 0.052765 Loss(val): 0.052704 acc(val): 0.640017
  658. [21:43:14] Epoch 36: Loss(train): 0.052756 Loss(val): 0.052639 acc(val): 0.638180
  659. [21:44:17] Epoch 37: Loss(train): 0.052711 Loss(val): 0.052555 acc(val): 0.640153
  660. [21:45:17] Epoch 38: Loss(train): 0.052549 Loss(val): 0.052426 acc(val): 0.644031
  661. [21:46:26] Epoch 39: Loss(train): 0.052322 Loss(val): 0.052297 acc(val): 0.650969
  662. [21:48:10] Epoch 40: Loss(train): 0.052060 Loss(val): 0.052150 acc(val): 0.658333
  663. [21:50:08] Epoch 41: Loss(train): 0.051793 Loss(val): 0.052026 acc(val): 0.665612
  664. [21:51:53] Epoch 42: Loss(train): 0.051636 Loss(val): 0.051965 acc(val): 0.668197
  665. [21:53:38] Epoch 43: Loss(train): 0.051514 Loss(val): 0.051931 acc(val): 0.670646
  666. [21:55:21] Epoch 44: Loss(train): 0.051438 Loss(val): 0.051905 acc(val): 0.669966
  667. [21:57:04] Epoch 45: Loss(train): 0.051403 Loss(val): 0.051888 acc(val): 0.670714
  668. [21:58:07] Epoch 46: Loss(train): 0.051358 Loss(val): 0.051865 acc(val): 0.670918
  669. [21:59:30] Epoch 47: Loss(train): 0.051337 Loss(val): 0.051848 acc(val): 0.670782
  670. [22:00:47] Epoch 48: Loss(train): 0.051338 Loss(val): 0.051840 acc(val): 0.670714
  671. [22:02:03] Epoch 49: Loss(train): 0.051314 Loss(val): 0.051821 acc(val): 0.670646
  672. [22:03:13] Epoch 50: Loss(train): 0.051277 Loss(val): 0.051795 acc(val): 0.670782
  673. [22:04:19] Epoch 51: Loss(train): 0.051270 Loss(val): 0.051777 acc(val): 0.671599
  674. [22:05:26] Epoch 52: Loss(train): 0.051233 Loss(val): 0.051768 acc(val): 0.671259
  675. [22:06:34] Epoch 53: Loss(train): 0.051212 Loss(val): 0.051756 acc(val): 0.671871
  676. [22:07:37] Epoch 54: Loss(train): 0.051174 Loss(val): 0.051745 acc(val): 0.673163
  677. [22:08:44] Epoch 55: Loss(train): 0.051122 Loss(val): 0.051735 acc(val): 0.673639
  678. [22:09:47] Epoch 56: Loss(train): 0.051073 Loss(val): 0.051735 acc(val): 0.672959
  679. [22:10:53] Epoch 57: Loss(train): 0.051029 Loss(val): 0.051744 acc(val): 0.672959
  680. [22:11:53] Epoch 58: Loss(train): 0.051000 Loss(val): 0.051749 acc(val): 0.674252
  681. [22:12:56] Epoch 59: Loss(train): 0.050973 Loss(val): 0.051769 acc(val): 0.674524
  682. [22:13:57] Epoch 60: Loss(train): 0.050952 Loss(val): 0.051795 acc(val): 0.673231
  683. [22:15:00] Epoch 61: Loss(train): 0.050937 Loss(val): 0.051807 acc(val): 0.673724
  684. [22:16:02] Epoch 62: Loss(train): 0.050928 Loss(val): 0.051828 acc(val): 0.673588
  685. [22:17:06] Epoch 63: Loss(train): 0.050916 Loss(val): 0.051835 acc(val): 0.673861
  686. [22:18:08] Epoch 64: Loss(train): 0.050908 Loss(val): 0.051846 acc(val): 0.673452
  687. [22:19:37] Epoch 65: Loss(train): 0.050891 Loss(val): 0.051827 acc(val): 0.673861
  688. [22:21:30] Epoch 66: Loss(train): 0.050875 Loss(val): 0.051813 acc(val): 0.673861
  689. [22:23:05] Epoch 67: Loss(train): 0.050859 Loss(val): 0.051792 acc(val): 0.674269
  690. [22:24:45] Epoch 68: Loss(train): 0.050848 Loss(val): 0.051785 acc(val): 0.674065
  691. [22:26:41] Epoch 69: Loss(train): 0.050833 Loss(val): 0.051761 acc(val): 0.675085
  692. [22:28:20] Epoch 70: Loss(train): 0.050818 Loss(val): 0.051735 acc(val): 0.675425
  693. [22:29:44] Epoch 71: Loss(train): 0.050805 Loss(val): 0.051714 acc(val): 0.676105
  694. [22:31:19] Epoch 72: Loss(train): 0.050798 Loss(val): 0.051715 acc(val): 0.676037
  695. [22:32:43] Epoch 73: Loss(train): 0.050788 Loss(val): 0.051700 acc(val): 0.676241
  696. [22:33:55] Epoch 74: Loss(train): 0.050779 Loss(val): 0.051684 acc(val): 0.676786
  697. [22:35:13] Epoch 75: Loss(train): 0.050772 Loss(val): 0.051674 acc(val): 0.676582
  698. [22:36:24] Epoch 76: Loss(train): 0.050765 Loss(val): 0.051667 acc(val): 0.676718
  699. [22:37:34] Epoch 77: Loss(train): 0.050759 Loss(val): 0.051653 acc(val): 0.677058
  700. [22:38:41] Epoch 78: Loss(train): 0.050753 Loss(val): 0.051646 acc(val): 0.676990
  701. [22:39:49] Epoch 79: Loss(train): 0.050748 Loss(val): 0.051638 acc(val): 0.677058
  702. [22:41:00] Epoch 80: Loss(train): 0.050743 Loss(val): 0.051630 acc(val): 0.676990
  703. [22:42:08] Epoch 81: Loss(train): 0.050740 Loss(val): 0.051629 acc(val): 0.677058
  704. [22:43:14] Epoch 82: Loss(train): 0.050735 Loss(val): 0.051620 acc(val): 0.676650
  705. [22:44:21] Epoch 83: Loss(train): 0.050732 Loss(val): 0.051618 acc(val): 0.676582
  706. [22:45:26] Epoch 84: Loss(train): 0.050728 Loss(val): 0.051612 acc(val): 0.676922
  707. [22:46:32] Epoch 85: Loss(train): 0.050725 Loss(val): 0.051609 acc(val): 0.676990
  708. [22:47:37] Epoch 86: Loss(train): 0.050722 Loss(val): 0.051603 acc(val): 0.676854
  709. [22:48:44] Epoch 87: Loss(train): 0.050719 Loss(val): 0.051599 acc(val): 0.677126
  710. [22:49:47] Epoch 88: Loss(train): 0.050716 Loss(val): 0.051596 acc(val): 0.676990
  711. [22:50:53] Epoch 89: Loss(train): 0.050714 Loss(val): 0.051596 acc(val): 0.676990
  712. [22:51:58] Epoch 90: Loss(train): 0.050711 Loss(val): 0.051589 acc(val): 0.677398
  713. [22:53:07] Epoch 91: Loss(train): 0.050710 Loss(val): 0.051587 acc(val): 0.677058
  714. [22:54:13] Epoch 92: Loss(train): 0.050708 Loss(val): 0.051584 acc(val): 0.677534
  715. [22:55:22] Epoch 93: Loss(train): 0.050706 Loss(val): 0.051585 acc(val): 0.677534
  716. [22:56:28] Epoch 94: Loss(train): 0.050705 Loss(val): 0.051585 acc(val): 0.677466
  717. [22:57:38] Epoch 95: Loss(train): 0.050703 Loss(val): 0.051586 acc(val): 0.677330
  718. [22:58:44] Epoch 96: Loss(train): 0.050702 Loss(val): 0.051580 acc(val): 0.677466
  719. [22:59:53] Epoch 97: Loss(train): 0.050700 Loss(val): 0.051578 acc(val): 0.677398
  720. [23:00:59] Epoch 98: Loss(train): 0.050699 Loss(val): 0.051579 acc(val): 0.677466
  721. [23:02:09] Epoch 99: Loss(train): 0.050698 Loss(val): 0.051577 acc(val): 0.677602
  722. [23:03:16] Epoch 100: Loss(train): 0.050697 Loss(val): 0.051575
  723. [23:03:25] FINAL(100) Loss(test): 0.051971 Accuarcy: 0.586486
  724. Configuration learning_rate=0.003, decay_step=40
  725. [23:03:43] INIT Loss(test): 0.119407 Accuarcy: 0.131892
  726. [23:04:57] Epoch 1: Loss(train): 0.086009 Loss(val): 0.084843 acc(val): 0.304133
  727. [23:06:07] Epoch 2: Loss(train): 0.068264 Loss(val): 0.066110 acc(val): 0.453316
  728. [23:07:15] Epoch 3: Loss(train): 0.064122 Loss(val): 0.062628 acc(val): 0.517568
  729. [23:08:25] Epoch 4: Loss(train): 0.060438 Loss(val): 0.059920 acc(val): 0.540153
  730. [23:09:34] Epoch 5: Loss(train): 0.059801 Loss(val): 0.059508 acc(val): 0.544507
  731. [23:10:47] Epoch 6: Loss(train): 0.059259 Loss(val): 0.058924 acc(val): 0.546412
  732. [23:12:21] Epoch 7: Loss(train): 0.058629 Loss(val): 0.058101 acc(val): 0.558673
  733. [23:13:41] Epoch 8: Loss(train): 0.058048 Loss(val): 0.057791 acc(val): 0.564099
  734. [23:14:46] Epoch 9: Loss(train): 0.058454 Loss(val): 0.058416 acc(val): 0.551122
  735. [23:15:55] Epoch 10: Loss(train): 0.058675 Loss(val): 0.058777 acc(val): 0.543724
  736. [23:17:04] Epoch 11: Loss(train): 0.058370 Loss(val): 0.058430 acc(val): 0.546582
  737. [23:19:16] Epoch 12: Loss(train): 0.057999 Loss(val): 0.058021 acc(val): 0.547126
  738. [23:21:09] Epoch 13: Loss(train): 0.057656 Loss(val): 0.057744 acc(val): 0.555425
  739. [23:23:13] Epoch 14: Loss(train): 0.057686 Loss(val): 0.057843 acc(val): 0.555289
  740. [23:24:54] Epoch 15: Loss(train): 0.058182 Loss(val): 0.058384 acc(val): 0.554949
  741. [23:26:00] Epoch 16: Loss(train): 0.058460 Loss(val): 0.058770 acc(val): 0.556037
  742. [23:27:03] Epoch 17: Loss(train): 0.058416 Loss(val): 0.058736 acc(val): 0.558486
  743. [23:28:10] Epoch 18: Loss(train): 0.057461 Loss(val): 0.057750 acc(val): 0.571888
  744. [23:29:45] Epoch 19: Loss(train): 0.056130 Loss(val): 0.056310 acc(val): 0.588895
  745. [23:31:09] Epoch 20: Loss(train): 0.054899 Loss(val): 0.055004 acc(val): 0.607738
  746. [23:32:13] Epoch 21: Loss(train): 0.054311 Loss(val): 0.054304 acc(val): 0.618214
  747. [23:33:22] Epoch 22: Loss(train): 0.054011 Loss(val): 0.053956 acc(val): 0.620323
  748. [23:34:39] Epoch 23: Loss(train): 0.053804 Loss(val): 0.053789 acc(val): 0.625425
  749. [23:35:56] Epoch 24: Loss(train): 0.053912 Loss(val): 0.053940 acc(val): 0.621361
  750. [23:37:02] Epoch 25: Loss(train): 0.053980 Loss(val): 0.054102 acc(val): 0.616105
  751. [23:38:12] Epoch 26: Loss(train): 0.054036 Loss(val): 0.054221 acc(val): 0.612364
  752. [23:39:21] Epoch 27: Loss(train): 0.054032 Loss(val): 0.054174 acc(val): 0.612636
  753. [23:40:32] Epoch 28: Loss(train): 0.053859 Loss(val): 0.053952 acc(val): 0.615374
  754. [23:41:39] Epoch 29: Loss(train): 0.053869 Loss(val): 0.053855 acc(val): 0.616395
  755. [23:42:47] Epoch 30: Loss(train): 0.053897 Loss(val): 0.053789 acc(val): 0.615714
  756. [23:43:53] Epoch 31: Loss(train): 0.053977 Loss(val): 0.053684 acc(val): 0.612789
  757. [23:45:01] Epoch 32: Loss(train): 0.054146 Loss(val): 0.053690 acc(val): 0.608929
  758. [23:46:05] Epoch 33: Loss(train): 0.054114 Loss(val): 0.053585 acc(val): 0.607143
  759. [23:47:12] Epoch 34: Loss(train): 0.053837 Loss(val): 0.053342 acc(val): 0.614286
  760. [23:48:54] Epoch 35: Loss(train): 0.053427 Loss(val): 0.053079 acc(val): 0.621905
  761. [23:51:05] Epoch 36: Loss(train): 0.052834 Loss(val): 0.052732 acc(val): 0.637143
  762. [23:52:37] Epoch 37: Loss(train): 0.052377 Loss(val): 0.052473 acc(val): 0.647075
  763. [23:53:55] Epoch 38: Loss(train): 0.052059 Loss(val): 0.052323 acc(val): 0.655765
  764. [23:55:00] Epoch 39: Loss(train): 0.051822 Loss(val): 0.052194 acc(val): 0.659711
  765. [23:56:37] Epoch 40: Loss(train): 0.051687 Loss(val): 0.052090 acc(val): 0.662432
  766. [23:57:56] Epoch 41: Loss(train): 0.051599 Loss(val): 0.052035 acc(val): 0.663452
  767. [23:59:27] Epoch 42: Loss(train): 0.051543 Loss(val): 0.052002 acc(val): 0.666105
  768. [00:00:40] Epoch 43: Loss(train): 0.051492 Loss(val): 0.051979 acc(val): 0.665289
  769. [00:01:55] Epoch 44: Loss(train): 0.051441 Loss(val): 0.051964 acc(val): 0.666514
  770. [00:03:10] Epoch 45: Loss(train): 0.051421 Loss(val): 0.051940 acc(val): 0.668078
  771. [00:04:31] Epoch 46: Loss(train): 0.051367 Loss(val): 0.051917 acc(val): 0.668827
  772. [00:05:37] Epoch 47: Loss(train): 0.051326 Loss(val): 0.051903 acc(val): 0.669031
  773. [00:06:46] Epoch 48: Loss(train): 0.051282 Loss(val): 0.051888 acc(val): 0.670119
  774. [00:07:54] Epoch 49: Loss(train): 0.051247 Loss(val): 0.051872 acc(val): 0.670391
  775. [00:09:08] Epoch 50: Loss(train): 0.051210 Loss(val): 0.051871 acc(val): 0.670391
  776. [00:10:13] Epoch 51: Loss(train): 0.051175 Loss(val): 0.051883 acc(val): 0.670187
  777. [00:11:22] Epoch 52: Loss(train): 0.051149 Loss(val): 0.051886 acc(val): 0.670051
  778. [00:12:27] Epoch 53: Loss(train): 0.051129 Loss(val): 0.051913 acc(val): 0.669099
  779. [00:13:37] Epoch 54: Loss(train): 0.051116 Loss(val): 0.051936 acc(val): 0.667330
  780. [00:14:45] Epoch 55: Loss(train): 0.051102 Loss(val): 0.051941 acc(val): 0.667602
  781. [00:15:57] Epoch 56: Loss(train): 0.051099 Loss(val): 0.051973 acc(val): 0.666718
  782. [00:17:04] Epoch 57: Loss(train): 0.051085 Loss(val): 0.051980 acc(val): 0.666718
  783. [00:18:26] Epoch 58: Loss(train): 0.051069 Loss(val): 0.051975 acc(val): 0.666650
  784. [00:20:26] Epoch 59: Loss(train): 0.051041 Loss(val): 0.051938 acc(val): 0.667602
  785. [00:21:56] Epoch 60: Loss(train): 0.051020 Loss(val): 0.051919 acc(val): 0.667738
  786. [00:23:31] Epoch 61: Loss(train): 0.050994 Loss(val): 0.051884 acc(val): 0.668963
  787. [00:24:50] Epoch 62: Loss(train): 0.050970 Loss(val): 0.051851 acc(val): 0.669915
  788. [00:26:36] Epoch 63: Loss(train): 0.050952 Loss(val): 0.051824 acc(val): 0.670187
  789. [00:27:56] Epoch 64: Loss(train): 0.050932 Loss(val): 0.051791 acc(val): 0.671207
  790. [00:29:25] Epoch 65: Loss(train): 0.050915 Loss(val): 0.051762 acc(val): 0.671888
  791. [00:30:38] Epoch 66: Loss(train): 0.050900 Loss(val): 0.051739 acc(val): 0.671820
  792. [00:31:47] Epoch 67: Loss(train): 0.050888 Loss(val): 0.051720 acc(val): 0.671956
  793. [00:33:12] Epoch 68: Loss(train): 0.050880 Loss(val): 0.051714 acc(val): 0.671956
  794. [00:34:24] Epoch 69: Loss(train): 0.050870 Loss(val): 0.051703 acc(val): 0.672024
  795. [00:35:42] Epoch 70: Loss(train): 0.050862 Loss(val): 0.051696 acc(val): 0.672636
  796. [00:36:54] Epoch 71: Loss(train): 0.050854 Loss(val): 0.051680 acc(val): 0.673044
  797. [00:38:13] Epoch 72: Loss(train): 0.050847 Loss(val): 0.051672 acc(val): 0.672840
  798. [00:39:21] Epoch 73: Loss(train): 0.050842 Loss(val): 0.051672 acc(val): 0.671888
  799. [00:40:37] Epoch 74: Loss(train): 0.050836 Loss(val): 0.051663 acc(val): 0.672364
  800. [00:41:45] Epoch 75: Loss(train): 0.050830 Loss(val): 0.051650 acc(val): 0.672432
  801. [00:43:00] Epoch 76: Loss(train): 0.050825 Loss(val): 0.051648 acc(val): 0.672840
  802. [00:44:08] Epoch 77: Loss(train): 0.050820 Loss(val): 0.051643 acc(val): 0.672568
  803. [00:45:20] Epoch 78: Loss(train): 0.050817 Loss(val): 0.051644 acc(val): 0.672500
  804. [00:46:30] Epoch 79: Loss(train): 0.050812 Loss(val): 0.051636 acc(val): 0.672364
  805. [00:47:41] Epoch 80: Loss(train): 0.050809 Loss(val): 0.051635 acc(val): 0.672704
  806. [00:48:51] Epoch 81: Loss(train): 0.050805 Loss(val): 0.051629 acc(val): 0.672840
  807. [00:50:33] Epoch 82: Loss(train): 0.050801 Loss(val): 0.051625 acc(val): 0.672704
  808. [00:52:14] Epoch 83: Loss(train): 0.050798 Loss(val): 0.051623 acc(val): 0.672772
  809. [00:53:55] Epoch 84: Loss(train): 0.050795 Loss(val): 0.051622 acc(val): 0.672704
  810. [00:55:43] Epoch 85: Loss(train): 0.050792 Loss(val): 0.051617 acc(val): 0.673112
  811. [00:57:16] Epoch 86: Loss(train): 0.050790 Loss(val): 0.051615 acc(val): 0.673248
  812. [00:58:46] Epoch 87: Loss(train): 0.050787 Loss(val): 0.051613 acc(val): 0.673520
  813. [01:00:14] Epoch 88: Loss(train): 0.050785 Loss(val): 0.051612 acc(val): 0.673248
  814. [01:01:36] Epoch 89: Loss(train): 0.050783 Loss(val): 0.051612 acc(val): 0.673180
  815. [01:02:53] Epoch 90: Loss(train): 0.050781 Loss(val): 0.051609 acc(val): 0.673248
  816. [01:04:05] Epoch 91: Loss(train): 0.050779 Loss(val): 0.051608 acc(val): 0.673316
  817. [01:05:24] Epoch 92: Loss(train): 0.050778 Loss(val): 0.051606 acc(val): 0.673384
  818. [01:06:45] Epoch 93: Loss(train): 0.050776 Loss(val): 0.051605 acc(val): 0.673452
  819. [01:07:59] Epoch 94: Loss(train): 0.050775 Loss(val): 0.051604 acc(val): 0.673588
  820. [01:09:13] Epoch 95: Loss(train): 0.050774 Loss(val): 0.051602 acc(val): 0.673452
  821. [01:10:25] Epoch 96: Loss(train): 0.050772 Loss(val): 0.051599 acc(val): 0.673316
  822. [01:11:35] Epoch 97: Loss(train): 0.050771 Loss(val): 0.051599 acc(val): 0.673520
  823. [01:12:45] Epoch 98: Loss(train): 0.050770 Loss(val): 0.051600 acc(val): 0.673588
  824. [01:13:54] Epoch 99: Loss(train): 0.050769 Loss(val): 0.051597 acc(val): 0.673452
  825. [01:15:04] Epoch 100: Loss(train): 0.050768 Loss(val): 0.051596
  826. [01:15:13] FINAL(100) Loss(test): 0.052151 Accuarcy: 0.602703
  827. Configuration learning_rate=0.003, decay_step=60
  828. [01:15:31] INIT Loss(test): 0.118268 Accuarcy: 0.130541
  829. [01:16:50] Epoch 1: Loss(train): 0.084326 Loss(val): 0.082111 acc(val): 0.317891
  830. [01:18:01] Epoch 2: Loss(train): 0.067675 Loss(val): 0.066096 acc(val): 0.453265
  831. [01:19:07] Epoch 3: Loss(train): 0.063037 Loss(val): 0.061884 acc(val): 0.512517
  832. [01:20:51] Epoch 4: Loss(train): 0.060235 Loss(val): 0.059597 acc(val): 0.557024
  833. [01:22:50] Epoch 5: Loss(train): 0.059429 Loss(val): 0.059154 acc(val): 0.559099
  834. [01:24:33] Epoch 6: Loss(train): 0.059058 Loss(val): 0.058983 acc(val): 0.559303
  835. [01:26:15] Epoch 7: Loss(train): 0.058302 Loss(val): 0.058147 acc(val): 0.566718
  836. [01:27:41] Epoch 8: Loss(train): 0.058073 Loss(val): 0.057841 acc(val): 0.566241
  837. [01:29:14] Epoch 9: Loss(train): 0.057751 Loss(val): 0.057718 acc(val): 0.568078
  838. [01:30:49] Epoch 10: Loss(train): 0.057868 Loss(val): 0.057935 acc(val): 0.561003
  839. [01:32:08] Epoch 11: Loss(train): 0.057844 Loss(val): 0.058059 acc(val): 0.559439
  840. [01:33:28] Epoch 12: Loss(train): 0.057605 Loss(val): 0.057729 acc(val): 0.561207
  841. [01:34:45] Epoch 13: Loss(train): 0.056757 Loss(val): 0.056798 acc(val): 0.574422
  842. [01:36:09] Epoch 14: Loss(train): 0.056279 Loss(val): 0.056278 acc(val): 0.585034
  843. [01:37:26] Epoch 15: Loss(train): 0.056084 Loss(val): 0.056146 acc(val): 0.587619
  844. [01:38:40] Epoch 16: Loss(train): 0.056607 Loss(val): 0.056665 acc(val): 0.580119
  845. [01:39:54] Epoch 17: Loss(train): 0.057447 Loss(val): 0.057562 acc(val): 0.563656
  846. [01:41:13] Epoch 18: Loss(train): 0.057931 Loss(val): 0.058062 acc(val): 0.555493
  847. [01:42:26] Epoch 19: Loss(train): 0.057932 Loss(val): 0.058026 acc(val): 0.556786
  848. [01:43:42] Epoch 20: Loss(train): 0.056523 Loss(val): 0.056666 acc(val): 0.581956
  849. [01:44:55] Epoch 21: Loss(train): 0.054813 Loss(val): 0.054965 acc(val): 0.613197
  850. [01:46:10] Epoch 22: Loss(train): 0.053830 Loss(val): 0.054018 acc(val): 0.632517
  851. [01:47:20] Epoch 23: Loss(train): 0.053511 Loss(val): 0.053685 acc(val): 0.635442
  852. [01:48:35] Epoch 24: Loss(train): 0.053198 Loss(val): 0.053462 acc(val): 0.640272
  853. [01:49:48] Epoch 25: Loss(train): 0.053054 Loss(val): 0.053336 acc(val): 0.642041
  854. [01:51:41] Epoch 26: Loss(train): 0.052948 Loss(val): 0.053279 acc(val): 0.643265
  855. [01:53:14] Epoch 27: Loss(train): 0.052850 Loss(val): 0.053252 acc(val): 0.644490
  856. [01:54:52] Epoch 28: Loss(train): 0.052809 Loss(val): 0.053145 acc(val): 0.644626
  857. [01:56:30] Epoch 29: Loss(train): 0.052700 Loss(val): 0.053007 acc(val): 0.646735
  858. [01:58:28] Epoch 30: Loss(train): 0.052565 Loss(val): 0.052855 acc(val): 0.650000
  859. [01:59:41] Epoch 31: Loss(train): 0.052525 Loss(val): 0.052728 acc(val): 0.651769
  860. [02:01:14] Epoch 32: Loss(train): 0.052521 Loss(val): 0.052635 acc(val): 0.649660
  861. [02:02:32] Epoch 33: Loss(train): 0.052542 Loss(val): 0.052554 acc(val): 0.649252
  862. [02:04:04] Epoch 34: Loss(train): 0.052591 Loss(val): 0.052510 acc(val): 0.646463
  863. [02:05:17] Epoch 35: Loss(train): 0.052629 Loss(val): 0.052478 acc(val): 0.645731
  864. [02:06:41] Epoch 36: Loss(train): 0.052609 Loss(val): 0.052442 acc(val): 0.647160
  865. [02:07:55] Epoch 37: Loss(train): 0.052505 Loss(val): 0.052344 acc(val): 0.649065
  866. [02:09:16] Epoch 38: Loss(train): 0.052283 Loss(val): 0.052196 acc(val): 0.653827
  867. [02:10:30] Epoch 39: Loss(train): 0.052129 Loss(val): 0.052099 acc(val): 0.656616
  868. [02:11:46] Epoch 40: Loss(train): 0.051865 Loss(val): 0.051973 acc(val): 0.660901
  869. [02:12:59] Epoch 41: Loss(train): 0.051727 Loss(val): 0.051912 acc(val): 0.664643
  870. [02:14:16] Epoch 42: Loss(train): 0.051614 Loss(val): 0.051869 acc(val): 0.666888
  871. [02:15:26] Epoch 43: Loss(train): 0.051569 Loss(val): 0.051843 acc(val): 0.666003
  872. [02:16:41] Epoch 44: Loss(train): 0.051505 Loss(val): 0.051823 acc(val): 0.667024
  873. [02:17:52] Epoch 45: Loss(train): 0.051464 Loss(val): 0.051800 acc(val): 0.667568
  874. [02:19:07] Epoch 46: Loss(train): 0.051440 Loss(val): 0.051774 acc(val): 0.667432
  875. [02:20:20] Epoch 47: Loss(train): 0.051418 Loss(val): 0.051756 acc(val): 0.667976
  876. [02:22:02] Epoch 48: Loss(train): 0.051364 Loss(val): 0.051738 acc(val): 0.669813
  877. [02:23:45] Epoch 49: Loss(train): 0.051345 Loss(val): 0.051721 acc(val): 0.669201
  878. [02:25:46] Epoch 50: Loss(train): 0.051294 Loss(val): 0.051708 acc(val): 0.671037
  879. [02:27:28] Epoch 51: Loss(train): 0.051244 Loss(val): 0.051699 acc(val): 0.670765
  880. [02:29:26] Epoch 52: Loss(train): 0.051217 Loss(val): 0.051687 acc(val): 0.671241
  881. [02:30:49] Epoch 53: Loss(train): 0.051168 Loss(val): 0.051685 acc(val): 0.671582
  882. [02:32:24] Epoch 54: Loss(train): 0.051129 Loss(val): 0.051696 acc(val): 0.672262
  883. [02:33:48] Epoch 55: Loss(train): 0.051096 Loss(val): 0.051717 acc(val): 0.670833
  884. [02:35:18] Epoch 56: Loss(train): 0.051069 Loss(val): 0.051752 acc(val): 0.669609
  885. [02:36:37] Epoch 57: Loss(train): 0.051054 Loss(val): 0.051791 acc(val): 0.668793
  886. [02:38:02] Epoch 58: Loss(train): 0.051046 Loss(val): 0.051823 acc(val): 0.668793
  887. [02:39:22] Epoch 59: Loss(train): 0.051033 Loss(val): 0.051830 acc(val): 0.668180
  888. [02:40:42] Epoch 60: Loss(train): 0.051030 Loss(val): 0.051859 acc(val): 0.667296
  889. [02:41:56] Epoch 61: Loss(train): 0.051019 Loss(val): 0.051869 acc(val): 0.666276
  890. [02:43:18] Epoch 62: Loss(train): 0.051008 Loss(val): 0.051871 acc(val): 0.666003
  891. [02:44:34] Epoch 63: Loss(train): 0.050986 Loss(val): 0.051852 acc(val): 0.665935
  892. [02:45:51] Epoch 64: Loss(train): 0.050960 Loss(val): 0.051816 acc(val): 0.667364
  893. [02:47:05] Epoch 65: Loss(train): 0.050938 Loss(val): 0.051782 acc(val): 0.668316
  894. [02:48:24] Epoch 66: Loss(train): 0.050921 Loss(val): 0.051765 acc(val): 0.668793
  895. [02:49:39] Epoch 67: Loss(train): 0.050905 Loss(val): 0.051737 acc(val): 0.669337
  896. [02:51:00] Epoch 68: Loss(train): 0.050893 Loss(val): 0.051723 acc(val): 0.669745
  897. [02:52:31] Epoch 69: Loss(train): 0.050879 Loss(val): 0.051702 acc(val): 0.670221
  898. [02:54:48] Epoch 70: Loss(train): 0.050866 Loss(val): 0.051684 acc(val): 0.669813
  899. [02:56:36] Epoch 71: Loss(train): 0.050855 Loss(val): 0.051657 acc(val): 0.670425
  900. [02:58:05] Epoch 72: Loss(train): 0.050848 Loss(val): 0.051651 acc(val): 0.670357
  901. [02:59:25] Epoch 73: Loss(train): 0.050842 Loss(val): 0.051645 acc(val): 0.670765
  902. [03:01:09] Epoch 74: Loss(train): 0.050834 Loss(val): 0.051638 acc(val): 0.670901
  903. [03:02:44] Epoch 75: Loss(train): 0.050828 Loss(val): 0.051627 acc(val): 0.671378
  904. [03:04:13] Epoch 76: Loss(train): 0.050821 Loss(val): 0.051613 acc(val): 0.671582
  905. [03:05:28] Epoch 77: Loss(train): 0.050817 Loss(val): 0.051616 acc(val): 0.671446
  906. [03:07:01] Epoch 78: Loss(train): 0.050811 Loss(val): 0.051602 acc(val): 0.671446
  907. [03:08:26] Epoch 79: Loss(train): 0.050808 Loss(val): 0.051602 acc(val): 0.671922
  908. [03:09:55] Epoch 80: Loss(train): 0.050804 Loss(val): 0.051598 acc(val): 0.672262
  909. [03:11:08] Epoch 81: Loss(train): 0.050800 Loss(val): 0.051593 acc(val): 0.671990
  910. [03:12:34] Epoch 82: Loss(train): 0.050796 Loss(val): 0.051591 acc(val): 0.672398
  911. [03:13:52] Epoch 83: Loss(train): 0.050792 Loss(val): 0.051581 acc(val): 0.672534
  912. [03:15:11] Epoch 84: Loss(train): 0.050789 Loss(val): 0.051579 acc(val): 0.672466
  913. [03:16:28] Epoch 85: Loss(train): 0.050786 Loss(val): 0.051579 acc(val): 0.672398
  914. [03:17:47] Epoch 86: Loss(train): 0.050783 Loss(val): 0.051572 acc(val): 0.672398
  915. [03:19:04] Epoch 87: Loss(train): 0.050781 Loss(val): 0.051570 acc(val): 0.672534
  916. [03:20:23] Epoch 88: Loss(train): 0.050779 Loss(val): 0.051571 acc(val): 0.672602
  917. [03:21:39] Epoch 89: Loss(train): 0.050777 Loss(val): 0.051571 acc(val): 0.672330
  918. [03:23:02] Epoch 90: Loss(train): 0.050774 Loss(val): 0.051563 acc(val): 0.672534
  919. [03:24:57] Epoch 91: Loss(train): 0.050773 Loss(val): 0.051561 acc(val): 0.672602
  920. [03:27:00] Epoch 92: Loss(train): 0.050771 Loss(val): 0.051558 acc(val): 0.672670
  921. [03:29:00] Epoch 93: Loss(train): 0.050769 Loss(val): 0.051557 acc(val): 0.672602
  922. [03:31:00] Epoch 94: Loss(train): 0.050767 Loss(val): 0.051552 acc(val): 0.672738
  923. [03:32:35] Epoch 95: Loss(train): 0.050766 Loss(val): 0.051551 acc(val): 0.672874
  924. [03:34:01] Epoch 96: Loss(train): 0.050765 Loss(val): 0.051556 acc(val): 0.672670
  925. [03:35:31] Epoch 97: Loss(train): 0.050764 Loss(val): 0.051553 acc(val): 0.673146
  926. [03:37:11] Epoch 98: Loss(train): 0.050763 Loss(val): 0.051555 acc(val): 0.672942
  927. [03:38:30] Epoch 99: Loss(train): 0.050762 Loss(val): 0.051552 acc(val): 0.673010
  928. [03:39:58] Epoch 100: Loss(train): 0.050761 Loss(val): 0.051550
  929. [03:40:09] FINAL(100) Loss(test): 0.051995 Accuarcy: 0.592703