123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939 |
- --------[30_09_2019 14:27:20]--------
- second stage Hyperparameter Tuning with 1 net, reevaluation with fixed epochs now, using testset
- Configuration learning_rate=0.03, decay_step=20
- [14:29:16] INIT Loss(test): 0.191895 Accuarcy: 0.061824
- [14:31:42] Epoch 1: Loss(train): 0.093080 Loss(val): 0.092508 acc(val): 0.274541
- [14:32:12] Epoch 2: Loss(train): 0.069031 Loss(val): 0.067629 acc(val): 0.436054
- [14:32:32] Epoch 3: Loss(train): 0.064291 Loss(val): 0.062943 acc(val): 0.508061
- [14:32:52] Epoch 4: Loss(train): 0.060827 Loss(val): 0.059568 acc(val): 0.543418
- [14:33:29] Epoch 5: Loss(train): 0.059301 Loss(val): 0.058460 acc(val): 0.555391
- [14:33:58] Epoch 6: Loss(train): 0.058672 Loss(val): 0.057920 acc(val): 0.556820
- [14:34:30] Epoch 7: Loss(train): 0.057834 Loss(val): 0.057255 acc(val): 0.570561
- [14:34:56] Epoch 8: Loss(train): 0.057435 Loss(val): 0.057099 acc(val): 0.570017
- [14:35:21] Epoch 9: Loss(train): 0.057113 Loss(val): 0.056812 acc(val): 0.573146
- [14:35:39] Epoch 10: Loss(train): 0.057030 Loss(val): 0.056849 acc(val): 0.571531
- [14:35:57] Epoch 11: Loss(train): 0.056916 Loss(val): 0.056893 acc(val): 0.572823
- [14:36:15] Epoch 12: Loss(train): 0.056653 Loss(val): 0.056813 acc(val): 0.571803
- [14:36:34] Epoch 13: Loss(train): 0.056223 Loss(val): 0.056521 acc(val): 0.577925
- [14:36:52] Epoch 14: Loss(train): 0.055907 Loss(val): 0.056257 acc(val): 0.581054
- [14:37:10] Epoch 15: Loss(train): 0.055811 Loss(val): 0.056153 acc(val): 0.581939
- [14:37:29] Epoch 16: Loss(train): 0.055996 Loss(val): 0.056253 acc(val): 0.577585
- [14:37:48] Epoch 17: Loss(train): 0.056509 Loss(val): 0.056749 acc(val): 0.566701
- [14:38:09] Epoch 18: Loss(train): 0.056736 Loss(val): 0.057002 acc(val): 0.561259
- [14:38:28] Epoch 19: Loss(train): 0.056825 Loss(val): 0.057148 acc(val): 0.557993
- [14:38:47] Epoch 20: Loss(train): 0.056349 Loss(val): 0.056709 acc(val): 0.564524
- [14:39:06] Epoch 21: Loss(train): 0.055339 Loss(val): 0.055721 acc(val): 0.583095
- [14:39:25] Epoch 22: Loss(train): 0.054080 Loss(val): 0.054463 acc(val): 0.616633
- [14:39:43] Epoch 23: Loss(train): 0.053393 Loss(val): 0.053672 acc(val): 0.634405
- [14:40:01] Epoch 24: Loss(train): 0.053121 Loss(val): 0.053326 acc(val): 0.641888
- [14:40:21] Epoch 25: Loss(train): 0.053043 Loss(val): 0.053181 acc(val): 0.640799
- [14:40:45] Epoch 26: Loss(train): 0.052858 Loss(val): 0.053029 acc(val): 0.641956
- [14:41:04] Epoch 27: Loss(train): 0.052826 Loss(val): 0.052986 acc(val): 0.641003
- [14:41:23] Epoch 28: Loss(train): 0.052841 Loss(val): 0.053040 acc(val): 0.639337
- [14:41:42] Epoch 29: Loss(train): 0.052777 Loss(val): 0.053036 acc(val): 0.638929
- [14:42:01] Epoch 30: Loss(train): 0.052711 Loss(val): 0.052963 acc(val): 0.639813
- [14:42:20] Epoch 31: Loss(train): 0.052688 Loss(val): 0.052891 acc(val): 0.640765
- [14:42:39] Epoch 32: Loss(train): 0.052576 Loss(val): 0.052782 acc(val): 0.643214
- [14:42:59] Epoch 33: Loss(train): 0.052667 Loss(val): 0.052730 acc(val): 0.642058
- [14:43:17] Epoch 34: Loss(train): 0.052779 Loss(val): 0.052696 acc(val): 0.639490
- [14:43:37] Epoch 35: Loss(train): 0.052854 Loss(val): 0.052653 acc(val): 0.637721
- [14:43:55] Epoch 36: Loss(train): 0.053212 Loss(val): 0.052754 acc(val): 0.630391
- [14:44:14] Epoch 37: Loss(train): 0.053448 Loss(val): 0.052824 acc(val): 0.625561
- [14:44:33] Epoch 38: Loss(train): 0.053491 Loss(val): 0.052787 acc(val): 0.625153
- [14:44:52] Epoch 39: Loss(train): 0.053333 Loss(val): 0.052663 acc(val): 0.629847
- [14:45:11] Epoch 40: Loss(train): 0.052837 Loss(val): 0.052362 acc(val): 0.641480
- [14:45:32] Epoch 41: Loss(train): 0.052389 Loss(val): 0.052117 acc(val): 0.649711
- [14:45:51] Epoch 42: Loss(train): 0.052066 Loss(val): 0.051959 acc(val): 0.657534
- [14:46:10] Epoch 43: Loss(train): 0.051788 Loss(val): 0.051844 acc(val): 0.663503
- [14:46:29] Epoch 44: Loss(train): 0.051640 Loss(val): 0.051785 acc(val): 0.666020
- [14:46:48] Epoch 45: Loss(train): 0.051530 Loss(val): 0.051748 acc(val): 0.668265
- [14:47:07] Epoch 46: Loss(train): 0.051493 Loss(val): 0.051737 acc(val): 0.667857
- [14:47:26] Epoch 47: Loss(train): 0.051452 Loss(val): 0.051726 acc(val): 0.668078
- [14:47:46] Epoch 48: Loss(train): 0.051433 Loss(val): 0.051714 acc(val): 0.668486
- [14:48:05] Epoch 49: Loss(train): 0.051401 Loss(val): 0.051703 acc(val): 0.668690
- [14:48:25] Epoch 50: Loss(train): 0.051384 Loss(val): 0.051694 acc(val): 0.669235
- [14:48:46] Epoch 51: Loss(train): 0.051362 Loss(val): 0.051680 acc(val): 0.669303
- [14:49:05] Epoch 52: Loss(train): 0.051325 Loss(val): 0.051665 acc(val): 0.670527
- [14:49:24] Epoch 53: Loss(train): 0.051259 Loss(val): 0.051651 acc(val): 0.670595
- [14:49:43] Epoch 54: Loss(train): 0.051197 Loss(val): 0.051644 acc(val): 0.670867
- [14:50:02] Epoch 55: Loss(train): 0.051136 Loss(val): 0.051634 acc(val): 0.671276
- [14:50:21] Epoch 56: Loss(train): 0.051068 Loss(val): 0.051633 acc(val): 0.671871
- [14:50:40] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051637 acc(val): 0.674116
- [14:50:59] Epoch 58: Loss(train): 0.050977 Loss(val): 0.051658 acc(val): 0.674456
- [14:51:18] Epoch 59: Loss(train): 0.050942 Loss(val): 0.051691 acc(val): 0.673231
- [14:51:37] Epoch 60: Loss(train): 0.050919 Loss(val): 0.051711 acc(val): 0.673248
- [14:51:56] Epoch 61: Loss(train): 0.050904 Loss(val): 0.051738 acc(val): 0.672704
- [14:52:15] Epoch 62: Loss(train): 0.050891 Loss(val): 0.051758 acc(val): 0.672432
- [14:52:34] Epoch 63: Loss(train): 0.050875 Loss(val): 0.051761 acc(val): 0.672908
- [14:52:53] Epoch 64: Loss(train): 0.050858 Loss(val): 0.051750 acc(val): 0.673248
- [14:53:12] Epoch 65: Loss(train): 0.050837 Loss(val): 0.051725 acc(val): 0.674065
- [14:53:30] Epoch 66: Loss(train): 0.050814 Loss(val): 0.051685 acc(val): 0.674201
- [14:53:49] Epoch 67: Loss(train): 0.050799 Loss(val): 0.051681 acc(val): 0.674133
- [14:54:08] Epoch 68: Loss(train): 0.050780 Loss(val): 0.051648 acc(val): 0.674269
- [14:54:27] Epoch 69: Loss(train): 0.050765 Loss(val): 0.051627 acc(val): 0.674133
- [14:54:46] Epoch 70: Loss(train): 0.050754 Loss(val): 0.051615 acc(val): 0.674133
- [14:55:05] Epoch 71: Loss(train): 0.050742 Loss(val): 0.051595 acc(val): 0.674541
- [14:55:24] Epoch 72: Loss(train): 0.050732 Loss(val): 0.051581 acc(val): 0.674745
- [14:55:42] Epoch 73: Loss(train): 0.050722 Loss(val): 0.051563 acc(val): 0.675561
- [14:56:01] Epoch 74: Loss(train): 0.050714 Loss(val): 0.051547 acc(val): 0.675833
- [14:56:20] Epoch 75: Loss(train): 0.050708 Loss(val): 0.051543 acc(val): 0.675493
- [14:56:38] Epoch 76: Loss(train): 0.050701 Loss(val): 0.051528 acc(val): 0.676446
- [14:56:59] Epoch 77: Loss(train): 0.050696 Loss(val): 0.051524 acc(val): 0.676514
- [14:57:19] Epoch 78: Loss(train): 0.050690 Loss(val): 0.051518 acc(val): 0.676718
- [14:57:39] Epoch 79: Loss(train): 0.050685 Loss(val): 0.051510 acc(val): 0.676990
- [14:57:59] Epoch 80: Loss(train): 0.050680 Loss(val): 0.051509 acc(val): 0.677126
- [14:58:19] Epoch 81: Loss(train): 0.050676 Loss(val): 0.051503 acc(val): 0.677126
- [14:58:39] Epoch 82: Loss(train): 0.050673 Loss(val): 0.051498 acc(val): 0.677398
- [14:58:59] Epoch 83: Loss(train): 0.050669 Loss(val): 0.051496 acc(val): 0.677534
- [14:59:19] Epoch 84: Loss(train): 0.050666 Loss(val): 0.051490 acc(val): 0.677602
- [14:59:40] Epoch 85: Loss(train): 0.050663 Loss(val): 0.051489 acc(val): 0.677330
- [15:00:01] Epoch 86: Loss(train): 0.050660 Loss(val): 0.051486 acc(val): 0.677534
- [15:00:21] Epoch 87: Loss(train): 0.050657 Loss(val): 0.051482 acc(val): 0.677330
- [15:00:41] Epoch 88: Loss(train): 0.050655 Loss(val): 0.051479 acc(val): 0.677602
- [15:01:01] Epoch 89: Loss(train): 0.050653 Loss(val): 0.051477 acc(val): 0.677466
- [15:01:21] Epoch 90: Loss(train): 0.050651 Loss(val): 0.051476 acc(val): 0.677466
- [15:01:41] Epoch 91: Loss(train): 0.050649 Loss(val): 0.051473 acc(val): 0.677534
- [15:02:01] Epoch 92: Loss(train): 0.050647 Loss(val): 0.051470 acc(val): 0.677534
- [15:02:21] Epoch 93: Loss(train): 0.050646 Loss(val): 0.051464 acc(val): 0.677466
- [15:02:41] Epoch 94: Loss(train): 0.050644 Loss(val): 0.051465 acc(val): 0.677534
- [15:03:01] Epoch 95: Loss(train): 0.050643 Loss(val): 0.051466 acc(val): 0.677466
- [15:03:21] Epoch 96: Loss(train): 0.050641 Loss(val): 0.051465 acc(val): 0.677398
- [15:03:42] Epoch 97: Loss(train): 0.050640 Loss(val): 0.051461 acc(val): 0.677330
- [15:04:02] Epoch 98: Loss(train): 0.050639 Loss(val): 0.051463 acc(val): 0.677466
- [15:04:22] Epoch 99: Loss(train): 0.050638 Loss(val): 0.051463 acc(val): 0.677602
- [15:04:43] Epoch 100: Loss(train): 0.050637 Loss(val): 0.051460
- [15:04:46] FINAL(100) Loss(test): 0.051915 Accuarcy: 0.600135
- Configuration learning_rate=0.03, decay_step=40
- [15:04:52] INIT Loss(test): 0.133920 Accuarcy: 0.117973
- [15:05:10] Epoch 1: Loss(train): 0.085145 Loss(val): 0.084277 acc(val): 0.303997
- [15:05:31] Epoch 2: Loss(train): 0.068091 Loss(val): 0.066436 acc(val): 0.462330
- [15:05:52] Epoch 3: Loss(train): 0.062043 Loss(val): 0.061092 acc(val): 0.528452
- [15:06:12] Epoch 4: Loss(train): 0.059993 Loss(val): 0.059272 acc(val): 0.556088
- [15:06:32] Epoch 5: Loss(train): 0.058545 Loss(val): 0.058204 acc(val): 0.572500
- [15:06:53] Epoch 6: Loss(train): 0.057621 Loss(val): 0.057397 acc(val): 0.584167
- [15:07:14] Epoch 7: Loss(train): 0.057388 Loss(val): 0.057294 acc(val): 0.581582
- [15:07:35] Epoch 8: Loss(train): 0.057485 Loss(val): 0.057341 acc(val): 0.577262
- [15:07:55] Epoch 9: Loss(train): 0.057354 Loss(val): 0.057369 acc(val): 0.581293
- [15:08:16] Epoch 10: Loss(train): 0.056581 Loss(val): 0.056680 acc(val): 0.596803
- [15:08:36] Epoch 11: Loss(train): 0.056511 Loss(val): 0.056548 acc(val): 0.596735
- [15:08:56] Epoch 12: Loss(train): 0.056471 Loss(val): 0.056751 acc(val): 0.591293
- [15:09:16] Epoch 13: Loss(train): 0.056514 Loss(val): 0.056947 acc(val): 0.584286
- [15:09:35] Epoch 14: Loss(train): 0.056266 Loss(val): 0.056668 acc(val): 0.586871
- [15:09:55] Epoch 15: Loss(train): 0.056044 Loss(val): 0.056443 acc(val): 0.589592
- [15:10:16] Epoch 16: Loss(train): 0.056162 Loss(val): 0.056465 acc(val): 0.584524
- [15:10:36] Epoch 17: Loss(train): 0.056294 Loss(val): 0.056613 acc(val): 0.581003
- [15:10:56] Epoch 18: Loss(train): 0.056623 Loss(val): 0.056966 acc(val): 0.573537
- [15:11:16] Epoch 19: Loss(train): 0.056799 Loss(val): 0.057218 acc(val): 0.571514
- [15:11:37] Epoch 20: Loss(train): 0.056388 Loss(val): 0.056934 acc(val): 0.577432
- [15:11:58] Epoch 21: Loss(train): 0.055646 Loss(val): 0.056139 acc(val): 0.588741
- [15:12:19] Epoch 22: Loss(train): 0.054702 Loss(val): 0.055054 acc(val): 0.609184
- [15:12:39] Epoch 23: Loss(train): 0.054107 Loss(val): 0.054288 acc(val): 0.625357
- [15:12:59] Epoch 24: Loss(train): 0.053836 Loss(val): 0.053849 acc(val): 0.634252
- [15:13:19] Epoch 25: Loss(train): 0.053429 Loss(val): 0.053493 acc(val): 0.643112
- [15:13:40] Epoch 26: Loss(train): 0.053345 Loss(val): 0.053398 acc(val): 0.643912
- [15:14:01] Epoch 27: Loss(train): 0.053229 Loss(val): 0.053307 acc(val): 0.645136
- [15:14:21] Epoch 28: Loss(train): 0.053138 Loss(val): 0.053215 acc(val): 0.644388
- [15:14:42] Epoch 29: Loss(train): 0.052939 Loss(val): 0.053058 acc(val): 0.649422
- [15:15:03] Epoch 30: Loss(train): 0.052790 Loss(val): 0.052942 acc(val): 0.651071
- [15:15:23] Epoch 31: Loss(train): 0.052683 Loss(val): 0.052816 acc(val): 0.653112
- [15:15:44] Epoch 32: Loss(train): 0.052602 Loss(val): 0.052753 acc(val): 0.654065
- [15:16:05] Epoch 33: Loss(train): 0.052579 Loss(val): 0.052674 acc(val): 0.654133
- [15:16:26] Epoch 34: Loss(train): 0.052610 Loss(val): 0.052597 acc(val): 0.655085
- [15:16:47] Epoch 35: Loss(train): 0.052611 Loss(val): 0.052530 acc(val): 0.655357
- [15:17:11] Epoch 36: Loss(train): 0.052680 Loss(val): 0.052476 acc(val): 0.653639
- [15:17:34] Epoch 37: Loss(train): 0.052791 Loss(val): 0.052449 acc(val): 0.653095
- [15:17:58] Epoch 38: Loss(train): 0.052758 Loss(val): 0.052363 acc(val): 0.655340
- [15:18:21] Epoch 39: Loss(train): 0.052682 Loss(val): 0.052269 acc(val): 0.656905
- [15:18:43] Epoch 40: Loss(train): 0.052525 Loss(val): 0.052156 acc(val): 0.661003
- [15:19:03] Epoch 41: Loss(train): 0.052267 Loss(val): 0.051996 acc(val): 0.665833
- [15:19:24] Epoch 42: Loss(train): 0.051986 Loss(val): 0.051854 acc(val): 0.671139
- [15:19:44] Epoch 43: Loss(train): 0.051762 Loss(val): 0.051751 acc(val): 0.675017
- [15:20:04] Epoch 44: Loss(train): 0.051592 Loss(val): 0.051681 acc(val): 0.679235
- [15:20:25] Epoch 45: Loss(train): 0.051479 Loss(val): 0.051643 acc(val): 0.680119
- [15:20:45] Epoch 46: Loss(train): 0.051394 Loss(val): 0.051616 acc(val): 0.681071
- [15:21:05] Epoch 47: Loss(train): 0.051367 Loss(val): 0.051600 acc(val): 0.680051
- [15:21:25] Epoch 48: Loss(train): 0.051341 Loss(val): 0.051579 acc(val): 0.680867
- [15:21:46] Epoch 49: Loss(train): 0.051304 Loss(val): 0.051564 acc(val): 0.681344
- [15:22:06] Epoch 50: Loss(train): 0.051285 Loss(val): 0.051563 acc(val): 0.681003
- [15:22:26] Epoch 51: Loss(train): 0.051286 Loss(val): 0.051553 acc(val): 0.681888
- [15:22:50] Epoch 52: Loss(train): 0.051247 Loss(val): 0.051551 acc(val): 0.681548
- [15:23:29] Epoch 53: Loss(train): 0.051221 Loss(val): 0.051543 acc(val): 0.681412
- [15:24:06] Epoch 54: Loss(train): 0.051172 Loss(val): 0.051541 acc(val): 0.680663
- [15:24:47] Epoch 55: Loss(train): 0.051131 Loss(val): 0.051537 acc(val): 0.679711
- [15:25:26] Epoch 56: Loss(train): 0.051072 Loss(val): 0.051525 acc(val): 0.680459
- [15:26:06] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051529 acc(val): 0.679575
- [15:26:46] Epoch 58: Loss(train): 0.050963 Loss(val): 0.051544 acc(val): 0.680816
- [15:27:24] Epoch 59: Loss(train): 0.050920 Loss(val): 0.051560 acc(val): 0.680884
- [15:28:03] Epoch 60: Loss(train): 0.050894 Loss(val): 0.051591 acc(val): 0.681429
- [15:28:39] Epoch 61: Loss(train): 0.050878 Loss(val): 0.051636 acc(val): 0.680408
- [15:29:12] Epoch 62: Loss(train): 0.050867 Loss(val): 0.051668 acc(val): 0.678844
- [15:29:41] Epoch 63: Loss(train): 0.050860 Loss(val): 0.051702 acc(val): 0.676667
- [15:30:06] Epoch 64: Loss(train): 0.050847 Loss(val): 0.051702 acc(val): 0.677619
- [15:30:36] Epoch 65: Loss(train): 0.050832 Loss(val): 0.051699 acc(val): 0.677211
- [15:30:56] Epoch 66: Loss(train): 0.050821 Loss(val): 0.051700 acc(val): 0.677687
- [15:31:17] Epoch 67: Loss(train): 0.050801 Loss(val): 0.051675 acc(val): 0.678435
- [15:31:38] Epoch 68: Loss(train): 0.050779 Loss(val): 0.051646 acc(val): 0.678980
- [15:31:59] Epoch 69: Loss(train): 0.050759 Loss(val): 0.051619 acc(val): 0.679796
- [15:32:20] Epoch 70: Loss(train): 0.050744 Loss(val): 0.051600 acc(val): 0.679796
- [15:32:41] Epoch 71: Loss(train): 0.050733 Loss(val): 0.051590 acc(val): 0.680272
- [15:33:02] Epoch 72: Loss(train): 0.050716 Loss(val): 0.051555 acc(val): 0.680748
- [15:33:23] Epoch 73: Loss(train): 0.050704 Loss(val): 0.051538 acc(val): 0.681156
- [15:33:44] Epoch 74: Loss(train): 0.050693 Loss(val): 0.051523 acc(val): 0.681156
- [15:34:06] Epoch 75: Loss(train): 0.050684 Loss(val): 0.051510 acc(val): 0.681361
- [15:34:29] Epoch 76: Loss(train): 0.050677 Loss(val): 0.051499 acc(val): 0.681905
- [15:35:14] Epoch 77: Loss(train): 0.050668 Loss(val): 0.051481 acc(val): 0.682041
- [15:35:46] Epoch 78: Loss(train): 0.050662 Loss(val): 0.051477 acc(val): 0.681769
- [15:36:15] Epoch 79: Loss(train): 0.050656 Loss(val): 0.051464 acc(val): 0.682041
- [15:36:43] Epoch 80: Loss(train): 0.050651 Loss(val): 0.051460 acc(val): 0.682177
- [15:37:03] Epoch 81: Loss(train): 0.050647 Loss(val): 0.051460 acc(val): 0.681633
- [15:37:24] Epoch 82: Loss(train): 0.050642 Loss(val): 0.051450 acc(val): 0.681633
- [15:37:45] Epoch 83: Loss(train): 0.050638 Loss(val): 0.051445 acc(val): 0.681429
- [15:38:07] Epoch 84: Loss(train): 0.050634 Loss(val): 0.051439 acc(val): 0.681293
- [15:38:29] Epoch 85: Loss(train): 0.050631 Loss(val): 0.051434 acc(val): 0.681224
- [15:38:50] Epoch 86: Loss(train): 0.050628 Loss(val): 0.051434 acc(val): 0.681497
- [15:39:11] Epoch 87: Loss(train): 0.050625 Loss(val): 0.051429 acc(val): 0.681565
- [15:39:32] Epoch 88: Loss(train): 0.050622 Loss(val): 0.051431 acc(val): 0.681633
- [15:39:53] Epoch 89: Loss(train): 0.050619 Loss(val): 0.051418 acc(val): 0.681293
- [15:40:18] Epoch 90: Loss(train): 0.050617 Loss(val): 0.051419 acc(val): 0.681361
- [15:40:47] Epoch 91: Loss(train): 0.050615 Loss(val): 0.051416 acc(val): 0.681429
- [15:41:10] Epoch 92: Loss(train): 0.050613 Loss(val): 0.051413 acc(val): 0.681429
- [15:41:34] Epoch 93: Loss(train): 0.050611 Loss(val): 0.051410 acc(val): 0.681429
- [15:41:55] Epoch 94: Loss(train): 0.050610 Loss(val): 0.051410 acc(val): 0.681497
- [15:42:17] Epoch 95: Loss(train): 0.050608 Loss(val): 0.051405 acc(val): 0.681769
- [15:42:38] Epoch 96: Loss(train): 0.050606 Loss(val): 0.051401 acc(val): 0.681905
- [15:42:59] Epoch 97: Loss(train): 0.050605 Loss(val): 0.051403 acc(val): 0.681429
- [15:43:20] Epoch 98: Loss(train): 0.050604 Loss(val): 0.051404 acc(val): 0.681361
- [15:43:41] Epoch 99: Loss(train): 0.050603 Loss(val): 0.051399 acc(val): 0.681565
- [15:44:02] Epoch 100: Loss(train): 0.050602 Loss(val): 0.051401
- [15:44:05] FINAL(100) Loss(test): 0.051667 Accuarcy: 0.604730
- Configuration learning_rate=0.03, decay_step=60
- [15:44:11] INIT Loss(test): 0.142111 Accuarcy: 0.116081
- [15:44:29] Epoch 1: Loss(train): 0.085466 Loss(val): 0.083448 acc(val): 0.324014
- [15:44:52] Epoch 2: Loss(train): 0.067009 Loss(val): 0.065906 acc(val): 0.454150
- [15:45:17] Epoch 3: Loss(train): 0.063191 Loss(val): 0.062418 acc(val): 0.506139
- [15:45:38] Epoch 4: Loss(train): 0.059964 Loss(val): 0.059266 acc(val): 0.545680
- [15:46:00] Epoch 5: Loss(train): 0.059229 Loss(val): 0.058537 acc(val): 0.556088
- [15:46:21] Epoch 6: Loss(train): 0.059260 Loss(val): 0.058677 acc(val): 0.551599
- [15:46:42] Epoch 7: Loss(train): 0.059613 Loss(val): 0.058838 acc(val): 0.540663
- [15:47:04] Epoch 8: Loss(train): 0.058372 Loss(val): 0.057957 acc(val): 0.553929
- [15:47:25] Epoch 9: Loss(train): 0.058023 Loss(val): 0.057690 acc(val): 0.559796
- [15:47:47] Epoch 10: Loss(train): 0.058341 Loss(val): 0.057856 acc(val): 0.557891
- [15:48:08] Epoch 11: Loss(train): 0.058291 Loss(val): 0.057867 acc(val): 0.555442
- [15:48:29] Epoch 12: Loss(train): 0.057441 Loss(val): 0.057287 acc(val): 0.566054
- [15:48:51] Epoch 13: Loss(train): 0.056272 Loss(val): 0.056194 acc(val): 0.584490
- [15:49:13] Epoch 14: Loss(train): 0.055851 Loss(val): 0.055663 acc(val): 0.594898
- [15:49:36] Epoch 15: Loss(train): 0.055405 Loss(val): 0.055219 acc(val): 0.603129
- [15:49:58] Epoch 16: Loss(train): 0.055548 Loss(val): 0.055248 acc(val): 0.600952
- [15:50:20] Epoch 17: Loss(train): 0.055654 Loss(val): 0.055321 acc(val): 0.599456
- [15:50:41] Epoch 18: Loss(train): 0.055723 Loss(val): 0.055456 acc(val): 0.598384
- [15:51:03] Epoch 19: Loss(train): 0.055744 Loss(val): 0.055502 acc(val): 0.597228
- [15:51:25] Epoch 20: Loss(train): 0.055564 Loss(val): 0.055416 acc(val): 0.596956
- [15:51:48] Epoch 21: Loss(train): 0.055276 Loss(val): 0.055244 acc(val): 0.600612
- [15:52:11] Epoch 22: Loss(train): 0.054920 Loss(val): 0.054917 acc(val): 0.607024
- [15:52:34] Epoch 23: Loss(train): 0.054450 Loss(val): 0.054397 acc(val): 0.612534
- [15:52:57] Epoch 24: Loss(train): 0.054105 Loss(val): 0.053996 acc(val): 0.619677
- [15:53:19] Epoch 25: Loss(train): 0.053674 Loss(val): 0.053549 acc(val): 0.630493
- [15:53:42] Epoch 26: Loss(train): 0.053361 Loss(val): 0.053270 acc(val): 0.637772
- [15:54:11] Epoch 27: Loss(train): 0.053233 Loss(val): 0.053125 acc(val): 0.640765
- [15:54:50] Epoch 28: Loss(train): 0.053071 Loss(val): 0.053007 acc(val): 0.644643
- [15:55:27] Epoch 29: Loss(train): 0.052948 Loss(val): 0.052935 acc(val): 0.644099
- [15:56:04] Epoch 30: Loss(train): 0.052997 Loss(val): 0.052941 acc(val): 0.643214
- [15:56:40] Epoch 31: Loss(train): 0.053064 Loss(val): 0.052970 acc(val): 0.639133
- [15:57:17] Epoch 32: Loss(train): 0.053172 Loss(val): 0.052973 acc(val): 0.636888
- [15:57:43] Epoch 33: Loss(train): 0.053340 Loss(val): 0.053012 acc(val): 0.632194
- [15:58:05] Epoch 34: Loss(train): 0.053407 Loss(val): 0.053008 acc(val): 0.631173
- [15:58:26] Epoch 35: Loss(train): 0.053488 Loss(val): 0.053011 acc(val): 0.630561
- [15:58:48] Epoch 36: Loss(train): 0.053523 Loss(val): 0.052991 acc(val): 0.626956
- [15:59:11] Epoch 37: Loss(train): 0.053434 Loss(val): 0.052903 acc(val): 0.629473
- [15:59:39] Epoch 38: Loss(train): 0.053004 Loss(val): 0.052646 acc(val): 0.638656
- [16:00:06] Epoch 39: Loss(train): 0.052548 Loss(val): 0.052387 acc(val): 0.650289
- [16:00:30] Epoch 40: Loss(train): 0.052184 Loss(val): 0.052197 acc(val): 0.658656
- [16:00:53] Epoch 41: Loss(train): 0.051921 Loss(val): 0.052086 acc(val): 0.663895
- [16:01:25] Epoch 42: Loss(train): 0.051787 Loss(val): 0.052001 acc(val): 0.667024
- [16:01:57] Epoch 43: Loss(train): 0.051664 Loss(val): 0.051921 acc(val): 0.669269
- [16:02:21] Epoch 44: Loss(train): 0.051557 Loss(val): 0.051856 acc(val): 0.670272
- [16:02:57] Epoch 45: Loss(train): 0.051499 Loss(val): 0.051807 acc(val): 0.671582
- [16:03:22] Epoch 46: Loss(train): 0.051440 Loss(val): 0.051767 acc(val): 0.672058
- [16:03:51] Epoch 47: Loss(train): 0.051405 Loss(val): 0.051744 acc(val): 0.672874
- [16:04:31] Epoch 48: Loss(train): 0.051368 Loss(val): 0.051721 acc(val): 0.673146
- [16:05:07] Epoch 49: Loss(train): 0.051326 Loss(val): 0.051705 acc(val): 0.673690
- [16:05:30] Epoch 50: Loss(train): 0.051288 Loss(val): 0.051689 acc(val): 0.673759
- [16:05:53] Epoch 51: Loss(train): 0.051239 Loss(val): 0.051667 acc(val): 0.674915
- [16:06:16] Epoch 52: Loss(train): 0.051180 Loss(val): 0.051656 acc(val): 0.675595
- [16:06:38] Epoch 53: Loss(train): 0.051117 Loss(val): 0.051658 acc(val): 0.677500
- [16:07:00] Epoch 54: Loss(train): 0.051078 Loss(val): 0.051665 acc(val): 0.678044
- [16:07:22] Epoch 55: Loss(train): 0.051036 Loss(val): 0.051678 acc(val): 0.677432
- [16:07:45] Epoch 56: Loss(train): 0.051011 Loss(val): 0.051702 acc(val): 0.677636
- [16:08:08] Epoch 57: Loss(train): 0.050990 Loss(val): 0.051737 acc(val): 0.676752
- [16:08:31] Epoch 58: Loss(train): 0.050977 Loss(val): 0.051756 acc(val): 0.675323
- [16:08:56] Epoch 59: Loss(train): 0.050971 Loss(val): 0.051791 acc(val): 0.675867
- [16:09:23] Epoch 60: Loss(train): 0.050955 Loss(val): 0.051786 acc(val): 0.675272
- [16:09:49] Epoch 61: Loss(train): 0.050941 Loss(val): 0.051792 acc(val): 0.675544
- [16:10:14] Epoch 62: Loss(train): 0.050929 Loss(val): 0.051794 acc(val): 0.674864
- [16:10:37] Epoch 63: Loss(train): 0.050904 Loss(val): 0.051764 acc(val): 0.674932
- [16:10:59] Epoch 64: Loss(train): 0.050879 Loss(val): 0.051731 acc(val): 0.675476
- [16:11:22] Epoch 65: Loss(train): 0.050859 Loss(val): 0.051707 acc(val): 0.676361
- [16:11:44] Epoch 66: Loss(train): 0.050838 Loss(val): 0.051671 acc(val): 0.677857
- [16:12:06] Epoch 67: Loss(train): 0.050822 Loss(val): 0.051655 acc(val): 0.678741
- [16:12:28] Epoch 68: Loss(train): 0.050804 Loss(val): 0.051619 acc(val): 0.680238
- [16:12:50] Epoch 69: Loss(train): 0.050792 Loss(val): 0.051603 acc(val): 0.680170
- [16:13:14] Epoch 70: Loss(train): 0.050781 Loss(val): 0.051588 acc(val): 0.680510
- [16:13:37] Epoch 71: Loss(train): 0.050770 Loss(val): 0.051567 acc(val): 0.681463
- [16:14:00] Epoch 72: Loss(train): 0.050762 Loss(val): 0.051553 acc(val): 0.681463
- [16:14:23] Epoch 73: Loss(train): 0.050755 Loss(val): 0.051545 acc(val): 0.681531
- [16:14:47] Epoch 74: Loss(train): 0.050748 Loss(val): 0.051538 acc(val): 0.680986
- [16:15:11] Epoch 75: Loss(train): 0.050742 Loss(val): 0.051527 acc(val): 0.681259
- [16:15:33] Epoch 76: Loss(train): 0.050736 Loss(val): 0.051523 acc(val): 0.681054
- [16:15:56] Epoch 77: Loss(train): 0.050731 Loss(val): 0.051517 acc(val): 0.681122
- [16:16:19] Epoch 78: Loss(train): 0.050726 Loss(val): 0.051506 acc(val): 0.681463
- [16:16:41] Epoch 79: Loss(train): 0.050722 Loss(val): 0.051507 acc(val): 0.681463
- [16:17:04] Epoch 80: Loss(train): 0.050717 Loss(val): 0.051499 acc(val): 0.681259
- [16:17:26] Epoch 81: Loss(train): 0.050713 Loss(val): 0.051492 acc(val): 0.681463
- [16:17:49] Epoch 82: Loss(train): 0.050709 Loss(val): 0.051488 acc(val): 0.681122
- [16:18:12] Epoch 83: Loss(train): 0.050706 Loss(val): 0.051491 acc(val): 0.681463
- [16:18:35] Epoch 84: Loss(train): 0.050703 Loss(val): 0.051484 acc(val): 0.681463
- [16:18:57] Epoch 85: Loss(train): 0.050700 Loss(val): 0.051476 acc(val): 0.681939
- [16:19:20] Epoch 86: Loss(train): 0.050698 Loss(val): 0.051476 acc(val): 0.681803
- [16:19:43] Epoch 87: Loss(train): 0.050695 Loss(val): 0.051471 acc(val): 0.682007
- [16:20:05] Epoch 88: Loss(train): 0.050693 Loss(val): 0.051474 acc(val): 0.681735
- [16:20:28] Epoch 89: Loss(train): 0.050691 Loss(val): 0.051469 acc(val): 0.681939
- [16:20:51] Epoch 90: Loss(train): 0.050689 Loss(val): 0.051463 acc(val): 0.681803
- [16:21:14] Epoch 91: Loss(train): 0.050687 Loss(val): 0.051464 acc(val): 0.681803
- [16:21:37] Epoch 92: Loss(train): 0.050685 Loss(val): 0.051462 acc(val): 0.681667
- [16:21:59] Epoch 93: Loss(train): 0.050683 Loss(val): 0.051460 acc(val): 0.681939
- [16:22:22] Epoch 94: Loss(train): 0.050682 Loss(val): 0.051455 acc(val): 0.682007
- [16:22:45] Epoch 95: Loss(train): 0.050681 Loss(val): 0.051457 acc(val): 0.681939
- [16:23:08] Epoch 96: Loss(train): 0.050679 Loss(val): 0.051455 acc(val): 0.682007
- [16:23:31] Epoch 97: Loss(train): 0.050678 Loss(val): 0.051454 acc(val): 0.682075
- [16:23:54] Epoch 98: Loss(train): 0.050677 Loss(val): 0.051455 acc(val): 0.682211
- [16:24:17] Epoch 99: Loss(train): 0.050676 Loss(val): 0.051454 acc(val): 0.682211
- [16:24:40] Epoch 100: Loss(train): 0.050675 Loss(val): 0.051452
- [16:24:43] FINAL(100) Loss(test): 0.051842 Accuarcy: 0.601622
- Configuration learning_rate=0.01, decay_step=20
- [16:24:49] INIT Loss(test): 0.123722 Accuarcy: 0.127973
- [16:25:10] Epoch 1: Loss(train): 0.086237 Loss(val): 0.086168 acc(val): 0.291888
- [16:25:47] Epoch 2: Loss(train): 0.066817 Loss(val): 0.065911 acc(val): 0.445901
- [16:26:21] Epoch 3: Loss(train): 0.063177 Loss(val): 0.062407 acc(val): 0.499915
- [16:26:54] Epoch 4: Loss(train): 0.061026 Loss(val): 0.060297 acc(val): 0.540425
- [16:27:28] Epoch 5: Loss(train): 0.059943 Loss(val): 0.059473 acc(val): 0.551259
- [16:28:03] Epoch 6: Loss(train): 0.058618 Loss(val): 0.058114 acc(val): 0.565204
- [16:28:45] Epoch 7: Loss(train): 0.057991 Loss(val): 0.057816 acc(val): 0.570017
- [16:29:12] Epoch 8: Loss(train): 0.057960 Loss(val): 0.057804 acc(val): 0.568316
- [16:29:53] Epoch 9: Loss(train): 0.058586 Loss(val): 0.058320 acc(val): 0.558980
- [16:31:02] Epoch 10: Loss(train): 0.058809 Loss(val): 0.058750 acc(val): 0.551156
- [16:32:04] Epoch 11: Loss(train): 0.057933 Loss(val): 0.058105 acc(val): 0.555714
- [16:33:09] Epoch 12: Loss(train): 0.057471 Loss(val): 0.057670 acc(val): 0.557619
- [16:34:03] Epoch 13: Loss(train): 0.057318 Loss(val): 0.057520 acc(val): 0.555918
- [16:34:45] Epoch 14: Loss(train): 0.057432 Loss(val): 0.057578 acc(val): 0.551905
- [16:35:26] Epoch 15: Loss(train): 0.057712 Loss(val): 0.057838 acc(val): 0.549864
- [16:36:24] Epoch 16: Loss(train): 0.058157 Loss(val): 0.058160 acc(val): 0.547075
- [16:37:16] Epoch 17: Loss(train): 0.058045 Loss(val): 0.058048 acc(val): 0.550969
- [16:38:01] Epoch 18: Loss(train): 0.057258 Loss(val): 0.057315 acc(val): 0.562279
- [16:38:48] Epoch 19: Loss(train): 0.056103 Loss(val): 0.056101 acc(val): 0.582143
- [16:39:35] Epoch 20: Loss(train): 0.055167 Loss(val): 0.055090 acc(val): 0.605000
- [16:40:16] Epoch 21: Loss(train): 0.054440 Loss(val): 0.054323 acc(val): 0.621939
- [16:40:59] Epoch 22: Loss(train): 0.053996 Loss(val): 0.053912 acc(val): 0.627585
- [16:41:44] Epoch 23: Loss(train): 0.053813 Loss(val): 0.053733 acc(val): 0.627653
- [16:42:31] Epoch 24: Loss(train): 0.053584 Loss(val): 0.053587 acc(val): 0.629626
- [16:43:12] Epoch 25: Loss(train): 0.053363 Loss(val): 0.053465 acc(val): 0.630238
- [16:43:56] Epoch 26: Loss(train): 0.053242 Loss(val): 0.053413 acc(val): 0.630306
- [16:44:40] Epoch 27: Loss(train): 0.053028 Loss(val): 0.053261 acc(val): 0.634932
- [16:45:23] Epoch 28: Loss(train): 0.052812 Loss(val): 0.053128 acc(val): 0.638197
- [16:46:04] Epoch 29: Loss(train): 0.052652 Loss(val): 0.052984 acc(val): 0.642143
- [16:46:49] Epoch 30: Loss(train): 0.052535 Loss(val): 0.052863 acc(val): 0.645680
- [16:47:31] Epoch 31: Loss(train): 0.052439 Loss(val): 0.052725 acc(val): 0.649694
- [16:48:14] Epoch 32: Loss(train): 0.052366 Loss(val): 0.052586 acc(val): 0.653844
- [16:48:57] Epoch 33: Loss(train): 0.052277 Loss(val): 0.052462 acc(val): 0.655204
- [16:49:39] Epoch 34: Loss(train): 0.052236 Loss(val): 0.052378 acc(val): 0.655340
- [16:50:20] Epoch 35: Loss(train): 0.052206 Loss(val): 0.052301 acc(val): 0.656905
- [16:51:02] Epoch 36: Loss(train): 0.052131 Loss(val): 0.052229 acc(val): 0.657262
- [16:51:43] Epoch 37: Loss(train): 0.052080 Loss(val): 0.052175 acc(val): 0.659235
- [16:52:26] Epoch 38: Loss(train): 0.051932 Loss(val): 0.052086 acc(val): 0.663112
- [16:53:08] Epoch 39: Loss(train): 0.051834 Loss(val): 0.052032 acc(val): 0.663929
- [16:53:52] Epoch 40: Loss(train): 0.051742 Loss(val): 0.051984 acc(val): 0.664745
- [16:54:35] Epoch 41: Loss(train): 0.051577 Loss(val): 0.051924 acc(val): 0.668078
- [16:55:19] Epoch 42: Loss(train): 0.051501 Loss(val): 0.051901 acc(val): 0.667466
- [16:56:06] Epoch 43: Loss(train): 0.051449 Loss(val): 0.051876 acc(val): 0.668078
- [16:57:15] Epoch 44: Loss(train): 0.051393 Loss(val): 0.051869 acc(val): 0.667721
- [16:58:19] Epoch 45: Loss(train): 0.051368 Loss(val): 0.051851 acc(val): 0.666497
- [16:59:26] Epoch 46: Loss(train): 0.051376 Loss(val): 0.051837 acc(val): 0.666241
- [17:00:17] Epoch 47: Loss(train): 0.051365 Loss(val): 0.051807 acc(val): 0.667398
- [17:01:31] Epoch 48: Loss(train): 0.051349 Loss(val): 0.051793 acc(val): 0.667262
- [17:02:32] Epoch 49: Loss(train): 0.051350 Loss(val): 0.051774 acc(val): 0.667262
- [17:03:24] Epoch 50: Loss(train): 0.051308 Loss(val): 0.051759 acc(val): 0.668622
- [17:04:19] Epoch 51: Loss(train): 0.051285 Loss(val): 0.051734 acc(val): 0.669439
- [17:05:10] Epoch 52: Loss(train): 0.051267 Loss(val): 0.051716 acc(val): 0.669847
- [17:05:54] Epoch 53: Loss(train): 0.051197 Loss(val): 0.051707 acc(val): 0.670663
- [17:06:43] Epoch 54: Loss(train): 0.051160 Loss(val): 0.051691 acc(val): 0.672432
- [17:07:32] Epoch 55: Loss(train): 0.051111 Loss(val): 0.051690 acc(val): 0.672772
- [17:08:29] Epoch 56: Loss(train): 0.051065 Loss(val): 0.051692 acc(val): 0.674269
- [17:09:18] Epoch 57: Loss(train): 0.051019 Loss(val): 0.051708 acc(val): 0.672483
- [17:10:05] Epoch 58: Loss(train): 0.050985 Loss(val): 0.051736 acc(val): 0.673231
- [17:10:50] Epoch 59: Loss(train): 0.050964 Loss(val): 0.051761 acc(val): 0.673163
- [17:11:38] Epoch 60: Loss(train): 0.050948 Loss(val): 0.051782 acc(val): 0.671667
- [17:12:21] Epoch 61: Loss(train): 0.050943 Loss(val): 0.051836 acc(val): 0.671684
- [17:13:13] Epoch 62: Loss(train): 0.050930 Loss(val): 0.051840 acc(val): 0.671412
- [17:14:01] Epoch 63: Loss(train): 0.050921 Loss(val): 0.051851 acc(val): 0.671276
- [17:14:46] Epoch 64: Loss(train): 0.050902 Loss(val): 0.051832 acc(val): 0.671956
- [17:15:30] Epoch 65: Loss(train): 0.050885 Loss(val): 0.051809 acc(val): 0.672092
- [17:16:19] Epoch 66: Loss(train): 0.050869 Loss(val): 0.051790 acc(val): 0.672024
- [17:17:03] Epoch 67: Loss(train): 0.050854 Loss(val): 0.051774 acc(val): 0.673044
- [17:17:51] Epoch 68: Loss(train): 0.050836 Loss(val): 0.051741 acc(val): 0.672891
- [17:18:37] Epoch 69: Loss(train): 0.050824 Loss(val): 0.051727 acc(val): 0.673435
- [17:19:23] Epoch 70: Loss(train): 0.050812 Loss(val): 0.051704 acc(val): 0.674116
- [17:20:07] Epoch 71: Loss(train): 0.050800 Loss(val): 0.051679 acc(val): 0.673912
- [17:20:53] Epoch 72: Loss(train): 0.050792 Loss(val): 0.051671 acc(val): 0.674388
- [17:21:38] Epoch 73: Loss(train): 0.050785 Loss(val): 0.051665 acc(val): 0.673776
- [17:22:24] Epoch 74: Loss(train): 0.050776 Loss(val): 0.051649 acc(val): 0.674116
- [17:23:09] Epoch 75: Loss(train): 0.050770 Loss(val): 0.051641 acc(val): 0.674660
- [17:23:55] Epoch 76: Loss(train): 0.050764 Loss(val): 0.051627 acc(val): 0.675544
- [17:24:40] Epoch 77: Loss(train): 0.050759 Loss(val): 0.051624 acc(val): 0.675408
- [17:25:26] Epoch 78: Loss(train): 0.050753 Loss(val): 0.051614 acc(val): 0.675952
- [17:26:11] Epoch 79: Loss(train): 0.050748 Loss(val): 0.051609 acc(val): 0.675612
- [17:27:19] Epoch 80: Loss(train): 0.050744 Loss(val): 0.051604 acc(val): 0.675884
- [17:28:38] Epoch 81: Loss(train): 0.050740 Loss(val): 0.051599 acc(val): 0.675816
- [17:29:45] Epoch 82: Loss(train): 0.050736 Loss(val): 0.051592 acc(val): 0.676020
- [17:30:30] Epoch 83: Loss(train): 0.050733 Loss(val): 0.051590 acc(val): 0.676088
- [17:31:31] Epoch 84: Loss(train): 0.050730 Loss(val): 0.051590 acc(val): 0.676020
- [17:32:34] Epoch 85: Loss(train): 0.050727 Loss(val): 0.051587 acc(val): 0.676293
- [17:33:24] Epoch 86: Loss(train): 0.050724 Loss(val): 0.051580 acc(val): 0.676429
- [17:34:37] Epoch 87: Loss(train): 0.050721 Loss(val): 0.051575 acc(val): 0.676837
- [17:35:41] Epoch 88: Loss(train): 0.050719 Loss(val): 0.051574 acc(val): 0.676769
- [17:36:29] Epoch 89: Loss(train): 0.050717 Loss(val): 0.051573 acc(val): 0.676701
- [17:37:33] Epoch 90: Loss(train): 0.050715 Loss(val): 0.051568 acc(val): 0.676701
- [17:38:23] Epoch 91: Loss(train): 0.050713 Loss(val): 0.051565 acc(val): 0.676905
- [17:39:10] Epoch 92: Loss(train): 0.050712 Loss(val): 0.051564 acc(val): 0.676837
- [17:39:57] Epoch 93: Loss(train): 0.050710 Loss(val): 0.051562 acc(val): 0.677177
- [17:40:43] Epoch 94: Loss(train): 0.050709 Loss(val): 0.051560 acc(val): 0.676973
- [17:41:33] Epoch 95: Loss(train): 0.050707 Loss(val): 0.051559 acc(val): 0.676905
- [17:42:22] Epoch 96: Loss(train): 0.050706 Loss(val): 0.051555 acc(val): 0.677177
- [17:43:18] Epoch 97: Loss(train): 0.050705 Loss(val): 0.051552 acc(val): 0.677177
- [17:44:05] Epoch 98: Loss(train): 0.050704 Loss(val): 0.051554 acc(val): 0.677041
- [17:44:52] Epoch 99: Loss(train): 0.050703 Loss(val): 0.051554 acc(val): 0.677109
- [17:45:40] Epoch 100: Loss(train): 0.050702 Loss(val): 0.051551
- [17:45:47] FINAL(100) Loss(test): 0.052029 Accuarcy: 0.599189
- Configuration learning_rate=0.01, decay_step=40
- [17:46:00] INIT Loss(test): 0.176727 Accuarcy: 0.092973
- [17:46:50] Epoch 1: Loss(train): 0.089826 Loss(val): 0.087610 acc(val): 0.284201
- [17:47:38] Epoch 2: Loss(train): 0.067577 Loss(val): 0.066210 acc(val): 0.447466
- [17:48:25] Epoch 3: Loss(train): 0.064038 Loss(val): 0.063006 acc(val): 0.497551
- [17:49:13] Epoch 4: Loss(train): 0.060244 Loss(val): 0.059602 acc(val): 0.554031
- [17:49:58] Epoch 5: Loss(train): 0.059227 Loss(val): 0.058822 acc(val): 0.565595
- [17:50:47] Epoch 6: Loss(train): 0.058737 Loss(val): 0.058792 acc(val): 0.559677
- [17:51:34] Epoch 7: Loss(train): 0.058215 Loss(val): 0.058426 acc(val): 0.559405
- [17:52:21] Epoch 8: Loss(train): 0.057274 Loss(val): 0.057502 acc(val): 0.570986
- [17:53:07] Epoch 9: Loss(train): 0.057132 Loss(val): 0.057426 acc(val): 0.570986
- [17:53:55] Epoch 10: Loss(train): 0.056904 Loss(val): 0.057230 acc(val): 0.571310
- [17:54:40] Epoch 11: Loss(train): 0.057148 Loss(val): 0.057484 acc(val): 0.560357
- [17:55:27] Epoch 12: Loss(train): 0.057290 Loss(val): 0.057630 acc(val): 0.556003
- [17:56:12] Epoch 13: Loss(train): 0.057313 Loss(val): 0.057586 acc(val): 0.550935
- [17:56:59] Epoch 14: Loss(train): 0.056691 Loss(val): 0.056890 acc(val): 0.563316
- [17:57:44] Epoch 15: Loss(train): 0.055783 Loss(val): 0.055999 acc(val): 0.579507
- [17:58:36] Epoch 16: Loss(train): 0.055334 Loss(val): 0.055583 acc(val): 0.590187
- [17:59:33] Epoch 17: Loss(train): 0.055496 Loss(val): 0.055625 acc(val): 0.587330
- [18:00:46] Epoch 18: Loss(train): 0.055848 Loss(val): 0.055966 acc(val): 0.582908
- [18:01:46] Epoch 19: Loss(train): 0.056469 Loss(val): 0.056572 acc(val): 0.573180
- [18:02:41] Epoch 20: Loss(train): 0.057220 Loss(val): 0.057206 acc(val): 0.559711
- [18:03:34] Epoch 21: Loss(train): 0.057383 Loss(val): 0.057452 acc(val): 0.557330
- [18:04:45] Epoch 22: Loss(train): 0.056780 Loss(val): 0.056885 acc(val): 0.568622
- [18:05:45] Epoch 23: Loss(train): 0.055630 Loss(val): 0.055793 acc(val): 0.584541
- [18:06:40] Epoch 24: Loss(train): 0.054334 Loss(val): 0.054489 acc(val): 0.612364
- [18:07:32] Epoch 25: Loss(train): 0.053592 Loss(val): 0.053724 acc(val): 0.624881
- [18:08:31] Epoch 26: Loss(train): 0.053303 Loss(val): 0.053389 acc(val): 0.628095
- [18:09:22] Epoch 27: Loss(train): 0.053062 Loss(val): 0.053169 acc(val): 0.633333
- [18:10:13] Epoch 28: Loss(train): 0.053093 Loss(val): 0.053166 acc(val): 0.630663
- [18:11:03] Epoch 29: Loss(train): 0.053281 Loss(val): 0.053265 acc(val): 0.624609
- [18:12:01] Epoch 30: Loss(train): 0.053359 Loss(val): 0.053331 acc(val): 0.620595
- [18:12:51] Epoch 31: Loss(train): 0.053417 Loss(val): 0.053345 acc(val): 0.619507
- [18:13:42] Epoch 32: Loss(train): 0.053442 Loss(val): 0.053321 acc(val): 0.619847
- [18:14:34] Epoch 33: Loss(train): 0.053280 Loss(val): 0.053169 acc(val): 0.622364
- [18:15:26] Epoch 34: Loss(train): 0.053042 Loss(val): 0.052970 acc(val): 0.628282
- [18:16:15] Epoch 35: Loss(train): 0.052677 Loss(val): 0.052694 acc(val): 0.637058
- [18:17:06] Epoch 36: Loss(train): 0.052356 Loss(val): 0.052496 acc(val): 0.643793
- [18:17:55] Epoch 37: Loss(train): 0.052130 Loss(val): 0.052343 acc(val): 0.649915
- [18:18:48] Epoch 38: Loss(train): 0.051967 Loss(val): 0.052270 acc(val): 0.653044
- [18:19:37] Epoch 39: Loss(train): 0.051858 Loss(val): 0.052225 acc(val): 0.655510
- [18:20:28] Epoch 40: Loss(train): 0.051766 Loss(val): 0.052186 acc(val): 0.659609
- [18:21:17] Epoch 41: Loss(train): 0.051688 Loss(val): 0.052134 acc(val): 0.661922
- [18:22:09] Epoch 42: Loss(train): 0.051594 Loss(val): 0.052048 acc(val): 0.664235
- [18:22:57] Epoch 43: Loss(train): 0.051490 Loss(val): 0.051962 acc(val): 0.665799
- [18:23:47] Epoch 44: Loss(train): 0.051394 Loss(val): 0.051891 acc(val): 0.666956
- [18:24:36] Epoch 45: Loss(train): 0.051314 Loss(val): 0.051834 acc(val): 0.670221
- [18:25:27] Epoch 46: Loss(train): 0.051240 Loss(val): 0.051802 acc(val): 0.670544
- [18:26:16] Epoch 47: Loss(train): 0.051188 Loss(val): 0.051788 acc(val): 0.671088
- [18:27:07] Epoch 48: Loss(train): 0.051140 Loss(val): 0.051788 acc(val): 0.670680
- [18:27:55] Epoch 49: Loss(train): 0.051096 Loss(val): 0.051787 acc(val): 0.670816
- [18:28:45] Epoch 50: Loss(train): 0.051063 Loss(val): 0.051781 acc(val): 0.671497
- [18:29:34] Epoch 51: Loss(train): 0.051026 Loss(val): 0.051795 acc(val): 0.670612
- [18:30:36] Epoch 52: Loss(train): 0.050999 Loss(val): 0.051804 acc(val): 0.671769
- [18:31:57] Epoch 53: Loss(train): 0.050980 Loss(val): 0.051816 acc(val): 0.671684
- [18:32:49] Epoch 54: Loss(train): 0.050963 Loss(val): 0.051823 acc(val): 0.672500
- [18:33:40] Epoch 55: Loss(train): 0.050948 Loss(val): 0.051837 acc(val): 0.672296
- [18:34:51] Epoch 56: Loss(train): 0.050931 Loss(val): 0.051833 acc(val): 0.673112
- [18:35:48] Epoch 57: Loss(train): 0.050918 Loss(val): 0.051838 acc(val): 0.672840
- [18:36:51] Epoch 58: Loss(train): 0.050899 Loss(val): 0.051821 acc(val): 0.673520
- [18:37:45] Epoch 59: Loss(train): 0.050875 Loss(val): 0.051788 acc(val): 0.674133
- [18:39:01] Epoch 60: Loss(train): 0.050850 Loss(val): 0.051753 acc(val): 0.673861
- [18:40:02] Epoch 61: Loss(train): 0.050831 Loss(val): 0.051731 acc(val): 0.673997
- [18:40:53] Epoch 62: Loss(train): 0.050811 Loss(val): 0.051693 acc(val): 0.673997
- [18:41:46] Epoch 63: Loss(train): 0.050796 Loss(val): 0.051672 acc(val): 0.674609
- [18:42:42] Epoch 64: Loss(train): 0.050784 Loss(val): 0.051655 acc(val): 0.674813
- [18:43:46] Epoch 65: Loss(train): 0.050771 Loss(val): 0.051625 acc(val): 0.675289
- [18:44:43] Epoch 66: Loss(train): 0.050760 Loss(val): 0.051608 acc(val): 0.674745
- [18:45:35] Epoch 67: Loss(train): 0.050751 Loss(val): 0.051598 acc(val): 0.675221
- [18:46:26] Epoch 68: Loss(train): 0.050744 Loss(val): 0.051597 acc(val): 0.675289
- [18:47:18] Epoch 69: Loss(train): 0.050736 Loss(val): 0.051580 acc(val): 0.675493
- [18:48:13] Epoch 70: Loss(train): 0.050730 Loss(val): 0.051581 acc(val): 0.675425
- [18:49:04] Epoch 71: Loss(train): 0.050723 Loss(val): 0.051565 acc(val): 0.675085
- [18:50:03] Epoch 72: Loss(train): 0.050717 Loss(val): 0.051560 acc(val): 0.674881
- [18:50:57] Epoch 73: Loss(train): 0.050711 Loss(val): 0.051549 acc(val): 0.675646
- [18:51:50] Epoch 74: Loss(train): 0.050706 Loss(val): 0.051550 acc(val): 0.675153
- [18:52:42] Epoch 75: Loss(train): 0.050702 Loss(val): 0.051542 acc(val): 0.675289
- [18:53:33] Epoch 76: Loss(train): 0.050696 Loss(val): 0.051532 acc(val): 0.675714
- [18:54:24] Epoch 77: Loss(train): 0.050693 Loss(val): 0.051534 acc(val): 0.675493
- [18:55:16] Epoch 78: Loss(train): 0.050688 Loss(val): 0.051525 acc(val): 0.675493
- [18:56:07] Epoch 79: Loss(train): 0.050684 Loss(val): 0.051525 acc(val): 0.675561
- [18:57:00] Epoch 80: Loss(train): 0.050681 Loss(val): 0.051526 acc(val): 0.675561
- [18:57:51] Epoch 81: Loss(train): 0.050677 Loss(val): 0.051521 acc(val): 0.675901
- [18:58:44] Epoch 82: Loss(train): 0.050675 Loss(val): 0.051520 acc(val): 0.675629
- [18:59:33] Epoch 83: Loss(train): 0.050671 Loss(val): 0.051510 acc(val): 0.675510
- [19:00:25] Epoch 84: Loss(train): 0.050668 Loss(val): 0.051509 acc(val): 0.675085
- [19:01:14] Epoch 85: Loss(train): 0.050666 Loss(val): 0.051507 acc(val): 0.675289
- [19:02:05] Epoch 86: Loss(train): 0.050664 Loss(val): 0.051506 acc(val): 0.675357
- [19:02:56] Epoch 87: Loss(train): 0.050661 Loss(val): 0.051503 acc(val): 0.675425
- [19:03:52] Epoch 88: Loss(train): 0.050659 Loss(val): 0.051504 acc(val): 0.675289
- [19:05:08] Epoch 89: Loss(train): 0.050657 Loss(val): 0.051499 acc(val): 0.675289
- [19:06:39] Epoch 90: Loss(train): 0.050655 Loss(val): 0.051496 acc(val): 0.675425
- [19:07:35] Epoch 91: Loss(train): 0.050654 Loss(val): 0.051497 acc(val): 0.675289
- [19:08:29] Epoch 92: Loss(train): 0.050652 Loss(val): 0.051498 acc(val): 0.675425
- [19:09:40] Epoch 93: Loss(train): 0.050650 Loss(val): 0.051495 acc(val): 0.675561
- [19:10:33] Epoch 94: Loss(train): 0.050649 Loss(val): 0.051495 acc(val): 0.675425
- [19:11:24] Epoch 95: Loss(train): 0.050648 Loss(val): 0.051491 acc(val): 0.675493
- [19:12:25] Epoch 96: Loss(train): 0.050647 Loss(val): 0.051491 acc(val): 0.675629
- [19:13:50] Epoch 97: Loss(train): 0.050646 Loss(val): 0.051491 acc(val): 0.675561
- [19:14:53] Epoch 98: Loss(train): 0.050645 Loss(val): 0.051490 acc(val): 0.675425
- [19:15:44] Epoch 99: Loss(train): 0.050644 Loss(val): 0.051491 acc(val): 0.675629
- [19:16:42] Epoch 100: Loss(train): 0.050643 Loss(val): 0.051489
- [19:16:51] FINAL(100) Loss(test): 0.051861 Accuarcy: 0.601014
- Configuration learning_rate=0.01, decay_step=60
- [19:17:05] INIT Loss(test): 0.146561 Accuarcy: 0.112095
- [19:18:05] Epoch 1: Loss(train): 0.085300 Loss(val): 0.083685 acc(val): 0.301548
- [19:19:02] Epoch 2: Loss(train): 0.066953 Loss(val): 0.065985 acc(val): 0.446310
- [19:19:57] Epoch 3: Loss(train): 0.064071 Loss(val): 0.063177 acc(val): 0.500459
- [19:20:56] Epoch 4: Loss(train): 0.061052 Loss(val): 0.060448 acc(val): 0.525357
- [19:21:48] Epoch 5: Loss(train): 0.059383 Loss(val): 0.059186 acc(val): 0.546735
- [19:22:43] Epoch 6: Loss(train): 0.058582 Loss(val): 0.058385 acc(val): 0.555374
- [19:23:36] Epoch 7: Loss(train): 0.057840 Loss(val): 0.057679 acc(val): 0.568571
- [19:24:32] Epoch 8: Loss(train): 0.057490 Loss(val): 0.057312 acc(val): 0.574575
- [19:25:24] Epoch 9: Loss(train): 0.057308 Loss(val): 0.057126 acc(val): 0.578878
- [19:26:21] Epoch 10: Loss(train): 0.056718 Loss(val): 0.056649 acc(val): 0.588265
- [19:27:16] Epoch 11: Loss(train): 0.056284 Loss(val): 0.056397 acc(val): 0.592483
- [19:28:11] Epoch 12: Loss(train): 0.055857 Loss(val): 0.056005 acc(val): 0.598810
- [19:29:04] Epoch 13: Loss(train): 0.055451 Loss(val): 0.055661 acc(val): 0.603044
- [19:29:59] Epoch 14: Loss(train): 0.055363 Loss(val): 0.055531 acc(val): 0.600935
- [19:30:50] Epoch 15: Loss(train): 0.055445 Loss(val): 0.055545 acc(val): 0.597874
- [19:31:43] Epoch 16: Loss(train): 0.055360 Loss(val): 0.055467 acc(val): 0.598963
- [19:32:36] Epoch 17: Loss(train): 0.055204 Loss(val): 0.055398 acc(val): 0.599099
- [19:33:31] Epoch 18: Loss(train): 0.054983 Loss(val): 0.055307 acc(val): 0.599456
- [19:34:24] Epoch 19: Loss(train): 0.054601 Loss(val): 0.055000 acc(val): 0.606395
- [19:35:21] Epoch 20: Loss(train): 0.054591 Loss(val): 0.054917 acc(val): 0.607279
- [19:36:13] Epoch 21: Loss(train): 0.054396 Loss(val): 0.054738 acc(val): 0.610272
- [19:37:07] Epoch 22: Loss(train): 0.054501 Loss(val): 0.054852 acc(val): 0.605850
- [19:37:59] Epoch 23: Loss(train): 0.054861 Loss(val): 0.055244 acc(val): 0.594490
- [19:38:53] Epoch 24: Loss(train): 0.055324 Loss(val): 0.055727 acc(val): 0.583316
- [19:39:44] Epoch 25: Loss(train): 0.055759 Loss(val): 0.056135 acc(val): 0.574099
- [19:40:46] Epoch 26: Loss(train): 0.056071 Loss(val): 0.056407 acc(val): 0.565527
- [19:41:55] Epoch 27: Loss(train): 0.055936 Loss(val): 0.056284 acc(val): 0.567636
- [19:43:12] Epoch 28: Loss(train): 0.055192 Loss(val): 0.055479 acc(val): 0.581173
- [19:44:18] Epoch 29: Loss(train): 0.054194 Loss(val): 0.054437 acc(val): 0.602874
- [19:45:41] Epoch 30: Loss(train): 0.053646 Loss(val): 0.053798 acc(val): 0.615340
- [19:46:36] Epoch 31: Loss(train): 0.053466 Loss(val): 0.053572 acc(val): 0.619558
- [19:47:32] Epoch 32: Loss(train): 0.053537 Loss(val): 0.053568 acc(val): 0.618214
- [19:48:26] Epoch 33: Loss(train): 0.053592 Loss(val): 0.053585 acc(val): 0.616446
- [19:49:32] Epoch 34: Loss(train): 0.053791 Loss(val): 0.053701 acc(val): 0.610850
- [19:50:27] Epoch 35: Loss(train): 0.053868 Loss(val): 0.053710 acc(val): 0.609422
- [19:51:33] Epoch 36: Loss(train): 0.053803 Loss(val): 0.053639 acc(val): 0.611122
- [19:52:48] Epoch 37: Loss(train): 0.053587 Loss(val): 0.053460 acc(val): 0.617041
- [19:53:47] Epoch 38: Loss(train): 0.053151 Loss(val): 0.053148 acc(val): 0.626616
- [19:54:45] Epoch 39: Loss(train): 0.052732 Loss(val): 0.052863 acc(val): 0.636973
- [19:55:43] Epoch 40: Loss(train): 0.052385 Loss(val): 0.052624 acc(val): 0.646854
- [19:56:37] Epoch 41: Loss(train): 0.052123 Loss(val): 0.052437 acc(val): 0.653316
- [19:57:42] Epoch 42: Loss(train): 0.051956 Loss(val): 0.052307 acc(val): 0.656922
- [19:58:41] Epoch 43: Loss(train): 0.051858 Loss(val): 0.052221 acc(val): 0.658776
- [19:59:43] Epoch 44: Loss(train): 0.051767 Loss(val): 0.052147 acc(val): 0.661769
- [20:00:38] Epoch 45: Loss(train): 0.051678 Loss(val): 0.052093 acc(val): 0.663197
- [20:01:37] Epoch 46: Loss(train): 0.051649 Loss(val): 0.052045 acc(val): 0.664422
- [20:02:33] Epoch 47: Loss(train): 0.051587 Loss(val): 0.051998 acc(val): 0.664983
- [20:03:31] Epoch 48: Loss(train): 0.051510 Loss(val): 0.051950 acc(val): 0.665595
- [20:04:27] Epoch 49: Loss(train): 0.051464 Loss(val): 0.051920 acc(val): 0.665595
- [20:05:24] Epoch 50: Loss(train): 0.051407 Loss(val): 0.051894 acc(val): 0.665867
- [20:06:19] Epoch 51: Loss(train): 0.051346 Loss(val): 0.051874 acc(val): 0.665255
- [20:07:15] Epoch 52: Loss(train): 0.051286 Loss(val): 0.051866 acc(val): 0.665391
- [20:08:09] Epoch 53: Loss(train): 0.051240 Loss(val): 0.051856 acc(val): 0.665731
- [20:09:05] Epoch 54: Loss(train): 0.051194 Loss(val): 0.051873 acc(val): 0.666939
- [20:09:59] Epoch 55: Loss(train): 0.051154 Loss(val): 0.051884 acc(val): 0.668316
- [20:10:57] Epoch 56: Loss(train): 0.051128 Loss(val): 0.051895 acc(val): 0.669065
- [20:12:18] Epoch 57: Loss(train): 0.051107 Loss(val): 0.051928 acc(val): 0.669609
- [20:13:30] Epoch 58: Loss(train): 0.051096 Loss(val): 0.051963 acc(val): 0.668571
- [20:15:09] Epoch 59: Loss(train): 0.051089 Loss(val): 0.051990 acc(val): 0.666327
- [20:16:36] Epoch 60: Loss(train): 0.051084 Loss(val): 0.052012 acc(val): 0.667007
- [20:17:35] Epoch 61: Loss(train): 0.051071 Loss(val): 0.052018 acc(val): 0.667075
- [20:18:56] Epoch 62: Loss(train): 0.051056 Loss(val): 0.052011 acc(val): 0.667279
- [20:20:15] Epoch 63: Loss(train): 0.051039 Loss(val): 0.052001 acc(val): 0.667075
- [20:21:25] Epoch 64: Loss(train): 0.051017 Loss(val): 0.051979 acc(val): 0.667347
- [20:22:32] Epoch 65: Loss(train): 0.050996 Loss(val): 0.051954 acc(val): 0.668571
- [20:23:39] Epoch 66: Loss(train): 0.050975 Loss(val): 0.051927 acc(val): 0.668639
- [20:24:43] Epoch 67: Loss(train): 0.050955 Loss(val): 0.051898 acc(val): 0.669048
- [20:25:45] Epoch 68: Loss(train): 0.050940 Loss(val): 0.051876 acc(val): 0.669864
- [20:26:40] Epoch 69: Loss(train): 0.050923 Loss(val): 0.051856 acc(val): 0.670408
- [20:27:41] Epoch 70: Loss(train): 0.050910 Loss(val): 0.051839 acc(val): 0.671361
- [20:28:42] Epoch 71: Loss(train): 0.050899 Loss(val): 0.051830 acc(val): 0.670748
- [20:29:47] Epoch 72: Loss(train): 0.050885 Loss(val): 0.051802 acc(val): 0.671769
- [20:30:46] Epoch 73: Loss(train): 0.050876 Loss(val): 0.051789 acc(val): 0.671905
- [20:31:45] Epoch 74: Loss(train): 0.050868 Loss(val): 0.051780 acc(val): 0.671769
- [20:32:43] Epoch 75: Loss(train): 0.050859 Loss(val): 0.051767 acc(val): 0.672517
- [20:33:43] Epoch 76: Loss(train): 0.050852 Loss(val): 0.051755 acc(val): 0.673061
- [20:34:42] Epoch 77: Loss(train): 0.050847 Loss(val): 0.051750 acc(val): 0.672789
- [20:35:42] Epoch 78: Loss(train): 0.050841 Loss(val): 0.051743 acc(val): 0.672653
- [20:36:39] Epoch 79: Loss(train): 0.050836 Loss(val): 0.051736 acc(val): 0.672585
- [20:37:39] Epoch 80: Loss(train): 0.050831 Loss(val): 0.051728 acc(val): 0.672245
- [20:38:37] Epoch 81: Loss(train): 0.050826 Loss(val): 0.051721 acc(val): 0.672449
- [20:39:38] Epoch 82: Loss(train): 0.050822 Loss(val): 0.051717 acc(val): 0.672517
- [20:40:37] Epoch 83: Loss(train): 0.050818 Loss(val): 0.051712 acc(val): 0.672381
- [20:41:36] Epoch 84: Loss(train): 0.050814 Loss(val): 0.051704 acc(val): 0.672925
- [20:42:32] Epoch 85: Loss(train): 0.050811 Loss(val): 0.051707 acc(val): 0.672449
- [20:44:12] Epoch 86: Loss(train): 0.050808 Loss(val): 0.051702 acc(val): 0.672449
- [20:45:56] Epoch 87: Loss(train): 0.050805 Loss(val): 0.051700 acc(val): 0.672517
- [20:47:19] Epoch 88: Loss(train): 0.050802 Loss(val): 0.051695 acc(val): 0.672653
- [20:48:38] Epoch 89: Loss(train): 0.050800 Loss(val): 0.051694 acc(val): 0.672721
- [20:49:47] Epoch 90: Loss(train): 0.050797 Loss(val): 0.051688 acc(val): 0.672993
- [20:51:04] Epoch 91: Loss(train): 0.050796 Loss(val): 0.051688 acc(val): 0.672993
- [20:52:29] Epoch 92: Loss(train): 0.050794 Loss(val): 0.051688 acc(val): 0.673129
- [20:53:54] Epoch 93: Loss(train): 0.050792 Loss(val): 0.051685 acc(val): 0.673197
- [20:54:59] Epoch 94: Loss(train): 0.050790 Loss(val): 0.051679 acc(val): 0.673129
- [20:56:01] Epoch 95: Loss(train): 0.050788 Loss(val): 0.051678 acc(val): 0.673129
- [20:57:04] Epoch 96: Loss(train): 0.050787 Loss(val): 0.051676 acc(val): 0.673197
- [20:58:13] Epoch 97: Loss(train): 0.050786 Loss(val): 0.051680 acc(val): 0.673537
- [20:59:20] Epoch 98: Loss(train): 0.050784 Loss(val): 0.051672 acc(val): 0.673265
- [21:00:19] Epoch 99: Loss(train): 0.050783 Loss(val): 0.051673 acc(val): 0.673469
- [21:01:21] Epoch 100: Loss(train): 0.050782 Loss(val): 0.051673
- [21:01:28] FINAL(100) Loss(test): 0.051884 Accuarcy: 0.596351
- Configuration learning_rate=0.003, decay_step=20
- [21:01:44] INIT Loss(test): 0.158548 Accuarcy: 0.098919
- [21:02:53] Epoch 1: Loss(train): 0.083241 Loss(val): 0.081338 acc(val): 0.315714
- [21:03:52] Epoch 2: Loss(train): 0.068011 Loss(val): 0.066275 acc(val): 0.454677
- [21:04:49] Epoch 3: Loss(train): 0.064500 Loss(val): 0.062939 acc(val): 0.506276
- [21:05:51] Epoch 4: Loss(train): 0.062763 Loss(val): 0.061977 acc(val): 0.521054
- [21:06:49] Epoch 5: Loss(train): 0.060735 Loss(val): 0.060340 acc(val): 0.539966
- [21:07:50] Epoch 6: Loss(train): 0.059243 Loss(val): 0.058967 acc(val): 0.554847
- [21:08:47] Epoch 7: Loss(train): 0.058659 Loss(val): 0.058494 acc(val): 0.556156
- [21:09:46] Epoch 8: Loss(train): 0.058254 Loss(val): 0.058243 acc(val): 0.558197
- [21:10:44] Epoch 9: Loss(train): 0.058391 Loss(val): 0.058417 acc(val): 0.550850
- [21:11:44] Epoch 10: Loss(train): 0.058190 Loss(val): 0.058318 acc(val): 0.549354
- [21:12:40] Epoch 11: Loss(train): 0.057913 Loss(val): 0.058267 acc(val): 0.551378
- [21:14:00] Epoch 12: Loss(train): 0.057026 Loss(val): 0.057716 acc(val): 0.558044
- [21:15:32] Epoch 13: Loss(train): 0.056899 Loss(val): 0.057677 acc(val): 0.556956
- [21:16:50] Epoch 14: Loss(train): 0.056948 Loss(val): 0.057710 acc(val): 0.556480
- [21:18:38] Epoch 15: Loss(train): 0.056834 Loss(val): 0.057578 acc(val): 0.559133
- [21:20:31] Epoch 16: Loss(train): 0.056879 Loss(val): 0.057601 acc(val): 0.558929
- [21:22:05] Epoch 17: Loss(train): 0.056644 Loss(val): 0.057316 acc(val): 0.562126
- [21:23:53] Epoch 18: Loss(train): 0.056123 Loss(val): 0.056696 acc(val): 0.573963
- [21:24:56] Epoch 19: Loss(train): 0.055539 Loss(val): 0.055993 acc(val): 0.589218
- [21:25:58] Epoch 20: Loss(train): 0.054817 Loss(val): 0.055192 acc(val): 0.604456
- [21:27:20] Epoch 21: Loss(train): 0.054251 Loss(val): 0.054556 acc(val): 0.617109
- [21:28:24] Epoch 22: Loss(train): 0.053735 Loss(val): 0.054037 acc(val): 0.628810
- [21:29:40] Epoch 23: Loss(train): 0.053590 Loss(val): 0.053807 acc(val): 0.629881
- [21:30:46] Epoch 24: Loss(train): 0.053532 Loss(val): 0.053687 acc(val): 0.628997
- [21:31:47] Epoch 25: Loss(train): 0.053535 Loss(val): 0.053628 acc(val): 0.628997
- [21:32:51] Epoch 26: Loss(train): 0.053524 Loss(val): 0.053611 acc(val): 0.626071
- [21:33:53] Epoch 27: Loss(train): 0.053373 Loss(val): 0.053523 acc(val): 0.628384
- [21:35:07] Epoch 28: Loss(train): 0.053050 Loss(val): 0.053344 acc(val): 0.634711
- [21:36:09] Epoch 29: Loss(train): 0.052943 Loss(val): 0.053259 acc(val): 0.636071
- [21:37:08] Epoch 30: Loss(train): 0.052761 Loss(val): 0.053106 acc(val): 0.640153
- [21:38:11] Epoch 31: Loss(train): 0.052696 Loss(val): 0.053025 acc(val): 0.642670
- [21:39:12] Epoch 32: Loss(train): 0.052689 Loss(val): 0.052930 acc(val): 0.642330
- [21:40:15] Epoch 33: Loss(train): 0.052740 Loss(val): 0.052846 acc(val): 0.641514
- [21:41:14] Epoch 34: Loss(train): 0.052706 Loss(val): 0.052756 acc(val): 0.642058
- [21:42:16] Epoch 35: Loss(train): 0.052765 Loss(val): 0.052704 acc(val): 0.640017
- [21:43:14] Epoch 36: Loss(train): 0.052756 Loss(val): 0.052639 acc(val): 0.638180
- [21:44:17] Epoch 37: Loss(train): 0.052711 Loss(val): 0.052555 acc(val): 0.640153
- [21:45:17] Epoch 38: Loss(train): 0.052549 Loss(val): 0.052426 acc(val): 0.644031
- [21:46:26] Epoch 39: Loss(train): 0.052322 Loss(val): 0.052297 acc(val): 0.650969
- [21:48:10] Epoch 40: Loss(train): 0.052060 Loss(val): 0.052150 acc(val): 0.658333
- [21:50:08] Epoch 41: Loss(train): 0.051793 Loss(val): 0.052026 acc(val): 0.665612
- [21:51:53] Epoch 42: Loss(train): 0.051636 Loss(val): 0.051965 acc(val): 0.668197
- [21:53:38] Epoch 43: Loss(train): 0.051514 Loss(val): 0.051931 acc(val): 0.670646
- [21:55:21] Epoch 44: Loss(train): 0.051438 Loss(val): 0.051905 acc(val): 0.669966
- [21:57:04] Epoch 45: Loss(train): 0.051403 Loss(val): 0.051888 acc(val): 0.670714
- [21:58:07] Epoch 46: Loss(train): 0.051358 Loss(val): 0.051865 acc(val): 0.670918
- [21:59:30] Epoch 47: Loss(train): 0.051337 Loss(val): 0.051848 acc(val): 0.670782
- [22:00:47] Epoch 48: Loss(train): 0.051338 Loss(val): 0.051840 acc(val): 0.670714
- [22:02:03] Epoch 49: Loss(train): 0.051314 Loss(val): 0.051821 acc(val): 0.670646
- [22:03:13] Epoch 50: Loss(train): 0.051277 Loss(val): 0.051795 acc(val): 0.670782
- [22:04:19] Epoch 51: Loss(train): 0.051270 Loss(val): 0.051777 acc(val): 0.671599
- [22:05:26] Epoch 52: Loss(train): 0.051233 Loss(val): 0.051768 acc(val): 0.671259
- [22:06:34] Epoch 53: Loss(train): 0.051212 Loss(val): 0.051756 acc(val): 0.671871
- [22:07:37] Epoch 54: Loss(train): 0.051174 Loss(val): 0.051745 acc(val): 0.673163
- [22:08:44] Epoch 55: Loss(train): 0.051122 Loss(val): 0.051735 acc(val): 0.673639
- [22:09:47] Epoch 56: Loss(train): 0.051073 Loss(val): 0.051735 acc(val): 0.672959
- [22:10:53] Epoch 57: Loss(train): 0.051029 Loss(val): 0.051744 acc(val): 0.672959
- [22:11:53] Epoch 58: Loss(train): 0.051000 Loss(val): 0.051749 acc(val): 0.674252
- [22:12:56] Epoch 59: Loss(train): 0.050973 Loss(val): 0.051769 acc(val): 0.674524
- [22:13:57] Epoch 60: Loss(train): 0.050952 Loss(val): 0.051795 acc(val): 0.673231
- [22:15:00] Epoch 61: Loss(train): 0.050937 Loss(val): 0.051807 acc(val): 0.673724
- [22:16:02] Epoch 62: Loss(train): 0.050928 Loss(val): 0.051828 acc(val): 0.673588
- [22:17:06] Epoch 63: Loss(train): 0.050916 Loss(val): 0.051835 acc(val): 0.673861
- [22:18:08] Epoch 64: Loss(train): 0.050908 Loss(val): 0.051846 acc(val): 0.673452
- [22:19:37] Epoch 65: Loss(train): 0.050891 Loss(val): 0.051827 acc(val): 0.673861
- [22:21:30] Epoch 66: Loss(train): 0.050875 Loss(val): 0.051813 acc(val): 0.673861
- [22:23:05] Epoch 67: Loss(train): 0.050859 Loss(val): 0.051792 acc(val): 0.674269
- [22:24:45] Epoch 68: Loss(train): 0.050848 Loss(val): 0.051785 acc(val): 0.674065
- [22:26:41] Epoch 69: Loss(train): 0.050833 Loss(val): 0.051761 acc(val): 0.675085
- [22:28:20] Epoch 70: Loss(train): 0.050818 Loss(val): 0.051735 acc(val): 0.675425
- [22:29:44] Epoch 71: Loss(train): 0.050805 Loss(val): 0.051714 acc(val): 0.676105
- [22:31:19] Epoch 72: Loss(train): 0.050798 Loss(val): 0.051715 acc(val): 0.676037
- [22:32:43] Epoch 73: Loss(train): 0.050788 Loss(val): 0.051700 acc(val): 0.676241
- [22:33:55] Epoch 74: Loss(train): 0.050779 Loss(val): 0.051684 acc(val): 0.676786
- [22:35:13] Epoch 75: Loss(train): 0.050772 Loss(val): 0.051674 acc(val): 0.676582
- [22:36:24] Epoch 76: Loss(train): 0.050765 Loss(val): 0.051667 acc(val): 0.676718
- [22:37:34] Epoch 77: Loss(train): 0.050759 Loss(val): 0.051653 acc(val): 0.677058
- [22:38:41] Epoch 78: Loss(train): 0.050753 Loss(val): 0.051646 acc(val): 0.676990
- [22:39:49] Epoch 79: Loss(train): 0.050748 Loss(val): 0.051638 acc(val): 0.677058
- [22:41:00] Epoch 80: Loss(train): 0.050743 Loss(val): 0.051630 acc(val): 0.676990
- [22:42:08] Epoch 81: Loss(train): 0.050740 Loss(val): 0.051629 acc(val): 0.677058
- [22:43:14] Epoch 82: Loss(train): 0.050735 Loss(val): 0.051620 acc(val): 0.676650
- [22:44:21] Epoch 83: Loss(train): 0.050732 Loss(val): 0.051618 acc(val): 0.676582
- [22:45:26] Epoch 84: Loss(train): 0.050728 Loss(val): 0.051612 acc(val): 0.676922
- [22:46:32] Epoch 85: Loss(train): 0.050725 Loss(val): 0.051609 acc(val): 0.676990
- [22:47:37] Epoch 86: Loss(train): 0.050722 Loss(val): 0.051603 acc(val): 0.676854
- [22:48:44] Epoch 87: Loss(train): 0.050719 Loss(val): 0.051599 acc(val): 0.677126
- [22:49:47] Epoch 88: Loss(train): 0.050716 Loss(val): 0.051596 acc(val): 0.676990
- [22:50:53] Epoch 89: Loss(train): 0.050714 Loss(val): 0.051596 acc(val): 0.676990
- [22:51:58] Epoch 90: Loss(train): 0.050711 Loss(val): 0.051589 acc(val): 0.677398
- [22:53:07] Epoch 91: Loss(train): 0.050710 Loss(val): 0.051587 acc(val): 0.677058
- [22:54:13] Epoch 92: Loss(train): 0.050708 Loss(val): 0.051584 acc(val): 0.677534
- [22:55:22] Epoch 93: Loss(train): 0.050706 Loss(val): 0.051585 acc(val): 0.677534
- [22:56:28] Epoch 94: Loss(train): 0.050705 Loss(val): 0.051585 acc(val): 0.677466
- [22:57:38] Epoch 95: Loss(train): 0.050703 Loss(val): 0.051586 acc(val): 0.677330
- [22:58:44] Epoch 96: Loss(train): 0.050702 Loss(val): 0.051580 acc(val): 0.677466
- [22:59:53] Epoch 97: Loss(train): 0.050700 Loss(val): 0.051578 acc(val): 0.677398
- [23:00:59] Epoch 98: Loss(train): 0.050699 Loss(val): 0.051579 acc(val): 0.677466
- [23:02:09] Epoch 99: Loss(train): 0.050698 Loss(val): 0.051577 acc(val): 0.677602
- [23:03:16] Epoch 100: Loss(train): 0.050697 Loss(val): 0.051575
- [23:03:25] FINAL(100) Loss(test): 0.051971 Accuarcy: 0.586486
- Configuration learning_rate=0.003, decay_step=40
- [23:03:43] INIT Loss(test): 0.119407 Accuarcy: 0.131892
- [23:04:57] Epoch 1: Loss(train): 0.086009 Loss(val): 0.084843 acc(val): 0.304133
- [23:06:07] Epoch 2: Loss(train): 0.068264 Loss(val): 0.066110 acc(val): 0.453316
- [23:07:15] Epoch 3: Loss(train): 0.064122 Loss(val): 0.062628 acc(val): 0.517568
- [23:08:25] Epoch 4: Loss(train): 0.060438 Loss(val): 0.059920 acc(val): 0.540153
- [23:09:34] Epoch 5: Loss(train): 0.059801 Loss(val): 0.059508 acc(val): 0.544507
- [23:10:47] Epoch 6: Loss(train): 0.059259 Loss(val): 0.058924 acc(val): 0.546412
- [23:12:21] Epoch 7: Loss(train): 0.058629 Loss(val): 0.058101 acc(val): 0.558673
- [23:13:41] Epoch 8: Loss(train): 0.058048 Loss(val): 0.057791 acc(val): 0.564099
- [23:14:46] Epoch 9: Loss(train): 0.058454 Loss(val): 0.058416 acc(val): 0.551122
- [23:15:55] Epoch 10: Loss(train): 0.058675 Loss(val): 0.058777 acc(val): 0.543724
- [23:17:04] Epoch 11: Loss(train): 0.058370 Loss(val): 0.058430 acc(val): 0.546582
- [23:19:16] Epoch 12: Loss(train): 0.057999 Loss(val): 0.058021 acc(val): 0.547126
- [23:21:09] Epoch 13: Loss(train): 0.057656 Loss(val): 0.057744 acc(val): 0.555425
- [23:23:13] Epoch 14: Loss(train): 0.057686 Loss(val): 0.057843 acc(val): 0.555289
- [23:24:54] Epoch 15: Loss(train): 0.058182 Loss(val): 0.058384 acc(val): 0.554949
- [23:26:00] Epoch 16: Loss(train): 0.058460 Loss(val): 0.058770 acc(val): 0.556037
- [23:27:03] Epoch 17: Loss(train): 0.058416 Loss(val): 0.058736 acc(val): 0.558486
- [23:28:10] Epoch 18: Loss(train): 0.057461 Loss(val): 0.057750 acc(val): 0.571888
- [23:29:45] Epoch 19: Loss(train): 0.056130 Loss(val): 0.056310 acc(val): 0.588895
- [23:31:09] Epoch 20: Loss(train): 0.054899 Loss(val): 0.055004 acc(val): 0.607738
- [23:32:13] Epoch 21: Loss(train): 0.054311 Loss(val): 0.054304 acc(val): 0.618214
- [23:33:22] Epoch 22: Loss(train): 0.054011 Loss(val): 0.053956 acc(val): 0.620323
- [23:34:39] Epoch 23: Loss(train): 0.053804 Loss(val): 0.053789 acc(val): 0.625425
- [23:35:56] Epoch 24: Loss(train): 0.053912 Loss(val): 0.053940 acc(val): 0.621361
- [23:37:02] Epoch 25: Loss(train): 0.053980 Loss(val): 0.054102 acc(val): 0.616105
- [23:38:12] Epoch 26: Loss(train): 0.054036 Loss(val): 0.054221 acc(val): 0.612364
- [23:39:21] Epoch 27: Loss(train): 0.054032 Loss(val): 0.054174 acc(val): 0.612636
- [23:40:32] Epoch 28: Loss(train): 0.053859 Loss(val): 0.053952 acc(val): 0.615374
- [23:41:39] Epoch 29: Loss(train): 0.053869 Loss(val): 0.053855 acc(val): 0.616395
- [23:42:47] Epoch 30: Loss(train): 0.053897 Loss(val): 0.053789 acc(val): 0.615714
- [23:43:53] Epoch 31: Loss(train): 0.053977 Loss(val): 0.053684 acc(val): 0.612789
- [23:45:01] Epoch 32: Loss(train): 0.054146 Loss(val): 0.053690 acc(val): 0.608929
- [23:46:05] Epoch 33: Loss(train): 0.054114 Loss(val): 0.053585 acc(val): 0.607143
- [23:47:12] Epoch 34: Loss(train): 0.053837 Loss(val): 0.053342 acc(val): 0.614286
- [23:48:54] Epoch 35: Loss(train): 0.053427 Loss(val): 0.053079 acc(val): 0.621905
- [23:51:05] Epoch 36: Loss(train): 0.052834 Loss(val): 0.052732 acc(val): 0.637143
- [23:52:37] Epoch 37: Loss(train): 0.052377 Loss(val): 0.052473 acc(val): 0.647075
- [23:53:55] Epoch 38: Loss(train): 0.052059 Loss(val): 0.052323 acc(val): 0.655765
- [23:55:00] Epoch 39: Loss(train): 0.051822 Loss(val): 0.052194 acc(val): 0.659711
- [23:56:37] Epoch 40: Loss(train): 0.051687 Loss(val): 0.052090 acc(val): 0.662432
- [23:57:56] Epoch 41: Loss(train): 0.051599 Loss(val): 0.052035 acc(val): 0.663452
- [23:59:27] Epoch 42: Loss(train): 0.051543 Loss(val): 0.052002 acc(val): 0.666105
- [00:00:40] Epoch 43: Loss(train): 0.051492 Loss(val): 0.051979 acc(val): 0.665289
- [00:01:55] Epoch 44: Loss(train): 0.051441 Loss(val): 0.051964 acc(val): 0.666514
- [00:03:10] Epoch 45: Loss(train): 0.051421 Loss(val): 0.051940 acc(val): 0.668078
- [00:04:31] Epoch 46: Loss(train): 0.051367 Loss(val): 0.051917 acc(val): 0.668827
- [00:05:37] Epoch 47: Loss(train): 0.051326 Loss(val): 0.051903 acc(val): 0.669031
- [00:06:46] Epoch 48: Loss(train): 0.051282 Loss(val): 0.051888 acc(val): 0.670119
- [00:07:54] Epoch 49: Loss(train): 0.051247 Loss(val): 0.051872 acc(val): 0.670391
- [00:09:08] Epoch 50: Loss(train): 0.051210 Loss(val): 0.051871 acc(val): 0.670391
- [00:10:13] Epoch 51: Loss(train): 0.051175 Loss(val): 0.051883 acc(val): 0.670187
- [00:11:22] Epoch 52: Loss(train): 0.051149 Loss(val): 0.051886 acc(val): 0.670051
- [00:12:27] Epoch 53: Loss(train): 0.051129 Loss(val): 0.051913 acc(val): 0.669099
- [00:13:37] Epoch 54: Loss(train): 0.051116 Loss(val): 0.051936 acc(val): 0.667330
- [00:14:45] Epoch 55: Loss(train): 0.051102 Loss(val): 0.051941 acc(val): 0.667602
- [00:15:57] Epoch 56: Loss(train): 0.051099 Loss(val): 0.051973 acc(val): 0.666718
- [00:17:04] Epoch 57: Loss(train): 0.051085 Loss(val): 0.051980 acc(val): 0.666718
- [00:18:26] Epoch 58: Loss(train): 0.051069 Loss(val): 0.051975 acc(val): 0.666650
- [00:20:26] Epoch 59: Loss(train): 0.051041 Loss(val): 0.051938 acc(val): 0.667602
- [00:21:56] Epoch 60: Loss(train): 0.051020 Loss(val): 0.051919 acc(val): 0.667738
- [00:23:31] Epoch 61: Loss(train): 0.050994 Loss(val): 0.051884 acc(val): 0.668963
- [00:24:50] Epoch 62: Loss(train): 0.050970 Loss(val): 0.051851 acc(val): 0.669915
- [00:26:36] Epoch 63: Loss(train): 0.050952 Loss(val): 0.051824 acc(val): 0.670187
- [00:27:56] Epoch 64: Loss(train): 0.050932 Loss(val): 0.051791 acc(val): 0.671207
- [00:29:25] Epoch 65: Loss(train): 0.050915 Loss(val): 0.051762 acc(val): 0.671888
- [00:30:38] Epoch 66: Loss(train): 0.050900 Loss(val): 0.051739 acc(val): 0.671820
- [00:31:47] Epoch 67: Loss(train): 0.050888 Loss(val): 0.051720 acc(val): 0.671956
- [00:33:12] Epoch 68: Loss(train): 0.050880 Loss(val): 0.051714 acc(val): 0.671956
- [00:34:24] Epoch 69: Loss(train): 0.050870 Loss(val): 0.051703 acc(val): 0.672024
- [00:35:42] Epoch 70: Loss(train): 0.050862 Loss(val): 0.051696 acc(val): 0.672636
- [00:36:54] Epoch 71: Loss(train): 0.050854 Loss(val): 0.051680 acc(val): 0.673044
- [00:38:13] Epoch 72: Loss(train): 0.050847 Loss(val): 0.051672 acc(val): 0.672840
- [00:39:21] Epoch 73: Loss(train): 0.050842 Loss(val): 0.051672 acc(val): 0.671888
- [00:40:37] Epoch 74: Loss(train): 0.050836 Loss(val): 0.051663 acc(val): 0.672364
- [00:41:45] Epoch 75: Loss(train): 0.050830 Loss(val): 0.051650 acc(val): 0.672432
- [00:43:00] Epoch 76: Loss(train): 0.050825 Loss(val): 0.051648 acc(val): 0.672840
- [00:44:08] Epoch 77: Loss(train): 0.050820 Loss(val): 0.051643 acc(val): 0.672568
- [00:45:20] Epoch 78: Loss(train): 0.050817 Loss(val): 0.051644 acc(val): 0.672500
- [00:46:30] Epoch 79: Loss(train): 0.050812 Loss(val): 0.051636 acc(val): 0.672364
- [00:47:41] Epoch 80: Loss(train): 0.050809 Loss(val): 0.051635 acc(val): 0.672704
- [00:48:51] Epoch 81: Loss(train): 0.050805 Loss(val): 0.051629 acc(val): 0.672840
- [00:50:33] Epoch 82: Loss(train): 0.050801 Loss(val): 0.051625 acc(val): 0.672704
- [00:52:14] Epoch 83: Loss(train): 0.050798 Loss(val): 0.051623 acc(val): 0.672772
- [00:53:55] Epoch 84: Loss(train): 0.050795 Loss(val): 0.051622 acc(val): 0.672704
- [00:55:43] Epoch 85: Loss(train): 0.050792 Loss(val): 0.051617 acc(val): 0.673112
- [00:57:16] Epoch 86: Loss(train): 0.050790 Loss(val): 0.051615 acc(val): 0.673248
- [00:58:46] Epoch 87: Loss(train): 0.050787 Loss(val): 0.051613 acc(val): 0.673520
- [01:00:14] Epoch 88: Loss(train): 0.050785 Loss(val): 0.051612 acc(val): 0.673248
- [01:01:36] Epoch 89: Loss(train): 0.050783 Loss(val): 0.051612 acc(val): 0.673180
- [01:02:53] Epoch 90: Loss(train): 0.050781 Loss(val): 0.051609 acc(val): 0.673248
- [01:04:05] Epoch 91: Loss(train): 0.050779 Loss(val): 0.051608 acc(val): 0.673316
- [01:05:24] Epoch 92: Loss(train): 0.050778 Loss(val): 0.051606 acc(val): 0.673384
- [01:06:45] Epoch 93: Loss(train): 0.050776 Loss(val): 0.051605 acc(val): 0.673452
- [01:07:59] Epoch 94: Loss(train): 0.050775 Loss(val): 0.051604 acc(val): 0.673588
- [01:09:13] Epoch 95: Loss(train): 0.050774 Loss(val): 0.051602 acc(val): 0.673452
- [01:10:25] Epoch 96: Loss(train): 0.050772 Loss(val): 0.051599 acc(val): 0.673316
- [01:11:35] Epoch 97: Loss(train): 0.050771 Loss(val): 0.051599 acc(val): 0.673520
- [01:12:45] Epoch 98: Loss(train): 0.050770 Loss(val): 0.051600 acc(val): 0.673588
- [01:13:54] Epoch 99: Loss(train): 0.050769 Loss(val): 0.051597 acc(val): 0.673452
- [01:15:04] Epoch 100: Loss(train): 0.050768 Loss(val): 0.051596
- [01:15:13] FINAL(100) Loss(test): 0.052151 Accuarcy: 0.602703
- Configuration learning_rate=0.003, decay_step=60
- [01:15:31] INIT Loss(test): 0.118268 Accuarcy: 0.130541
- [01:16:50] Epoch 1: Loss(train): 0.084326 Loss(val): 0.082111 acc(val): 0.317891
- [01:18:01] Epoch 2: Loss(train): 0.067675 Loss(val): 0.066096 acc(val): 0.453265
- [01:19:07] Epoch 3: Loss(train): 0.063037 Loss(val): 0.061884 acc(val): 0.512517
- [01:20:51] Epoch 4: Loss(train): 0.060235 Loss(val): 0.059597 acc(val): 0.557024
- [01:22:50] Epoch 5: Loss(train): 0.059429 Loss(val): 0.059154 acc(val): 0.559099
- [01:24:33] Epoch 6: Loss(train): 0.059058 Loss(val): 0.058983 acc(val): 0.559303
- [01:26:15] Epoch 7: Loss(train): 0.058302 Loss(val): 0.058147 acc(val): 0.566718
- [01:27:41] Epoch 8: Loss(train): 0.058073 Loss(val): 0.057841 acc(val): 0.566241
- [01:29:14] Epoch 9: Loss(train): 0.057751 Loss(val): 0.057718 acc(val): 0.568078
- [01:30:49] Epoch 10: Loss(train): 0.057868 Loss(val): 0.057935 acc(val): 0.561003
- [01:32:08] Epoch 11: Loss(train): 0.057844 Loss(val): 0.058059 acc(val): 0.559439
- [01:33:28] Epoch 12: Loss(train): 0.057605 Loss(val): 0.057729 acc(val): 0.561207
- [01:34:45] Epoch 13: Loss(train): 0.056757 Loss(val): 0.056798 acc(val): 0.574422
- [01:36:09] Epoch 14: Loss(train): 0.056279 Loss(val): 0.056278 acc(val): 0.585034
- [01:37:26] Epoch 15: Loss(train): 0.056084 Loss(val): 0.056146 acc(val): 0.587619
- [01:38:40] Epoch 16: Loss(train): 0.056607 Loss(val): 0.056665 acc(val): 0.580119
- [01:39:54] Epoch 17: Loss(train): 0.057447 Loss(val): 0.057562 acc(val): 0.563656
- [01:41:13] Epoch 18: Loss(train): 0.057931 Loss(val): 0.058062 acc(val): 0.555493
- [01:42:26] Epoch 19: Loss(train): 0.057932 Loss(val): 0.058026 acc(val): 0.556786
- [01:43:42] Epoch 20: Loss(train): 0.056523 Loss(val): 0.056666 acc(val): 0.581956
- [01:44:55] Epoch 21: Loss(train): 0.054813 Loss(val): 0.054965 acc(val): 0.613197
- [01:46:10] Epoch 22: Loss(train): 0.053830 Loss(val): 0.054018 acc(val): 0.632517
- [01:47:20] Epoch 23: Loss(train): 0.053511 Loss(val): 0.053685 acc(val): 0.635442
- [01:48:35] Epoch 24: Loss(train): 0.053198 Loss(val): 0.053462 acc(val): 0.640272
- [01:49:48] Epoch 25: Loss(train): 0.053054 Loss(val): 0.053336 acc(val): 0.642041
- [01:51:41] Epoch 26: Loss(train): 0.052948 Loss(val): 0.053279 acc(val): 0.643265
- [01:53:14] Epoch 27: Loss(train): 0.052850 Loss(val): 0.053252 acc(val): 0.644490
- [01:54:52] Epoch 28: Loss(train): 0.052809 Loss(val): 0.053145 acc(val): 0.644626
- [01:56:30] Epoch 29: Loss(train): 0.052700 Loss(val): 0.053007 acc(val): 0.646735
- [01:58:28] Epoch 30: Loss(train): 0.052565 Loss(val): 0.052855 acc(val): 0.650000
- [01:59:41] Epoch 31: Loss(train): 0.052525 Loss(val): 0.052728 acc(val): 0.651769
- [02:01:14] Epoch 32: Loss(train): 0.052521 Loss(val): 0.052635 acc(val): 0.649660
- [02:02:32] Epoch 33: Loss(train): 0.052542 Loss(val): 0.052554 acc(val): 0.649252
- [02:04:04] Epoch 34: Loss(train): 0.052591 Loss(val): 0.052510 acc(val): 0.646463
- [02:05:17] Epoch 35: Loss(train): 0.052629 Loss(val): 0.052478 acc(val): 0.645731
- [02:06:41] Epoch 36: Loss(train): 0.052609 Loss(val): 0.052442 acc(val): 0.647160
- [02:07:55] Epoch 37: Loss(train): 0.052505 Loss(val): 0.052344 acc(val): 0.649065
- [02:09:16] Epoch 38: Loss(train): 0.052283 Loss(val): 0.052196 acc(val): 0.653827
- [02:10:30] Epoch 39: Loss(train): 0.052129 Loss(val): 0.052099 acc(val): 0.656616
- [02:11:46] Epoch 40: Loss(train): 0.051865 Loss(val): 0.051973 acc(val): 0.660901
- [02:12:59] Epoch 41: Loss(train): 0.051727 Loss(val): 0.051912 acc(val): 0.664643
- [02:14:16] Epoch 42: Loss(train): 0.051614 Loss(val): 0.051869 acc(val): 0.666888
- [02:15:26] Epoch 43: Loss(train): 0.051569 Loss(val): 0.051843 acc(val): 0.666003
- [02:16:41] Epoch 44: Loss(train): 0.051505 Loss(val): 0.051823 acc(val): 0.667024
- [02:17:52] Epoch 45: Loss(train): 0.051464 Loss(val): 0.051800 acc(val): 0.667568
- [02:19:07] Epoch 46: Loss(train): 0.051440 Loss(val): 0.051774 acc(val): 0.667432
- [02:20:20] Epoch 47: Loss(train): 0.051418 Loss(val): 0.051756 acc(val): 0.667976
- [02:22:02] Epoch 48: Loss(train): 0.051364 Loss(val): 0.051738 acc(val): 0.669813
- [02:23:45] Epoch 49: Loss(train): 0.051345 Loss(val): 0.051721 acc(val): 0.669201
- [02:25:46] Epoch 50: Loss(train): 0.051294 Loss(val): 0.051708 acc(val): 0.671037
- [02:27:28] Epoch 51: Loss(train): 0.051244 Loss(val): 0.051699 acc(val): 0.670765
- [02:29:26] Epoch 52: Loss(train): 0.051217 Loss(val): 0.051687 acc(val): 0.671241
- [02:30:49] Epoch 53: Loss(train): 0.051168 Loss(val): 0.051685 acc(val): 0.671582
- [02:32:24] Epoch 54: Loss(train): 0.051129 Loss(val): 0.051696 acc(val): 0.672262
- [02:33:48] Epoch 55: Loss(train): 0.051096 Loss(val): 0.051717 acc(val): 0.670833
- [02:35:18] Epoch 56: Loss(train): 0.051069 Loss(val): 0.051752 acc(val): 0.669609
- [02:36:37] Epoch 57: Loss(train): 0.051054 Loss(val): 0.051791 acc(val): 0.668793
- [02:38:02] Epoch 58: Loss(train): 0.051046 Loss(val): 0.051823 acc(val): 0.668793
- [02:39:22] Epoch 59: Loss(train): 0.051033 Loss(val): 0.051830 acc(val): 0.668180
- [02:40:42] Epoch 60: Loss(train): 0.051030 Loss(val): 0.051859 acc(val): 0.667296
- [02:41:56] Epoch 61: Loss(train): 0.051019 Loss(val): 0.051869 acc(val): 0.666276
- [02:43:18] Epoch 62: Loss(train): 0.051008 Loss(val): 0.051871 acc(val): 0.666003
- [02:44:34] Epoch 63: Loss(train): 0.050986 Loss(val): 0.051852 acc(val): 0.665935
- [02:45:51] Epoch 64: Loss(train): 0.050960 Loss(val): 0.051816 acc(val): 0.667364
- [02:47:05] Epoch 65: Loss(train): 0.050938 Loss(val): 0.051782 acc(val): 0.668316
- [02:48:24] Epoch 66: Loss(train): 0.050921 Loss(val): 0.051765 acc(val): 0.668793
- [02:49:39] Epoch 67: Loss(train): 0.050905 Loss(val): 0.051737 acc(val): 0.669337
- [02:51:00] Epoch 68: Loss(train): 0.050893 Loss(val): 0.051723 acc(val): 0.669745
- [02:52:31] Epoch 69: Loss(train): 0.050879 Loss(val): 0.051702 acc(val): 0.670221
- [02:54:48] Epoch 70: Loss(train): 0.050866 Loss(val): 0.051684 acc(val): 0.669813
- [02:56:36] Epoch 71: Loss(train): 0.050855 Loss(val): 0.051657 acc(val): 0.670425
- [02:58:05] Epoch 72: Loss(train): 0.050848 Loss(val): 0.051651 acc(val): 0.670357
- [02:59:25] Epoch 73: Loss(train): 0.050842 Loss(val): 0.051645 acc(val): 0.670765
- [03:01:09] Epoch 74: Loss(train): 0.050834 Loss(val): 0.051638 acc(val): 0.670901
- [03:02:44] Epoch 75: Loss(train): 0.050828 Loss(val): 0.051627 acc(val): 0.671378
- [03:04:13] Epoch 76: Loss(train): 0.050821 Loss(val): 0.051613 acc(val): 0.671582
- [03:05:28] Epoch 77: Loss(train): 0.050817 Loss(val): 0.051616 acc(val): 0.671446
- [03:07:01] Epoch 78: Loss(train): 0.050811 Loss(val): 0.051602 acc(val): 0.671446
- [03:08:26] Epoch 79: Loss(train): 0.050808 Loss(val): 0.051602 acc(val): 0.671922
- [03:09:55] Epoch 80: Loss(train): 0.050804 Loss(val): 0.051598 acc(val): 0.672262
- [03:11:08] Epoch 81: Loss(train): 0.050800 Loss(val): 0.051593 acc(val): 0.671990
- [03:12:34] Epoch 82: Loss(train): 0.050796 Loss(val): 0.051591 acc(val): 0.672398
- [03:13:52] Epoch 83: Loss(train): 0.050792 Loss(val): 0.051581 acc(val): 0.672534
- [03:15:11] Epoch 84: Loss(train): 0.050789 Loss(val): 0.051579 acc(val): 0.672466
- [03:16:28] Epoch 85: Loss(train): 0.050786 Loss(val): 0.051579 acc(val): 0.672398
- [03:17:47] Epoch 86: Loss(train): 0.050783 Loss(val): 0.051572 acc(val): 0.672398
- [03:19:04] Epoch 87: Loss(train): 0.050781 Loss(val): 0.051570 acc(val): 0.672534
- [03:20:23] Epoch 88: Loss(train): 0.050779 Loss(val): 0.051571 acc(val): 0.672602
- [03:21:39] Epoch 89: Loss(train): 0.050777 Loss(val): 0.051571 acc(val): 0.672330
- [03:23:02] Epoch 90: Loss(train): 0.050774 Loss(val): 0.051563 acc(val): 0.672534
- [03:24:57] Epoch 91: Loss(train): 0.050773 Loss(val): 0.051561 acc(val): 0.672602
- [03:27:00] Epoch 92: Loss(train): 0.050771 Loss(val): 0.051558 acc(val): 0.672670
- [03:29:00] Epoch 93: Loss(train): 0.050769 Loss(val): 0.051557 acc(val): 0.672602
- [03:31:00] Epoch 94: Loss(train): 0.050767 Loss(val): 0.051552 acc(val): 0.672738
- [03:32:35] Epoch 95: Loss(train): 0.050766 Loss(val): 0.051551 acc(val): 0.672874
- [03:34:01] Epoch 96: Loss(train): 0.050765 Loss(val): 0.051556 acc(val): 0.672670
- [03:35:31] Epoch 97: Loss(train): 0.050764 Loss(val): 0.051553 acc(val): 0.673146
- [03:37:11] Epoch 98: Loss(train): 0.050763 Loss(val): 0.051555 acc(val): 0.672942
- [03:38:30] Epoch 99: Loss(train): 0.050762 Loss(val): 0.051552 acc(val): 0.673010
- [03:39:58] Epoch 100: Loss(train): 0.050761 Loss(val): 0.051550
- [03:40:09] FINAL(100) Loss(test): 0.051995 Accuarcy: 0.592703
|