123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572 |
- --------[27_09_2019 16:51:25]--------
- second stage Hyperparameter Tuning with 1 net
- Configuration learning_rate=0.03, decay_step=20
- [16:52:47] INIT Loss(val): 0.148451 Accuarcy: 0.112245
- [16:54:51] Epoch 1: Loss(train): 0.089663 Loss(val): 0.088737
- [16:55:10] Epoch 2: Loss(train): 0.070667 Loss(val): 0.068928
- [16:55:27] Epoch 3: Loss(train): 0.063542 Loss(val): 0.062128
- [16:55:43] Epoch 4: Loss(train): 0.060417 Loss(val): 0.059723
- [16:56:01] Epoch 5: Loss(train): 0.059493 Loss(val): 0.059026
- [16:56:19] Epoch 6: Loss(train): 0.058942 Loss(val): 0.058448
- [16:56:36] Epoch 7: Loss(train): 0.058415 Loss(val): 0.057891
- [16:56:56] Epoch 8: Loss(train): 0.057964 Loss(val): 0.057296
- [16:57:13] Epoch 9: Loss(train): 0.057504 Loss(val): 0.056888
- [16:57:31] Epoch 10: Loss(train): 0.056998 Loss(val): 0.056621
- [16:57:48] Epoch 11: Loss(train): 0.056863 Loss(val): 0.056618
- [16:58:07] Epoch 12: Loss(train): 0.056772 Loss(val): 0.056483
- [16:58:23] Epoch 13: Loss(train): 0.056706 Loss(val): 0.056282
- [16:58:39] Epoch 14: Loss(train): 0.056430 Loss(val): 0.055929
- [16:58:55] Epoch 15: Loss(train): 0.056260 Loss(val): 0.055614
- [16:59:12] Epoch 16: Loss(train): 0.055839 Loss(val): 0.055432
- [16:59:30] Epoch 17: Loss(train): 0.055648 Loss(val): 0.055394
- [16:59:54] Epoch 18: Loss(train): 0.055889 Loss(val): 0.055650
- [17:00:12] Epoch 19: Loss(train): 0.056102 Loss(val): 0.055804
- [17:00:28] Epoch 20: Loss(train): 0.055990 Loss(val): 0.055733
- [17:00:45] Epoch 21: Loss(train): 0.056002 Loss(val): 0.055650
- [17:01:01] Epoch 22: Loss(train): 0.055604 Loss(val): 0.055251
- [17:01:18] Epoch 23: Loss(train): 0.054814 Loss(val): 0.054507
- [17:01:35] Epoch 24: Loss(train): 0.054289 Loss(val): 0.053961
- [17:01:59] Epoch 25: Loss(train): 0.054040 Loss(val): 0.053693
- [17:02:16] Epoch 26: Loss(train): 0.054076 Loss(val): 0.053621
- [17:02:32] Epoch 27: Loss(train): 0.054120 Loss(val): 0.053601
- [17:02:49] Epoch 28: Loss(train): 0.054555 Loss(val): 0.053849
- [17:03:06] Epoch 29: Loss(train): 0.055195 Loss(val): 0.054283
- [17:03:24] Epoch 30: Loss(train): 0.055647 Loss(val): 0.054590
- [17:03:41] Epoch 31: Loss(train): 0.055832 Loss(val): 0.054655
- [17:03:58] Epoch 32: Loss(train): 0.055375 Loss(val): 0.054319
- [17:04:16] Epoch 33: Loss(train): 0.054487 Loss(val): 0.053729
- [17:04:33] Epoch 34: Loss(train): 0.053432 Loss(val): 0.053076
- [17:04:50] Epoch 35: Loss(train): 0.052729 Loss(val): 0.052693
- [17:05:08] Epoch 36: Loss(train): 0.052390 Loss(val): 0.052524
- [17:05:25] Epoch 37: Loss(train): 0.052178 Loss(val): 0.052437
- [17:05:43] Epoch 38: Loss(train): 0.052099 Loss(val): 0.052385
- [17:06:00] Epoch 39: Loss(train): 0.052070 Loss(val): 0.052320
- [17:06:17] Epoch 40: Loss(train): 0.052017 Loss(val): 0.052243
- [17:06:35] Epoch 41: Loss(train): 0.051925 Loss(val): 0.052159
- [17:06:52] Epoch 42: Loss(train): 0.051824 Loss(val): 0.052056
- [17:07:10] Epoch 43: Loss(train): 0.051765 Loss(val): 0.051970
- [17:07:27] Epoch 44: Loss(train): 0.051683 Loss(val): 0.051933
- [17:07:45] Epoch 45: Loss(train): 0.051606 Loss(val): 0.051889
- [17:08:01] Epoch 46: Loss(train): 0.051533 Loss(val): 0.051864
- [17:08:18] Epoch 47: Loss(train): 0.051464 Loss(val): 0.051846
- [17:08:35] Epoch 48: Loss(train): 0.051401 Loss(val): 0.051838
- [17:08:52] Epoch 49: Loss(train): 0.051325 Loss(val): 0.051843
- [17:09:09] Epoch 50: Loss(train): 0.051266 Loss(val): 0.051860
- [17:09:26] Epoch 51: Loss(train): 0.051222 Loss(val): 0.051899
- [17:09:43] Epoch 52: Loss(train): 0.051193 Loss(val): 0.051918
- [17:10:01] Epoch 53: Loss(train): 0.051176 Loss(val): 0.051968
- [17:10:18] Epoch 54: Loss(train): 0.051162 Loss(val): 0.052006
- [17:10:35] Epoch 55: Loss(train): 0.051134 Loss(val): 0.051998
- [17:10:52] Epoch 56: Loss(train): 0.051111 Loss(val): 0.051992
- [17:11:10] Epoch 57: Loss(train): 0.051075 Loss(val): 0.051958
- [17:11:28] Epoch 58: Loss(train): 0.051050 Loss(val): 0.051941
- [17:11:45] Epoch 59: Loss(train): 0.051010 Loss(val): 0.051883
- [17:12:03] Epoch 60: Loss(train): 0.050980 Loss(val): 0.051837
- [17:12:20] Epoch 61: Loss(train): 0.050952 Loss(val): 0.051785
- [17:12:37] Epoch 62: Loss(train): 0.050934 Loss(val): 0.051770
- [17:12:55] Epoch 63: Loss(train): 0.050916 Loss(val): 0.051740
- [17:13:12] Epoch 64: Loss(train): 0.050899 Loss(val): 0.051707
- [17:13:29] Epoch 65: Loss(train): 0.050887 Loss(val): 0.051699
- [17:13:48] Epoch 66: Loss(train): 0.050875 Loss(val): 0.051674
- [17:14:05] Epoch 67: Loss(train): 0.050866 Loss(val): 0.051669
- [17:14:22] Epoch 68: Loss(train): 0.050856 Loss(val): 0.051652
- [17:14:40] Epoch 69: Loss(train): 0.050847 Loss(val): 0.051641
- [17:14:57] Epoch 70: Loss(train): 0.050839 Loss(val): 0.051635
- Converged at Loss(train): 0.051801, Loss(val): 0.052561 in epoch 70 with accuracy(val): 0.654473
- Configuration learning_rate=0.03, decay_step=40
- [17:15:12] INIT Loss(val): 0.149915 Accuarcy: 0.098707
- [17:15:33] Epoch 1: Loss(train): 0.088789 Loss(val): 0.087691
- [17:15:50] Epoch 2: Loss(train): 0.068032 Loss(val): 0.067004
- [17:16:08] Epoch 3: Loss(train): 0.063522 Loss(val): 0.062396
- [17:16:26] Epoch 4: Loss(train): 0.061677 Loss(val): 0.060596
- [17:16:43] Epoch 5: Loss(train): 0.060235 Loss(val): 0.059540
- [17:17:01] Epoch 6: Loss(train): 0.060055 Loss(val): 0.059479
- [17:17:18] Epoch 7: Loss(train): 0.059696 Loss(val): 0.058983
- [17:17:36] Epoch 8: Loss(train): 0.058420 Loss(val): 0.057931
- [17:17:53] Epoch 9: Loss(train): 0.057890 Loss(val): 0.057784
- [17:18:11] Epoch 10: Loss(train): 0.057887 Loss(val): 0.057939
- [17:18:28] Epoch 11: Loss(train): 0.058021 Loss(val): 0.058248
- [17:18:45] Epoch 12: Loss(train): 0.057937 Loss(val): 0.058021
- [17:19:03] Epoch 13: Loss(train): 0.057402 Loss(val): 0.057456
- [17:19:20] Epoch 14: Loss(train): 0.056914 Loss(val): 0.057007
- [17:19:37] Epoch 15: Loss(train): 0.056709 Loss(val): 0.056965
- [17:19:55] Epoch 16: Loss(train): 0.056852 Loss(val): 0.057157
- [17:20:13] Epoch 17: Loss(train): 0.057367 Loss(val): 0.057542
- [17:20:31] Epoch 18: Loss(train): 0.057601 Loss(val): 0.057665
- [17:20:49] Epoch 19: Loss(train): 0.057731 Loss(val): 0.057718
- [17:21:07] Epoch 20: Loss(train): 0.057218 Loss(val): 0.057077
- [17:21:24] Epoch 21: Loss(train): 0.056135 Loss(val): 0.056025
- [17:21:43] Epoch 22: Loss(train): 0.055024 Loss(val): 0.054987
- [17:22:00] Epoch 23: Loss(train): 0.054295 Loss(val): 0.054281
- [17:22:18] Epoch 24: Loss(train): 0.053959 Loss(val): 0.053951
- [17:22:36] Epoch 25: Loss(train): 0.053691 Loss(val): 0.053717
- [17:22:53] Epoch 26: Loss(train): 0.053627 Loss(val): 0.053622
- [17:23:11] Epoch 27: Loss(train): 0.053773 Loss(val): 0.053724
- [17:23:29] Epoch 28: Loss(train): 0.054236 Loss(val): 0.053973
- [17:23:47] Epoch 29: Loss(train): 0.054522 Loss(val): 0.054070
- [17:24:04] Epoch 30: Loss(train): 0.054943 Loss(val): 0.054246
- [17:24:22] Epoch 31: Loss(train): 0.055004 Loss(val): 0.054189
- [17:24:40] Epoch 32: Loss(train): 0.054876 Loss(val): 0.053994
- [17:24:57] Epoch 33: Loss(train): 0.054486 Loss(val): 0.053667
- [17:25:15] Epoch 34: Loss(train): 0.053800 Loss(val): 0.053187
- [17:25:33] Epoch 35: Loss(train): 0.053172 Loss(val): 0.052797
- [17:25:51] Epoch 36: Loss(train): 0.052741 Loss(val): 0.052563
- [17:26:09] Epoch 37: Loss(train): 0.052461 Loss(val): 0.052420
- [17:26:27] Epoch 38: Loss(train): 0.052257 Loss(val): 0.052302
- [17:26:46] Epoch 39: Loss(train): 0.052129 Loss(val): 0.052214
- [17:27:14] Epoch 40: Loss(train): 0.052035 Loss(val): 0.052163
- [17:27:44] Epoch 41: Loss(train): 0.051944 Loss(val): 0.052115
- [17:28:14] Epoch 42: Loss(train): 0.051880 Loss(val): 0.052084
- [17:28:31] Epoch 43: Loss(train): 0.051804 Loss(val): 0.052055
- [17:28:51] Epoch 44: Loss(train): 0.051738 Loss(val): 0.052032
- [17:29:18] Epoch 45: Loss(train): 0.051650 Loss(val): 0.052006
- [17:29:47] Epoch 46: Loss(train): 0.051562 Loss(val): 0.051986
- [17:30:09] Epoch 47: Loss(train): 0.051479 Loss(val): 0.051978
- [17:30:29] Epoch 48: Loss(train): 0.051412 Loss(val): 0.051979
- [17:30:54] Epoch 49: Loss(train): 0.051361 Loss(val): 0.051999
- [17:31:19] Epoch 50: Loss(train): 0.051323 Loss(val): 0.052038
- [17:31:38] Epoch 51: Loss(train): 0.051302 Loss(val): 0.052081
- [17:31:56] Epoch 52: Loss(train): 0.051278 Loss(val): 0.052096
- [17:32:15] Epoch 53: Loss(train): 0.051261 Loss(val): 0.052118
- [17:32:46] Epoch 54: Loss(train): 0.051239 Loss(val): 0.052120
- [17:33:13] Epoch 55: Loss(train): 0.051207 Loss(val): 0.052095
- Converged at Loss(train): 0.052161, Loss(val): 0.053005 in epoch 55 with accuracy(val): 0.645272
- Configuration learning_rate=0.03, decay_step=60
- [17:33:32] INIT Loss(val): 0.165520 Accuarcy: 0.073197
- [17:33:53] Epoch 1: Loss(train): 0.090615 Loss(val): 0.089994
- [17:34:11] Epoch 2: Loss(train): 0.069812 Loss(val): 0.068600
- [17:34:30] Epoch 3: Loss(train): 0.064214 Loss(val): 0.063470
- [17:34:49] Epoch 4: Loss(train): 0.061411 Loss(val): 0.061059
- [17:35:10] Epoch 5: Loss(train): 0.060100 Loss(val): 0.059814
- [17:35:31] Epoch 6: Loss(train): 0.059087 Loss(val): 0.059043
- [17:35:52] Epoch 7: Loss(train): 0.058686 Loss(val): 0.058661
- [17:36:10] Epoch 8: Loss(train): 0.058388 Loss(val): 0.058214
- [17:36:30] Epoch 9: Loss(train): 0.058066 Loss(val): 0.057698
- [17:36:51] Epoch 10: Loss(train): 0.057511 Loss(val): 0.057332
- [17:37:16] Epoch 11: Loss(train): 0.057068 Loss(val): 0.057097
- [17:37:35] Epoch 12: Loss(train): 0.057114 Loss(val): 0.057160
- [17:37:55] Epoch 13: Loss(train): 0.056885 Loss(val): 0.056944
- [17:38:15] Epoch 14: Loss(train): 0.056694 Loss(val): 0.056566
- [17:38:35] Epoch 15: Loss(train): 0.056233 Loss(val): 0.056134
- [17:38:55] Epoch 16: Loss(train): 0.055359 Loss(val): 0.055511
- [17:39:15] Epoch 17: Loss(train): 0.055256 Loss(val): 0.055431
- [17:39:35] Epoch 18: Loss(train): 0.055643 Loss(val): 0.055726
- [17:39:54] Epoch 19: Loss(train): 0.056205 Loss(val): 0.056171
- [17:40:14] Epoch 20: Loss(train): 0.056926 Loss(val): 0.056773
- [17:40:34] Epoch 21: Loss(train): 0.057006 Loss(val): 0.056768
- [17:40:53] Epoch 22: Loss(train): 0.056528 Loss(val): 0.056344
- [17:41:12] Epoch 23: Loss(train): 0.055433 Loss(val): 0.055290
- [17:41:31] Epoch 24: Loss(train): 0.054533 Loss(val): 0.054371
- [17:41:50] Epoch 25: Loss(train): 0.054282 Loss(val): 0.054007
- [17:42:10] Epoch 26: Loss(train): 0.054135 Loss(val): 0.053829
- [17:42:29] Epoch 27: Loss(train): 0.054173 Loss(val): 0.053806
- [17:42:48] Epoch 28: Loss(train): 0.054269 Loss(val): 0.053833
- [17:43:07] Epoch 29: Loss(train): 0.054556 Loss(val): 0.054030
- [17:43:27] Epoch 30: Loss(train): 0.054662 Loss(val): 0.054104
- [17:43:47] Epoch 31: Loss(train): 0.054619 Loss(val): 0.054089
- [17:44:06] Epoch 32: Loss(train): 0.054174 Loss(val): 0.053796
- [17:44:25] Epoch 33: Loss(train): 0.053649 Loss(val): 0.053463
- [17:44:44] Epoch 34: Loss(train): 0.052997 Loss(val): 0.053043
- [17:45:03] Epoch 35: Loss(train): 0.052595 Loss(val): 0.052792
- [17:45:22] Epoch 36: Loss(train): 0.052346 Loss(val): 0.052621
- [17:45:41] Epoch 37: Loss(train): 0.052151 Loss(val): 0.052500
- [17:46:00] Epoch 38: Loss(train): 0.052059 Loss(val): 0.052406
- [17:46:19] Epoch 39: Loss(train): 0.052001 Loss(val): 0.052335
- [17:46:38] Epoch 40: Loss(train): 0.051941 Loss(val): 0.052277
- [17:46:57] Epoch 41: Loss(train): 0.051894 Loss(val): 0.052226
- [17:47:17] Epoch 42: Loss(train): 0.051868 Loss(val): 0.052181
- [17:47:36] Epoch 43: Loss(train): 0.051789 Loss(val): 0.052130
- [17:47:55] Epoch 44: Loss(train): 0.051722 Loss(val): 0.052078
- [17:48:13] Epoch 45: Loss(train): 0.051639 Loss(val): 0.052052
- [17:48:33] Epoch 46: Loss(train): 0.051541 Loss(val): 0.052007
- [17:48:52] Epoch 47: Loss(train): 0.051465 Loss(val): 0.052000
- [17:49:12] Epoch 48: Loss(train): 0.051404 Loss(val): 0.051979
- [17:49:31] Epoch 49: Loss(train): 0.051339 Loss(val): 0.051980
- [17:49:51] Epoch 50: Loss(train): 0.051292 Loss(val): 0.051991
- [17:50:11] Epoch 51: Loss(train): 0.051249 Loss(val): 0.051991
- [17:50:30] Epoch 52: Loss(train): 0.051222 Loss(val): 0.052008
- [17:50:50] Epoch 53: Loss(train): 0.051198 Loss(val): 0.052018
- [17:51:09] Epoch 54: Loss(train): 0.051173 Loss(val): 0.052017
- [17:51:29] Epoch 55: Loss(train): 0.051146 Loss(val): 0.052003
- [17:51:48] Epoch 56: Loss(train): 0.051118 Loss(val): 0.051979
- [17:52:08] Epoch 57: Loss(train): 0.051087 Loss(val): 0.051948
- [17:52:27] Epoch 58: Loss(train): 0.051059 Loss(val): 0.051916
- [17:52:48] Epoch 59: Loss(train): 0.051032 Loss(val): 0.051880
- [17:53:08] Epoch 60: Loss(train): 0.051009 Loss(val): 0.051852
- Converged at Loss(train): 0.051977, Loss(val): 0.052759 in epoch 60 with accuracy(val): 0.642109
- Configuration learning_rate=0.01, decay_step=20
- [17:53:25] INIT Loss(val): 0.187323 Accuarcy: 0.077789
- [17:53:47] Epoch 1: Loss(train): 0.087576 Loss(val): 0.086033
- [17:54:07] Epoch 2: Loss(train): 0.068133 Loss(val): 0.066643
- [17:54:26] Epoch 3: Loss(train): 0.062122 Loss(val): 0.060931
- [17:54:46] Epoch 4: Loss(train): 0.058892 Loss(val): 0.058234
- [17:55:05] Epoch 5: Loss(train): 0.057786 Loss(val): 0.057269
- [17:55:24] Epoch 6: Loss(train): 0.057601 Loss(val): 0.057197
- [17:55:44] Epoch 7: Loss(train): 0.056949 Loss(val): 0.056568
- [17:56:03] Epoch 8: Loss(train): 0.056481 Loss(val): 0.056139
- [17:56:23] Epoch 9: Loss(train): 0.056194 Loss(val): 0.055886
- [17:56:43] Epoch 10: Loss(train): 0.055974 Loss(val): 0.055750
- [17:57:03] Epoch 11: Loss(train): 0.055800 Loss(val): 0.055777
- [17:57:22] Epoch 12: Loss(train): 0.055692 Loss(val): 0.055731
- [17:57:42] Epoch 13: Loss(train): 0.055691 Loss(val): 0.055779
- [17:58:02] Epoch 14: Loss(train): 0.055478 Loss(val): 0.055565
- [17:58:22] Epoch 15: Loss(train): 0.055101 Loss(val): 0.055146
- [17:58:42] Epoch 16: Loss(train): 0.054910 Loss(val): 0.054922
- [17:59:01] Epoch 17: Loss(train): 0.054790 Loss(val): 0.054875
- [17:59:21] Epoch 18: Loss(train): 0.054960 Loss(val): 0.055119
- [17:59:41] Epoch 19: Loss(train): 0.055063 Loss(val): 0.055270
- [18:00:02] Epoch 20: Loss(train): 0.054944 Loss(val): 0.055221
- [18:00:23] Epoch 21: Loss(train): 0.054798 Loss(val): 0.055048
- [18:00:44] Epoch 22: Loss(train): 0.054379 Loss(val): 0.054582
- [18:01:05] Epoch 23: Loss(train): 0.053794 Loss(val): 0.053956
- [18:01:31] Epoch 24: Loss(train): 0.053113 Loss(val): 0.053302
- [18:01:56] Epoch 25: Loss(train): 0.052780 Loss(val): 0.052874
- [18:02:22] Epoch 26: Loss(train): 0.052543 Loss(val): 0.052660
- [18:02:56] Epoch 27: Loss(train): 0.052278 Loss(val): 0.052454
- [18:03:31] Epoch 28: Loss(train): 0.052146 Loss(val): 0.052352
- [18:03:55] Epoch 29: Loss(train): 0.052045 Loss(val): 0.052292
- [18:04:17] Epoch 30: Loss(train): 0.051909 Loss(val): 0.052243
- [18:04:38] Epoch 31: Loss(train): 0.051838 Loss(val): 0.052235
- [18:05:01] Epoch 32: Loss(train): 0.051793 Loss(val): 0.052183
- [18:05:34] Epoch 33: Loss(train): 0.051737 Loss(val): 0.052164
- [18:06:03] Epoch 34: Loss(train): 0.051672 Loss(val): 0.052101
- [18:06:27] Epoch 35: Loss(train): 0.051629 Loss(val): 0.052036
- [18:07:02] Epoch 36: Loss(train): 0.051564 Loss(val): 0.051960
- [18:07:36] Epoch 37: Loss(train): 0.051538 Loss(val): 0.051894
- [18:08:09] Epoch 38: Loss(train): 0.051462 Loss(val): 0.051794
- [18:08:38] Epoch 39: Loss(train): 0.051404 Loss(val): 0.051718
- [18:09:02] Epoch 40: Loss(train): 0.051335 Loss(val): 0.051661
- [18:09:22] Epoch 41: Loss(train): 0.051274 Loss(val): 0.051597
- [18:09:44] Epoch 42: Loss(train): 0.051173 Loss(val): 0.051555
- [18:10:09] Epoch 43: Loss(train): 0.051112 Loss(val): 0.051520
- [18:10:30] Epoch 44: Loss(train): 0.051043 Loss(val): 0.051497
- [18:10:52] Epoch 45: Loss(train): 0.050990 Loss(val): 0.051464
- [18:11:13] Epoch 46: Loss(train): 0.050939 Loss(val): 0.051442
- [18:11:34] Epoch 47: Loss(train): 0.050889 Loss(val): 0.051419
- [18:11:55] Epoch 48: Loss(train): 0.050855 Loss(val): 0.051395
- [18:12:22] Epoch 49: Loss(train): 0.050825 Loss(val): 0.051377
- [18:12:44] Epoch 50: Loss(train): 0.050809 Loss(val): 0.051350
- [18:13:13] Epoch 51: Loss(train): 0.050787 Loss(val): 0.051347
- [18:13:35] Epoch 52: Loss(train): 0.050774 Loss(val): 0.051326
- [18:13:57] Epoch 53: Loss(train): 0.050761 Loss(val): 0.051317
- [18:14:18] Epoch 54: Loss(train): 0.050748 Loss(val): 0.051308
- [18:14:40] Epoch 55: Loss(train): 0.050737 Loss(val): 0.051294
- Converged at Loss(train): 0.051697, Loss(val): 0.052241 in epoch 55 with accuracy(val): 0.656922
- Configuration learning_rate=0.01, decay_step=40
- [18:15:00] INIT Loss(val): 0.177921 Accuarcy: 0.078452
- [18:15:25] Epoch 1: Loss(train): 0.085913 Loss(val): 0.084972
- [18:15:47] Epoch 2: Loss(train): 0.067994 Loss(val): 0.067212
- [18:16:08] Epoch 3: Loss(train): 0.064593 Loss(val): 0.063531
- [18:16:29] Epoch 4: Loss(train): 0.062117 Loss(val): 0.061084
- [18:16:49] Epoch 5: Loss(train): 0.060363 Loss(val): 0.060003
- [18:17:10] Epoch 6: Loss(train): 0.059131 Loss(val): 0.058921
- [18:17:31] Epoch 7: Loss(train): 0.058338 Loss(val): 0.058271
- [18:17:52] Epoch 8: Loss(train): 0.057717 Loss(val): 0.057788
- [18:18:15] Epoch 9: Loss(train): 0.057529 Loss(val): 0.057686
- [18:18:35] Epoch 10: Loss(train): 0.057496 Loss(val): 0.057720
- [18:18:56] Epoch 11: Loss(train): 0.057429 Loss(val): 0.057801
- [18:19:17] Epoch 12: Loss(train): 0.057253 Loss(val): 0.057779
- [18:19:40] Epoch 13: Loss(train): 0.056893 Loss(val): 0.057320
- [18:20:02] Epoch 14: Loss(train): 0.056243 Loss(val): 0.056562
- [18:20:22] Epoch 15: Loss(train): 0.056184 Loss(val): 0.056340
- [18:20:45] Epoch 16: Loss(train): 0.056221 Loss(val): 0.056419
- [18:21:06] Epoch 17: Loss(train): 0.056585 Loss(val): 0.056762
- [18:21:28] Epoch 18: Loss(train): 0.056598 Loss(val): 0.056918
- [18:21:49] Epoch 19: Loss(train): 0.056542 Loss(val): 0.056837
- [18:22:10] Epoch 20: Loss(train): 0.056023 Loss(val): 0.056362
- [18:22:34] Epoch 21: Loss(train): 0.055122 Loss(val): 0.055430
- [18:22:55] Epoch 22: Loss(train): 0.054322 Loss(val): 0.054565
- [18:23:15] Epoch 23: Loss(train): 0.053714 Loss(val): 0.053932
- [18:24:01] Epoch 24: Loss(train): 0.053428 Loss(val): 0.053591
- [18:24:44] Epoch 25: Loss(train): 0.053074 Loss(val): 0.053378
- [18:25:28] Epoch 26: Loss(train): 0.052950 Loss(val): 0.053381
- [18:26:12] Epoch 27: Loss(train): 0.052878 Loss(val): 0.053369
- [18:26:55] Epoch 28: Loss(train): 0.052749 Loss(val): 0.053334
- [18:27:39] Epoch 29: Loss(train): 0.052654 Loss(val): 0.053279
- [18:28:23] Epoch 30: Loss(train): 0.052551 Loss(val): 0.053160
- [18:29:07] Epoch 31: Loss(train): 0.052434 Loss(val): 0.052998
- [18:29:51] Epoch 32: Loss(train): 0.052321 Loss(val): 0.052879
- [18:30:34] Epoch 33: Loss(train): 0.052148 Loss(val): 0.052693
- [18:31:20] Epoch 34: Loss(train): 0.052083 Loss(val): 0.052561
- [18:32:09] Epoch 35: Loss(train): 0.051987 Loss(val): 0.052415
- [18:33:10] Epoch 36: Loss(train): 0.051942 Loss(val): 0.052319
- [18:34:01] Epoch 37: Loss(train): 0.051942 Loss(val): 0.052249
- [18:35:10] Epoch 38: Loss(train): 0.051861 Loss(val): 0.052164
- [18:36:23] Epoch 39: Loss(train): 0.051847 Loss(val): 0.052113
- [18:37:32] Epoch 40: Loss(train): 0.051747 Loss(val): 0.052048
- [18:38:32] Epoch 41: Loss(train): 0.051614 Loss(val): 0.051969
- [18:39:30] Epoch 42: Loss(train): 0.051501 Loss(val): 0.051891
- [18:40:41] Epoch 43: Loss(train): 0.051425 Loss(val): 0.051849
- [18:41:25] Epoch 44: Loss(train): 0.051336 Loss(val): 0.051807
- [18:42:13] Epoch 45: Loss(train): 0.051292 Loss(val): 0.051769
- [18:43:09] Epoch 46: Loss(train): 0.051229 Loss(val): 0.051732
- [18:43:55] Epoch 47: Loss(train): 0.051206 Loss(val): 0.051705
- [18:44:47] Epoch 48: Loss(train): 0.051182 Loss(val): 0.051671
- [18:45:34] Epoch 49: Loss(train): 0.051149 Loss(val): 0.051648
- [18:46:25] Epoch 50: Loss(train): 0.051119 Loss(val): 0.051630
- [18:47:15] Epoch 51: Loss(train): 0.051105 Loss(val): 0.051612
- [18:48:02] Epoch 52: Loss(train): 0.051082 Loss(val): 0.051598
- [18:48:48] Epoch 53: Loss(train): 0.051052 Loss(val): 0.051591
- [18:49:37] Epoch 54: Loss(train): 0.051018 Loss(val): 0.051580
- Converged at Loss(train): 0.051932, Loss(val): 0.052555 in epoch 54 with accuracy(val): 0.645799
- Configuration learning_rate=0.01, decay_step=60
- [18:50:03] INIT Loss(val): 0.131412 Accuarcy: 0.101854
- [18:50:48] Epoch 1: Loss(train): 0.080744 Loss(val): 0.079622
- [18:51:38] Epoch 2: Loss(train): 0.066512 Loss(val): 0.064695
- [18:52:26] Epoch 3: Loss(train): 0.062622 Loss(val): 0.061448
- [18:53:13] Epoch 4: Loss(train): 0.060374 Loss(val): 0.059260
- [18:54:00] Epoch 5: Loss(train): 0.058809 Loss(val): 0.058090
- [18:54:47] Epoch 6: Loss(train): 0.057931 Loss(val): 0.057646
- [18:55:35] Epoch 7: Loss(train): 0.057441 Loss(val): 0.057309
- [18:56:22] Epoch 8: Loss(train): 0.057106 Loss(val): 0.056946
- [18:57:08] Epoch 9: Loss(train): 0.056751 Loss(val): 0.056449
- [18:57:55] Epoch 10: Loss(train): 0.056720 Loss(val): 0.056406
- [18:58:42] Epoch 11: Loss(train): 0.056405 Loss(val): 0.056351
- [18:59:30] Epoch 12: Loss(train): 0.056383 Loss(val): 0.056457
- [19:00:17] Epoch 13: Loss(train): 0.056200 Loss(val): 0.056471
- [19:01:04] Epoch 14: Loss(train): 0.055845 Loss(val): 0.056025
- [19:01:52] Epoch 15: Loss(train): 0.055829 Loss(val): 0.055832
- [19:02:39] Epoch 16: Loss(train): 0.055686 Loss(val): 0.055626
- [19:03:25] Epoch 17: Loss(train): 0.055622 Loss(val): 0.055737
- [19:04:14] Epoch 18: Loss(train): 0.055771 Loss(val): 0.055958
- [19:05:05] Epoch 19: Loss(train): 0.055711 Loss(val): 0.056073
- [19:06:15] Epoch 20: Loss(train): 0.055973 Loss(val): 0.056337
- [19:07:04] Epoch 21: Loss(train): 0.055825 Loss(val): 0.056190
- [19:07:52] Epoch 22: Loss(train): 0.055313 Loss(val): 0.055599
- [19:08:41] Epoch 23: Loss(train): 0.054893 Loss(val): 0.055099
- [19:09:42] Epoch 24: Loss(train): 0.054391 Loss(val): 0.054471
- [19:10:44] Epoch 25: Loss(train): 0.053839 Loss(val): 0.053866
- [19:11:36] Epoch 26: Loss(train): 0.053508 Loss(val): 0.053500
- [19:12:40] Epoch 27: Loss(train): 0.053171 Loss(val): 0.053214
- [19:13:37] Epoch 28: Loss(train): 0.052970 Loss(val): 0.053028
- [19:14:26] Epoch 29: Loss(train): 0.052803 Loss(val): 0.052932
- [19:15:16] Epoch 30: Loss(train): 0.052774 Loss(val): 0.052891
- [19:16:07] Epoch 31: Loss(train): 0.052981 Loss(val): 0.052950
- [19:17:00] Epoch 32: Loss(train): 0.053209 Loss(val): 0.053051
- [19:18:01] Epoch 33: Loss(train): 0.053366 Loss(val): 0.053110
- [19:19:03] Epoch 34: Loss(train): 0.053671 Loss(val): 0.053226
- [19:19:51] Epoch 35: Loss(train): 0.053941 Loss(val): 0.053339
- [19:20:42] Epoch 36: Loss(train): 0.053890 Loss(val): 0.053279
- [19:21:33] Epoch 37: Loss(train): 0.053691 Loss(val): 0.053105
- [19:22:22] Epoch 38: Loss(train): 0.053180 Loss(val): 0.052768
- [19:23:23] Epoch 39: Loss(train): 0.052641 Loss(val): 0.052439
- [19:24:15] Epoch 40: Loss(train): 0.052294 Loss(val): 0.052223
- [19:25:05] Epoch 41: Loss(train): 0.052021 Loss(val): 0.052078
- [19:25:57] Epoch 42: Loss(train): 0.051852 Loss(val): 0.051986
- [19:26:46] Epoch 43: Loss(train): 0.051764 Loss(val): 0.051918
- [19:27:36] Epoch 44: Loss(train): 0.051702 Loss(val): 0.051857
- [19:28:29] Epoch 45: Loss(train): 0.051609 Loss(val): 0.051782
- [19:29:18] Epoch 46: Loss(train): 0.051520 Loss(val): 0.051719
- [19:30:08] Epoch 47: Loss(train): 0.051464 Loss(val): 0.051675
- [19:30:57] Epoch 48: Loss(train): 0.051390 Loss(val): 0.051640
- [19:31:47] Epoch 49: Loss(train): 0.051337 Loss(val): 0.051621
- [19:32:37] Epoch 50: Loss(train): 0.051246 Loss(val): 0.051601
- [19:33:26] Epoch 51: Loss(train): 0.051194 Loss(val): 0.051589
- [19:34:16] Epoch 52: Loss(train): 0.051123 Loss(val): 0.051589
- [19:35:06] Epoch 53: Loss(train): 0.051061 Loss(val): 0.051593
- [19:35:56] Epoch 54: Loss(train): 0.051009 Loss(val): 0.051610
- [19:37:03] Epoch 55: Loss(train): 0.050970 Loss(val): 0.051631
- [19:38:13] Epoch 56: Loss(train): 0.050941 Loss(val): 0.051682
- [19:39:31] Epoch 57: Loss(train): 0.050926 Loss(val): 0.051712
- [19:40:44] Epoch 58: Loss(train): 0.050924 Loss(val): 0.051774
- [19:41:48] Epoch 59: Loss(train): 0.050913 Loss(val): 0.051794
- [19:42:38] Epoch 60: Loss(train): 0.050901 Loss(val): 0.051803
- [19:43:59] Epoch 61: Loss(train): 0.050880 Loss(val): 0.051795
- [19:45:01] Epoch 62: Loss(train): 0.050853 Loss(val): 0.051767
- [19:46:02] Epoch 63: Loss(train): 0.050823 Loss(val): 0.051729
- Converged at Loss(train): 0.051745, Loss(val): 0.052544 in epoch 63 with accuracy(val): 0.647007
- Configuration learning_rate=0.003, decay_step=20
- [19:46:27] INIT Loss(val): 0.197653 Accuarcy: 0.091241
- [19:47:20] Epoch 1: Loss(train): 0.079464 Loss(val): 0.077783
- [19:48:20] Epoch 2: Loss(train): 0.067149 Loss(val): 0.066139
- [19:49:25] Epoch 3: Loss(train): 0.062640 Loss(val): 0.061764
- [19:50:18] Epoch 4: Loss(train): 0.060908 Loss(val): 0.059928
- [19:51:14] Epoch 5: Loss(train): 0.058845 Loss(val): 0.058380
- [19:52:13] Epoch 6: Loss(train): 0.058177 Loss(val): 0.057776
- [19:53:06] Epoch 7: Loss(train): 0.057540 Loss(val): 0.057122
- [19:53:59] Epoch 8: Loss(train): 0.056814 Loss(val): 0.056477
- [19:54:56] Epoch 9: Loss(train): 0.056360 Loss(val): 0.056050
- [19:55:52] Epoch 10: Loss(train): 0.056028 Loss(val): 0.055813
- [19:56:48] Epoch 11: Loss(train): 0.055597 Loss(val): 0.055646
- [19:57:44] Epoch 12: Loss(train): 0.055225 Loss(val): 0.055342
- [19:58:37] Epoch 13: Loss(train): 0.055302 Loss(val): 0.055394
- [19:59:31] Epoch 14: Loss(train): 0.055238 Loss(val): 0.055283
- [20:00:26] Epoch 15: Loss(train): 0.055095 Loss(val): 0.055047
- [20:01:19] Epoch 16: Loss(train): 0.054809 Loss(val): 0.054673
- [20:02:12] Epoch 17: Loss(train): 0.054337 Loss(val): 0.054301
- [20:03:06] Epoch 18: Loss(train): 0.053958 Loss(val): 0.054079
- [20:03:59] Epoch 19: Loss(train): 0.053873 Loss(val): 0.054106
- [20:04:51] Epoch 20: Loss(train): 0.054026 Loss(val): 0.054221
- [20:05:45] Epoch 21: Loss(train): 0.054201 Loss(val): 0.054455
- [20:06:38] Epoch 22: Loss(train): 0.054305 Loss(val): 0.054560
- [20:07:32] Epoch 23: Loss(train): 0.054394 Loss(val): 0.054562
- [20:08:25] Epoch 24: Loss(train): 0.054172 Loss(val): 0.054326
- [20:09:19] Epoch 25: Loss(train): 0.053743 Loss(val): 0.053848
- [20:10:12] Epoch 26: Loss(train): 0.053142 Loss(val): 0.053302
- [20:11:06] Epoch 27: Loss(train): 0.052741 Loss(val): 0.052896
- [20:11:59] Epoch 28: Loss(train): 0.052561 Loss(val): 0.052737
- [20:13:15] Epoch 29: Loss(train): 0.052489 Loss(val): 0.052637
- [20:14:12] Epoch 30: Loss(train): 0.052407 Loss(val): 0.052581
- [20:15:14] Epoch 31: Loss(train): 0.052487 Loss(val): 0.052619
- [20:16:17] Epoch 32: Loss(train): 0.052642 Loss(val): 0.052717
- [20:17:31] Epoch 33: Loss(train): 0.052745 Loss(val): 0.052786
- [20:18:41] Epoch 34: Loss(train): 0.052957 Loss(val): 0.052931
- [20:19:53] Epoch 35: Loss(train): 0.053042 Loss(val): 0.052976
- [20:20:52] Epoch 36: Loss(train): 0.053068 Loss(val): 0.052970
- [20:21:49] Epoch 37: Loss(train): 0.052881 Loss(val): 0.052853
- [20:22:59] Epoch 38: Loss(train): 0.052606 Loss(val): 0.052675
- [20:24:06] Epoch 39: Loss(train): 0.052210 Loss(val): 0.052439
- [20:25:05] Epoch 40: Loss(train): 0.051897 Loss(val): 0.052271
- [20:26:08] Epoch 41: Loss(train): 0.051670 Loss(val): 0.052125
- [20:27:04] Epoch 42: Loss(train): 0.051492 Loss(val): 0.052033
- [20:28:03] Epoch 43: Loss(train): 0.051401 Loss(val): 0.051973
- [20:28:59] Epoch 44: Loss(train): 0.051340 Loss(val): 0.051914
- [20:30:00] Epoch 45: Loss(train): 0.051286 Loss(val): 0.051879
- [20:31:02] Epoch 46: Loss(train): 0.051234 Loss(val): 0.051821
- [20:31:59] Epoch 47: Loss(train): 0.051182 Loss(val): 0.051773
- [20:32:55] Epoch 48: Loss(train): 0.051136 Loss(val): 0.051725
- [20:33:51] Epoch 49: Loss(train): 0.051084 Loss(val): 0.051684
- [20:34:47] Epoch 50: Loss(train): 0.051037 Loss(val): 0.051649
- [20:35:43] Epoch 51: Loss(train): 0.050997 Loss(val): 0.051624
- [20:36:43] Epoch 52: Loss(train): 0.050962 Loss(val): 0.051611
- [20:37:39] Epoch 53: Loss(train): 0.050926 Loss(val): 0.051608
- [20:38:34] Epoch 54: Loss(train): 0.050892 Loss(val): 0.051609
- [20:39:31] Epoch 55: Loss(train): 0.050861 Loss(val): 0.051612
- [20:40:26] Epoch 56: Loss(train): 0.050835 Loss(val): 0.051619
- [20:41:23] Epoch 57: Loss(train): 0.050810 Loss(val): 0.051647
- [20:42:18] Epoch 58: Loss(train): 0.050793 Loss(val): 0.051664
- [20:43:13] Epoch 59: Loss(train): 0.050780 Loss(val): 0.051697
- [20:44:09] Epoch 60: Loss(train): 0.050769 Loss(val): 0.051714
- [20:45:06] Epoch 61: Loss(train): 0.050759 Loss(val): 0.051723
- [20:46:02] Epoch 62: Loss(train): 0.050747 Loss(val): 0.051723
- [20:47:04] Epoch 63: Loss(train): 0.050739 Loss(val): 0.051730
- [20:48:21] Epoch 64: Loss(train): 0.050730 Loss(val): 0.051738
- [20:49:22] Epoch 65: Loss(train): 0.050708 Loss(val): 0.051697
- Converged at Loss(train): 0.051663, Loss(val): 0.052587 in epoch 65 with accuracy(val): 0.643129
- Configuration learning_rate=0.003, decay_step=40
- [20:49:52] INIT Loss(val): 0.131267 Accuarcy: 0.083214
- [20:50:53] Epoch 1: Loss(train): 0.085294 Loss(val): 0.084576
- [20:52:03] Epoch 2: Loss(train): 0.067979 Loss(val): 0.066454
- [20:53:25] Epoch 3: Loss(train): 0.064692 Loss(val): 0.063468
- [20:54:50] Epoch 4: Loss(train): 0.060204 Loss(val): 0.059563
- [20:55:54] Epoch 5: Loss(train): 0.059113 Loss(val): 0.058688
- [20:56:51] Epoch 6: Loss(train): 0.058466 Loss(val): 0.058163
- [20:57:49] Epoch 7: Loss(train): 0.058073 Loss(val): 0.057866
- [20:58:55] Epoch 8: Loss(train): 0.057293 Loss(val): 0.057096
- [20:59:57] Epoch 9: Loss(train): 0.056591 Loss(val): 0.056324
- [21:01:01] Epoch 10: Loss(train): 0.056213 Loss(val): 0.056077
- [21:02:04] Epoch 11: Loss(train): 0.055718 Loss(val): 0.055704
- [21:03:08] Epoch 12: Loss(train): 0.055250 Loss(val): 0.055418
- [21:04:14] Epoch 13: Loss(train): 0.055137 Loss(val): 0.055309
- [21:05:16] Epoch 14: Loss(train): 0.055121 Loss(val): 0.055232
- [21:06:19] Epoch 15: Loss(train): 0.055151 Loss(val): 0.055145
- [21:07:20] Epoch 16: Loss(train): 0.055220 Loss(val): 0.055140
- [21:08:20] Epoch 17: Loss(train): 0.054638 Loss(val): 0.054753
- [21:09:19] Epoch 18: Loss(train): 0.054304 Loss(val): 0.054612
- [21:10:20] Epoch 19: Loss(train): 0.054102 Loss(val): 0.054496
- [21:11:21] Epoch 20: Loss(train): 0.054121 Loss(val): 0.054624
- [21:12:20] Epoch 21: Loss(train): 0.054457 Loss(val): 0.054914
- [21:13:20] Epoch 22: Loss(train): 0.054731 Loss(val): 0.055191
- [21:14:20] Epoch 23: Loss(train): 0.054807 Loss(val): 0.055170
- [21:15:19] Epoch 24: Loss(train): 0.054750 Loss(val): 0.055053
- [21:16:20] Epoch 25: Loss(train): 0.054323 Loss(val): 0.054540
- [21:17:19] Epoch 26: Loss(train): 0.053600 Loss(val): 0.053798
- [21:18:17] Epoch 27: Loss(train): 0.053148 Loss(val): 0.053314
- [21:19:16] Epoch 28: Loss(train): 0.052852 Loss(val): 0.053044
- [21:20:15] Epoch 29: Loss(train): 0.052607 Loss(val): 0.052848
- [21:21:39] Epoch 30: Loss(train): 0.052449 Loss(val): 0.052746
- [21:23:03] Epoch 31: Loss(train): 0.052500 Loss(val): 0.052788
- [21:24:22] Epoch 32: Loss(train): 0.052512 Loss(val): 0.052785
- [21:25:49] Epoch 33: Loss(train): 0.052571 Loss(val): 0.052789
- [21:26:54] Epoch 34: Loss(train): 0.052684 Loss(val): 0.052806
- [21:28:20] Epoch 35: Loss(train): 0.052755 Loss(val): 0.052841
- [21:29:30] Epoch 36: Loss(train): 0.052775 Loss(val): 0.052819
- [21:30:48] Epoch 37: Loss(train): 0.052677 Loss(val): 0.052722
- [21:32:02] Epoch 38: Loss(train): 0.052491 Loss(val): 0.052566
- [21:33:11] Epoch 39: Loss(train): 0.052202 Loss(val): 0.052362
- [21:34:15] Epoch 40: Loss(train): 0.051935 Loss(val): 0.052191
- [21:35:18] Epoch 41: Loss(train): 0.051739 Loss(val): 0.052059
- [21:36:22] Epoch 42: Loss(train): 0.051572 Loss(val): 0.051957
- [21:37:25] Epoch 43: Loss(train): 0.051486 Loss(val): 0.051892
- [21:38:29] Epoch 44: Loss(train): 0.051410 Loss(val): 0.051844
- [21:39:35] Epoch 45: Loss(train): 0.051352 Loss(val): 0.051801
- [21:40:38] Epoch 46: Loss(train): 0.051312 Loss(val): 0.051755
- [21:41:45] Epoch 47: Loss(train): 0.051259 Loss(val): 0.051708
- [21:42:48] Epoch 48: Loss(train): 0.051191 Loss(val): 0.051682
- [21:43:52] Epoch 49: Loss(train): 0.051153 Loss(val): 0.051656
- [21:44:55] Epoch 50: Loss(train): 0.051126 Loss(val): 0.051639
- [21:45:56] Epoch 51: Loss(train): 0.051106 Loss(val): 0.051628
- [21:46:59] Epoch 52: Loss(train): 0.051067 Loss(val): 0.051622
- [21:48:02] Epoch 53: Loss(train): 0.051033 Loss(val): 0.051616
- [21:49:05] Epoch 54: Loss(train): 0.051009 Loss(val): 0.051614
- [21:50:07] Epoch 55: Loss(train): 0.050979 Loss(val): 0.051613
- [21:51:08] Epoch 56: Loss(train): 0.050938 Loss(val): 0.051623
- [21:52:11] Epoch 57: Loss(train): 0.050908 Loss(val): 0.051627
- [21:53:14] Epoch 58: Loss(train): 0.050878 Loss(val): 0.051646
- [21:54:23] Epoch 59: Loss(train): 0.050853 Loss(val): 0.051660
- [21:55:29] Epoch 60: Loss(train): 0.050829 Loss(val): 0.051672
- Converged at Loss(train): 0.051769, Loss(val): 0.052593 in epoch 60 with accuracy(val): 0.647092
- Configuration learning_rate=0.003, decay_step=60
- [21:56:01] INIT Loss(val): 0.128400 Accuarcy: 0.091173
- [21:57:04] Epoch 1: Loss(train): 0.087630 Loss(val): 0.086648
- [21:58:09] Epoch 2: Loss(train): 0.068650 Loss(val): 0.067234
- [21:59:13] Epoch 3: Loss(train): 0.064484 Loss(val): 0.062984
- [22:00:15] Epoch 4: Loss(train): 0.062109 Loss(val): 0.060795
- [22:01:20] Epoch 5: Loss(train): 0.059852 Loss(val): 0.059078
- [22:02:26] Epoch 6: Loss(train): 0.059096 Loss(val): 0.058652
- [22:03:28] Epoch 7: Loss(train): 0.058634 Loss(val): 0.058505
- [22:04:30] Epoch 8: Loss(train): 0.058904 Loss(val): 0.058879
- [22:05:33] Epoch 9: Loss(train): 0.058865 Loss(val): 0.058817
- [22:06:40] Epoch 10: Loss(train): 0.058145 Loss(val): 0.058074
- [22:07:45] Epoch 11: Loss(train): 0.057094 Loss(val): 0.057320
- [22:08:57] Epoch 12: Loss(train): 0.056686 Loss(val): 0.057000
- [22:10:03] Epoch 13: Loss(train): 0.056440 Loss(val): 0.056806
- [22:11:07] Epoch 14: Loss(train): 0.056780 Loss(val): 0.056969
- [22:12:15] Epoch 15: Loss(train): 0.057121 Loss(val): 0.057249
- [22:13:19] Epoch 16: Loss(train): 0.057089 Loss(val): 0.057170
- [22:14:23] Epoch 17: Loss(train): 0.056695 Loss(val): 0.056844
- [22:15:28] Epoch 18: Loss(train): 0.056033 Loss(val): 0.056204
- [22:16:41] Epoch 19: Loss(train): 0.055199 Loss(val): 0.055399
- [22:17:51] Epoch 20: Loss(train): 0.054661 Loss(val): 0.054834
- [22:18:59] Epoch 21: Loss(train): 0.054271 Loss(val): 0.054396
- [22:20:05] Epoch 22: Loss(train): 0.054036 Loss(val): 0.054124
- [22:21:12] Epoch 23: Loss(train): 0.054021 Loss(val): 0.054075
- [22:22:18] Epoch 24: Loss(train): 0.053884 Loss(val): 0.054009
- [22:23:23] Epoch 25: Loss(train): 0.053809 Loss(val): 0.053988
- [22:24:28] Epoch 26: Loss(train): 0.053688 Loss(val): 0.053978
- [22:25:32] Epoch 27: Loss(train): 0.053466 Loss(val): 0.053863
- [22:26:35] Epoch 28: Loss(train): 0.053356 Loss(val): 0.053755
- [22:27:42] Epoch 29: Loss(train): 0.053260 Loss(val): 0.053652
- [22:28:47] Epoch 30: Loss(train): 0.053242 Loss(val): 0.053502
- [22:29:52] Epoch 31: Loss(train): 0.053209 Loss(val): 0.053376
- [22:30:55] Epoch 32: Loss(train): 0.053310 Loss(val): 0.053301
- [22:32:02] Epoch 33: Loss(train): 0.053360 Loss(val): 0.053194
- [22:33:11] Epoch 34: Loss(train): 0.053411 Loss(val): 0.053129
- [22:34:18] Epoch 35: Loss(train): 0.053490 Loss(val): 0.053088
- [22:35:25] Epoch 36: Loss(train): 0.053329 Loss(val): 0.052931
- [22:36:35] Epoch 37: Loss(train): 0.053057 Loss(val): 0.052750
- [22:37:46] Epoch 38: Loss(train): 0.052699 Loss(val): 0.052533
- [22:38:57] Epoch 39: Loss(train): 0.052314 Loss(val): 0.052344
- [22:40:06] Epoch 40: Loss(train): 0.052103 Loss(val): 0.052243
- [22:41:26] Epoch 41: Loss(train): 0.051960 Loss(val): 0.052185
- [22:42:35] Epoch 42: Loss(train): 0.051858 Loss(val): 0.052141
- [22:43:43] Epoch 43: Loss(train): 0.051790 Loss(val): 0.052123
- [22:44:48] Epoch 44: Loss(train): 0.051754 Loss(val): 0.052102
- [22:45:55] Epoch 45: Loss(train): 0.051746 Loss(val): 0.052096
- [22:47:15] Epoch 46: Loss(train): 0.051733 Loss(val): 0.052079
- [22:48:27] Epoch 47: Loss(train): 0.051714 Loss(val): 0.052058
- [22:49:37] Epoch 48: Loss(train): 0.051704 Loss(val): 0.052038
- [22:50:50] Epoch 49: Loss(train): 0.051654 Loss(val): 0.052020
- [22:51:58] Epoch 50: Loss(train): 0.051591 Loss(val): 0.051994
- [22:53:08] Epoch 51: Loss(train): 0.051546 Loss(val): 0.051980
- Converged at Loss(train): 0.052474, Loss(val): 0.052877 in epoch 51 with accuracy(val): 0.638793
|