log_18_09_2019.log 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. --------[18_09_2019 15:33:43]--------
  2. Random Grid Search
  3. Search 1 of 500
  4. momentum0.92, features=[32, 32, 32], dropout_rate=0.1
  5. kernel=Tuple{Int64,Int64}[(3, 1), (3, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.1
  6. [15:34:55] INIT Loss(val): 0.125317
  7. [15:36:49] Epoch 2: Loss(train): 0.075287 Loss(val): 0.075063
  8. [15:37:26] Epoch 4: Loss(train): 0.068476 Loss(val): 0.069180
  9. [15:37:55] Epoch 6: Loss(train): 0.065515 Loss(val): 0.066542
  10. [15:38:22] Epoch 8: Loss(train): 0.063886 Loss(val): 0.064420
  11. [15:38:53] Epoch 10: Loss(train): 0.063473 Loss(val): 0.062857
  12. [15:39:23] Epoch 12: Loss(train): 0.061911 Loss(val): 0.061451
  13. [15:39:52] Epoch 14: Loss(train): 0.060242 Loss(val): 0.059912
  14. [15:40:22] Epoch 16: Loss(train): 0.058877 Loss(val): 0.058791
  15. [15:40:51] Epoch 18: Loss(train): 0.058132 Loss(val): 0.058110
  16. [15:41:19] Epoch 20: Loss(train): 0.057336 Loss(val): 0.057570
  17. [15:41:49] Epoch 22: Loss(train): 0.056594 Loss(val): 0.056818
  18. Early stopping at 0
  19. Search 2 of 500
  20. momentum0.94, features=[64, 64, 64], dropout_rate=0.6
  21. kernel=Tuple{Int64,Int64}[(3, 1), (3, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(2, 1), (2, 1)], learning_rate=0.01
  22. [15:41:55] INIT Loss(val): 0.129147
  23. [15:42:23] Epoch 2: Loss(train): 0.078632 Loss(val): 0.078414
  24. [15:42:54] Epoch 4: Loss(train): 0.070202 Loss(val): 0.070887
  25. [15:43:22] Epoch 6: Loss(train): 0.066139 Loss(val): 0.066908
  26. [15:43:51] Epoch 8: Loss(train): 0.063795 Loss(val): 0.063834
  27. [15:44:19] Epoch 10: Loss(train): 0.063544 Loss(val): 0.062947
  28. [15:44:48] Epoch 12: Loss(train): 0.061362 Loss(val): 0.060898
  29. [15:45:18] Epoch 14: Loss(train): 0.060353 Loss(val): 0.060166
  30. [15:45:51] Epoch 16: Loss(train): 0.058804 Loss(val): 0.059007
  31. [15:46:21] Epoch 18: Loss(train): 0.057630 Loss(val): 0.057931
  32. [15:46:51] Epoch 20: Loss(train): 0.056580 Loss(val): 0.056976
  33. [15:47:19] Epoch 22: Loss(train): 0.055983 Loss(val): 0.056418
  34. [15:47:48] Epoch 24: Loss(train): 0.055619 Loss(val): 0.055914
  35. [15:48:20] Epoch 26: Loss(train): 0.054729 Loss(val): 0.055086
  36. [15:48:52] Epoch 28: Loss(train): 0.054222 Loss(val): 0.054575
  37. [15:49:27] FINAL(30) Loss(val): 0.054254
  38. [15:49:30] FINAL(30) Loss(val): 0.054254
  39. Search 3 of 500
  40. momentum0.94, features=[64, 64, 64], dropout_rate=0.1
  41. kernel=Tuple{Int64,Int64}[(7, 1), (7, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.001
  42. [15:49:31] INIT Loss(val): 0.126161
  43. [15:49:59] Epoch 2: Loss(train): 0.077885 Loss(val): 0.078426
  44. [15:50:34] Epoch 4: Loss(train): 0.070761 Loss(val): 0.071715
  45. [15:51:04] Epoch 6: Loss(train): 0.067634 Loss(val): 0.068910
  46. [15:51:33] Epoch 8: Loss(train): 0.065403 Loss(val): 0.066819
  47. [15:52:02] Epoch 10: Loss(train): 0.064137 Loss(val): 0.064249
  48. [15:52:30] Epoch 12: Loss(train): 0.062744 Loss(val): 0.062782
  49. [15:53:00] Epoch 14: Loss(train): 0.061248 Loss(val): 0.061450
  50. [15:53:29] Epoch 16: Loss(train): 0.059991 Loss(val): 0.060319
  51. [15:53:58] Epoch 18: Loss(train): 0.058670 Loss(val): 0.059030
  52. [15:54:29] Epoch 20: Loss(train): 0.057977 Loss(val): 0.058384
  53. [15:55:01] Epoch 22: Loss(train): 0.056949 Loss(val): 0.057303
  54. [15:55:30] Epoch 24: Loss(train): 0.056464 Loss(val): 0.056771
  55. [15:55:59] Epoch 26: Loss(train): 0.055319 Loss(val): 0.055677
  56. [15:56:29] Epoch 28: Loss(train): 0.055020 Loss(val): 0.055222
  57. [15:56:59] FINAL(30) Loss(val): 0.054447
  58. [15:57:02] FINAL(30) Loss(val): 0.054447
  59. Search 4 of 500
  60. momentum0.94, features=[96, 192, 192], dropout_rate=0.1
  61. kernel=Tuple{Int64,Int64}[(5, 1), (3, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(2, 1), (2, 1)], learning_rate=0.01
  62. [15:57:03] INIT Loss(val): 0.143514
  63. [15:57:29] Epoch 2: Loss(train): 0.077462 Loss(val): 0.078461
  64. [15:57:59] Epoch 4: Loss(train): 0.070687 Loss(val): 0.072219
  65. [15:58:30] Epoch 6: Loss(train): 0.067443 Loss(val): 0.068943
  66. [15:58:58] Epoch 8: Loss(train): 0.064753 Loss(val): 0.064823
  67. [15:59:28] Epoch 10: Loss(train): 0.063935 Loss(val): 0.063728
  68. [15:59:57] Epoch 12: Loss(train): 0.061962 Loss(val): 0.061995
  69. [16:00:27] Epoch 14: Loss(train): 0.061007 Loss(val): 0.061087
  70. [16:00:57] Epoch 16: Loss(train): 0.059649 Loss(val): 0.059899
  71. [16:01:26] Epoch 18: Loss(train): 0.059178 Loss(val): 0.059439
  72. [16:01:56] Epoch 20: Loss(train): 0.058152 Loss(val): 0.058421
  73. [16:02:25] Epoch 22: Loss(train): 0.057042 Loss(val): 0.057437
  74. [16:02:55] Epoch 24: Loss(train): 0.056095 Loss(val): 0.056463
  75. [16:03:25] Epoch 26: Loss(train): 0.055047 Loss(val): 0.055456
  76. [16:03:54] Epoch 28: Loss(train): 0.054421 Loss(val): 0.054925
  77. [16:04:24] FINAL(30) Loss(val): 0.054328
  78. [16:04:27] FINAL(30) Loss(val): 0.054328
  79. Search 5 of 500
  80. momentum0.94, features=[32, 32, 32], dropout_rate=0.3
  81. kernel=Tuple{Int64,Int64}[(3, 1), (3, 1), (3, 6)], pooldims=Tuple{Int64,Int64}[(2, 1), (2, 1)], learning_rate=0.03
  82. [16:04:27] INIT Loss(val): 0.142138
  83. [16:04:55] Epoch 2: Loss(train): 0.078344 Loss(val): 0.079304
  84. [16:05:24] Epoch 4: Loss(train): 0.069910 Loss(val): 0.070895
  85. [16:05:54] Epoch 6: Loss(train): 0.066428 Loss(val): 0.067457
  86. [16:06:23] Epoch 8: Loss(train): 0.063501 Loss(val): 0.064552
  87. [16:06:53] Epoch 10: Loss(train): 0.062729 Loss(val): 0.062416
  88. [16:07:23] Epoch 12: Loss(train): 0.061756 Loss(val): 0.061395
  89. [16:07:53] Epoch 14: Loss(train): 0.059968 Loss(val): 0.059828
  90. [16:08:22] Epoch 16: Loss(train): 0.059002 Loss(val): 0.058909
  91. [16:08:52] Epoch 18: Loss(train): 0.058062 Loss(val): 0.058228
  92. [16:09:22] Epoch 20: Loss(train): 0.057697 Loss(val): 0.057969
  93. [16:09:52] Epoch 22: Loss(train): 0.057117 Loss(val): 0.057452
  94. [16:10:22] Epoch 24: Loss(train): 0.056149 Loss(val): 0.056525
  95. [16:10:53] Epoch 26: Loss(train): 0.055586 Loss(val): 0.055998
  96. [16:11:23] Epoch 28: Loss(train): 0.054670 Loss(val): 0.055118
  97. [16:11:53] FINAL(30) Loss(val): 0.054470
  98. [16:11:56] FINAL(30) Loss(val): 0.054470
  99. Search 6 of 500
  100. momentum0.9, features=[64, 64, 64], dropout_rate=0.1
  101. kernel=Tuple{Int64,Int64}[(7, 1), (5, 1), (3, 6)], pooldims=Tuple{Int64,Int64}[(2, 1), (2, 1)], learning_rate=0.1
  102. [16:11:57] INIT Loss(val): 0.130219
  103. [16:12:25] Epoch 2: Loss(train): 0.077388 Loss(val): 0.077279
  104. [16:12:55] Epoch 4: Loss(train): 0.068726 Loss(val): 0.069355
  105. [16:13:26] Epoch 6: Loss(train): 0.065969 Loss(val): 0.066996
  106. [16:13:56] Epoch 8: Loss(train): 0.064049 Loss(val): 0.065168
  107. [16:14:26] Epoch 10: Loss(train): 0.062873 Loss(val): 0.062917
  108. [16:14:56] Epoch 12: Loss(train): 0.063614 Loss(val): 0.063174
  109. [16:15:26] Epoch 14: Loss(train): 0.061988 Loss(val): 0.061926
  110. [16:15:56] Epoch 16: Loss(train): 0.060946 Loss(val): 0.061109
  111. Early stopping at 0
  112. Search 7 of 500
  113. momentum0.92, features=[96, 192, 192], dropout_rate=0.1
  114. kernel=Tuple{Int64,Int64}[(7, 1), (7, 1), (3, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.01
  115. [16:16:01] INIT Loss(val): 0.158887
  116. [16:16:29] Epoch 2: Loss(train): 0.077085 Loss(val): 0.077593
  117. [16:16:59] Epoch 4: Loss(train): 0.068976 Loss(val): 0.070250
  118. [16:17:28] Epoch 6: Loss(train): 0.065539 Loss(val): 0.066987
  119. [16:17:58] Epoch 8: Loss(train): 0.062972 Loss(val): 0.064571
  120. [16:18:29] Epoch 10: Loss(train): 0.060879 Loss(val): 0.061406
  121. [16:18:58] Epoch 12: Loss(train): 0.060074 Loss(val): 0.060140
  122. [16:19:28] Epoch 14: Loss(train): 0.058958 Loss(val): 0.059320
  123. [16:19:58] Epoch 16: Loss(train): 0.057612 Loss(val): 0.058114
  124. [16:20:29] Epoch 18: Loss(train): 0.056968 Loss(val): 0.057745
  125. [16:20:59] Epoch 20: Loss(train): 0.056104 Loss(val): 0.056927
  126. [16:21:30] Epoch 22: Loss(train): 0.055559 Loss(val): 0.056354
  127. [16:22:20] Epoch 24: Loss(train): 0.055063 Loss(val): 0.055935
  128. [16:23:07] Epoch 26: Loss(train): 0.054734 Loss(val): 0.055537
  129. [16:23:56] Epoch 28: Loss(train): 0.054243 Loss(val): 0.055037
  130. [16:24:42] FINAL(30) Loss(val): 0.054857
  131. [16:24:45] FINAL(30) Loss(val): 0.054857
  132. Search 8 of 500
  133. momentum0.96, features=[64, 64, 64], dropout_rate=0.1
  134. kernel=Tuple{Int64,Int64}[(5, 1), (5, 1), (3, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.001
  135. [16:24:47] INIT Loss(val): 0.138277
  136. [16:25:30] Epoch 2: Loss(train): 0.079859 Loss(val): 0.080827
  137. [16:26:20] Epoch 4: Loss(train): 0.069111 Loss(val): 0.070549
  138. [16:27:11] Epoch 6: Loss(train): 0.065782 Loss(val): 0.067275
  139. [16:28:05] Epoch 8: Loss(train): 0.062852 Loss(val): 0.064066
  140. [16:29:03] Epoch 10: Loss(train): 0.061824 Loss(val): 0.061692
  141. [16:29:56] Epoch 12: Loss(train): 0.061533 Loss(val): 0.060959
  142. [16:30:49] Epoch 14: Loss(train): 0.060054 Loss(val): 0.059820
  143. [16:31:44] Epoch 16: Loss(train): 0.058679 Loss(val): 0.058628
  144. [16:32:37] Epoch 18: Loss(train): 0.057629 Loss(val): 0.057877
  145. [16:33:29] Epoch 20: Loss(train): 0.056418 Loss(val): 0.056854
  146. [16:34:21] Epoch 22: Loss(train): 0.056054 Loss(val): 0.056538
  147. [16:35:13] Epoch 24: Loss(train): 0.055056 Loss(val): 0.055656
  148. [16:36:01] Epoch 26: Loss(train): 0.054567 Loss(val): 0.055133
  149. [16:36:50] Epoch 28: Loss(train): 0.053922 Loss(val): 0.054542
  150. [16:37:37] FINAL(30) Loss(val): 0.053926
  151. [16:37:42] FINAL(30) Loss(val): 0.053926
  152. Search 9 of 500
  153. momentum0.9, features=[32, 64, 128], dropout_rate=0.3
  154. kernel=Tuple{Int64,Int64}[(7, 1), (7, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.1
  155. [16:37:43] INIT Loss(val): 0.149463
  156. [16:38:18] Epoch 2: Loss(train): 0.076049 Loss(val): 0.076722
  157. [16:39:03] Epoch 4: Loss(train): 0.068973 Loss(val): 0.069592
  158. [16:39:53] Epoch 6: Loss(train): 0.065072 Loss(val): 0.066030
  159. [16:40:37] Epoch 8: Loss(train): 0.063404 Loss(val): 0.064415
  160. [16:41:20] Epoch 10: Loss(train): 0.061349 Loss(val): 0.061938
  161. Early stopping at 0
  162. Search 10 of 500
  163. momentum0.98, features=[32, 32, 32], dropout_rate=0.4
  164. kernel=Tuple{Int64,Int64}[(5, 1), (3, 1), (2, 6)], pooldims=Tuple{Int64,Int64}[(3, 1), (3, 1)], learning_rate=0.1
  165. [16:41:41] INIT Loss(val): 0.127935
  166. [16:42:10] Epoch 2: Loss(train): 0.076783 Loss(val): 0.078367
  167. [16:42:43] Epoch 4: Loss(train): 0.069196 Loss(val): 0.070709
  168. [16:43:22] Epoch 6: Loss(train): 0.066136 Loss(val): 0.067725
  169. [16:43:54] Epoch 8: Loss(train): 0.063826 Loss(val): 0.065215
  170. [16:44:26] Epoch 10: Loss(train): 0.062568 Loss(val): 0.063721