소스 검색

bugfixes in autoencoder and verbose

Sebastian Vendt 6 년 전
부모
커밋
0b90655af9
3개의 변경된 파일23개의 추가작업 그리고 10개의 파일을 삭제
  1. 10 4
      julia/autoencoder.jl
  2. 5 4
      julia/dataManager.jl
  3. 8 2
      julia/verbose.jl

+ 10 - 4
julia/autoencoder.jl

@@ -6,12 +6,16 @@ include("./verbose.jl")
 using .verbose
 using .dataManager: make_batch
 
+using FeedbackNets
+
 dataset_folderpath = "../MATLAB/TrainingData/"
 dataset_name = "2019_09_09_1658"
 hidden1 = 150
 hidden2 = 80
 epochs = 50
 
+flatten(x) = reshape(x, :, size(x, ndims(x)))
+
 train = make_batch(dataset_folderpath, "$(dataset_name)_TRAIN.mat", normalize_data=false, truncate_data=false)
 val = make_batch(dataset_folderpath, "$(dataset_name)_VAL.mat", normalize_data=false, truncate_data=false)
 test = make_batch(dataset_folderpath, "$(dataset_name)_TEST.mat", normalize_data=false, truncate_data=false)
@@ -22,7 +26,8 @@ test = gpu.(test)
 
 
 model = Chain(
-	# encoding 
+	# encoding
+	flatten, 
 	Dense(288, hidden1, relu),
 	Dense(hidden1, hidden2, relu),
 	Dense(hidden2, 2),
@@ -35,13 +40,14 @@ model = Chain(
 	
 model = model |> gpu
 
-loss(x) = mse(model(x), x)
+loss(x, y) = Flux.mse(model(x), flatten(x))
+loss(x) = Flux.mse(model(x), flatten(x))
 
 opt = ADAM()
 
 for i in 1:epochs
 	Flux.train!(loss, params(model), train, opt)
-	@tprintf("Epoch %i: Loss: %f", i, loss(train[1]))
+	@tprintf("Epoch %i: Loss: %f\n", i, loss(train[1][1]))
 end
 
-	
+	

+ 5 - 4
julia/dataManager.jl

@@ -38,8 +38,8 @@ Structure of the .mat file:
 where N denotes the number of samples, 50 is the window size and 6 are the number of channels
 """
 function make_batch(filepath, filenames...; batch_size=100, normalize_data=true, truncate_data=false)
-    data = Array{Float64}(undef, 0)
-    labels = Array{Float64}(undef, 0)
+    data = nothing # Array{Float64}(undef, 0)
+    labels = nothing # Array{Float64}(undef, 0)
     for (i, filename) in enumerate(filenames)
         # load the data from the mat file
         file = "$filepath$filename"
@@ -50,10 +50,11 @@ function make_batch(filepath, filenames...; batch_size=100, normalize_data=true,
         # size(bin_targets) = (N, 10)
         labelsPart = read(matfile, "labels")
         close(matfile) 
-
+        if (isnothing(data)) data = dataPart; labels = labelsPart;
+        else
         data = cat(dims=3, data, dataPart)
         labels = cat(dims=2, labels, labelsPart)   
-        
+        end
     end
 	
 	# add singleton dimension and permute dims so it matches the convention of Flux width x height x channels x batchsize(Setsize)   

+ 8 - 2
julia/verbose.jl

@@ -1,8 +1,14 @@
 module verbose
 
-export @tprintf
+using Dates
+
+using Base.Printf: _printf, is_str_expr, fix_dec, DIGITS, DIGITSs, print_fixed, print_fixed_width, decode_dec, decode_hex,
+                   ini_hex, ini_HEX, print_exp_a, decode_0ct, decode_HEX, ini_dec, print_exp_e,
+                   decode_oct, _limit, SmallNumber
 
+export @tprintf
 
+time_format = "HH:MM:SS"
 
 
 """
@@ -21,4 +27,4 @@ macro tprintf(args...)
     end
 end
 
-end # module verbose
+end # module verbose