diff --git a/Project.toml b/Project.toml index 24299db2..c625e0f1 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ReservoirComputing" uuid = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294" authors = ["Francesco Martinuzzi"] -version = "0.6.2" +version = "0.6.3" [deps] Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" diff --git a/src/ReservoirComputing.jl b/src/ReservoirComputing.jl index 961679bb..3919757f 100644 --- a/src/ReservoirComputing.jl +++ b/src/ReservoirComputing.jl @@ -28,7 +28,7 @@ include("esn_reservoirs.jl") export init_reservoir_givendeg, init_reservoir_givensp, pseudoSVD, DLR, DLRB, SCR, CRJ include("echostatenetwork.jl") -export ESN, ESNpredict, ESNpredict_h_steps +export ESN, ESNpredict, ESNpredict_h_steps, ESNfitted include("dafesn.jl") export dafESN, dafESNpredict, dafESNpredict_h_steps diff --git a/src/echostatenetwork.jl b/src/echostatenetwork.jl index 5f3d9ac2..72cc2ff2 100644 --- a/src/echostatenetwork.jl +++ b/src/echostatenetwork.jl @@ -225,3 +225,63 @@ function ESNpredict_h_steps(esn::AbstractLeakyESN, end return output end + +""" + ESNfitted(esn::AbstractLeakyESN, W_out::Matrix; autonomous=false) + +Return the prediction for the training data using the trained output layer. The autonomous trigger can be used to have have it return an autonomous prediction starting from the first point if true, or a point by point prediction if false. +""" + +function ESNfitted(esn::AbstractLeakyESN, W_out::Matrix; autonomous=false) + train_len = size(esn.train_data, 2) + output = zeros(Float64, esn.in_size, train_len) + x = zeros(size(esn.states, 1)) + + if autonomous + out = esn.train_data[:,1] + return _fitted!(output, esn, x, train_len, W_out, out) + else + return _fitted!(output, esn, x, train_len, W_out, esn.train_data) + end +end + +function _fitted!(output, esn, state, train_len, W_out, vector::Vector) + if esn.extended_states == false + for i=1:train_len + state = leaky_fixed_rnn(esn.activation, esn.alpha, esn.W, esn.W_in, state, vector) + x_new = nla(esn.nla_type, state) + vector = (W_out*x_new) + output[:, i] = vector + end + elseif esn.extended_states == true + for i=1:train_len + state = vcat(leaky_fixed_rnn(esn.activation, esn.alpha, esn.W, esn.W_in, state[1:esn.res_size], vector), vector) + x_new = nla(esn.nla_type, state) + vector = (W_out*x_new) + output[:, i] = vector + end + end + return output +end + +function _fitted!(output, esn, state, train_len, W_out, vector::Matrix) + if esn.extended_states == false + for i=1:train_len + state = leaky_fixed_rnn(esn.activation, esn.alpha, esn.W, esn.W_in, state, vector[:,i]) + x_new = nla(esn.nla_type, state) + out = (W_out*x_new) + output[:, i] = out + end + elseif esn.extended_states == true + for i=1:train_len + state = vcat(leaky_fixed_rnn(esn.activation, esn.alpha, esn.W, esn.W_in, state[1:esn.res_size], vector[:,i]), vector[:,i]) + x_new = nla(esn.nla_type, state) + out = (W_out*x_new) + output[:, i] = out + end + end + return output +end + + + diff --git a/test/extras/test_extended_states.jl b/test/extras/test_extended_states.jl index 4bf51e4f..d0a5a2c4 100644 --- a/test/extras/test_extended_states.jl +++ b/test/extras/test_extended_states.jl @@ -41,6 +41,13 @@ output = ESNpredict(esn, predict_len, W_out) output = ESNpredict_h_steps(esn, predict_len, h_steps, test, W_out) @test size(output) == (out_size, predict_len) +#test esnfitted +fit1 = ESNfitted(esn, W_out; autonomous=false) +@test size(fit1) == size(train) + +fit2 = ESNfitted(esn, W_out; autonomous=true) +@test size(fit1) == size(train) + #test esgp mean = MeanZero() kernel = Lin(1.0) diff --git a/test/runtests.jl b/test/runtests.jl index 22b0f7f7..452003c1 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -17,3 +17,4 @@ using SafeTestsets @time @safetestset "reca gol predict" begin include("training/test_recagol.jl") end @time @safetestset "RMM constructors" begin include("constructors/test_rmm_constructors.jl") end @time @safetestset "GRUESN constructors" begin include("constructors/test_gruesn_constructors.jl") end +@time @safetestset "ESN fitted" begin include("training/test_esnfitted.jl") end diff --git a/test/training/test_esnfitted.jl b/test/training/test_esnfitted.jl new file mode 100644 index 00000000..9fce63de --- /dev/null +++ b/test/training/test_esnfitted.jl @@ -0,0 +1,41 @@ +using ReservoirComputing +using MLJLinearModels +#model parameters +const approx_res_size = 30 +const radius = 1.2 +const activation = tanh +const degree = 6 +const sigma = 0.1 +const beta = 0.0 +const alpha = 1.0 +const nla_type = NLADefault() +const in_size = 3 +const out_size = 3 +const extended_states = false +const delta = 0.5 + + +const train_len = 50 +const predict_len = 12 +data = ones(Float64, in_size, 100) +train = data[:, 1:1+train_len-1] +test = data[:, train_len:train_len+predict_len-1] + +#constructor 1 +esn = ESN(approx_res_size, + train, + degree, + radius, + activation = activation, + sigma = sigma, + alpha = alpha, + nla_type = nla_type, + extended_states = extended_states) + +W_out = ESNtrain(esn, beta) + +fit1 = ESNfitted(esn, W_out; autonomous=false) +@test size(fit1) == size(train) + +fit2 = ESNfitted(esn, W_out; autonomous=true) +@test size(fit1) == size(train)