From c695740907d21b7386fb50cbee31f4a6a1a5b646 Mon Sep 17 00:00:00 2001 From: MartinuzziFrancesco Date: Thu, 17 Oct 2024 10:33:02 +0200 Subject: [PATCH 1/3] fixing minimal_init --- src/esn/esn_input_layers.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/esn/esn_input_layers.jl b/src/esn/esn_input_layers.jl index c5edf383..6bbb5267 100644 --- a/src/esn/esn_input_layers.jl +++ b/src/esn/esn_input_layers.jl @@ -190,10 +190,10 @@ function minimal_init(rng::AbstractRNG, ::Type{T}, dims::Integer...; return layer_matrix end -function _create_bernoulli(p::T, +function _create_bernoulli(p::Number, res_size::Int, in_size::Int, - weight::T, + weight::Number, rng::AbstractRNG, ::Type{T}) where {T <: Number} input_matrix = zeros(T, res_size, in_size) @@ -210,7 +210,7 @@ function _create_irrational(irrational::Irrational, start::Int, res_size::Int, in_size::Int, - weight::T, + weight::Number, rng::AbstractRNG, ::Type{T}) where {T <: Number} setprecision(BigFloat, Int(ceil(log2(10) * (res_size * in_size + start + 1)))) From dea70cf6c146eb004147303a2bb95c610aac20f9 Mon Sep 17 00:00:00 2001 From: MartinuzziFrancesco Date: Thu, 17 Oct 2024 10:33:31 +0200 Subject: [PATCH 2/3] version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 98e01660..a7c35268 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ReservoirComputing" uuid = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294" authors = ["Francesco Martinuzzi"] -version = "0.10.3" +version = "0.10.4" [deps] Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" From 498f1f4dda772d669c765d813f80db60b85a7c4c Mon Sep 17 00:00:00 2001 From: MartinuzziFrancesco Date: Sat, 19 Oct 2024 21:02:15 +0200 Subject: [PATCH 3/3] rewrote change_layers.md for new layers api --- docs/Project.toml | 2 +- docs/pages.jl | 2 +- docs/src/esn_tutorials/change_layers.md | 68 +++++++++++++++++++++++++ 3 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 docs/src/esn_tutorials/change_layers.md diff --git a/docs/Project.toml b/docs/Project.toml index acac3787..dc7bca9e 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -19,5 +19,5 @@ Documenter = "1" OrdinaryDiffEq = "6" Plots = "1" PredefinedDynamicalSystems = "1" -ReservoirComputing = "0.9, 0.10" +ReservoirComputing = "0.10" StatsBase = "0.33, 0.34" diff --git a/docs/pages.jl b/docs/pages.jl index 32e6c373..28bbfae5 100644 --- a/docs/pages.jl +++ b/docs/pages.jl @@ -7,7 +7,7 @@ pages = [ "Echo State Network Tutorials" => Any[ "Lorenz System Forecasting" => "esn_tutorials/lorenz_basic.md", #"Mackey-Glass Forecasting on GPU" => "esn_tutorials/mackeyglass_basic.md", - #"Using Different Layers" => "esn_tutorials/change_layers.md", + "Using Different Layers" => "esn_tutorials/change_layers.md", "Using Different Reservoir Drivers" => "esn_tutorials/different_drivers.md", #"Using Different Training Methods" => "esn_tutorials/different_training.md", "Deep Echo State Networks" => "esn_tutorials/deep_esn.md", diff --git a/docs/src/esn_tutorials/change_layers.md b/docs/src/esn_tutorials/change_layers.md new file mode 100644 index 00000000..906d693b --- /dev/null +++ b/docs/src/esn_tutorials/change_layers.md @@ -0,0 +1,68 @@ +# Using different layers + +A great deal of efforts in the ESNs field are devoted to finding an ideal construction for the reservoir matrices. ReservoirComputing.jl offers multiple implementation of reservoir and input matrices initializations found in the literature. The API is standardized, and follows by [WeightInitializers.jl](https://github.com/LuxDL/WeightInitializers.jl): + +```julia +weights = init(rng, dims...) +#rng is optional +weights = init(dims...) +``` +Additional keywords can be added when needed: +```julia +weights_init = init(rng; kwargs...) +weights = weights_init(rng, dims...) +# or +weights_init = init(; kwargs...) +weights = weights_init(dims...) +``` + +Custom layers only need to follow these APIs to be compatible with ReservoirComputing.jl. + +## Example of minimally complex ESN + +Using [^rodan2012] and [^rodan2010] as references this section will provide an example on how to change both the input layer and the reservoir for ESNs. + +The task for this example will be the one step ahead prediction of the Henon map. To obtain the data one can leverage the package [PredefinedDynamicalSystems.jl](https://juliadynamics.github.io/PredefinedDynamicalSystems.jl/dev/). The data is scaled to be between -1 and 1. + +```@example minesn +using PredefinedDynamicalSystems +train_len = 3000 +predict_len = 2000 + +ds = Systems.henon() +traj, t = trajectory(ds, 7000) +data = Matrix(traj)' +data = (data .-0.5) .* 2 +shift = 200 + +training_input = data[:, shift:shift+train_len-1] +training_target = data[:, shift+1:shift+train_len] +testing_input = data[:,shift+train_len:shift+train_len+predict_len-1] +testing_target = data[:,shift+train_len+1:shift+train_len+predict_len] +``` +Now it is possible to define the input layers and reservoirs we want to compare and run the comparison in a simple for loop. The accuracy will be tested using the mean squared deviation msd from StatsBase. + +```@example minesn +using ReservoirComputing, StatsBase + +res_size = 300 +input_layer = [minimal_init(; weight = 0.85, sampling_type=:irrational), + minimal_init(; weight = 0.95, sampling_type=:irrational)] +reservoirs = [simple_cycle(; weight=0.7), + cycle_jumps(; cycle_weight=0.7, jump_weight=0.2, jump_size=5)] + +for i=1:length(reservoirs) + esn = ESN(training_input, 2, res_size; + input_layer = input_layer[i], + reservoir = reservoirs[i]) + wout = train(esn, training_target, StandardRidge(0.001)) + output = esn(Predictive(testing_input), wout) + println(msd(testing_target, output)) +end +``` +As it is possible to see, changing layers in ESN models is straightforward. Be sure to check the API documentation for a full list of reservoir and layers. + +## Bibliography +[^rodan2012]: Rodan, Ali, and Peter Tiňo. “Simple deterministically constructed cycle reservoirs with regular jumps.” Neural computation 24.7 (2012): 1822-1852. + +[^rodan2010]: Rodan, Ali, and Peter Tiňo. “Minimum complexity echo state network.” IEEE transactions on neural networks 22.1 (2010): 131-144. \ No newline at end of file