diff --git a/README.md b/README.md index 76c6ef5..517bd13 100644 --- a/README.md +++ b/README.md @@ -38,8 +38,8 @@ prediction = StateSpaceLearning.forecast(output, 12) #Gets a 12 steps ahead pred ## Fit Arguments * `y::Vector{Fl}`: Vector of data. -* model_input::Dict: Dictionary containing the model input parameters (default: Dict("level" => true, "stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 12)). -* estimation_input::Dict: Dictionary containing the estimation input parameters (default: Dict("α" => 0.1, "information_criteria" => "aic", ψ => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => true)). +* `model_input::Dict`: Dictionary containing the model input parameters (default: Dict("level" => true, "stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 12)). +* `estimation_input::Dict`: Dictionary containing the estimation input parameters (default: Dict("α" => 0.1, "information_criteria" => "aic", ψ => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => true)). * `Exogenous_X::Union{Matrix{Fl}, Missing}`: Exogenous variables matrix (default: missing). * `outlier::Bool`: Flag for considering outlier component (default: true). * `ζ_ω_threshold::Int64`: ζ_ω_threshold parameter (default: 12). diff --git a/src/estimation_procedure/default_estimation_procedure.jl b/src/estimation_procedure/default_estimation_procedure.jl index 59a1b4f..de78461 100644 --- a/src/estimation_procedure/default_estimation_procedure.jl +++ b/src/estimation_procedure/default_estimation_procedure.jl @@ -65,7 +65,7 @@ end """ function get_path_information_criteria(model::GLMNetPath, Estimation_X::Matrix{Tl}, estimation_y::Vector{Fl}, information_criteria::String; intercept::Bool = true)::Tuple{Vector{Float64}, Vector{Float64}} where {Tl, Fl} path_size = length(model.lambda) - T, p = size(Estimation_X) + T = size(Estimation_X, 1) K = count(i->i != 0, model.betas; dims = 1)' method_vec = Vector{Float64}(undef, path_size) @@ -73,7 +73,7 @@ function get_path_information_criteria(model::GLMNetPath, Estimation_X::Matrix{T fit = Estimation_X*model.betas[:, i] .+ model.a0[i] ϵ = estimation_y - fit - method_vec[i] = get_information(T, K[i], ϵ; information_criteria = information_criteria, p = p) + method_vec[i] = get_information(T, K[i], ϵ; information_criteria = information_criteria) end best_model_idx = argmin(method_vec) diff --git a/src/information_criteria.jl b/src/information_criteria.jl index 45fd20b..e2c47c9 100644 --- a/src/information_criteria.jl +++ b/src/information_criteria.jl @@ -8,22 +8,19 @@ - `T::Int64`: Number of observations. - `K::Int64`: Number of selected predictors. - `ϵ::Vector{Float64}`: Vector of residuals. - - `information_criteria::String`: Method for hyperparameter selection (default: "bic"). + - `information_criteria::String`: Method for hyperparameter selection (default: "aic"). - `p::Int64`: Number of total predictors (default: 0). # Returns - `Float64`: Information criterion value. """ -function get_information(T::Int64, K::Int64, ϵ::Vector{Float64}; information_criteria::String = "bic", p::Int64 = 0)::Float64 +function get_information(T::Int64, K::Int64, ϵ::Vector{Float64}; information_criteria::String = "aic")::Float64 if information_criteria == "bic" return T*log(var(ϵ)) + K*log(T) elseif information_criteria == "aic" return 2*K + T*log(var(ϵ)) elseif information_criteria == "aicc" return 2*K + T*log(var(ϵ)) + ((2*K^2 +2*K)/(T - K - 1)) - elseif information_criteria == "EBIC" - EBIC_comb_term = (K <= 1 || p == K) ? 0 : 2*(sum(log(j) for j in 1:p) - (sum(log(j) for j in 1:K) + sum(log(j) for j in 1:(p-K)))) - return T*log(var(ϵ)) + K*log(T) + EBIC_comb_term end end \ No newline at end of file diff --git a/src/models/unobserved_components.jl b/src/models/unobserved_components.jl index d435f32..48b9c3b 100644 --- a/src/models/unobserved_components.jl +++ b/src/models/unobserved_components.jl @@ -133,7 +133,7 @@ end """ function create_initial_states_Matrix(T::Int64, freq_seasonal::Int64, steps_ahead::Int64, trend::Bool, seasonal::Bool)::Matrix - initial_states_matrix = ones(T+steps_ahead) + initial_states_matrix = ones(T+steps_ahead, 1) trend ? initial_states_matrix = hcat(initial_states_matrix, vcat([0], collect(1:T+steps_ahead-1))) : nothing if seasonal diff --git a/test/StateSpaceLearning.jl b/test/StateSpaceLearning.jl index b544abf..3236112 100644 --- a/test/StateSpaceLearning.jl +++ b/test/StateSpaceLearning.jl @@ -13,13 +13,13 @@ @test all(isnan.(output2.ϵ[10:20])) @test !all(isnan.(output2.fitted[10:20])) - output3 = StateSpaceLearning.fit_model(y1; stabilize_ζ = 1) - @test length(output3.coefs) == length(output1.coefs) - 2 + output3 = StateSpaceLearning.fit_model(y1; ζ_ω_threshold = 1) + @test length(output3.coefs) - 22 == length(output1.coefs) - @test_throws AssertionError StateSpaceLearning.fit_model(y1; s = 200) - @test_throws ErrorException StateSpaceLearning.fit_model(y1; model_type = "none") - @test_throws AssertionError StateSpaceLearning.fit_model(y1; α = -0.1) - @test_throws AssertionError StateSpaceLearning.fit_model(y1; α = 1.1) + @test_throws AssertionError StateSpaceLearning.fit_model(y1; model_input = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 1000)) + + @test_throws AssertionError StateSpaceLearning.fit_model(y1; estimation_input = Dict("α" => -0.1, "information_criteria" => "aic", "ψ" => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => true)) + @test_throws AssertionError StateSpaceLearning.fit_model(y1; estimation_input = Dict("α" => 1.1, "information_criteria" => "aic", "ψ" => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => true)) end @@ -33,7 +33,6 @@ end output2 = StateSpaceLearning.fit_model(y2; Exogenous_X = rand(100, 3)) @test length(StateSpaceLearning.forecast(output2, 10; Exogenous_Forecast = rand(10, 3))) == 10 - @test_throws AssertionError StateSpaceLearning.forecast(output1, -1) @test_throws AssertionError StateSpaceLearning.forecast(output1, 10; Exogenous_Forecast = rand(5, 3)) @test_throws AssertionError StateSpaceLearning.forecast(output2, 10) @test_throws AssertionError StateSpaceLearning.forecast(output2, 10; Exogenous_Forecast = rand(5, 3)) diff --git a/test/estimation_procedure/adalasso.jl b/test/estimation_procedure/adalasso.jl deleted file mode 100644 index 44f5fff..0000000 --- a/test/estimation_procedure/adalasso.jl +++ /dev/null @@ -1,22 +0,0 @@ -@testset "Function: fit_adalasso" begin - Random.seed!(1234) - Exogenous_X = hcat(rand(10, 3), vcat(ones(3), zeros(1), ones(6))) - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X, true, "Basic Structural", 0) - Estimation_X = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 10, 3, Exogenous_X, true, 0) - - estimation_y = Estimation_X*rand(size(Estimation_X, 2)) + rand(10).*5 - - coefs1, ϵ1 = StateSpaceLearning.fit_adalasso(Estimation_X, estimation_y, 0.1, "aic", components_indexes, 0.1, true, true) - @test length(coefs1) == 43 - @test length(ϵ1) == 10 - - coefs1, ϵ1 = StateSpaceLearning.fit_adalasso(Estimation_X, estimation_y, 0.1, "aic", components_indexes, 0.1, true, false) - @test length(coefs1) == 43 - @test length(ϵ1) == 10 - @test all(coefs1[components_indexes["initial_states"][2:end] .- 1] .!= 0) - - coefs2, ϵ2 = StateSpaceLearning.fit_adalasso(Estimation_X, estimation_y, 0.1, "aic", components_indexes, 10000.0, true, true) - coefs_lasso, ϵ_lasso = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; intercept = true) - @test all(isapprox.(coefs2, coefs_lasso; atol = 1e-3)) - @test all(isapprox.(ϵ2, ϵ_lasso; atol = 1e-3)) -end \ No newline at end of file diff --git a/test/estimation_procedure/default_estimation_procedure.jl b/test/estimation_procedure/default_estimation_procedure.jl new file mode 100644 index 0000000..1c81a31 --- /dev/null +++ b/test/estimation_procedure/default_estimation_procedure.jl @@ -0,0 +1,112 @@ +Random.seed!(1234) +Estimation_X = rand(30, 3) +estimation_y = rand(30) +α = 0.5 +penalty_factor = ones(3) +@testset "Function: get_path_information_criteria" begin + intercept1 = true + intercept2 = false + + model1 = glmnet(Estimation_X, estimation_y, alpha = α, penalty_factor = penalty_factor, intercept = intercept1, dfmax=size(Estimation_X, 2), lambda_min_ratio=0.001) + coefs1, ϵ1 = StateSpaceLearning.get_path_information_criteria(model1, Estimation_X, estimation_y, "aic"; intercept = intercept1) + @test length(coefs1) == 4 + @test coefs1[1] != 0 + @test all(coefs1[2:end] .== 0) + @test length(ϵ1) == 30 + + model2 = glmnet(Estimation_X, estimation_y, alpha = α, penalty_factor = penalty_factor, intercept = intercept2, dfmax=size(Estimation_X, 2), lambda_min_ratio=0.001) + coefs2, ϵ2 = StateSpaceLearning.get_path_information_criteria(model2, Estimation_X, estimation_y, "aic"; intercept = intercept2) + @test length(coefs2) == 3 + @test all(coefs2 .== 0) + @test length(ϵ2) == 30 +end + +@testset "Function: fit_glmnet" begin + coefs, ϵ = StateSpaceLearning.fit_glmnet(Estimation_X, estimation_y, α; information_criteria="aic", penalty_factor=penalty_factor, intercept = true) + @test length(coefs) == 4 + @test length(ϵ) == 30 +end + +@testset "Function: fit_lasso" begin + Random.seed!(1234) + Exogenous_X = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + + components_indexes = StateSpaceLearning.get_components_indexes(10, Exogenous_X, Basic_Structural, true, 0) + + Estimation_X = StateSpaceLearning.create_X_unobserved_components(Basic_Structural, Exogenous_X, true, 0, 10) + estimation_y = Estimation_X*rand(size(Estimation_X, 2)) + rand(10) + + coefs1, ϵ1 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; intercept = true) + @test length(coefs1) == 43 + @test length(ϵ1) == 10 + + coefs2, ϵ2 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; intercept = false) + @test coefs2[1] == mean(estimation_y) + @test length(coefs2) == 43 + @test length(ϵ2) == 10 + + coefs3, ϵ3 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", false, components_indexes; intercept = true) + @test coefs3[components_indexes["o"][4]] == 0 + @test all(coefs3[components_indexes["Exogenous_X"]] .!= 0) + @test length(coefs3) == 43 + @test length(ϵ3) == 10 + + coefs4, ϵ4 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; penalty_factor = vcat(ones(1), ones(size(Estimation_X,2) - 2).*Inf), intercept = true) + @test all(coefs4[3:end] .== 0) + @test length(coefs4) == 43 + @test length(ϵ4) == 10 +end + +@testset "Function: default_estimation_procedure" begin + Random.seed!(1234) + Exogenous_X = hcat(rand(10, 3), vcat(ones(3), zeros(1), ones(6))) + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + + components_indexes = StateSpaceLearning.get_components_indexes(10, Exogenous_X, Basic_Structural, true, 0) + + Estimation_X = StateSpaceLearning.create_X_unobserved_components(Basic_Structural, Exogenous_X, true, 0, 10) + + estimation_y = Estimation_X*rand(size(Estimation_X, 2)) + rand(10).*5 + + estimation_input1 = Dict("α" => 0.1, "information_criteria" => "aic", "ψ" => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => true) + coefs1, ϵ1 = StateSpaceLearning.default_estimation_procedure(Estimation_X, estimation_y, components_indexes, estimation_input1) + @test length(coefs1) == 43 + @test length(ϵ1) == 10 + + estimation_input2 = Dict("α" => 0.1, "information_criteria" => "aic", "ψ" => 0.05, "penalize_exogenous" => true, "penalize_initial_states" => false) + coefs2, ϵ2 = StateSpaceLearning.default_estimation_procedure(Estimation_X, estimation_y, components_indexes, estimation_input2) + @test length(coefs2) == 43 + @test length(ϵ2) == 10 + @test all(coefs2[components_indexes["initial_states"][2:end] .- 1] .!= 0) +end + +@testset "Function: get_dummy_indexes" begin + Exogenous_X1 = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) + Exogenous_X2 = hcat(rand(10, 3)) + + dummy_indexes1 = StateSpaceLearning.get_dummy_indexes(Exogenous_X1) + @test dummy_indexes1 == [4] + + dummy_indexes2 = StateSpaceLearning.get_dummy_indexes(Exogenous_X2) + @test dummy_indexes2 == [] +end + +@testset "Function: get_outlier_duplicate_columns" begin + Random.seed!(1234) + Exogenous_X1 = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) + Exogenous_X2 = rand(10, 3) + + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + + components_indexes1 = StateSpaceLearning.get_components_indexes(10, Exogenous_X1, Basic_Structural, true, 0) + components_indexes2 = StateSpaceLearning.get_components_indexes(10, Exogenous_X2, Basic_Structural, true, 0) + + Estimation_X1 = StateSpaceLearning.create_X_unobserved_components(Basic_Structural, Exogenous_X1, true, 0, 10) + outlier_duplicate_columns1 = StateSpaceLearning.get_outlier_duplicate_columns(Estimation_X1, components_indexes1) + @test outlier_duplicate_columns1 == [32] + + Estimation_X2 = StateSpaceLearning.create_X_unobserved_components(Basic_Structural, Exogenous_X2, true, 0, 10) + outlier_duplicate_columns2 = StateSpaceLearning.get_outlier_duplicate_columns(Estimation_X2, components_indexes2) + @test outlier_duplicate_columns2 == [] +end \ No newline at end of file diff --git a/test/estimation_procedure/estimation_utils.jl b/test/estimation_procedure/estimation_utils.jl deleted file mode 100644 index 7fe6988..0000000 --- a/test/estimation_procedure/estimation_utils.jl +++ /dev/null @@ -1,42 +0,0 @@ -@testset "Function: get_dummy_indexes" begin - Exogenous_X1 = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) - Exogenous_X2 = hcat(rand(10, 3)) - - dummy_indexes1 = StateSpaceLearning.get_dummy_indexes(Exogenous_X1) - @test dummy_indexes1 == [4] - - dummy_indexes2 = StateSpaceLearning.get_dummy_indexes(Exogenous_X2) - @test dummy_indexes2 == [] -end - -@testset "Function: get_outlier_duplicate_columns" begin - Random.seed!(1234) - Exogenous_X1 = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) - Exogenous_X2 = rand(10, 3) - components_indexes1 = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X1, true, "Basic Structural", 0) - components_indexes2 = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X2, true, "Basic Structural", 0) - - Estimation_X1 = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 10, 3, Exogenous_X1, true, 0) - outlier_duplicate_columns1 = StateSpaceLearning.get_outlier_duplicate_columns(Estimation_X1, components_indexes1) - @test outlier_duplicate_columns1 == [32] - - Estimation_X2 = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 10, 3, Exogenous_X2, true, 0) - outlier_duplicate_columns2 = StateSpaceLearning.get_outlier_duplicate_columns(Estimation_X2, components_indexes2) - @test outlier_duplicate_columns2 == [] -end - -@testset "Function: fit_estimation_procedure" begin - Random.seed!(1234) - Exogenous_X = hcat(rand(10, 3), vcat(ones(3), zeros(1), ones(6))) - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X, true, "Basic Structural", 0) - Estimation_X = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 10, 3, Exogenous_X, true, 0) - estimation_y = Estimation_X*rand(size(Estimation_X, 2)) + rand(10).*5 - - coefs1, ϵ1 = StateSpaceLearning.fit_estimation_procedure("Lasso", Estimation_X, estimation_y, 0.1, "aic", components_indexes, 0.1, true, true) - @test length(coefs1) == 43 - @test length(ϵ1) == 10 - - coefs2, ϵ2 = StateSpaceLearning.fit_estimation_procedure("AdaLasso", Estimation_X, estimation_y, 0.1, "aic", components_indexes, 0.1, true, true) - @test length(coefs2) == 43 - @test length(ϵ2) == 10 -end \ No newline at end of file diff --git a/test/estimation_procedure/information_criteria.jl b/test/estimation_procedure/information_criteria.jl deleted file mode 100644 index 0d827ff..0000000 --- a/test/estimation_procedure/information_criteria.jl +++ /dev/null @@ -1,14 +0,0 @@ -@testset "Function: get_information" begin - ϵ = [1.1, 2.2, 3.3, 4.4, 5.5] - T = 5 - K = 3 - p = 10 - bic = StateSpaceLearning.get_information(T, K, ϵ; hyperparameter_selection = "bic", p = p) - aic = StateSpaceLearning.get_information(T, K, ϵ; hyperparameter_selection = "aic", p = p) - aicc = StateSpaceLearning.get_information(T, K, ϵ; hyperparameter_selection = "aicc", p = p) - EBIC = StateSpaceLearning.get_information(T, K, ϵ; hyperparameter_selection = "EBIC", p = p) - @test round(bic, digits = 5) == 10.36287 - @test round(aic, digits = 5) == 11.53456 - @test round(aicc, digits = 5) == 35.53456 - @test round(EBIC, digits = 5) == 19.93785 -end \ No newline at end of file diff --git a/test/estimation_procedure/lasso.jl b/test/estimation_procedure/lasso.jl deleted file mode 100644 index e72ad74..0000000 --- a/test/estimation_procedure/lasso.jl +++ /dev/null @@ -1,56 +0,0 @@ -Random.seed!(1234) -Estimation_X = rand(30, 3) -estimation_y = rand(30) -α = 0.5 -penalty_factor = ones(3) -@testset "Function: get_path_information_criteria" begin - intercept1 = true - intercept2 = false - - model1 = glmnet(Estimation_X, estimation_y, alpha = α, penalty_factor = penalty_factor, intercept = intercept1, dfmax=size(Estimation_X, 2), lambda_min_ratio=0.001) - coefs1, ϵ1 = StateSpaceLearning.get_path_information_criteria(model1, Estimation_X, estimation_y, "aic"; intercept = intercept1) - @test length(coefs1) == 4 - @test coefs1[1] != 0 - @test all(coefs1[2:end] .== 0) - @test length(ϵ1) == 30 - - model2 = glmnet(Estimation_X, estimation_y, alpha = α, penalty_factor = penalty_factor, intercept = intercept2, dfmax=size(Estimation_X, 2), lambda_min_ratio=0.001) - coefs2, ϵ2 = StateSpaceLearning.get_path_information_criteria(model2, Estimation_X, estimation_y, "aic"; intercept = intercept2) - @test length(coefs2) == 3 - @test all(coefs2 .== 0) - @test length(ϵ2) == 30 -end - -@testset "Function: fit_glmnet" begin - coefs, ϵ = StateSpaceLearning.fit_glmnet(Estimation_X, estimation_y, α; hyperparameter_selection="aic", penalty_factor=penalty_factor, intercept = true) - @test length(coefs) == 4 - @test length(ϵ) == 30 -end - -@testset "Function: fit_lasso" begin - Random.seed!(1234) - Exogenous_X = hcat(rand(10, 3), vcat(zeros(3), ones(1), zeros(6))) - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X, true, "Basic Structural", 0) - Estimation_X = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 10, 3, Exogenous_X, true, 0) - estimation_y = Estimation_X*rand(size(Estimation_X, 2)) + rand(10) - - coefs1, ϵ1 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; intercept = true) - @test length(coefs1) == 43 - @test length(ϵ1) == 10 - - coefs2, ϵ2 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; intercept = false) - @test coefs2[1] == mean(estimation_y) - @test length(coefs2) == 43 - @test length(ϵ2) == 10 - - coefs3, ϵ3 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", false, components_indexes; intercept = true) - @test coefs3[components_indexes["o"][4]] == 0 - @test all(coefs3[components_indexes["Exogenous_X"]] .!= 0) - @test length(coefs3) == 43 - @test length(ϵ3) == 10 - - coefs4, ϵ4 = StateSpaceLearning.fit_lasso(Estimation_X, estimation_y, 0.1, "aic", true, components_indexes; penalty_factor = vcat(ones(1), ones(size(Estimation_X,2) - 2).*Inf), intercept = true) - @test all(coefs4[3:end] .== 0) - @test length(coefs4) == 43 - @test length(ϵ4) == 10 -end \ No newline at end of file diff --git a/test/information_criteria.jl b/test/information_criteria.jl new file mode 100644 index 0000000..e6cfad8 --- /dev/null +++ b/test/information_criteria.jl @@ -0,0 +1,11 @@ +@testset "Function: get_information" begin + ϵ = [1.1, 2.2, 3.3, 4.4, 5.5] + T = 5 + K = 3 + bic = StateSpaceLearning.get_information(T, K, ϵ; information_criteria = "bic") + aic = StateSpaceLearning.get_information(T, K, ϵ; information_criteria = "aic") + aicc = StateSpaceLearning.get_information(T, K, ϵ; information_criteria = "aicc") + @test round(bic, digits = 5) == 10.36287 + @test round(aic, digits = 5) == 11.53456 + @test round(aicc, digits = 5) == 35.53456 +end \ No newline at end of file diff --git a/test/models/unobserved_components.jl b/test/models/unobserved_components.jl index ee1ad20..cf8539a 100644 --- a/test/models/unobserved_components.jl +++ b/test/models/unobserved_components.jl @@ -75,8 +75,8 @@ end @testset "Initial State Matrix" begin - X1 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, "Basic Structural") - X2 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, "Basic Structural") + X1 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, true, true) + X2 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, true, true) @test X1 == [1.0 0.0 1.0 0.0; 1.0 1.0 0.0 1.0; @@ -92,8 +92,8 @@ end 1.0 5.0 0.0 1.0; 1.0 6.0 1.0 0.0] - X3 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, "Local Linear Trend") - X4 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, "Local Linear Trend") + X3 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, true, false) + X4 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, true, false) @test X3 == [1.0 0.0; 1.0 1.0; @@ -109,8 +109,8 @@ end 1.0 5.0; 1.0 6.0] - X5 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, "Local Level") - X6 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, "Local Level") + X5 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 0, false, false) + X6 = StateSpaceLearning.create_initial_states_Matrix(5, 2, 2, false, false) @test X5 == ones(5, 1) @test X6 == ones(7, 1) @@ -131,40 +131,45 @@ end Any[false, 2, 2] ] - size_vec1=[(5, 22), (5, 18), (7, 18), (5, 17), (5, 13), (7, 13), (5, 12), (5, 12), (7, 12), (5, 7), (5, 7), (7, 7), (5, 16), (5, 14), (7, 14), (5, 11), (5, 9), (7, 9)] - size_vec2=[(5, 19), (5, 15), (7, 15), (5, 14), (5, 10), (7, 10), (5, 9), (5, 9), (7, 9), (5, 4), (5, 4), (7, 4), (5, 13), (5, 11), (7, 11), (5, 8), (5, 6), (7, 6)] - count = 1 - for model_type in ["Basic Structural", "Local Level", "Local Linear Trend"] + size_vec1=[(5, 22), (5, 18), (7, 18), (5, 17), (5, 13), (7, 13), (5, 12), (5, 12), (7, 12), (5, 7), (5, 7), (7, 7), (5, 16), (5, 14), (7, 14), (5, 11), (5, 9), (7, 9), (5, 13), (5, 11), (7, 11), (5, 8), (5, 6), (7, 6)] + size_vec2=[(5, 19), (5, 15), (7, 15), (5, 14), (5, 10), (7, 10), (5, 9), (5, 9), (7, 9), (5, 4), (5, 4), (7, 4), (5, 13), (5, 11), (7, 11), (5, 8), (5, 6), (7, 6), (5, 10), (5, 8), (7, 8), (5, 5), (5, 3), (7, 3)] + counter = 1 + for model_input in [Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2), + Dict("stochastic_level" => true, "trend" => false, "stochastic_trend" => false, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2), + Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2), + Dict("stochastic_level" => false, "trend" => true, "stochastic_trend" => true, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2)] for param in param_combination if param[3] != 0 - X1 = StateSpaceLearning.create_X_unobserved_components(model_type, 5, 2, Exogenous_X1, param[1], param[2], param[3], Exogenous_forecast1) + X1 = StateSpaceLearning.create_X_unobserved_components(model_input, Exogenous_X1, param[1], param[2], 5, param[3], Exogenous_forecast1) else - X1 = StateSpaceLearning.create_X_unobserved_components(model_type, 5, 2, Exogenous_X1, param[1], param[2], param[3]) + X1 = StateSpaceLearning.create_X_unobserved_components(model_input, Exogenous_X1, param[1], param[2], 5, param[3]) end - X2 = StateSpaceLearning.create_X_unobserved_components(model_type, 5, 2, Exogenous_X2, param[1], param[2], param[3]) - @test size(X1) == size_vec1[count] - @test size(X2) == size_vec2[count] - count += 1 + X2 = StateSpaceLearning.create_X_unobserved_components(model_input, Exogenous_X2, param[1], param[2], 5, param[3]) + @test size(X1) == size_vec1[counter] + @test size(X2) == size_vec2[counter] + counter += 1 end end - @test_throws ErrorException StateSpaceLearning.create_X_unobserved_components("none", 5, 2, Exogenous_X1, param_combination[1][1], param_combination[1][2], param_combination[1][3], Exogenous_forecast1) end @testset "Function: get_components_indexes_unobserved_components" begin Exogenous_X1 = rand(10, 3) Exogenous_X2 = zeros(10, 0) + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + Local_Level = Dict("stochastic_level" => true, "trend" => false, "stochastic_trend" => false, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) + Local_Linear_Trend = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) parameter_combination = [ - ["Basic Structural", true, Exogenous_X1], - ["Local Level", true, Exogenous_X1], - ["Local Linear Trend", true, Exogenous_X1], - ["Basic Structural", false, Exogenous_X1], - ["Basic Structural", true, Exogenous_X2], + [Basic_Structural, true, Exogenous_X1], + [Local_Level, true, Exogenous_X1], + [Local_Linear_Trend, true, Exogenous_X1], + [Basic_Structural, false, Exogenous_X1], + [Basic_Structural, true, Exogenous_X2], ] for param in parameter_combination - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, param[3], param[2], param[1], 0) + components_indexes = StateSpaceLearning.get_components_indexes(10, param[3], param[1], param[2], 0) for key in keys(components_indexes) if param[1] == "Basic Structural" @@ -181,19 +186,23 @@ end @test key == "Exogenous_X" ? length(components_indexes[key]) == size(param[3], 2) : true end end - @test_throws ErrorException StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, Exogenous_X1, true, "none", 0) end @testset "Function: get_variances_unobserved_components" begin Exogenous_X2 = zeros(10, 0) + + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + Local_Level = Dict("stochastic_level" => true, "trend" => false, "stochastic_trend" => false, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) + Local_Linear_Trend = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) + parameter_combination = [ - ["Basic Structural", true, Exogenous_X2, ["ξ", "ζ", "ω", "ϵ"]], - ["Local Level", true, Exogenous_X2, ["ξ", "ϵ"]], - ["Local Linear Trend", true, Exogenous_X2, ["ξ", "ζ", "ϵ"]] + [Basic_Structural, true, Exogenous_X2, ["ξ", "ζ", "ω", "ε"]], + [Local_Level, true, Exogenous_X2, ["ξ", "ε"]], + [Local_Linear_Trend, true, Exogenous_X2, ["ξ", "ζ", "ε"]] ] for param in parameter_combination - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, param[3], param[2], param[1], 0) - variances = StateSpaceLearning.get_variances_unobserved_components(rand(100), rand(39), components_indexes) + components_indexes = StateSpaceLearning.get_components_indexes(10, param[3], param[1], param[2], 0) + variances = StateSpaceLearning.get_variances(rand(100), rand(39), components_indexes) @test all([key in keys(variances) for key in param[4]]) end end \ No newline at end of file diff --git a/test/runtests.jl b/test/runtests.jl index 5841245..abdc3ce 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,9 +1,7 @@ using StateSpaceLearning, Test, Random, LinearAlgebra, GLMNet, Statistics include("models/unobserved_components.jl") -include("estimation_procedure/information_criteria.jl") -include("estimation_procedure/lasso.jl") -include("estimation_procedure/adalasso.jl") -include("estimation_procedure/estimation_utils.jl") +include("information_criteria.jl") +include("estimation_procedure/default_estimation_procedure.jl") include("utils.jl") include("StateSpaceLearning.jl") diff --git a/test/utils.jl b/test/utils.jl index a5bcc0c..7bf3961 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -2,17 +2,21 @@ Exogenous_X1 = rand(10, 3) Exogenous_X2 = zeros(10, 0) + Basic_Structural = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => true, "stochastic_seasonal" => true, "freq_seasonal" => 2) + Local_Level = Dict("stochastic_level" => true, "trend" => false, "stochastic_trend" => false, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) + Local_Linear_Trend = Dict("stochastic_level" => true, "trend" => true, "stochastic_trend" => true, "seasonal" => false, "stochastic_seasonal" => false, "freq_seasonal" => 2) parameter_combination = [ - ["Basic Structural", true, Exogenous_X1, []], - ["Local Level", true, Exogenous_X1, ["ω", "γ₁", "ζ", "ν₁"]], - ["Local Linear Trend", true, Exogenous_X1, ["ω", "γ₁"]], - ["Basic Structural", false, Exogenous_X1, ["o"]], - ["Basic Structural", true, Exogenous_X2, ["Exogenous_X"]], + [Basic_Structural, true, Exogenous_X1], + [Local_Level, true, Exogenous_X1], + [Local_Linear_Trend, true, Exogenous_X1], + [Basic_Structural, false, Exogenous_X1], + [Basic_Structural, true, Exogenous_X2], ] for param in parameter_combination - X = StateSpaceLearning.create_X_unobserved_components(param[1], 10, 3, param[3], param[2], 0) - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(10, 3, param[3], param[2], param[1], 0) + X = StateSpaceLearning.create_X_unobserved_components(param[1], param[3], param[2], 0, 10, 0) + + components_indexes = StateSpaceLearning.get_components_indexes(10, param[3], param[1], param[2], 0) coefs = rand(size(X, 2)) components = StateSpaceLearning.build_components(X, coefs, components_indexes) @@ -20,8 +24,6 @@ @test "Values" in keys(components[key]) @test "Coefs" in keys(components[key]) @test "Indexes" in keys(components[key]) - @test !(key in param[4]) ? !isempty(components[key]["Coefs"]) : isempty(components[key]["Coefs"]) - @test !(key in param[4]) ? !isempty(components[key]["Indexes"]) : isempty(components[key]["Indexes"]) @test key == "Exogenous_X" ? "Selected" in keys(components[key]) : true end end @@ -46,15 +48,4 @@ end ϵ2, fitted2 = StateSpaceLearning.get_fit_and_residuals(estimation_ϵ2, coefs, X, valid_indexes2, T) @test !all(isnan.(ϵ2[valid_indexes2])) @test !all(isnan.(fitted2)) -end - -@testset "Function: forecast_model" begin - Exogenous_X = rand(30, 3) - X = StateSpaceLearning.create_X_unobserved_components("Basic Structural", 30, 2, Exogenous_X, true, 0) - coefs = rand(size(X, 2)) - components_indexes = StateSpaceLearning.get_components_indexes_unobserved_components(30, 2, Exogenous_X, true, "Basic Structural", 0) - components = StateSpaceLearning.build_components(X, coefs, components_indexes) - - output = StateSpaceLearning.Output("Basic Structural", X, coefs, rand(30), rand(30), components, Dict(), 3, 30, true, collect(1:30), 0, rand(60)) - @test length(StateSpaceLearning.unobserved_components_dict["forecast"](output, 5, rand(5, 3))) == 5 end \ No newline at end of file