Skip to content

Commit

Permalink
Merge pull request #25 from JohannesNaegele/dev
Browse files Browse the repository at this point in the history
Implemented GROWTH solution with NonlinearSolve.jl, implemented adjoint functionality
  • Loading branch information
JohannesNaegele authored Nov 23, 2023
2 parents ccce36b + 6b8f2b5 commit 1ffc0e9
Show file tree
Hide file tree
Showing 14 changed files with 740 additions and 29 deletions.
8 changes: 8 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,18 @@ Crayons = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Gadfly = "c91e804a-d5a3-530f-b6f0-dfbca275c004"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56"
NonlinearSolve = "8913a72c-1f9b-4ce2-8d82-65094dcecaec"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
OptimizationOptimJL = "36348300-93cb-4f02-beb5-3c3902f8871e"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
Pipe = "b98c9c47-44ae-5843-9183-064241ee97a0"
SciMLSensitivity = "1ed8b502-d754-442c-8d5d-10ac956f44a1"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
Crayons = "4"
Expand Down
75 changes: 75 additions & 0 deletions examples/bayesian.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
using Consistent
using Distributions
using DataFrames
using Pipe
using Gadfly
using Random
# using Zygote
# using Optimization
# using OptimizationOptimJL
# using SciMLSensitivity

# Given data for exogenous variables
df = DataFrame(:G => fill(20, 60))

# Define model
sim = Consistent.SIM()[:model]
exos = permutedims(Matrix(df[!, sim.exogenous_variables]))
lags = Consistent.SIM()[:lags]
params_dict = @parameters begin
θ = 0.2
α_1 = Normal(0.6, 0.01)
α_2 = Normal(0.4, 0.01)
end
priors_dict = @parameters begin
α_1 = Uniform()
α_2 = Uniform()
end

# let's say we know that some kind of variables is prone to measurement error
# if it is just a constant, we can introduce some bias variable

function solve(
model, lags, exos, params_dict::AbstractDict;
rng=Random.default_rng(),
initial=fill(1.0, length(model.endogenous_variables))
)
function sample_param(x)
if x isa Number
return x
else
return rand(rng, x)
end
end
param_values = map(x -> sample_param(params_dict[x]), sim.parameters)
return Consistent.solve(
sim, lags, exos, param_values, initial=initial
)
end

# Solve model for 59 periods
for i in 1:59
solution = solve(
sim,
lags,
exos[:, begin:i],
params_dict
)
lags = hcat(lags, solution)
end

# Convert results to DataFrame
df = DataFrame(lags', sim.endogenous_variables)
# Add time column
df[!, :period] = 1:nrow(df)
# Select variables, convert to long format, and plot variables
@pipe df |>
select(_, [:Y, :C, :YD, :period]) |>
stack(_, Not(:period), variable_name=:variable) |>
plot(
_,
x=:period,
y=:value,
color=:variable,
Geom.line
)
101 changes: 93 additions & 8 deletions examples/calibrate.jl
Original file line number Diff line number Diff line change
@@ -1,15 +1,100 @@
using Consistent
using DataFrames
using ForwardDiff
using Pipe
using Gadfly
using Zygote
using Optimization
using OptimizationOptimJL
using SciMLSensitivity

function calibrate!(data, model, parameters=Dict())
x -> model.f!(y, x, ...) + x
function loss(data,)
# Given data for exogenous variables
df = DataFrame(:G => 20 .+ 10 * rand(60))

# Define model
sim = Consistent.SIM()[:model]
exos = permutedims(Matrix(df[!, sim.exogenous_variables]))
lags = Consistent.SIM()[:lags]
params_dict = Consistent.SIM()[:params]
param_values = map(x -> params_dict[x], sim.parameters)

# Solve model for 59 periods
for i in 1:59
# assume we have some randomness in our parameters
solution = Consistent.solve(sim, lags, exos[:, begin:i], param_values + 0.05 * rand(3))
lags = hcat(lags, solution)
end

# Convert results to DataFrame
df = hcat(df, DataFrame(lags', sim.endogenous_variables))

function calibrate(data, model, opt_vars, opt_params, init_params)
# Set up variables
exos = permutedims(Matrix(data[!, model.exogenous_variables]))
reference_results = permutedims(Matrix(data[!, model.endogenous_variables]))
results = Matrix(data[!, model.endogenous_variables])' # lags in
results[:, 1] = Vector(data[1, model.endogenous_variables])'
param_values = map(x -> init_params[x], model.parameters)
param_opt_inidices = Int[]
for (i, p) in enumerate(model.parameters)
if p in opt_params
push!(param_opt_inidices, i)
end
end
var_opt_indices = Int[]
for (i, var) in enumerate(model.endogenous_variables)
if var in opt_vars
push!(var_opt_indices, i)
end
end
ForwardDiff.derivative(() -> loss, input)
initial = map(x -> init_params[x], opt_params)

horizon = 2:(nrow(data))
# println(horizon)

# Define loss function
function loss(p)
pvalues = Zygote.Buffer(param_values)
pvalues[1:length(param_values)] = param_values
pvalues[param_opt_inidices] = p
bresults = Zygote.Buffer(results)
bresults[:] = results
# sol = prognose!(bresults, horizon, model, exos, copy(pvalues); method=:broyden)
sol = onestep_prognose!(bresults, results, horizon, model, exos, copy(pvalues); method=:broyden)
# println(copy(bresults))
# println(copy(pvalues))
if sol == ReturnCode.Failure
return Inf
else
# println(copy(bresults[:, 2:end]))
return Consistent.msrmse(reference_results[:, 2:end], copy(bresults[:, 2:end]))
end
end

# Numerical optimization
adtype = Optimization.AutoZygote()
optf = Optimization.OptimizationFunction((x, p) -> loss(x), adtype)
optprob = Optimization.OptimizationProblem(optf, initial, lb = [0.0, 0.0], ub = [1.0, 1.0])
res = Optimization.solve(optprob, SAMIN(), maxiters = 50000)
return res
end

model = SIM()
df = DataFrame(reverse(lags[:, end-20:end-1], dims=2)', model.endogenous_variables)
df
sol = calibrate(df, sim, [:Y, :C], [:α_1, :α_2], Dict(:α_1 => 0.5, :α_2 => 0.5, => 0.2))

# Compare to fit
fitted = deepcopy(lags)
prognose!(fitted, 2:60, sim, exos, vcat(0.2, sol.u); method=:broyden)
# prognose!(fitted, 2:60, sim, exos, vcat(0.2, [0.6, 0.4]); method=:broyden)

df_fitted = DataFrame(fitted', sim.endogenous_variables)
df_fitted[!, :period] = 1:nrow(df)
df_all = hcat(df, df_fitted, makeunique=true)
@pipe df_all |>
select(_, [:Y, :Y_1, :C, :C_1, :period]) |>
stack(_, Not(:period), variable_name=:variable) |>
plot(
_,
x=:period,
y=:value,
color=:variable,
Geom.line
)
24 changes: 22 additions & 2 deletions src/CombineModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,35 @@ import Base: +

function +(model1::Model, model2::Model)
undetermined = findall(in(model2.endogenous_variables), model1.endogenous_variables)
@assert isempty(undetermined) "The endogenous variables $(model1[undetermined]) appear twice"
@assert isempty(undetermined) "The endogenous variables $(model1.endogenous_variables[undetermined]) appear twice"
exos1 = filter(x -> !(x in model2.endogenous_variables), model1.exogenous_variables)
exos2 = filter(
x -> !((x in model1.endogenous_variables) || (x in exos1)), model2.exogenous_variables
)
return model(
endos=Variables(vcat(model1.endogenous_variables, model2.endogenous_variables)),
exos=Variables(vcat(exos1, exos2)),
params=Variables(vcat(model1.parameters, model2.parameters)),
params=Variables(unique(vcat(model1.parameters, model2.parameters))),
eqs=Equations(vcat(model1.equations, model2.equations))
)
end

function add_params(model1::Model, params::Variables, verbose=false)
return model(
endos=model1.endogenous_variables,
exos=model1.exogenous_variables,
params=Variables(unique(vcat(model1.parameters, params))),
eqs=model1.equations,
verbose=verbose
)
end

function add_exos(model1::Model, exos::Variables, verbose=false)
return model(
endos=model1.endogenous_variables,
exos=Variables(unique(vcat(model1.exogenous_variables, exos))),
params=model1.parameters,
eqs=model1.equations,
verbose=verbose
)
end
4 changes: 3 additions & 1 deletion src/Consistent.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
module Consistent

export @parameters, @equations, @variables
export model, solve
export model, solve, operators!, add_params, add_exos, prognose!, onestep_prognose!

include("Helpers.jl")
include("ModelComponents.jl")
Expand All @@ -11,6 +11,8 @@ include("ConstructResiduals.jl")
include("Macros.jl")
include("CombineModels.jl")
include("Solve.jl")
include("Loss.jl")
include("Prognose.jl")

# Godley/Lavoie
include("models/SIM.jl")
Expand Down
2 changes: 2 additions & 0 deletions src/Loss.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
using Statistics

"""
Root-mean-square error of a time series.
"""
Expand Down
6 changes: 4 additions & 2 deletions src/Macros.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ function build_f!(endos, exos, params, args)
if issubset(variables, found)
function_body = replace_vars(function_body, endos, exos, params, name[2:5])
else
error("$(setdiff(variables, found)) unused!")
error("$(setdiff(variables, found)) unused!\n\nUsed: $found")
end

# construct function for residuals of model variables
Expand Down Expand Up @@ -91,4 +91,6 @@ function model(;
eqs,
deepcopy(Consistent.f!)
)
end
end

operators!(x) = union!(math_operators, x)
2 changes: 1 addition & 1 deletion src/Model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ function Base.show(io::IO, m::Model)
print(io, Crayon(foreground = :blue), descriptors[3]); println(io, Crayon(reset=true), m.parameters)
print(io, Crayon(foreground = :red), descriptors[4]); print(io, Crayon(reset=true))
for i in eachindex(m.equations)
additional_space = div(length(m.equations), 10) - div(i, 10)
additional_space = Int(floor(log10(length(m.equations)))) - Int(floor(log10(i)))
print(io, "\n", ' '^(max_width + max(additional_space, 0)), "($i) ", m.equations[i])
end
end
21 changes: 21 additions & 0 deletions src/Prognose.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
function prognose!(results, horizon, model, exos, param_values; method=TrustMethod())
for i in horizon
sol = Consistent.solve_nonlinear(model, results[:, begin:i-1], exos, param_values, initial=results[:, i-1])
if sol.retcode == ReturnCode.Failure
return ReturnCode.Failure
end
results[:, i] = sol.u
end
return ReturnCode.Success
end

function onestep_prognose!(results, reference_results, horizon, model, exos, param_values; method=TrustMethod())
for i in horizon
sol = Consistent.solve_nonlinear(model, reference_results[:, begin:i-1], exos, param_values, initial=reference_results[:, i-1])
if sol.retcode == ReturnCode.Failure
return ReturnCode.Failure
end
results[:, i] = sol.u
end
return ReturnCode.Success
end
37 changes: 33 additions & 4 deletions src/Solve.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,38 @@
using NLsolve
using LinearAlgebra
import Optimization
using NonlinearSolve

function solve(model, lags, exos, params, initial=fill(1.0, length(model.endogenous_variables)))
function solve(model, lags, exos, params; initial=fill(1.0, length(model.endogenous_variables)), method=:newton)
nlsolve(
(x, y) -> model.f!(x, y, lags, exos, params),
(F, x) -> model.f!(F, x, lags, exos, params),
initial,
autodiff = :forward,
autodiff=:forward,
method=method,
# iterations = 500,
ftol=1e-40,
xtol=1e-40
).zero
end
end

function solve_nonlinear(model, lags, exos, params; initial=fill(1.0, length(model.endogenous_variables)), method=TrustRegion())
prob = NonlinearProblem(
(F, x, p) -> model.f!(F, x, lags, exos, p),
initial,
params,
abstol=1e-40,
reltol=1e-40
)
sol = NonlinearSolve.solve(prob, method)
return sol
end

# function solve_optim(model, lags, exos, params; initial=fill(1.0, length(model.endogenous_variables)), method=:newton)
# f = (x, y) -> model.f!(x, y, lags, exos, params)
# prob = Optimization.OptimizationProblem(
# x -> max(norm(f(x, y)) + abs(f(x, y)[1] - f(x, y)[1])),
# initial,
# Optimization.AutoForwardDiff()
# )
# sol = Optimization.solve(prob, Optim.BFGS())
# end
6 changes: 3 additions & 3 deletions src/Variables.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ function create_missing_indices(line::Expr, vars::Set, symbs::Set)
completed_line = deepcopy(line)
head = completed_line.head
args = completed_line.args
for i in length(args):-1:2 # FIXME: why?
if (typeof(args[i]) == Symbol) && (head == :call) && args[i] in vars # create index
for i in length(args):-1:1 # FIXME: why backwards?
if (typeof(args[i]) == Symbol) && !(head == :ref) && args[i] in vars # create index
args[i] = :($(args[i])[0])
elseif typeof(args[i]) == Expr # recursion for nested expressions
args[i] = create_missing_indices(args[i], vars, symbs)
Expand Down Expand Up @@ -74,7 +74,7 @@ function create_vars( # FIXME: read properly
position = findall(x -> x == args[1], exos)[1]
if length(args) == 2
if args[2] <= 0
completed_line.args = [name[3], :($position), Expr(:call, :-, :end, args[2])]
completed_line.args = [name[3], :($position), Expr(:call, :+, :end, args[2])]
else
error("future indices are not allowed!")
end
Expand Down
Loading

0 comments on commit 1ffc0e9

Please sign in to comment.