Skip to content

Commit

Permalink
Updated README, documentation.
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelsonric committed Jun 29, 2023
1 parent 3e21016 commit 1ee3fe3
Show file tree
Hide file tree
Showing 7 changed files with 125 additions and 88 deletions.
14 changes: 10 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,29 @@ notebooks and an API.
using AlgebraicInference
using Catlab.Programs

wd = @relation (x,) begin
wd = @relation (x,) where (x::X, y::Y) begin
prior(x)
likelihood(x, y)
evidence(y)
end

hm = Dict(
hom_map = Dict(
:prior => normal(0, 1), # x ~ N(0, 1)
:likelihood => kernel([1], 0, 1), # y | x ~ N(x, 1)
:evidence => normal(2, 0)) # y = 2

ob_map = Dict(
:X => 1, # x ∈ ℝ¹
:Y => 1) # y ∈ ℝ¹

ob_attr = :junction_type

# Solve directly.
Σ = oapply(wd, hm)
Σ = oapply(wd, hom_map, ob_map; ob_attr)

# Solve using belief propagation.
T = DenseGaussianSystem{Float64}
Σ = solve(InferenceProblem{T}(wd, hm), MinFill())
Σ = solve(InferenceProblem{T, Int}(wd, hom_map, ob_map; ob_attr), MinFill())
```

![inference](./inference.svg)
39 changes: 21 additions & 18 deletions docs/literate/kalman.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
using AlgebraicInference
using BenchmarkTools
using Catlab.Graphics, Catlab.Programs, Catlab.WiringDiagrams
using Catlab.WiringDiagrams.MonoidalUndirectedWiringDiagrams: UntypedHypergraphDiagram
using Distributions
using FillArrays
using LinearAlgebra
Expand All @@ -27,6 +26,7 @@ B = [
1.3 0.0
0.0 0.7
]

P = [
0.05 0.0
0.0 0.05
Expand All @@ -53,21 +53,17 @@ end;
# observations of ``(z_1, \dots, z_n)``. The function `kalman` constructs a wiring diagram
# that represents the filtering problem.
function kalman_step(i)
kf = UntypedHypergraphDiagram{String}(2)
add_box!(kf, 2; name="state")
add_box!(kf, 4; name="predict")
add_box!(kf, 4; name="measure")
add_box!(kf, 2; name="z$i")
kf = HypergraphDiagram{String, String}(["X"])
add_box!(kf, ["X"]; name="state")
add_box!(kf, ["X", "X"]; name="predict")
add_box!(kf, ["X", "Z"]; name="measure")
add_box!(kf, ["Z"]; name="z$i")

add_wires!(kf, [
(0, 1) => (2, 3),
(0, 2) => (2, 4),
(0, 1) => (2, 2),
(1, 1) => (2, 1),
(1, 1) => (3, 1),
(1, 2) => (2, 2),
(1, 2) => (3, 2),
(3, 3) => (4, 1),
(3, 4) => (4, 2)])
(3, 2) => (4, 1)])

kf
end
Expand All @@ -80,19 +76,26 @@ to_graphviz(kalman(5), box_labels=:name; implicit_junctions=true)
# We generate ``100`` points of data and solve the filtering problem.
n = 100; kf = kalman(n); data = generate_data(n)

dm = Dict("z$i" => normal(data[i], Zeros(2, 2)) for i in 1:n)
evidence = Dict("z$i" => normal(data[i], Zeros(2, 2)) for i in 1:n)

hm = Dict(
dm...,
hom_map = Dict(
evidence...,
"state" => normal(Zeros(2), 100I(2)),
"predict" => kernel(A, Zeros(2), P),
"measure" => kernel(B, Zeros(2), Q))

mean(oapply(kf, hm))
ob_map = Dict(
"X" => 2,
"Z" => 2)

ob_attr = :junction_type

mean(oapply(kf, hom_map, ob_map; ob_attr))
#
@benchmark oapply(kf, hm)
@benchmark oapply(kf, hom_map, ob_map; ob_attr)
# Since the filtering problem is large, we may wish to solve it using belief propagation.
ip = InferenceProblem{DenseGaussianSystem{Float64}}(kf, hm)
T = DenseGaussianSystem{Float64}
ip = InferenceProblem{T, Int}(kf, hom_map, ob_map; ob_attr)
is = init(ip, MinFill())

mean(solve(is))
Expand Down
46 changes: 29 additions & 17 deletions docs/literate/regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,28 +46,34 @@ Q = I - X * pinv(X)
β̂ = pinv(X) * (I - pinv(Q * W * Q) * Q * W)' * y
# To solve for ``\hat{\beta}`` using AlgebraicInference.jl, we construct an undirected
# wiring diagram.
wd = @relation (a₁, a₂) begin
X(a₁, a₂, b₁, b₂, b₃)
+(b₁, b₂, b₃, c₁, c₂, c₃, d₁, d₂, d₃)
ϵ(c₁, c₂, c₃)
y(d₁, d₂, d₃)
wd = @relation (a,) where (a::m, b::n, c::n, d::n) begin
X(a, b)
+(b, c, d)
ϵ(c)
y(d)
end

to_graphviz(wd; box_labels=:name, implicit_junctions=true)
# Then we assign values to the boxes in `wd` and compute the result.
P = [
P = [
1 0 0 1 0 0
0 1 0 0 1 0
0 0 1 0 0 1
]

hm = Dict(
hom_map = Dict(
:X => kernel(X, Zeros(3), Zeros(3, 3)),
:+ => kernel(P, Zeros(3), Zeros(3, 3)),
=> normal(Zeros(3), W),
:y => normal(y, Zeros(3, 3)))

β̂ = mean(oapply(wd, hm))
ob_map = Dict(
:m => 2,
:n => 3)

ob_attr = :junction_type

β̂ = mean(oapply(wd, hom_map, ob_map; ob_attr))
# ## Bayesian Linear Regression
# Let ``\rho = \mathcal{N}(m, V)`` be our prior belief about ``\beta``. Then our posterior
# belief ``\hat{\rho}`` is a bivariate normal distribution with mean
Expand All @@ -93,26 +99,32 @@ m̂ = m - V * X' * pinv(X * V * X' + W) * (X * m - y)
= V - V * X' * pinv(X * V * X' + W) * X * V
# To solve for ``\hat{\rho}`` using AlgebraicInference.jl, we construct an undirected
# wiring diagram.
wd = @relation (a₁, a₂) begin
ρ(a₁, a₂)
X(a₁, a₂, b₁, b₂, b₃)
+(b₁, b₂, b₃, c₁, c₂, c₃, d₁, d₂, d₃)
ϵ(c₁, c₂, c₃)
y(d₁, d₂, d₃)
wd = @relation (a,) where (a::m, b::n, c::n, d::n) begin
ρ(a)
X(a, b)
+(b, c, d)
ϵ(c)
y(d)
end

to_graphviz(wd; box_labels=:name, implicit_junctions=true)
# Then we assign values to the boxes in `wd` and compute the result.
hm = Dict(
hom_map = Dict(
=> normal(m, V),
:X => kernel(X, Zeros(3), Zeros(3, 3)),
:+ => kernel(P, Zeros(3), Zeros(3, 3)),
=> normal(Zeros(3), W),
:y => normal(y, Zeros(3, 3)))

= mean(oapply(wd, hm))
ob_map = Dict(
:m => 2,
:n => 3)

ob_attr = :junction_type

= mean(oapply(wd, hom_map, ob_map; ob_attr))
#
= cov(oapply(wd, hm))
= cov(oapply(wd, hom_map, ob_map; ob_attr))
#
covellipse!(m, V, aspect_ratio=:equal, label="prior")
covellipse!(m̂, V̂, aspect_ratio=:equal, label="posterior")
8 changes: 4 additions & 4 deletions docs/src/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

```@docs
GaussianSystem
GaussianSystem(::AbstractMatrix, ::AbstractMatrix, ::AbstractVector, ::AbstractVector, ::Any)
GaussianSystem(::AbstractMatrix, ::AbstractMatrix, ::AbstractVector, ::AbstractVector, ::Real)
normal
kernel
Expand All @@ -14,7 +14,7 @@ invcov(::GaussianSystem)
var(::GaussianSystem)
mean(::GaussianSystem)
oapply(::AbstractUWD, ::AbstractVector{<:GaussianSystem})
oapply(::AbstractUWD, ::AbstractVector{<:GaussianSystem}, ::AbstractVector)
```

## Problems
Expand All @@ -23,8 +23,8 @@ InferenceProblem
MinDegree
MinFill
InferenceProblem{T}(::AbstractUWD, ::AbstractDict, ::Union{Nothing, AbstractDict}) where T
InferenceProblem{T}(::AbstractUWD, ::AbstractVector, ::Union{Nothing, AbstractVector}) where T
InferenceProblem{T₁, T₂}(::AbstractUWD, ::AbstractDict, ::AbstractDict) where {T₁, T₂}
InferenceProblem{T₁, T₂}(::AbstractUWD, ::AbstractVector, ::AbstractVector) where {T₁, T₂}
solve(::InferenceProblem, alg)
init(::InferenceProblem, alg)
Expand Down
39 changes: 21 additions & 18 deletions docs/src/generated/kalman.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ EditURL = "<unknown>/literate/kalman.jl"
using AlgebraicInference
using BenchmarkTools
using Catlab.Graphics, Catlab.Programs, Catlab.WiringDiagrams
using Catlab.WiringDiagrams.MonoidalUndirectedWiringDiagrams: UntypedHypergraphDiagram
using Distributions
using FillArrays
using LinearAlgebra
Expand Down Expand Up @@ -37,6 +36,7 @@ B = [
1.3 0.0
0.0 0.7
]
P = [
0.05 0.0
0.0 0.05
Expand Down Expand Up @@ -68,21 +68,17 @@ that represents the filtering problem.

````@example kalman
function kalman_step(i)
kf = UntypedHypergraphDiagram{String}(2)
add_box!(kf, 2; name="state")
add_box!(kf, 4; name="predict")
add_box!(kf, 4; name="measure")
add_box!(kf, 2; name="z$i")
kf = HypergraphDiagram{String, String}(["X"])
add_box!(kf, ["X"]; name="state")
add_box!(kf, ["X", "X"]; name="predict")
add_box!(kf, ["X", "Z"]; name="measure")
add_box!(kf, ["Z"]; name="z$i")
add_wires!(kf, [
(0, 1) => (2, 3),
(0, 2) => (2, 4),
(0, 1) => (2, 2),
(1, 1) => (2, 1),
(1, 1) => (3, 1),
(1, 2) => (2, 2),
(1, 2) => (3, 2),
(3, 3) => (4, 1),
(3, 4) => (4, 2)])
(3, 2) => (4, 1)])
kf
end
Expand All @@ -99,25 +95,32 @@ We generate ``100`` points of data and solve the filtering problem.
````@example kalman
n = 100; kf = kalman(n); data = generate_data(n)
dm = Dict("z$i" => normal(data[i], Zeros(2, 2)) for i in 1:n)
evidence = Dict("z$i" => normal(data[i], Zeros(2, 2)) for i in 1:n)
hm = Dict(
dm...,
hom_map = Dict(
evidence...,
"state" => normal(Zeros(2), 100I(2)),
"predict" => kernel(A, Zeros(2), P),
"measure" => kernel(B, Zeros(2), Q))
mean(oapply(kf, hm))
ob_map = Dict(
"X" => 2,
"Z" => 2)
ob_attr = :junction_type
mean(oapply(kf, hom_map, ob_map; ob_attr))
````

````@example kalman
@benchmark oapply(kf, hm)
@benchmark oapply(kf, hom_map, ob_map; ob_attr)
````

Since the filtering problem is large, we may wish to solve it using belief propagation.

````@example kalman
ip = InferenceProblem{DenseGaussianSystem{Float64}}(kf, hm)
T = DenseGaussianSystem{Float64}
ip = InferenceProblem{T, Int}(kf, hom_map, ob_map; ob_attr)
is = init(ip, MinFill())
mean(solve(is))
Expand Down
44 changes: 28 additions & 16 deletions docs/src/generated/regression.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ To solve for ``\hat{\beta}`` using AlgebraicInference.jl, we construct an undire
wiring diagram.

````@example regression
wd = @relation (a₁, a₂) begin
X(a₁, a₂, b₁, b₂, b₃)
+(b₁, b₂, b₃, c₁, c₂, c₃, d₁, d₂, d₃)
ϵ(c₁, c₂, c₃)
y(d₁, d₂, d₃)
wd = @relation (a,) where (a::m, b::n, c::n, d::n) begin
X(a, b)
+(b, c, d)
ϵ(c)
y(d)
end
to_graphviz(wd; box_labels=:name, implicit_junctions=true)
Expand All @@ -79,13 +79,19 @@ P = [
0 0 1 0 0 1
]
hm = Dict(
hom_map = Dict(
:X => kernel(X, Zeros(3), Zeros(3, 3)),
:+ => kernel(P, Zeros(3), Zeros(3, 3)),
:ϵ => normal(Zeros(3), W),
:y => normal(y, Zeros(3, 3)))
β̂ = mean(oapply(wd, hm))
ob_map = Dict(
:m => 2,
:n => 3)
ob_attr = :junction_type
β̂ = mean(oapply(wd, hom_map, ob_map; ob_attr))
````

## Bayesian Linear Regression
Expand Down Expand Up @@ -121,12 +127,12 @@ To solve for ``\hat{\rho}`` using AlgebraicInference.jl, we construct an undirec
wiring diagram.

````@example regression
wd = @relation (a₁, a₂) begin
ρ(a₁, a₂)
X(a₁, a₂, b₁, b₂, b₃)
+(b₁, b₂, b₃, c₁, c₂, c₃, d₁, d₂, d₃)
ϵ(c₁, c₂, c₃)
y(d₁, d₂, d₃)
wd = @relation (a,) where (a::m, b::n, c::n, d::n) begin
ρ(a)
X(a, b)
+(b, c, d)
ϵ(c)
y(d)
end
to_graphviz(wd; box_labels=:name, implicit_junctions=true)
Expand All @@ -135,18 +141,24 @@ to_graphviz(wd; box_labels=:name, implicit_junctions=true)
Then we assign values to the boxes in `wd` and compute the result.

````@example regression
hm = Dict(
hom_map = Dict(
:ρ => normal(m, V),
:X => kernel(X, Zeros(3), Zeros(3, 3)),
:+ => kernel(P, Zeros(3), Zeros(3, 3)),
:ϵ => normal(Zeros(3), W),
:y => normal(y, Zeros(3, 3)))
m̂ = mean(oapply(wd, hm))
ob_map = Dict(
:m => 2,
:n => 3)
ob_attr = :junction_type
m̂ = mean(oapply(wd, hom_map, ob_map; ob_attr))
````

````@example regression
V̂ = cov(oapply(wd, hm))
V̂ = cov(oapply(wd, hom_map, ob_map; ob_attr))
````

````@example regression
Expand Down
Loading

0 comments on commit 1ee3fe3

Please sign in to comment.