Skip to content

Commit

Permalink
Small performance tweaks (#772)
Browse files Browse the repository at this point in the history
* minor tweaks to improve type inference

* another type restriction

* fix NEWS and version number

* fixes

* simplify convenience constructors for OptSummary

* fix use of type parameters in defaults

* NEWS

* tweaks

* JuliaFormatter

* abstract near-zero checks into configurable params

* technically we've added a feature....
  • Loading branch information
palday authored Jun 25, 2024
1 parent ce5b604 commit 34899cf
Show file tree
Hide file tree
Showing 6 changed files with 61 additions and 68 deletions.
14 changes: 13 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,16 @@
MixedModels v4.25 Release Notes
==============================
- Add type notations in `pwrss(::LinearMixedModel)` and `logdet(::LinearMixedModel)` to enhance type inference. [#773]
- Take advantage of type parameter for `StatsAPI.weights(::LinearMixedModel{T})`. [#772]
- Fix use of kwargs in `fit!((::LinearMixedModel)`: [#772]
- user-specified `σ` is actually used, defaulting to existing value
- `REML` defaults to model's already specified REML value.
- Clean up code of keyword convenience constructor for `OptSummary`. [#772]
- Refactor thresholding parameters for forcing near-zero parameter values into `OptSummary`. [#772]

MixedModels v4.24.1 Release Notes
==============================
Add type notations in `pwrss(::LinearMixedModel)` and `logdet(::LinearMixedModel)` to enhance type inference. [#773]
- Re-export accidentally dropped export `lrtest`. [#769]

MixedModels v4.24.0 Release Notes
==============================
Expand Down Expand Up @@ -525,4 +535,6 @@ Package dependencies
[#755]: https://github.com/JuliaStats/MixedModels.jl/issues/755
[#756]: https://github.com/JuliaStats/MixedModels.jl/issues/756
[#767]: https://github.com/JuliaStats/MixedModels.jl/issues/767
[#769]: https://github.com/JuliaStats/MixedModels.jl/issues/769
[#772]: https://github.com/JuliaStats/MixedModels.jl/issues/772
[#773]: https://github.com/JuliaStats/MixedModels.jl/issues/773
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MixedModels"
uuid = "ff71e718-51f3-5ec2-a782-8ffcbfa3c316"
author = ["Phillip Alday <[email protected]>", "Douglas Bates <[email protected]>", "Jose Bayoan Santiago Calderon <[email protected]>"]
version = "4.24.1"
version = "4.25.0"

[deps]
Arrow = "69666777-d1a9-59fb-9406-91d4454c9d45"
Expand Down
2 changes: 1 addition & 1 deletion src/MixedModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ using LinearAlgebra: ldiv!, lmul!, logdet, mul!, norm, normalize, normalize!, qr
using LinearAlgebra: rank, rdiv!, rmul!, svd, tril!
using Markdown: Markdown
using MixedModelsDatasets: dataset, datasets
using NLopt: NLopt, Opt, ftol_abs, ftol_rel, initial_step, xtol_abs, xtol_rel
using NLopt: NLopt, Opt
using PooledArrays: PooledArrays, PooledArray
using PrecompileTools: PrecompileTools, @setup_workload, @compile_workload
using ProgressMeter: ProgressMeter, Progress, ProgressUnknown, finish!, next!
Expand Down
4 changes: 2 additions & 2 deletions src/generalizedlinearmixedmodel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -309,13 +309,13 @@ function StatsAPI.fit!(
## check if very small parameter values bounded below by zero can be set to zero
xmin_ = copy(xmin)
for i in eachindex(xmin_)
if iszero(optsum.lowerbd[i]) && zero(T) < xmin_[i] < T(0.001)
if iszero(optsum.lowerbd[i]) && zero(T) < xmin_[i] < optsum.xtol_zero_abs
xmin_[i] = zero(T)
end
end
loglength = length(fitlog)
if xmin xmin_
if (zeroobj = obj(xmin_, T[])) (fmin + 1.e-5)
if (zeroobj = obj(xmin_, T[])) (fmin + optsum.ftol_zero_abs)
fmin = zeroobj
copyto!(xmin, xmin_)
elseif length(fitlog) > loglength
Expand Down
21 changes: 11 additions & 10 deletions src/linearmixedmodel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ struct LinearMixedModel{T<:AbstractFloat} <: MixedModel{T}
sqrtwts::Vector{T}
parmap::Vector{NTuple{3,Int}}
dims::NamedTuple{(:n, :p, :nretrms),NTuple{3,Int}}
A::Vector{AbstractMatrix{T}} # cross-product blocks
L::Vector{AbstractMatrix{T}}
A::Vector{<:AbstractMatrix{T}} # cross-product blocks
L::Vector{<:AbstractMatrix{T}}
optsum::OptSummary{T}
end

Expand Down Expand Up @@ -175,7 +175,7 @@ function LinearMixedModel(
A, L = createAL(reterms, Xy)
lbd = foldl(vcat, lowerbd(c) for c in reterms)
θ = foldl(vcat, getθ(c) for c in reterms)
optsum = OptSummary(θ, lbd, :LN_BOBYQA; ftol_rel=T(1.0e-12), ftol_abs=T(1.0e-8))
optsum = OptSummary(θ, lbd)
optsum.sigma = isnothing(σ) ? nothing : T(σ)
fill!(optsum.xtol_abs, 1.0e-10)
return LinearMixedModel(
Expand Down Expand Up @@ -408,7 +408,7 @@ function createAL(reterms::Vector{<:AbstractReMat{T}}, Xy::FeMat{T}) where {T}
end
end
end
return A, L
return identity.(A), identity.(L)
end

StatsAPI.deviance(m::LinearMixedModel) = objective(m)
Expand All @@ -431,8 +431,8 @@ function feL(m::LinearMixedModel)
end

"""
fit!(m::LinearMixedModel; progress::Bool=true, REML::Bool=false,
σ::Union{Real, Nothing}=nothing,
fit!(m::LinearMixedModel; progress::Bool=true, REML::Bool=m.optsum.REML,
σ::Union{Real, Nothing}=m.optsum.sigma,
thin::Int=typemax(Int))
Optimize the objective of a `LinearMixedModel`. When `progress` is `true` a
Expand All @@ -445,8 +445,8 @@ saved in `m.optsum.fitlog`.
function StatsAPI.fit!(
m::LinearMixedModel{T};
progress::Bool=true,
REML::Bool=false,
σ::Union{Real,Nothing}=nothing,
REML::Bool=m.optsum.REML,
σ::Union{Real,Nothing}=m.optsum.sigma,
thin::Int=typemax(Int),
) where {T}
optsum = m.optsum
Expand All @@ -461,6 +461,7 @@ function StatsAPI.fit!(
end
opt = Opt(optsum)
optsum.REML = REML
optsum.sigma = σ
prog = ProgressUnknown(; desc="Minimizing", showspeed=true)
# start from zero for the initial call to obj before optimization
iter = 0
Expand Down Expand Up @@ -511,13 +512,13 @@ function StatsAPI.fit!(
xmin_ = copy(xmin)
lb = optsum.lowerbd
for i in eachindex(xmin_)
if iszero(lb[i]) && zero(T) < xmin_[i] < T(0.001)
if iszero(lb[i]) && zero(T) < xmin_[i] < optsum.xtol_zero_abs
xmin_[i] = zero(T)
end
end
loglength = length(fitlog)
if xmin xmin_
if (zeroobj = obj(xmin_, T[])) (fmin + 1.e-5)
if (zeroobj = obj(xmin_, T[])) (fmin + optsum.ftol_zero_abs)
fmin = zeroobj
copyto!(xmin, xmin_)
elseif length(fitlog) > loglength
Expand Down
86 changes: 33 additions & 53 deletions src/optsummary.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,75 +19,55 @@ Summary of an `NLopt` optimization
* `feval`: the number of function evaluations
* `optimizer`: the name of the optimizer used, as a `Symbol`
* `returnvalue`: the return value, as a `Symbol`
* `xtol_zero_abs`: the tolerance for a near zero parameter to be considered practically zero
* `ftol_zero_abs`: the tolerance for change in the objective for setting a near zero parameter to zero
* `fitlog`: A vector of tuples of parameter and objectives values from steps in the optimization
* `nAGQ`: number of adaptive Gauss-Hermite quadrature points in deviance evaluation for GLMMs
* `REML`: use the REML criterion for LMM fits
* `sigma`: a priori value for the residual standard deviation for LMM
* `fitlog`: A vector of tuples of parameter and objectives values from steps in the optimization
The latter four fields are MixedModels functionality and not related directly to the `NLopt` package or algorithms.
The last three fields are MixedModels functionality and not related directly to the `NLopt` package or algorithms.
!!! note
The internal storage of the parameter values within `fitlog` may change in
the future to use a different subtype of `AbstractVector` (e.g., `StaticArrays.SVector`)
for each snapshot without being considered a breaking change.
"""
mutable struct OptSummary{T<:AbstractFloat}
Base.@kwdef mutable struct OptSummary{T<:AbstractFloat}
initial::Vector{T}
lowerbd::Vector{T}
finitial::T
ftol_rel::T
ftol_abs::T
xtol_rel::T
xtol_abs::Vector{T}
initial_step::Vector{T}
maxfeval::Int
maxtime::T
feval::Int
final::Vector{T}
fmin::T
optimizer::Symbol
returnvalue::Symbol
nAGQ::Integer # don't really belong here but I needed a place to store them
REML::Bool
sigma::Union{T,Nothing}
fitlog::Vector{Tuple{Vector{T},T}} # not SVector because we would need to parameterize on size (which breaks GLMM)
# the @kwdef macro isn't quite smart enough for us to use the type parameter
# for the default values, but we can fake it
finitial::T = Inf * one(eltype(initial))
ftol_rel::T = eltype(initial)(1.0e-12)
ftol_abs::T = eltype(initial)(1.0e-8)
xtol_rel::T = zero(eltype(initial))
xtol_abs::Vector{T} = zero(initial) .+ 1e-10
initial_step::Vector{T} = empty(initial)
maxfeval::Int = -1
maxtime::T = -one(eltype(initial))
feval::Int = -1
final::Vector{T} = copy(initial)
fmin::T = Inf * one(eltype(initial))
optimizer::Symbol = :LN_BOBYQA
returnvalue::Symbol = :FAILURE
xtol_zero_abs::T = eltype(initial)(0.001)
ftol_zero_abs::T = eltype(initial)(1.e-5)
# not SVector because we would need to parameterize on size (which breaks GLMM)
fitlog::Vector{Tuple{Vector{T},T}} = [(initial, fmin)]
# don't really belong here but I needed a place to store them
nAGQ::Int = 1
REML::Bool = false
sigma::Union{T,Nothing} = nothing
end

function OptSummary(
initial::Vector{T},
lowerbd::Vector{T},
optimizer::Symbol;
ftol_rel::T=zero(T),
ftol_abs::T=zero(T),
xtol_rel::T=zero(T),
xtol_abs::Vector{T}=zero(initial) .+ 1e-10,
initial_step::Vector{T}=T[],
maxfeval=-1,
maxtime=T(-1),
) where {T<:AbstractFloat}
fitlog = [(initial, T(Inf))]

return OptSummary(
initial,
lowerbd,
T(Inf),
ftol_rel,
ftol_abs,
xtol_rel,
xtol_abs,
initial_step,
maxfeval,
maxtime,
-1,
copy(initial),
T(Inf),
optimizer,
:FAILURE,
1,
false,
nothing,
fitlog,
)
lowerbd::Vector{S},
optimizer::Symbol=:LN_BOBYQA; kwargs...,
) where {T<:AbstractFloat,S<:AbstractFloat}
TS = promote_type(T, S)
return OptSummary{TS}(; initial, lowerbd, optimizer, kwargs...)
end

"""
Expand Down

2 comments on commit 34899cf

@palday
Copy link
Member Author

@palday palday commented on 34899cf Jun 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/109764

Tip: Release Notes

Did you know you can add release notes too? Just add markdown formatted text underneath the comment after the text
"Release notes:" and it will be added to the registry PR, and if TagBot is installed it will also be added to the
release that TagBot creates. i.e.

@JuliaRegistrator register

Release notes:

## Breaking changes

- blah

To add them here just re-invoke and the PR will be updated.

Tagging

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v4.25.0 -m "<description of version>" 34899cf6d95abb2df8c961c9de5a6a500351ebff
git push origin v4.25.0

Please sign in to comment.