Skip to content

Commit

Permalink
Merge branch 'main' into update-contributor-guide
Browse files Browse the repository at this point in the history
  • Loading branch information
timmens authored Feb 20, 2024
2 parents 27f89e1 + 28ca664 commit 159bb15
Show file tree
Hide file tree
Showing 74 changed files with 89 additions and 20 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ repos:
- id: yamllint
exclude: tests/optimization/fixtures
- repo: https://github.com/psf/black
rev: 23.11.0
rev: 24.1.1
hooks:
- id: black
language_version: python3.10
Expand All @@ -79,7 +79,7 @@ repos:
- --blank
exclude: src/estimagic/optimization/algo_options.py
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.6
rev: v0.2.0
hooks:
- id: ruff
- repo: https://github.com/nbQA-dev/nbQA
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/batch_evaluators.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
can be used used as batch evaluator in estimagic.
"""

from joblib import Parallel, delayed

try:
Expand Down
3 changes: 2 additions & 1 deletion src/estimagic/benchmarking/cartis_roberts.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
- https://vanderbei.princeton.edu/ampl/nlmodels/cute/index.html
"""

from functools import partial

import numpy as np
Expand Down Expand Up @@ -5128,7 +5129,7 @@ def get_start_points_methanl8():
"start_x": [-1] * 98,
"solution_x": None,
"start_criterion": 3.2160e4,
"solution_criterion": None
"solution_criterion": None,
# we found a lower minimum than Cartis and Roberts (2019) at 1651.837;
# according to table 3 in their paper, the minimum is at 4292.197.
# We suspect, however, that the true optimum is even lower.
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/benchmarking/more_wild.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
- Whether the base start vector is multiplied by a factor of ten or not (column 4).
"""

from functools import partial

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/benchmarking/run_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- Add option for deterministic noise or wiggle.
"""

import numpy as np

from estimagic import batch_evaluators
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/cli.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""This module comprises all CLI capabilities of estimagic."""

import click

from estimagic.dashboard.run_dashboard import run_dashboard
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/dashboard/dashboard_app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Show the development of one database's criterion and parameters over time."""

from functools import partial
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/dashboard/plot_functions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Helper functions for the dashboard."""

from bokeh.models import HoverTool, Legend
from bokeh.plotting import figure

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
https://realpython.com/primer-on-python-decorators/
"""

import functools
import inspect
import warnings
Expand Down
8 changes: 5 additions & 3 deletions src/estimagic/differentiation/derivatives.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,9 +747,11 @@ def _convert_evals_to_numpy(
evals = [val.astype(float) if not _is_scalar_nan(val) else val for val in evals]
else:
evals = [
np.array(tree_leaves(val, registry=registry), dtype=np.float64)
if not _is_scalar_nan(val)
else val
(
np.array(tree_leaves(val, registry=registry), dtype=np.float64)
if not _is_scalar_nan(val)
else val
)
for val in evals
]

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/differentiation/finite_differences.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
warnings or errors for that case.
"""

from typing import NamedTuple

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/estimation/estimate_msm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Do a method of simlated moments estimation."""

import functools
import warnings
from collections.abc import Callable
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/examples/logit.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Likelihood functions and derivatives of a logit model."""

import numpy as np
import pandas as pd

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/examples/numdiff_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
We pickled them so we don't need statsmodels as a dependency.
"""

import numpy as np
from scipy.stats import norm

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/inference/ml_covs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Functions for inferences in maximum likelihood models."""

import numpy as np
import pandas as pd

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/logging/read_from_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
``read_log.py`` instead.
"""

import traceback
import warnings

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/logging/read_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
path_or_database. Otherwise, the functions may be very slow.
"""

from dataclasses import dataclass
from pathlib import Path
from typing import Union
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/bhhh.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implement Berndt-Hall-Hall-Hausman (BHHH) algorithm."""

import numpy as np

from estimagic.decorators import mark_minimizer
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/cyipopt_optimizers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implement cyipopt's Interior Point Optimizer."""

import numpy as np

from estimagic.config import IS_CYIPOPT_INSTALLED
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/fides_optimizers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implement the fides optimizer."""

import logging

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/nag_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
and algorithm.
"""

import warnings

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/neldermead.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implementation of parallelosation of Nelder-Mead algorithm."""

import numpy as np

from estimagic.batch_evaluators import process_batch_evaluator
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/nlopt_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
The documentation is heavily based on (nlopt documentation)[nlopt.readthedocs.io].
"""

import numpy as np

from estimagic.config import IS_NLOPT_INSTALLED
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/pounders.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implement the POUNDERS algorithm."""

import warnings

import numpy as np
Expand Down
7 changes: 4 additions & 3 deletions src/estimagic/optimization/pounders_auxiliary.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Auxiliary functions for the pounders algorithm."""

from typing import NamedTuple, Union

import numpy as np
Expand All @@ -15,9 +16,9 @@
class ResidualModel(NamedTuple):
intercepts: Union[np.ndarray, None] = None # shape (n_residuals,)
linear_terms: Union[np.ndarray, None] = None # shape (n_residuals, n_params)
square_terms: Union[
np.ndarray, None
] = None # shape (n_residuals, n_params, n_params)
square_terms: Union[np.ndarray, None] = (
None # shape (n_residuals, n_params, n_params)
)


class MainModel(NamedTuple):
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/pounders_history.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""History class for pounders and similar optimizers."""

import numpy as np


Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/pygmo_optimizers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implement pygmo optimizers."""

import contextlib
import warnings

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/scipy_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
terminate if it returned True.
"""

import functools

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/simopt_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
However, for the moment they are not supported.
"""

import numpy as np

from estimagic.config import IS_SIMOPT_INSTALLED
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implementation of the Conjugate Gradient algorithm."""

import numpy as np


Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/subsolvers/_steihaug_toint.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implementation of the Steihaug-Toint Conjugate Gradient algorithm."""

import numpy as np


Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/subsolvers/_trsbox.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Implementation of the quadratic trustregion solver TRSBOX."""

import numpy as np


Expand Down
4 changes: 2 additions & 2 deletions src/estimagic/optimization/subsolvers/bntr.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Auxiliary functions for the quadratic BNTR trust-region subsolver."""

from functools import reduce
from typing import NamedTuple, Union

Expand Down Expand Up @@ -583,8 +584,7 @@ def _perform_gradient_descent_step(
square_terms = x_inactive.T @ hessian_inactive @ x_inactive

predicted_reduction = trustregion_radius * (
gradient_norm
- 0.5 * trustregion_radius * square_terms / (gradient_norm**2)
gradient_norm - 0.5 * trustregion_radius * square_terms / (gradient_norm**2)
)
actual_reduction = f_candidate_initial - f_candidate

Expand Down
5 changes: 2 additions & 3 deletions src/estimagic/optimization/subsolvers/gqtpar.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Auxiliary functions for the quadratic GQTPAR trust-region subsolver."""

from typing import NamedTuple, Union

import numpy as np
Expand Down Expand Up @@ -356,9 +357,7 @@ def _update_lambdas_when_factorization_unsuccessful(
)
v_norm = np.linalg.norm(v)

lambda_lower_bound = max(
lambdas.lower_bound, lambdas.candidate + delta / v_norm**2
)
lambda_lower_bound = max(lambdas.lower_bound, lambdas.candidate + delta / v_norm**2)
lambda_new_candidate = _get_new_lambda_candidate(
lower_bound=lambda_lower_bound, upper_bound=lambdas.upper_bound
)
Expand Down
5 changes: 2 additions & 3 deletions src/estimagic/optimization/subsolvers/linear_subsolvers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Collection of linear trust-region subsolvers."""

from typing import NamedTuple, Union

import numpy as np
Expand Down Expand Up @@ -326,9 +327,7 @@ def _get_distance_to_trustregion_boundary(
else:
distance_to_boundary = (
np.sqrt(
np.maximum(
0, g_dot_x**2 + g_sumsq * (trustregion_radius**2 - x_sumsq)
)
np.maximum(0, g_dot_x**2 + g_sumsq * (trustregion_radius**2 - x_sumsq))
)
- g_dot_x
) / g_sumsq
Expand Down
7 changes: 4 additions & 3 deletions src/estimagic/optimization/tao_optimizers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""This module implements the POUNDERs algorithm."""

import contextlib
import functools

Expand Down Expand Up @@ -257,9 +258,9 @@ def _process_pounders_results(residuals_out, tao):
"n_derivative_evaluations": None,
"n_iterations": None,
"success": bool(convergence_code >= 0),
"reached_convergence_criterion": convergence_reason
if convergence_code >= 0
else None,
"reached_convergence_criterion": (
convergence_reason if convergence_code >= 0 else None
),
"message": convergence_reason,
# Further results.
"solution_criterion_values": residuals_out.array,
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/optimization/tiktak.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
`GitHub Repository <https://github.com/amckay/TikTak>`_)
"""

import warnings
from functools import partial

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/block_trees.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Functions to convert between array and block-tree representations of a matrix."""

import numpy as np
import pandas as pd
from pybaum import tree_flatten, tree_unflatten
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/check_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
See the module docstring of process_constraints for naming conventions.
"""

from functools import partial

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/consolidate_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Check the module docstring of process_constraints for naming conventions.
"""

import numpy as np
import pandas as pd

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/conversion.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Aggregate the multiple parameter and function output conversions into on."""

from typing import NamedTuple

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/kernel_transformations.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
https://google.github.io/styleguide/pyguide.html
"""

import numpy as np

from estimagic.utilities import (
Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/process_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
only be done after consolidation.
"""

import numpy as np
import pandas as pd

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/space_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
n_internal the length of the internal parameter vector.
"""

from functools import partial
from typing import NamedTuple

Expand Down
1 change: 1 addition & 0 deletions src/estimagic/parameters/tree_registry.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Wrapper around pybaum get_registry to tailor it to estimagic."""

from functools import partial
from itertools import product

Expand Down
Loading

0 comments on commit 159bb15

Please sign in to comment.