Skip to content

Commit

Permalink
add skopt Optimization methods
Browse files Browse the repository at this point in the history
  • Loading branch information
chaoming0625 committed Mar 9, 2024
1 parent 3bf1fb7 commit 9ca2d6c
Show file tree
Hide file tree
Showing 2 changed files with 134 additions and 0 deletions.
36 changes: 36 additions & 0 deletions brainpy/_src/optimizers/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import abc

from tqdm.auto import tqdm

__all__ = ['Optimizer']


class Optimizer(metaclass=abc.ABCMeta):
"""
Optimizer class created as a base for optimization initialization and
performance with different libraries. To be used with modelfitting
Fitter.
"""

@abc.abstractmethod
def initialize(self, *args, **kwargs):
"""
Initialize the instrumentation for the optimization, based on
parameters, creates bounds for variables and attaches them to the
optimizer
"""
pass

@abc.abstractmethod
def one_trial(self, *args, **kwargs):
"""
Returns the requested number of samples of parameter sets
"""
pass

def minimize(self, n_iter):
results = []
for i in tqdm(range(n_iter)):
r = self.one_trial(choice_best=i + 1 == n_iter)
results.append(r)
return results[-1]
98 changes: 98 additions & 0 deletions brainpy/_src/optimizers/skopt_bayesian.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from typing import Callable, Optional, Sequence

import numpy as np

from .base import Optimizer

__all__ = ['SkBayesianOptimizer']


class SkBayesianOptimizer(Optimizer):
"""
SkoptOptimizer instance creates all the tools necessary for the user
to use it with scikit-optimize library.
Parameters
----------
parameter_names: list[str]
Parameters to be used as instruments.
bounds : list
List with appropiate bounds for each parameter.
method : `str`, optional
The optimization method. Possibilities: "GP", "RF", "ET", "GBRT" or
sklearn regressor, default="GP"
n_calls: int
Number of calls to ``func``. Defaults to 100.
n_jobs: int
The number of jobs to run in parallel for ``fit``. If -1, then the
number of jobs is set to the number of cores.
"""

def __init__(
self,
loss_fun: Callable,
n_sample: int,
bounds: Optional[Sequence] = None,
method: str = 'GP',
**kwds
):
super().__init__()

try:
from sklearn.base import RegressorMixin # noqa
except (ImportError, ModuleNotFoundError):
raise ImportError("scikit-learn must be installed to use this class")

# loss function
assert callable(loss_fun), "'loss_fun' must be a callable function"
self.loss_fun = loss_fun

# method
if not (method.upper() in ["GP", "RF", "ET", "GBRT"] or isinstance(method, RegressorMixin)):
raise AssertionError(f"Provided method: {method} is not an skopt optimization or a regressor")
self.method = method

# population size
assert n_sample > 0, "'n_sample' must be a positive integer"
self.n_sample = n_sample

# bounds
if bounds is None:
bounds = ()
self.bounds = bounds

# others
self.kwds = kwds

def initialize(self):
try:
from skopt.optimizer import Optimizer # noqa
from skopt.space import Real # noqa
except (ImportError, ModuleNotFoundError):
raise ImportError("scikit-optimize must be installed to use this class")
self.tested_parameters = []
self.errors = []
instruments = []
for bound in self.bounds:
instrumentation = Real(*np.asarray(bound), transform='normalize')
instruments.append(instrumentation)
self.optim = Optimizer(dimensions=instruments, base_estimator=self.method, **self.kwds)

def one_trial(self, choice_best: bool = False):
# draw parameters
parameters = self.optim.ask(n_points=self.n_sample)
self.tested_parameters.extend(parameters)

# errors
errors = self.loss_fun(*np.asarray(parameters).T)
errors = np.asarray(errors).tolist()
self.errors.extend(errors)

# tell
self.optim.tell(parameters, errors)

if choice_best:
xi = self.optim.Xi
yii = np.array(self.optim.yi)
return xi[yii.argmin()]

0 comments on commit 9ca2d6c

Please sign in to comment.