Skip to content

Commit

Permalink
Doc: extend scatter search documentation (#1506)
Browse files Browse the repository at this point in the history
Add example, background, cross-references, ...

---------

Co-authored-by: Dilan Pathirana <59329744+dilpath@users.noreply.github.com>
  • Loading branch information
dweindl and dilpath authored Nov 19, 2024
1 parent e62c2cb commit c5bfd72
Show file tree
Hide file tree
Showing 4 changed files with 179 additions and 46 deletions.
120 changes: 96 additions & 24 deletions pypesto/optimize/ess/ess.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
See papers on ESS :footcite:p:`EgeaBal2009,EgeaMar2010`,
CESS :footcite:p:`VillaverdeEge2012`, and saCeSS :footcite:p:`PenasGon2017`.
"""
from __future__ import annotations

import enum
import logging
import time
from typing import Callable, Optional, Union
from typing import Protocol
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -38,15 +39,80 @@ class ESSExitFlag(int, enum.Enum):
MAX_TIME = -3


class OptimizerFactory(Protocol):
def __call__(
self, max_eval: float, max_walltime_s: float
) -> pypesto.optimize.Optimizer:
"""Create a new optimizer instance.
Parameters
----------
max_eval:
Maximum number of objective functions allowed.
max_walltime_s:
Maximum walltime in seconds.
"""
...


class ESSOptimizer:
"""Enhanced Scatter Search (ESS) global optimization.
See papers on ESS :footcite:p:`EgeaBal2009,EgeaMar2010`,
CESS :footcite:p:`VillaverdeEge2012`, and saCeSS :footcite:p:`PenasGon2017`.
Scatter search is a meta-heuristic for global optimization. A set of points
(the reference set, RefSet) is iteratively adapted to explore the parameter
space and to follow promising directions.
.. footbibliography::
This implementation is based on :footcite:p:`EgeaBal2009,EgeaMar2010`,
but does not implement any constraint handling beyond box constraints.
The basic steps of ESS are:
* Initialization: Generate a diverse set of points (RefSet) in the
parameter space.
* Recombination: Generate new points by recombining the RefSet points.
* Improvement: Improve the RefSet by replacing points with better ones.
The steps are repeated until a stopping criterion is met.
ESS is gradient-free, unless a gradient-based local optimizer is used
(``local_optimizer``).
.. note: Does not implement any constraint handling beyond box constraints
Hyperparameters
---------------
Various hyperparameters control the behavior of ESS.
Initialization is controlled by ``dim_refset`` and ``n_diverse``.
Local optimizations are controlled by ``local_optimizer``, ``local_n1``,
``local_n2``, and ``balance``.
Exit criteria
-------------
The optimization stops if any of the following criteria are met:
* The maximum number of iterations is reached (``max_iter``).
* The maximum number of objective function evaluations is reached
(``max_eval``).
* The maximum wall-time is reached (``max_walltime_s``).
One of these criteria needs to be provided.
Note that the wall-time and function evaluation criteria are not checked
after every single function evaluation, and thus, the actual number of
function evaluations may slightly exceed the given value.
Parallelization
---------------
Objective function evaluations inside :class:`ESSOptimizer` can be
parallelized using multiprocessing or multithreading by passing a value
>1 for ``n_procs`` or ``n_threads``, respectively.
.. seealso::
:class:`pypesto.optimize.ess.sacess.SacessOptimizer`
.. footbibliography::
"""

def __init__(
Expand All @@ -57,18 +123,17 @@ def __init__(
local_n1: int = 1,
local_n2: int = 10,
balance: float = 0.5,
local_optimizer: Union[
"pypesto.optimize.Optimizer",
Callable[..., "pypesto.optimize.Optimizer"],
] = None,
local_optimizer: pypesto.optimize.Optimizer
| OptimizerFactory
| None = None,
max_eval=None,
n_diverse: int = None,
n_procs=None,
n_threads=None,
max_walltime_s=None,
result_includes_refset: bool = False,
):
"""Construct new ESS instance.
r"""Construct new ESS instance.
For plausible values of hyperparameters, see :footcite:t:`VillaverdeEge2012`.
Expand All @@ -81,10 +146,11 @@ def __init__(
Maximum number of ESS iterations.
local_n1:
Minimum number of iterations before first local search.
Ignored if ``local_optimizer=None``.
local_n2:
Minimum number of iterations between consecutive local
searches. Maximally one local search per performed in each
iteration.
iteration. Ignored if ``local_optimizer=None``.
local_optimizer:
Local optimizer for refinement, or a callable that creates an
:class:`pypesto.optimize.Optimizer` or ``None`` to skip local searches.
Expand All @@ -104,8 +170,14 @@ def __init__(
optimizations and other simulations, and thus, may be exceeded by
the duration of a local search.
balance:
Quality vs diversity balancing factor [0, 1];
0 = only quality; 1 = only diversity
Quality vs. diversity balancing factor with
:math:`0 \leq balance \leq 1`; ``0`` = only quality,
``1`` = only diversity.
Affects the choice of starting points for local searches. I.e.,
whether local optimization should focus on improving the best
solutions found so far (quality), or on exploring new regions of
the parameter space (diversity).
Ignored if ``local_optimizer=None``.
n_procs:
Number of parallel processes to use for parallel function
evaluation. Mutually exclusive with `n_threads`.
Expand Down Expand Up @@ -144,8 +216,8 @@ def __init__(
raise ValueError(
"`n_procs` and `n_threads` are mutually exclusive."
)
self.n_procs: Optional[int] = n_procs
self.n_threads: Optional[int] = n_threads
self.n_procs: int | None = n_procs
self.n_threads: int | None = n_threads
self.balance: float = balance
# After how many iterations a stagnated solution is to be replaced by
# a random one. Default value taken from [EgeaMar2010]_
Expand All @@ -162,9 +234,9 @@ def __init__(
def _initialize(self):
"""(Re-)Initialize."""
# RefSet
self.refset: Optional[RefSet] = None
self.refset: RefSet | None = None
# Overall best parameters found so far
self.x_best: Optional[np.array] = None
self.x_best: np.ndarray | None = None
# Overall best function value found so far
self.fx_best: float = np.inf
# Results from local searches (only those with finite fval)
Expand All @@ -177,15 +249,15 @@ def _initialize(self):
# Whether self.x_best has changed in the current iteration
self.x_best_has_changed: bool = False
self.exit_flag: ESSExitFlag = ESSExitFlag.DID_NOT_RUN
self.evaluator: Optional[FunctionEvaluator] = None
self.starttime: Optional[float] = None
self.evaluator: FunctionEvaluator | None = None
self.starttime: float | None = None
self.history: MemoryHistory = MemoryHistory()

def _initialize_minimize(
self,
problem: Problem = None,
startpoint_method: StartpointMethod = None,
refset: Optional[RefSet] = None,
refset: RefSet | None = None,
):
"""Initialize for optimizations.
Expand Down Expand Up @@ -242,7 +314,7 @@ def minimize(
self,
problem: Problem = None,
startpoint_method: StartpointMethod = None,
refset: Optional[RefSet] = None,
refset: RefSet | None = None,
) -> pypesto.Result:
"""Minimize the given objective.
Expand Down Expand Up @@ -384,7 +456,7 @@ def _get_remaining_eval(self):
return np.inf
return self.max_eval - self.evaluator.n_eval

def _combine_solutions(self) -> tuple[np.array, np.array]:
def _combine_solutions(self) -> tuple[np.ndarray, np.ndarray]:
"""Combine solutions and evaluate.
Creates the next generation from the RefSet by pair-wise combination
Expand Down Expand Up @@ -418,7 +490,7 @@ def _combine_solutions(self) -> tuple[np.array, np.array]:
break
return y, fy

def _combine(self, i, j) -> np.array:
def _combine(self, i, j) -> np.ndarray:
"""Combine RefSet members ``i`` and ``j``.
Samples a new point from a biased hyper-rectangle derived from the
Expand Down Expand Up @@ -463,7 +535,7 @@ def _combine(self, i, j) -> np.array:
)

def _do_local_search(
self, x_best_children: np.array, fx_best_children: np.array
self, x_best_children: np.ndarray, fx_best_children: np.ndarray
) -> None:
"""
Perform a local search to refine the next generation.
Expand Down
14 changes: 8 additions & 6 deletions pypesto/optimize/ess/refset.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def __init__(
self,
dim: int,
evaluator: FunctionEvaluator,
x: Optional[np.array] = None,
fx: Optional[np.array] = None,
x: Optional[np.ndarray] = None,
fx: Optional[np.ndarray] = None,
):
"""Construct.
Expand Down Expand Up @@ -65,7 +65,7 @@ def __init__(
self.fx = fx

self.n_stuck = np.zeros(shape=[dim])
self.attributes: dict[Any, np.array] = {}
self.attributes: dict[Any, np.ndarray] = {}

def __repr__(self):
fx = (
Expand Down Expand Up @@ -97,7 +97,9 @@ def initialize_random(
x_diverse, fx_diverse = self.evaluator.multiple_random(n_diverse)
self.initialize_from_array(x_diverse=x_diverse, fx_diverse=fx_diverse)

def initialize_from_array(self, x_diverse: np.array, fx_diverse: np.array):
def initialize_from_array(
self, x_diverse: np.ndarray, fx_diverse: np.ndarray
):
"""Create an initial reference set using the provided points.
Populate half of the RefSet using the best given solutions and fill the
Expand Down Expand Up @@ -168,7 +170,7 @@ def normalize(x):
x[j], self.fx[j] = self.evaluator.single_random()
self.sort()

def update(self, i: int, x: np.array, fx: float):
def update(self, i: int, x: np.ndarray, fx: float):
"""Update a RefSet entry."""
self.x[i] = x
self.fx[i] = fx
Expand All @@ -179,7 +181,7 @@ def replace_by_random(self, i: int):
self.x[i], self.fx[i] = self.evaluator.single_random()
self.n_stuck[i] = 0

def add_attribute(self, name: str, values: np.array):
def add_attribute(self, name: str, values: np.ndarray):
"""
Add an attribute array to the refset members.
Expand Down
Loading

0 comments on commit c5bfd72

Please sign in to comment.