Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pr/1.5 #266

Merged
merged 17 commits into from
May 29, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
changes in hyperparameters and defaults
  • Loading branch information
mpvanderschelling committed May 24, 2024
commit a275814c82c4d26a3dd7667cbdf02c7978d320c5
23 changes: 12 additions & 11 deletions src/f3dasm/_src/experimentdata/experimentdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1517,17 +1517,17 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
if isinstance(x0_selection, str):
if x0_selection == 'new':

if iterations < optimizer.hyperparameters.population:
if iterations < optimizer.hyperparameters["population"]:
raise ValueError(
f'For creating new samples, the total number of '
f'requested iterations ({iterations}) cannot be '
f'smaller than the population size '
f'({optimizer.hyperparameters.population})')
f'({optimizer.hyperparameters["population"]})')

init_samples = ExperimentData.from_sampling(
domain=self.domain,
sampler=sampler,
n_samples=optimizer.hyperparameters.population,
n_samples=optimizer.hyperparameters["population"],
seed=optimizer.seed)

init_samples.evaluate(
Expand All @@ -1548,10 +1548,10 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
self.add_experiments(init_samples)

x0_selection = 'last'
iterations -= optimizer.hyperparameters.population
iterations -= optimizer.hyperparameters["population"]

x0 = x0_factory(experiment_data=self, mode=x0_selection,
n_samples=optimizer.hyperparameters.population)
n_samples=optimizer.hyperparameters["population"])
optimizer.set_data(x0)

optimizer._check_number_of_datapoints()
Expand All @@ -1560,7 +1560,7 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,

for _ in range(number_of_updates(
iterations,
population=optimizer.hyperparameters.population)):
population=optimizer.hyperparameters["population"])):
new_samples = optimizer.update_step(data_generator)

# If new_samples is a tuple of input_data and output_data
Expand Down Expand Up @@ -1591,7 +1591,8 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
if not overwrite:
# Remove overiterations
self.remove_rows_bottom(number_of_overiterations(
iterations, population=optimizer.hyperparameters.population))
iterations,
population=optimizer.hyperparameters["population"]))

# Reset the optimizer
optimizer.reset(ExperimentData(domain=self.domain))
Expand Down Expand Up @@ -1657,17 +1658,17 @@ def _iterate_scipy(self, optimizer: Optimizer,
if isinstance(x0_selection, str):
if x0_selection == 'new':

if iterations < optimizer.hyperparameters.population:
if iterations < optimizer.hyperparameters["population"]:
raise ValueError(
f'For creating new samples, the total number of '
f'requested iterations ({iterations}) cannot be '
f'smaller than the population size '
f'({optimizer.hyperparameters.population})')
f'({optimizer.hyperparameters["population"]})')

init_samples = ExperimentData.from_sampling(
domain=self.domain,
sampler=sampler,
n_samples=optimizer.hyperparameters.population,
n_samples=optimizer.hyperparameters["population"],
seed=optimizer.seed)

init_samples.evaluate(
Expand All @@ -1690,7 +1691,7 @@ def _iterate_scipy(self, optimizer: Optimizer,
x0_selection = 'last'

x0 = x0_factory(experiment_data=self, mode=x0_selection,
n_samples=optimizer.hyperparameters.population)
n_samples=optimizer.hyperparameters["population"])
optimizer.set_data(x0)

optimizer._check_number_of_datapoints()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,15 +57,15 @@ def fun(x):
_, y = sample.to_numpy()
return float(y)

self.hyperparameters.maxiter = iterations
self.hyperparameters['maxiter'] = iterations

minimize(
fun=fun,
method=self.method,
jac=data_generator.dfdx,
x0=self.data.get_n_best_output(1).to_numpy()[0].ravel(),
callback=self._callback,
options=self.hyperparameters.__dict__,
options=self.hyperparameters,
bounds=self.domain.get_bounds(),
tol=0.0,
)
Expand Down
19 changes: 5 additions & 14 deletions src/f3dasm/_src/optimization/numpy_implementations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,14 @@
# =============================================================================

# Standard
from dataclasses import dataclass
from typing import List, Tuple

# Third-party core
import autograd.numpy as np
import numpy as np

# Locals
from ..datageneration.datagenerator import DataGenerator
from .optimizer import Optimizer, OptimizerParameters
from .optimizer import Optimizer

# Authorship & Credits
# =============================================================================
Expand All @@ -26,27 +25,19 @@
# =============================================================================


@dataclass
class RandomSearch_Parameters(OptimizerParameters):
"""Hyperparameters for RandomSearch optimizer"""

pass


class RandomSearch(Optimizer):
"""Naive random search"""
require_gradients: bool = False
hyperparameters: RandomSearch_Parameters = RandomSearch_Parameters()

def set_seed(self):
np.random.seed(self.seed)
def set_algorithm(self):
self.algorithm = np.random.default_rng(self.seed)

def update_step(
self, data_generator: DataGenerator
) -> Tuple[np.ndarray, np.ndarray]:
x_new = np.atleast_2d(
[
np.random.uniform(
self.algorithm.uniform(
low=self.domain.get_bounds()[d, 0],
high=self.domain.get_bounds()[d, 1])
for d in range(len(self.domain))
Expand Down
39 changes: 16 additions & 23 deletions src/f3dasm/_src/optimization/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@

# Standard
from dataclasses import dataclass
from typing import ClassVar, Iterable, List, Optional, Protocol, Tuple
from typing import (Any, ClassVar, Dict, Iterable, List, Optional, Protocol,
Tuple)

# Third-party core
import numpy as np
Expand Down Expand Up @@ -48,27 +49,13 @@ def to_numpy() -> Tuple[np.ndarray, np.ndarray]:
def select(self, indices: int | slice | Iterable[int]) -> ExperimentData:
...


@dataclass
class OptimizerParameters:
"""Interface of a continuous benchmark function

Parameters
----------
population : int
population of the optimizer update step
force_bounds : bool
force the optimizer to not exceed the boundaries of the domain
"""

population: int = 1
force_bounds: bool = True
# =============================================================================


class Optimizer:
type: ClassVar[str] = 'any'
require_gradients: ClassVar[bool] = False
hyperparameters: OptimizerParameters = OptimizerParameters()
default_hyperparameters: Dict[str, Any] = None

def __init__(
self, domain: Domain, seed: Optional[int] = None,
Expand Down Expand Up @@ -98,8 +85,17 @@ def __init__(
if not hyperparameters:
hyperparameters = {}

# Set the default hyperparameters to an empty dictionary if not set
if not self.default_hyperparameters:
self.default_hyperparameters = {}

# Overwrite the default hyperparameters with the given hyperparameters
self.hyperparameters.__init__(**hyperparameters)
self.hyperparameters = self.default_hyperparameters.copy()
self.hyperparameters.update(hyperparameters)

# Set the default population to 1
if 'population' not in self.hyperparameters:
self.hyperparameters['population'] = 1

# Set the name of the optimizer to the class name if no name is given
if name is None:
Expand Down Expand Up @@ -134,10 +130,10 @@ def _check_number_of_datapoints(self):
ValueError
Raises then the number of datapoints is insufficient
"""
if len(self.data) < self.hyperparameters.population:
if len(self.data) < self.hyperparameters['population']:
raise ValueError(
f'There are {len(self.data)} datapoints available, \
need {self.hyperparameters.population} for initial \
need {self.hyperparameters["population"]} for initial \
population!'
)

Expand All @@ -154,9 +150,6 @@ def set_data(self, data: ExperimentData):
"""Set the data attribute to the given data"""
self.data = data

def add_experiments(self, experiments: ExperimentData):
...

def get_name(self) -> str:
"""Get the name of the optimizer

Expand Down
31 changes: 7 additions & 24 deletions src/f3dasm/_src/optimization/scipy_implementations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@
# =============================================================================

# Standard
from dataclasses import dataclass
from typing import List

# Locals
from .adapters.scipy_implementations import _SciPyOptimizer
from .optimizer import OptimizerParameters

# Authorship & Credits
# =============================================================================
Expand All @@ -22,60 +20,45 @@
#
# =============================================================================


@dataclass
class CG_Parameters(OptimizerParameters):
"""CG Parameters"""

gtol: float = 0.0
CG_DEFAULTS = {'gtol': 0.0}


class CG(_SciPyOptimizer):
"""CG"""
require_gradients: bool = True
method: str = "CG"
hyperparameters: CG_Parameters = CG_Parameters()
default_hyperparameters = CG_DEFAULTS

def get_info(self) -> List[str]:
return ['Stable', 'First-Order', 'Single-Solution']

# =============================================================================


@dataclass
class LBFGSB_Parameters(OptimizerParameters):
"""Hyperparameters for LBFGSB optimizer"""

ftol: float = 0.0
gtol: float = 0.0
LBFGSB_DEFAULTS = {'ftol': 0.0, 'gtol': 0.0}


class LBFGSB(_SciPyOptimizer):
"""L-BFGS-B"""
require_gradients: bool = True
method: str = "L-BFGS-B"
hyperparameters: LBFGSB_Parameters = LBFGSB_Parameters()
default_hyperparameters = LBFGSB_DEFAULTS

def get_info(self) -> List[str]:
return ['Stable', 'First-Order', 'Single-Solution']

# =============================================================================


@dataclass
class NelderMead_Parameters(OptimizerParameters):
"""Hyperparameters for NelderMead optimizer"""

xatol: float = 0.0
fatol: float = 0.0
adaptive: bool = False
NelderMead_DEFAULTS = {'xatol': 0.0, 'fatol': 0.0,
'adaptive': False}


class NelderMead(_SciPyOptimizer):
"""Nelder-Mead"""
require_gradients: bool = False
method: str = "Nelder-Mead"
hyperparameters: NelderMead_Parameters = NelderMead_Parameters()
default_hyperparameters = NelderMead_DEFAULTS

def get_info(self) -> List[str]:
return ['Fast', 'Global', 'First-Order', 'Single-Solution']
3 changes: 1 addition & 2 deletions src/f3dasm/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# =============================================================================

# Local
from ._src.optimization.optimizer import Optimizer, OptimizerParameters
from ._src.optimization.optimizer import Optimizer
from ._src.optimization.optimizer_factory import OPTIMIZERS

# Authorship & Credits
Expand All @@ -19,6 +19,5 @@

__all__ = [
'Optimizer',
'OptimizerParameters',
'OPTIMIZERS',
]
2 changes: 1 addition & 1 deletion tests/optimization/test_all_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def test_optimizer_iterations(iterations: int, data_generator: str,

_optimizer = _optimizer_factory(optimizer, domain=domain)

if x0_selection == "new" and iterations < _optimizer.hyperparameters.population:
if x0_selection == "new" and iterations < _optimizer.hyperparameters['population']:
with pytest.raises(ValueError):
data.optimize(optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs={'seed': seed, 'noise': None,
Expand Down
Loading