Skip to content

Commit

Permalink
Merge pull request #140 from bamboo1997/feature/neldermead
Browse files Browse the repository at this point in the history
Feature/neldermead
  • Loading branch information
y0z authored Aug 21, 2024
2 parents 8efedb6 + aeab964 commit a5a6585
Show file tree
Hide file tree
Showing 8 changed files with 567 additions and 0 deletions.
22 changes: 22 additions & 0 deletions package/samplers/nelder_mead/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@

MIT License

Copyright (c) 2024 Shintaro Takenaga

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
87 changes: 87 additions & 0 deletions package/samplers/nelder_mead/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
---
author: Shintaro Takenaga
title: NelderMead Sampler
description: Local search heuristic using a simplex method with effective initialization.
tags: [sampler, heuristic, local search, Nelder-Mead]
optuna_versions: [3.6.1]
license: MIT License
---

## Abstract

This Nelder-Mead method implemenation employs the effective initialization method proposed by Takenaga et al., 2023.

![The search view](images/nm.png)

## Class or Function Names

- NelderMeadSampler

## Installation

```bash
pip install -r https://raw.githubusercontent.com/optuna/optunahub-registry/main/package/samplers/nelder_mead/requirements.txt
```

## Example

```python
from __future__ import annotations

import optuna
from optuna.distributions import BaseDistribution
from optuna.distributions import FloatDistribution
import optuna.study.study
import optunahub


def objective(x: float, y: float) -> float:
return x**2 + y**2


def optuna_objective(trial: optuna.trial.Trial) -> float:
x = trial.suggest_float("x", -5, 5)
y = trial.suggest_float("y", -5, 5)
return objective(x, y)


if __name__ == "__main__":
# You can specify the search space before optimization.
# This allows the sampler to generate the initial simplex based on the specified search space at the first trial.
search_space: dict[str, BaseDistribution] = {
"x": FloatDistribution(-5, 5),
"y": FloatDistribution(-5, 5),
}
module = optunahub.load_module(
package="samplers/nelder_mead",
)

# study.optimize can be used with an Optuna-style objective function.
sampler = module.NelderMeadSampler(search_space, seed=123)
study = optuna.create_study(sampler=sampler)
study.optimize(optuna_objective, n_trials=100)
print(study.best_params, study.best_value)
```

## Others

### Reference

Takenaga, Shintaro, Yoshihiko Ozaki, and Masaki Onishi. "Practical initialization of the Nelder–Mead method for computationally expensive optimization problems." Optimization Letters 17.2 (2023): 283-297.

See the [paper](https://doi.org/10.1007/s11590-022-01953-y) for more details.

### BibTeX

```bibtex
@article{takenaga2023practical,
title={Practical initialization of the Nelder--Mead method for computationally expensive optimization problems},
author={Takenaga, Shintaro and Ozaki, Yoshihiko and Onishi, Masaki},
journal={Optimization Letters},
volume={17},
number={2},
pages={283--297},
year={2023},
publisher={Springer}
}
```
4 changes: 4 additions & 0 deletions package/samplers/nelder_mead/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .nelder_mead import NelderMeadSampler


__all__ = ["NelderMeadSampler"]
55 changes: 55 additions & 0 deletions package/samplers/nelder_mead/example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from __future__ import annotations

import optuna
from optuna.distributions import BaseDistribution
from optuna.distributions import FloatDistribution
import optuna.study.study
import optunahub


def objective(x: float, y: float) -> float:
return x**2 + y**2


def optuna_objective(trial: optuna.trial.Trial) -> float:
x = trial.suggest_float("x", -5, 5)
y = trial.suggest_float("y", -5, 5)
return objective(x, y)


if __name__ == "__main__":
# You can specify the search space before optimization.
# This allows the sampler to generate the initial simplex based on the specified search space at the first trial.
search_space: dict[str, BaseDistribution] = {
"x": FloatDistribution(-5, 5),
"y": FloatDistribution(-5, 5),
}
module = optunahub.load_module(
package="samplers/nelder_mead",
)

sampler = module.NelderMeadSampler(search_space, seed=123)
study = optuna.create_study(sampler=sampler)
# Ask-and-Tell style optimizaiton.
for i in range(100):
trial = study.ask(search_space)
value = objective(**trial.params)
study.tell(trial, value)
print(
f"Trial {trial.number} finished with values: {value} and parameters: {trial.params}. "
f"Best it trial {study.best_trial.number} with value: {study.best_value}"
)
print(study.best_params, study.best_value)

# study.optimize can be used with an Optuna-style objective function.
sampler = module.NelderMeadSampler(search_space, seed=123)
study = optuna.create_study(sampler=sampler)
study.optimize(optuna_objective, n_trials=100)
print(study.best_params, study.best_value)

# Without the search_space argument, the search space is estimated during the first trial.
# In this case, independent_sampler (default: RandomSampler) will be used instead of the Nelder-Mead algorithm for the first trial.
sampler = module.NelderMeadSampler(seed=123)
study = optuna.create_study(sampler=sampler)
study.optimize(optuna_objective, n_trials=100)
print(study.best_params, study.best_value)
104 changes: 104 additions & 0 deletions package/samplers/nelder_mead/generate_initial_simplex.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# This code is taken from aiaccel (https://github.com/aistairc/aiaccel) distributed under the MIT license.
#
# MIT License
#
# Copyright (c) 2022 National Institute of Advanced Industrial Science and Technology (AIST), Japan, All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import numpy as np


def generate_initial_simplex(
dim: int,
edge: float = 0.5,
centroid: float = 0.5,
rng: np.random.RandomState | None = None,
) -> np.ndarray:
"""
Generate an initial simplex with a regular shape.
"""

assert 0.0 <= centroid <= 1.0, "The centroid must be exists in the unit hypercube. "

assert (
0.0 < edge <= max(centroid, 1 - centroid)
), f"Maximum edge length is {max(centroid, 1 - centroid)}"

# Our implementation normalizes the search space to unit hypercube [0, 1]^n.
bdrys = np.array([[0, 1] for _ in range(dim)])

# Generate regular simplex.
initial_simplex = np.zeros((dim + 1, dim))
b = 0.0
for i in range(dim):
c = np.sqrt(1 - b)
initial_simplex[i][i] = c
r = ((-1 / dim) - b) / c
for j in range(i + 1, dim + 1):
initial_simplex[j][i] = r
b = b + r**2

# Rotate the generated initial simplex.
if rng is not None:
V = rng.random((dim, dim))
else:
V = np.random.random((dim, dim))

for i in range(dim):
for j in range(0, i):
V[i] = V[i] - np.dot(V[i], V[j]) * V[j]
V[i] = V[i] / (np.sum(V[i] ** 2) ** 0.5)
for i in range(dim + 1):
initial_simplex[i] = np.dot(initial_simplex[i], V)

# Scale up or down and move the generated initial simplex.
for i in range(dim + 1):
initial_simplex[i] = edge * initial_simplex[i]
Matrix_centroid = initial_simplex.mean(axis=0)
initial_simplex = initial_simplex + (centroid - Matrix_centroid)

# Check the condition of the generated initial simplex.
if check_initial_simplex(initial_simplex, bdrys):
generate_initial_simplex(dim, edge, centroid)
y = np.array(initial_simplex)

return y


def check_initial_simplex(initial_simplex: np.ndarray, bdrys: np.ndarray) -> bool:
"""
Check whether there is at least one vertex of the generated simplex in the search space.
"""
dim = len(initial_simplex) - 1
if dim + 1 > sum([out_of_boundary(vertex, bdrys) for vertex in initial_simplex]):
return False
return True


def out_of_boundary(y: np.ndarray, bdrys: np.ndarray) -> bool:
"""
Check whether the input vertex is in the search space.
"""
for yi, b in zip(y, bdrys):
if float(b[0]) <= float(yi) <= float(b[1]):
pass
else:
return True
return False
Binary file added package/samplers/nelder_mead/images/nm.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit a5a6585

Please sign in to comment.