Skip to content

Commit e0bd32a

Browse files
committed
add ScipySHGO optimizer
1 parent 484df61 commit e0bd32a

File tree

1 file changed

+257
-0
lines changed

1 file changed

+257
-0
lines changed

src/hyperactive/opt/scipy/_shgo.py

Lines changed: 257 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
"""SHGO (Simplicial Homology Global Optimization) from scipy.optimize."""
2+
3+
# copyright: hyperactive developers, MIT License (see LICENSE file)
4+
5+
from hyperactive.opt._adapters import _BaseScipyAdapter
6+
7+
__all__ = ["ScipySHGO"]
8+
9+
10+
class ScipySHGO(_BaseScipyAdapter):
11+
"""Scipy SHGO (Simplicial Homology Global Optimization).
12+
13+
SHGO is designed to find all local minima of a function, not just
14+
the global minimum. It is effective for:
15+
16+
* Problems where finding multiple local minima is valuable
17+
* Continuous optimization with bounds
18+
* Low to moderate dimensional problems
19+
20+
Parameters
21+
----------
22+
param_space : dict[str, tuple]
23+
The search space to explore. Dictionary with parameter names as keys.
24+
Values must be tuples ``(low, high)`` for continuous ranges.
25+
26+
n_iter : int, default=100
27+
Number of sampling iterations.
28+
29+
max_time : float, optional
30+
Maximum optimization time in seconds.
31+
32+
initialize : dict, optional
33+
Initialization configuration (not used by SHGO).
34+
35+
random_state : int, optional
36+
Random seed (not directly supported by SHGO).
37+
38+
n : int, default=100
39+
Number of sampling points per iteration.
40+
41+
sampling_method : str, default="simplicial"
42+
Sampling method for generating points:
43+
44+
* ``"simplicial"``: Sobol sequence based (default)
45+
* ``"halton"``: Halton sequence
46+
* ``"sobol"``: Pure Sobol sequence
47+
48+
experiment : BaseExperiment, optional
49+
The experiment to optimize.
50+
51+
Attributes
52+
----------
53+
best_params_ : dict
54+
Best parameters found after calling ``solve()``.
55+
56+
best_score_ : float
57+
Score of the best parameters found.
58+
59+
See Also
60+
--------
61+
ScipyDirect : Another deterministic global optimizer.
62+
ScipyDifferentialEvolution : Stochastic global optimizer.
63+
64+
References
65+
----------
66+
.. [1] Endres, S. C., Sandrock, C., & Focke, W. W. (2018). A simplicial
67+
homology algorithm for Lipschitz optimisation. Journal of Global
68+
Optimization, 72(2), 181-217.
69+
70+
Examples
71+
--------
72+
>>> from hyperactive.experiment.bench import Ackley
73+
>>> from hyperactive.opt.scipy import ScipySHGO
74+
75+
>>> ackley = Ackley.create_test_instance()
76+
>>> optimizer = ScipySHGO(
77+
... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)},
78+
... n_iter=3,
79+
... n=50,
80+
... experiment=ackley,
81+
... )
82+
>>> best_params = optimizer.solve() # doctest: +SKIP
83+
"""
84+
85+
_tags = {
86+
"info:name": "Scipy SHGO",
87+
"info:local_vs_global": "global",
88+
"info:explore_vs_exploit": "explore",
89+
"info:compute": "middle",
90+
"python_dependencies": ["scipy"],
91+
}
92+
93+
def __init__(
94+
self,
95+
param_space=None,
96+
n_iter=100,
97+
max_time=None,
98+
initialize=None,
99+
random_state=None,
100+
n=100,
101+
sampling_method="simplicial",
102+
experiment=None,
103+
):
104+
self.n = n
105+
self.sampling_method = sampling_method
106+
107+
super().__init__(
108+
param_space=param_space,
109+
n_iter=n_iter,
110+
max_time=max_time,
111+
initialize=initialize,
112+
random_state=random_state,
113+
experiment=experiment,
114+
)
115+
116+
def _get_scipy_func(self):
117+
"""Get the shgo function.
118+
119+
Returns
120+
-------
121+
callable
122+
The ``scipy.optimize.shgo`` function.
123+
"""
124+
from scipy.optimize import shgo
125+
126+
return shgo
127+
128+
def _get_iteration_param_name(self):
129+
"""Get iteration parameter name.
130+
131+
Returns
132+
-------
133+
str
134+
"iters" for shgo.
135+
"""
136+
return "iters"
137+
138+
def _get_optimizer_kwargs(self):
139+
"""Get SHGO specific arguments.
140+
141+
Returns
142+
-------
143+
dict
144+
Configuration arguments for shgo.
145+
"""
146+
kwargs = {
147+
"n": self.n,
148+
"sampling_method": self.sampling_method,
149+
}
150+
return kwargs
151+
152+
def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs):
153+
"""Run the SHGO optimization.
154+
155+
Overrides base class to handle SHGO's different API
156+
(no seed, no callback).
157+
158+
Parameters
159+
----------
160+
experiment : BaseExperiment
161+
The experiment to optimize.
162+
param_space : dict
163+
The parameter space to search.
164+
n_iter : int
165+
Number of sampling iterations.
166+
max_time : float, optional
167+
Maximum time in seconds (not supported by SHGO).
168+
**kwargs
169+
Additional parameters.
170+
171+
Returns
172+
-------
173+
dict
174+
Best parameters found.
175+
"""
176+
from scipy.optimize import shgo
177+
178+
# Convert search space
179+
bounds, param_names = self._convert_to_scipy_space(param_space)
180+
181+
# Create objective function (negated for minimization)
182+
def objective(x):
183+
params = self._array_to_dict(x, param_names)
184+
score = experiment(params)
185+
return -score
186+
187+
# Run optimization
188+
result = shgo(
189+
objective,
190+
bounds,
191+
n=self.n,
192+
iters=n_iter,
193+
sampling_method=self.sampling_method,
194+
)
195+
196+
# Extract best parameters
197+
best_params = self._array_to_dict(result.x, param_names)
198+
self.best_score_ = -result.fun
199+
200+
return best_params
201+
202+
@classmethod
203+
def get_test_params(cls, parameter_set="default"):
204+
"""Return testing parameter settings for the optimizer.
205+
206+
Returns
207+
-------
208+
list of dict
209+
List of parameter configurations for testing.
210+
"""
211+
from hyperactive.experiment.bench import Ackley
212+
213+
params = []
214+
215+
ackley_exp = Ackley.create_test_instance()
216+
217+
# Test 1: Default configuration
218+
params.append(
219+
{
220+
"param_space": {
221+
"x0": (-5.0, 5.0),
222+
"x1": (-5.0, 5.0),
223+
},
224+
"n_iter": 2,
225+
"n": 30,
226+
"experiment": ackley_exp,
227+
}
228+
)
229+
230+
# Test 2: Halton sampling
231+
params.append(
232+
{
233+
"param_space": {
234+
"x0": (-5.0, 5.0),
235+
"x1": (-5.0, 5.0),
236+
},
237+
"n_iter": 2,
238+
"n": 30,
239+
"sampling_method": "halton",
240+
"experiment": ackley_exp,
241+
}
242+
)
243+
244+
# Test 3: More sampling points
245+
params.append(
246+
{
247+
"param_space": {
248+
"x0": (-3.0, 3.0),
249+
"x1": (-3.0, 3.0),
250+
},
251+
"n_iter": 3,
252+
"n": 50,
253+
"experiment": ackley_exp,
254+
}
255+
)
256+
257+
return params

0 commit comments

Comments
 (0)