Skip to content

Commit f3afd56

Browse files
committed
add ScipyDirect algorithm
1 parent f5d5357 commit f3afd56

1 file changed

Lines changed: 253 additions & 0 deletions

File tree

Lines changed: 253 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,253 @@
1+
"""DIRECT (DIviding RECTangles) optimizer from scipy.optimize."""
2+
3+
# copyright: hyperactive developers, MIT License (see LICENSE file)
4+
5+
from hyperactive.opt._adapters import _BaseScipyAdapter
6+
7+
__all__ = ["ScipyDirect"]
8+
9+
10+
class ScipyDirect(_BaseScipyAdapter):
11+
"""Scipy DIRECT (DIviding RECTangles) optimizer.
12+
13+
DIRECT is a deterministic derivative-free global optimization algorithm.
14+
It is effective for:
15+
16+
* Problems where deterministic behavior is required
17+
* Lipschitz-continuous objective functions
18+
* Low to moderate dimensional problems
19+
* Finding approximate global optima efficiently
20+
21+
Parameters
22+
----------
23+
param_space : dict[str, tuple]
24+
The search space to explore. Dictionary with parameter names as keys.
25+
Values must be tuples ``(low, high)`` for continuous ranges.
26+
27+
n_iter : int, default=100
28+
Maximum number of function evaluations.
29+
30+
max_time : float, optional
31+
Maximum optimization time in seconds (not supported by DIRECT).
32+
33+
initialize : dict, optional
34+
Initialization configuration (not used by DIRECT).
35+
36+
random_state : int, optional
37+
Random seed (not used, DIRECT is deterministic).
38+
39+
eps : float, default=1e-4
40+
Minimal required difference of the objective function values
41+
between the current best and potential global minima.
42+
43+
locally_biased : bool, default=True
44+
If True, use locally biased DIRECT (more local refinement).
45+
If False, use original DIRECT (more global exploration).
46+
47+
experiment : BaseExperiment, optional
48+
The experiment to optimize.
49+
50+
Attributes
51+
----------
52+
best_params_ : dict
53+
Best parameters found after calling ``solve()``.
54+
55+
best_score_ : float
56+
Score of the best parameters found.
57+
58+
See Also
59+
--------
60+
ScipySHGO : Another deterministic global optimizer.
61+
ScipyDifferentialEvolution : Stochastic global optimizer.
62+
63+
References
64+
----------
65+
.. [1] Jones, D. R., Perttunen, C. D., & Stuckman, B. E. (1993).
66+
Lipschitzian optimization without the Lipschitz constant.
67+
Journal of optimization Theory and Applications, 79(1), 157-181.
68+
69+
Examples
70+
--------
71+
>>> from hyperactive.experiment.bench import Ackley
72+
>>> from hyperactive.opt.scipy import ScipyDirect
73+
74+
>>> ackley = Ackley.create_test_instance()
75+
>>> optimizer = ScipyDirect(
76+
... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)},
77+
... n_iter=200,
78+
... experiment=ackley,
79+
... )
80+
>>> best_params = optimizer.solve() # doctest: +SKIP
81+
"""
82+
83+
_tags = {
84+
"info:name": "Scipy DIRECT",
85+
"info:local_vs_global": "global",
86+
"info:explore_vs_exploit": "explore",
87+
"info:compute": "low",
88+
"python_dependencies": ["scipy"],
89+
}
90+
91+
def __init__(
92+
self,
93+
param_space=None,
94+
n_iter=100,
95+
max_time=None,
96+
initialize=None,
97+
random_state=None,
98+
eps=1e-4,
99+
locally_biased=True,
100+
experiment=None,
101+
):
102+
self.eps = eps
103+
self.locally_biased = locally_biased
104+
105+
super().__init__(
106+
param_space=param_space,
107+
n_iter=n_iter,
108+
max_time=max_time,
109+
initialize=initialize,
110+
random_state=random_state,
111+
experiment=experiment,
112+
)
113+
114+
def _get_scipy_func(self):
115+
"""Get the direct function.
116+
117+
Returns
118+
-------
119+
callable
120+
The ``scipy.optimize.direct`` function.
121+
"""
122+
from scipy.optimize import direct
123+
124+
return direct
125+
126+
def _get_iteration_param_name(self):
127+
"""Get iteration parameter name.
128+
129+
Returns
130+
-------
131+
str
132+
"maxfun" for direct (controls function evaluations).
133+
"""
134+
return "maxfun"
135+
136+
def _get_optimizer_kwargs(self):
137+
"""Get DIRECT specific arguments.
138+
139+
Returns
140+
-------
141+
dict
142+
Configuration arguments for direct.
143+
"""
144+
kwargs = {
145+
"eps": self.eps,
146+
"locally_biased": self.locally_biased,
147+
}
148+
return kwargs
149+
150+
def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs):
151+
"""Run the DIRECT optimization.
152+
153+
Overrides base class to handle DIRECT's different API
154+
(no seed, no callback, no x0).
155+
156+
Parameters
157+
----------
158+
experiment : BaseExperiment
159+
The experiment to optimize.
160+
param_space : dict
161+
The parameter space to search.
162+
n_iter : int
163+
Maximum number of function evaluations.
164+
max_time : float, optional
165+
Maximum time (not supported by DIRECT).
166+
**kwargs
167+
Additional parameters.
168+
169+
Returns
170+
-------
171+
dict
172+
Best parameters found.
173+
"""
174+
from scipy.optimize import direct
175+
176+
# Convert search space
177+
bounds, param_names = self._convert_to_scipy_space(param_space)
178+
179+
# Create objective function (negated for minimization)
180+
def objective(x):
181+
params = self._array_to_dict(x, param_names)
182+
score = experiment(params)
183+
return -score
184+
185+
# Run optimization
186+
result = direct(
187+
objective,
188+
bounds,
189+
eps=self.eps,
190+
maxfun=n_iter,
191+
locally_biased=self.locally_biased,
192+
)
193+
194+
# Extract best parameters
195+
best_params = self._array_to_dict(result.x, param_names)
196+
self.best_score_ = -result.fun
197+
198+
return best_params
199+
200+
@classmethod
201+
def get_test_params(cls, parameter_set="default"):
202+
"""Return testing parameter settings for the optimizer.
203+
204+
Returns
205+
-------
206+
list of dict
207+
List of parameter configurations for testing.
208+
"""
209+
from hyperactive.experiment.bench import Ackley
210+
211+
params = []
212+
213+
ackley_exp = Ackley.create_test_instance()
214+
215+
# Test 1: Default configuration (locally biased)
216+
params.append(
217+
{
218+
"param_space": {
219+
"x0": (-5.0, 5.0),
220+
"x1": (-5.0, 5.0),
221+
},
222+
"n_iter": 100,
223+
"experiment": ackley_exp,
224+
}
225+
)
226+
227+
# Test 2: Original DIRECT (not locally biased)
228+
params.append(
229+
{
230+
"param_space": {
231+
"x0": (-5.0, 5.0),
232+
"x1": (-5.0, 5.0),
233+
},
234+
"n_iter": 100,
235+
"locally_biased": False,
236+
"experiment": ackley_exp,
237+
}
238+
)
239+
240+
# Test 3: Higher precision
241+
params.append(
242+
{
243+
"param_space": {
244+
"x0": (-3.0, 3.0),
245+
"x1": (-3.0, 3.0),
246+
},
247+
"n_iter": 150,
248+
"eps": 1e-6,
249+
"experiment": ackley_exp,
250+
}
251+
)
252+
253+
return params

0 commit comments

Comments
 (0)