|
| 1 | +"""Dual Annealing optimizer from scipy.optimize.""" |
| 2 | + |
| 3 | +# copyright: hyperactive developers, MIT License (see LICENSE file) |
| 4 | + |
| 5 | +from hyperactive.opt._adapters import _BaseScipyAdapter |
| 6 | + |
| 7 | +__all__ = ["ScipyDualAnnealing"] |
| 8 | + |
| 9 | + |
| 10 | +class ScipyDualAnnealing(_BaseScipyAdapter): |
| 11 | + """Scipy Dual Annealing optimizer. |
| 12 | +
|
| 13 | + Dual Annealing combines Classical Simulated Annealing with a fast |
| 14 | + local search method. It is effective for: |
| 15 | +
|
| 16 | + * Global optimization with many local minima |
| 17 | + * Continuous optimization problems |
| 18 | + * Problems where local refinement improves solutions |
| 19 | +
|
| 20 | + Parameters |
| 21 | + ---------- |
| 22 | + param_space : dict[str, tuple] |
| 23 | + The search space to explore. Dictionary with parameter names as keys. |
| 24 | + Values must be tuples ``(low, high)`` for continuous ranges. |
| 25 | +
|
| 26 | + n_iter : int, default=100 |
| 27 | + Maximum number of global iterations. |
| 28 | +
|
| 29 | + max_time : float, optional |
| 30 | + Maximum optimization time in seconds. |
| 31 | +
|
| 32 | + initialize : dict, optional |
| 33 | + Initialization configuration. Supports: |
| 34 | +
|
| 35 | + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with |
| 36 | + known good configurations (uses first point as x0) |
| 37 | +
|
| 38 | + random_state : int, optional |
| 39 | + Random seed for reproducibility. |
| 40 | +
|
| 41 | + initial_temp : float, default=5230.0 |
| 42 | + Initial temperature for the annealing schedule. |
| 43 | +
|
| 44 | + restart_temp_ratio : float, default=2e-5 |
| 45 | + When temperature falls below ``initial_temp * restart_temp_ratio``, |
| 46 | + the annealing restarts. |
| 47 | +
|
| 48 | + visit : float, default=2.62 |
| 49 | + Parameter for the visiting distribution. Higher values lead to |
| 50 | + heavier tails (more global exploration). |
| 51 | +
|
| 52 | + accept : float, default=-5.0 |
| 53 | + Parameter for the acceptance distribution. More negative values |
| 54 | + make acceptance stricter. |
| 55 | +
|
| 56 | + no_local_search : bool, default=False |
| 57 | + If True, disable local search refinement. |
| 58 | +
|
| 59 | + experiment : BaseExperiment, optional |
| 60 | + The experiment to optimize. |
| 61 | +
|
| 62 | + Attributes |
| 63 | + ---------- |
| 64 | + best_params_ : dict |
| 65 | + Best parameters found after calling ``solve()``. |
| 66 | +
|
| 67 | + best_score_ : float |
| 68 | + Score of the best parameters found. |
| 69 | +
|
| 70 | + See Also |
| 71 | + -------- |
| 72 | + ScipyDifferentialEvolution : Population-based global optimizer. |
| 73 | + ScipyBasinhopping : Another global-local hybrid approach. |
| 74 | +
|
| 75 | + References |
| 76 | + ---------- |
| 77 | + .. [1] Tsallis, C. (1988). Possible generalization of Boltzmann-Gibbs |
| 78 | + statistics. Journal of statistical physics, 52(1-2), 479-487. |
| 79 | +
|
| 80 | + .. [2] Xiang, Y., et al. (2013). Generalized simulated annealing for |
| 81 | + global optimization. Science, 220(4598), 671-680. |
| 82 | +
|
| 83 | + Examples |
| 84 | + -------- |
| 85 | + >>> from hyperactive.experiment.bench import Ackley |
| 86 | + >>> from hyperactive.opt.scipy import ScipyDualAnnealing |
| 87 | +
|
| 88 | + >>> ackley = Ackley.create_test_instance() |
| 89 | + >>> optimizer = ScipyDualAnnealing( |
| 90 | + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, |
| 91 | + ... n_iter=100, |
| 92 | + ... random_state=42, |
| 93 | + ... experiment=ackley, |
| 94 | + ... ) |
| 95 | + >>> best_params = optimizer.solve() # doctest: +SKIP |
| 96 | + """ |
| 97 | + |
| 98 | + _tags = { |
| 99 | + "info:name": "Scipy Dual Annealing", |
| 100 | + "info:local_vs_global": "global", |
| 101 | + "info:explore_vs_exploit": "mixed", |
| 102 | + "info:compute": "low", |
| 103 | + "python_dependencies": ["scipy"], |
| 104 | + } |
| 105 | + |
| 106 | + def __init__( |
| 107 | + self, |
| 108 | + param_space=None, |
| 109 | + n_iter=100, |
| 110 | + max_time=None, |
| 111 | + initialize=None, |
| 112 | + random_state=None, |
| 113 | + initial_temp=5230.0, |
| 114 | + restart_temp_ratio=2e-5, |
| 115 | + visit=2.62, |
| 116 | + accept=-5.0, |
| 117 | + no_local_search=False, |
| 118 | + experiment=None, |
| 119 | + ): |
| 120 | + self.initial_temp = initial_temp |
| 121 | + self.restart_temp_ratio = restart_temp_ratio |
| 122 | + self.visit = visit |
| 123 | + self.accept = accept |
| 124 | + self.no_local_search = no_local_search |
| 125 | + |
| 126 | + super().__init__( |
| 127 | + param_space=param_space, |
| 128 | + n_iter=n_iter, |
| 129 | + max_time=max_time, |
| 130 | + initialize=initialize, |
| 131 | + random_state=random_state, |
| 132 | + experiment=experiment, |
| 133 | + ) |
| 134 | + |
| 135 | + def _get_scipy_func(self): |
| 136 | + """Get the dual_annealing function. |
| 137 | +
|
| 138 | + Returns |
| 139 | + ------- |
| 140 | + callable |
| 141 | + The ``scipy.optimize.dual_annealing`` function. |
| 142 | + """ |
| 143 | + from scipy.optimize import dual_annealing |
| 144 | + |
| 145 | + return dual_annealing |
| 146 | + |
| 147 | + def _get_iteration_param_name(self): |
| 148 | + """Get iteration parameter name. |
| 149 | +
|
| 150 | + Returns |
| 151 | + ------- |
| 152 | + str |
| 153 | + "maxiter" for dual_annealing. |
| 154 | + """ |
| 155 | + return "maxiter" |
| 156 | + |
| 157 | + def _get_optimizer_kwargs(self): |
| 158 | + """Get dual annealing specific arguments. |
| 159 | +
|
| 160 | + Returns |
| 161 | + ------- |
| 162 | + dict |
| 163 | + Configuration arguments for dual_annealing. |
| 164 | + """ |
| 165 | + kwargs = { |
| 166 | + "initial_temp": self.initial_temp, |
| 167 | + "restart_temp_ratio": self.restart_temp_ratio, |
| 168 | + "visit": self.visit, |
| 169 | + "accept": self.accept, |
| 170 | + "no_local_search": self.no_local_search, |
| 171 | + } |
| 172 | + return kwargs |
| 173 | + |
| 174 | + @classmethod |
| 175 | + def get_test_params(cls, parameter_set="default"): |
| 176 | + """Return testing parameter settings for the optimizer. |
| 177 | +
|
| 178 | + Returns |
| 179 | + ------- |
| 180 | + list of dict |
| 181 | + List of parameter configurations for testing. |
| 182 | + """ |
| 183 | + from hyperactive.experiment.bench import Ackley |
| 184 | + |
| 185 | + params = [] |
| 186 | + |
| 187 | + ackley_exp = Ackley.create_test_instance() |
| 188 | + |
| 189 | + # Test 1: Default configuration |
| 190 | + params.append( |
| 191 | + { |
| 192 | + "param_space": { |
| 193 | + "x0": (-5.0, 5.0), |
| 194 | + "x1": (-5.0, 5.0), |
| 195 | + }, |
| 196 | + "n_iter": 50, |
| 197 | + "experiment": ackley_exp, |
| 198 | + "random_state": 42, |
| 199 | + } |
| 200 | + ) |
| 201 | + |
| 202 | + # Test 2: No local search |
| 203 | + params.append( |
| 204 | + { |
| 205 | + "param_space": { |
| 206 | + "x0": (-5.0, 5.0), |
| 207 | + "x1": (-5.0, 5.0), |
| 208 | + }, |
| 209 | + "n_iter": 50, |
| 210 | + "no_local_search": True, |
| 211 | + "experiment": ackley_exp, |
| 212 | + "random_state": 42, |
| 213 | + } |
| 214 | + ) |
| 215 | + |
| 216 | + # Test 3: Custom temperature settings |
| 217 | + params.append( |
| 218 | + { |
| 219 | + "param_space": { |
| 220 | + "x0": (-3.0, 3.0), |
| 221 | + "x1": (-3.0, 3.0), |
| 222 | + }, |
| 223 | + "n_iter": 30, |
| 224 | + "initial_temp": 10000.0, |
| 225 | + "visit": 2.8, |
| 226 | + "experiment": ackley_exp, |
| 227 | + "random_state": 123, |
| 228 | + } |
| 229 | + ) |
| 230 | + |
| 231 | + return params |
0 commit comments