1- """Hill climbing optimizer from gfo."""
1+ """Adapter for gfo package ."""
22# copyright: hyperactive developers, MIT License (see LICENSE file)
33
4- from gradient_free_optimizers import HillClimbingOptimizer
54from hyperactive .base import BaseOptimizer
65from skbase .utils .stdout_mute import StdoutMute
76
7+ __all__ = ["_BaseGFOadapter" ]
88
9- class HillClimbing (BaseOptimizer ):
10- """Hill climbing optimizer.
11-
12- Parameters
13- ----------
14- search_space : dict[str, list]
15- The search space to explore. A dictionary with parameter
16- names as keys and a numpy array as values.
17- Optional, can be passed later in ``add_search``.
18- initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
19- The method to generate initial positions. A dictionary with
20- the following key literals and the corresponding value type:
21- {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
22- constraints : list[callable], default=[]
23- A list of constraints, where each constraint is a callable.
24- The callable returns `True` or `False` dependend on the input parameters.
25- random_state : None, int, default=None
26- If None, create a new random state. If int, create a new random state
27- seeded with the value.
28- rand_rest_p : float, default=0.1
29- The probability of a random iteration during the the search process.
30- epsilon : float, default=0.01
31- The step-size for the climbing.
32- distribution : str, default="normal"
33- The type of distribution to sample from.
34- n_neighbours : int, default=10
35- The number of neighbours to sample and evaluate before moving to the best
36- of those neighbours.
37- n_iter : int, default=100
38- The number of iterations to run the optimizer.
39- verbose : bool, default=False
40- If True, print the progress of the optimization process.
41- experiment : BaseExperiment, optional
42- The experiment to optimize parameters for.
43- Optional, can be passed later in ``add_search``.
44-
45- Examples
46- --------
47- Hill climbing applied to scikit-learn parameter tuning:
48-
49- 1. defining the experiment to optimize:
50- >>> from hyperactive.experiment.integrations import SklearnCvExperiment
51- >>> from sklearn.datasets import load_iris
52- >>> from sklearn.svm import SVC
53- >>>
54- >>> X, y = load_iris(return_X_y=True)
55- >>>
56- >>> sklearn_exp = SklearnCvExperiment(
57- ... estimator=SVC(),
58- ... X=X,
59- ... y=y,
60- ... )
61-
62- 2. setting up the hill climbing optimizer:
63- >>> from hyperactive.opt import HillClimbing
64- >>> import numpy as np
65- >>>
66- >>> hillclimbing_config = {
67- ... "search_space": {
68- ... "C": np.array([0.01, 0.1, 1, 10]),
69- ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
70- ... },
71- ... "n_iter": 100,
72- ... }
73- >>> hillclimbing = HillClimbing(experiment=sklearn_exp, **hillclimbing_config)
74-
75- 3. running the hill climbing search:
76- >>> best_params = hillclimbing.run()
77-
78- Best parameters can also be accessed via the attributes:
79- >>> best_params = hillclimbing.best_params_
9+
10+ class _BaseGFOadapter (BaseOptimizer ):
11+ """Adapter base class for gradient-free-optimizers.
12+
13+ * default tag setting
14+ * default _run method
15+ * default get_search_config
16+ * default get_test_params
17+ * Handles defaults for "initialize" parameter
18+ * extension interface: _get_gfo_class, docstring, tags
8019 """
8120
8221 _tags = {
22+ "authors" : "SimonBlanke" ,
8323 "python_dependencies" : ["gradient-free-optimizers>=1.5.0" ],
8424 }
8525
86- def __init__ (
87- self ,
88- search_space = None ,
89- initialize = None ,
90- constraints = None ,
91- random_state = None ,
92- rand_rest_p = 0.1 ,
93- epsilon = 0.01 ,
94- distribution = "normal" ,
95- n_neighbours = 10 ,
96- n_iter = 100 ,
97- verbose = False ,
98- experiment = None ,
99- ):
100- self .random_state = random_state
101- self .rand_rest_p = rand_rest_p
102- self .epsilon = epsilon
103- self .distribution = distribution
104- self .n_neighbours = n_neighbours
105- self .search_space = search_space
106- self .initialize = initialize
107- self .constraints = constraints
108- self .n_iter = n_iter
109- self .experiment = experiment
110- self .verbose = verbose
26+ def __init__ (self ):
11127
11228 super ().__init__ ()
11329
114- if initialize is None :
30+ if self . initialize is None :
11531 self ._initialize = {"grid" : 4 , "random" : 2 , "vertices" : 4 }
11632 else :
117- self ._initialize = initialize
33+ self ._initialize = self .initialize
34+
35+ def _get_gfo_class (self ):
36+ """Get the GFO class to use.
37+
38+ Returns
39+ -------
40+ class
41+ The GFO class to use. One of the concrete GFO classes
42+ """
43+ raise NotImplementedError (
44+ "This method should be implemented in a subclass."
45+ )
11846
11947 def get_search_config (self ):
12048 """Get the search configuration.
@@ -131,14 +59,12 @@ def get_search_config(self):
13159
13260 def _run (self , experiment , ** search_config ):
13361 """Run the optimization search process.
134-
13562 Parameters
13663 ----------
13764 experiment : BaseExperiment
13865 The experiment to optimize parameters for.
13966 search_config : dict with str keys
14067 identical to return of ``get_search_config``.
141-
14268 Returns
14369 -------
14470 dict with str keys
@@ -148,7 +74,8 @@ def _run(self, experiment, **search_config):
14874 n_iter = search_config .pop ("n_iter" , 100 )
14975 max_time = search_config .pop ("max_time" , None )
15076
151- hcopt = HillClimbingOptimizer (** search_config )
77+ gfo_cls = self ._get_gfo_class ()
78+ hcopt = gfo_cls (** search_config )
15279
15380 with StdoutMute (active = not self .verbose ):
15481 hcopt .search (
0 commit comments