Skip to content

Commit e6e7ce0

Browse files
authored
Merge pull request #121 from fkiraly/gfo-proto-and-hillclimbing
[ENH] prototype `gfo` adapter, more hill climbing classes
2 parents 613b45c + 837d1da commit e6e7ce0

19 files changed

Lines changed: 583 additions & 131 deletions

File tree

examples/hyperactive_intro.ipynb

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -392,7 +392,7 @@
392392
},
393393
{
394394
"cell_type": "code",
395-
"execution_count": 12,
395+
"execution_count": null,
396396
"id": "e9a07a73",
397397
"metadata": {},
398398
"outputs": [
@@ -408,7 +408,7 @@
408408
}
409409
],
410410
"source": [
411-
"from hyperactive.opt import GridSearch\n",
411+
"from hyperactive.opt import GridSearchSk as GridSearch\n",
412412
"\n",
413413
"param_grid = {\n",
414414
" \"C\": [0.01, 0.1, 1, 10],\n",
@@ -492,7 +492,7 @@
492492
},
493493
{
494494
"cell_type": "code",
495-
"execution_count": 15,
495+
"execution_count": null,
496496
"id": "9a13b4f3",
497497
"metadata": {},
498498
"outputs": [
@@ -516,7 +516,7 @@
516516
],
517517
"source": [
518518
"import numpy as np\n",
519-
"from hyperactive.opt.gfo import HillClimbing\n",
519+
"from hyperactive.opt import HillClimbing\n",
520520
"\n",
521521
"hillclimbing_config = {\n",
522522
" \"search_space\": {\n",
@@ -532,7 +532,7 @@
532532
},
533533
{
534534
"cell_type": "code",
535-
"execution_count": 16,
535+
"execution_count": null,
536536
"id": "5aa7ca80",
537537
"metadata": {},
538538
"outputs": [
@@ -556,7 +556,7 @@
556556
],
557557
"source": [
558558
"import numpy as np\n",
559-
"from hyperactive.opt.gfo import HillClimbing\n",
559+
"from hyperactive.opt import HillClimbing\n",
560560
"\n",
561561
"hill_climbing_config = {\n",
562562
" \"search_space\": {\n",
@@ -602,15 +602,15 @@
602602
},
603603
{
604604
"cell_type": "code",
605-
"execution_count": 17,
605+
"execution_count": null,
606606
"id": "4bdf2d49",
607607
"metadata": {},
608608
"outputs": [],
609609
"source": [
610610
"# 1. defining the tuned estimator\n",
611611
"from sklearn.svm import SVC\n",
612612
"from hyperactive.integrations.sklearn import OptCV\n",
613-
"from hyperactive.opt import GridSearch\n",
613+
"from hyperactive.opt import GridSearchSk as GridSearch\n",
614614
"\n",
615615
"param_grid = {\"kernel\": [\"linear\", \"rbf\"], \"C\": [1, 10]}\n",
616616
"tuned_svc = OptCV(SVC(), optimizer=GridSearch(param_grid))\n",
@@ -1084,7 +1084,7 @@
10841084
},
10851085
{
10861086
"cell_type": "code",
1087-
"execution_count": 20,
1087+
"execution_count": null,
10881088
"id": "f606284b",
10891089
"metadata": {},
10901090
"outputs": [
@@ -1100,7 +1100,7 @@
11001100
"# 1. defining the tuned estimator\n",
11011101
"from sklearn.svm import SVC\n",
11021102
"from hyperactive.integrations.sklearn import OptCV\n",
1103-
"from hyperactive.opt.gfo import HillClimbing\n",
1103+
"from hyperactive.opt import HillClimbing\n",
11041104
"\n",
11051105
"# picking the optimizer is the only part that changes!\n",
11061106
"hill_climbing_config = {\n",

examples/integrations/sklearn_example.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from sklearn import svm, datasets
22

33
from hyperactive.integrations import HyperactiveSearchCV
4-
from sklearn.model_selection import GridSearchCV
54
from hyperactive.optimizers import RandomSearchOptimizer
65

76
iris = datasets.load_iris()

src/hyperactive/base/_optimizer.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,13 @@ class BaseOptimizer(BaseObject):
1010
_tags = {
1111
"object_type": "optimizer",
1212
"python_dependencies": None,
13+
# properties of the optimizer
14+
"info:name": None, # str
15+
"info:local_vs_global": "mixed", # "local", "mixed", "global"
16+
"info:explore_vs_exploit": "mixed", # "explore", "exploit", "mixed"
17+
"info:compute": "middle", # "low", "middle", "high"
18+
# see here for explanation of the tags:
19+
# https://simonblanke.github.io/gradient-free-optimizers-documentation/1.5/optimizers/ # noqa: E501
1320
}
1421

1522
def __init__(self):
@@ -18,6 +25,9 @@ def __init__(self):
1825
search_config = self.get_params()
1926
self._experiment = search_config.pop("experiment", None)
2027

28+
if self.get_tag("info:name") is None:
29+
self.set_tags(**{"info:name": self.__class__.__name__})
30+
2131
def get_search_config(self):
2232
"""Get the search configuration.
2333

src/hyperactive/base/tests/test_endtoend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def test_endtoend_hillclimbing():
2626

2727
# 2. set up the HillClimbing optimizer
2828
import numpy as np
29-
from hyperactive.opt.gfo import HillClimbing
29+
from hyperactive.opt import HillClimbing
3030

3131
hillclimbing_config = {
3232
"search_space": {

src/hyperactive/integrations/sklearn/opt_cv.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ class OptCV(BaseEstimator, _BestEstimator_, Checks):
4444
1. defining the tuned estimator:
4545
>>> from sklearn.svm import SVC
4646
>>> from hyperactive.integrations.sklearn import OptCV
47-
>>> from hyperactive.opt import GridSearch
47+
>>> from hyperactive.opt import GridSearchSk as GridSearch
4848
>>>
4949
>>> param_grid = {"kernel": ["linear", "rbf"], "C": [1, 10]}
5050
>>> tuned_svc = OptCV(SVC(), GridSearch(param_grid))

src/hyperactive/opt/__init__.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
11
"""Individual optimization algorithms."""
22
# copyright: hyperactive developers, MIT License (see LICENSE file)
33

4-
from hyperactive.opt.gfo import HillClimbing
5-
from hyperactive.opt.gridsearch import GridSearch
4+
from hyperactive.opt.gridsearch import GridSearchSk
5+
from hyperactive.opt.hillclimbing import HillClimbing
6+
from hyperactive.opt.hillclimbing_repulsing import HillClimbingRepulsing
7+
from hyperactive.opt.hillclimbing_stochastic import HillClimbingStochastic
68

79
__all__ = [
8-
"GridSearch",
10+
"GridSearchSk",
911
"HillClimbing",
12+
"HillClimbingRepulsing",
13+
"HillClimbingStochastic",
1014
]
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
"""Adapters for individual packages."""
2+
# copyright: hyperactive developers, MIT License (see LICENSE file)
Lines changed: 30 additions & 103 deletions
Original file line numberDiff line numberDiff line change
@@ -1,120 +1,48 @@
1-
"""Hill climbing optimizer from gfo."""
1+
"""Adapter for gfo package."""
22
# copyright: hyperactive developers, MIT License (see LICENSE file)
33

4-
from gradient_free_optimizers import HillClimbingOptimizer
54
from hyperactive.base import BaseOptimizer
65
from skbase.utils.stdout_mute import StdoutMute
76

7+
__all__ = ["_BaseGFOadapter"]
88

9-
class HillClimbing(BaseOptimizer):
10-
"""Hill climbing optimizer.
11-
12-
Parameters
13-
----------
14-
search_space : dict[str, list]
15-
The search space to explore. A dictionary with parameter
16-
names as keys and a numpy array as values.
17-
Optional, can be passed later in ``add_search``.
18-
initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
19-
The method to generate initial positions. A dictionary with
20-
the following key literals and the corresponding value type:
21-
{"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
22-
constraints : list[callable], default=[]
23-
A list of constraints, where each constraint is a callable.
24-
The callable returns `True` or `False` dependend on the input parameters.
25-
random_state : None, int, default=None
26-
If None, create a new random state. If int, create a new random state
27-
seeded with the value.
28-
rand_rest_p : float, default=0.1
29-
The probability of a random iteration during the the search process.
30-
epsilon : float, default=0.01
31-
The step-size for the climbing.
32-
distribution : str, default="normal"
33-
The type of distribution to sample from.
34-
n_neighbours : int, default=10
35-
The number of neighbours to sample and evaluate before moving to the best
36-
of those neighbours.
37-
n_iter : int, default=100
38-
The number of iterations to run the optimizer.
39-
verbose : bool, default=False
40-
If True, print the progress of the optimization process.
41-
experiment : BaseExperiment, optional
42-
The experiment to optimize parameters for.
43-
Optional, can be passed later in ``add_search``.
44-
45-
Examples
46-
--------
47-
Hill climbing applied to scikit-learn parameter tuning:
48-
49-
1. defining the experiment to optimize:
50-
>>> from hyperactive.experiment.integrations import SklearnCvExperiment
51-
>>> from sklearn.datasets import load_iris
52-
>>> from sklearn.svm import SVC
53-
>>>
54-
>>> X, y = load_iris(return_X_y=True)
55-
>>>
56-
>>> sklearn_exp = SklearnCvExperiment(
57-
... estimator=SVC(),
58-
... X=X,
59-
... y=y,
60-
... )
61-
62-
2. setting up the hill climbing optimizer:
63-
>>> from hyperactive.opt import HillClimbing
64-
>>> import numpy as np
65-
>>>
66-
>>> hillclimbing_config = {
67-
... "search_space": {
68-
... "C": np.array([0.01, 0.1, 1, 10]),
69-
... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
70-
... },
71-
... "n_iter": 100,
72-
... }
73-
>>> hillclimbing = HillClimbing(experiment=sklearn_exp, **hillclimbing_config)
74-
75-
3. running the hill climbing search:
76-
>>> best_params = hillclimbing.run()
77-
78-
Best parameters can also be accessed via the attributes:
79-
>>> best_params = hillclimbing.best_params_
9+
10+
class _BaseGFOadapter(BaseOptimizer):
11+
"""Adapter base class for gradient-free-optimizers.
12+
13+
* default tag setting
14+
* default _run method
15+
* default get_search_config
16+
* default get_test_params
17+
* Handles defaults for "initialize" parameter
18+
* extension interface: _get_gfo_class, docstring, tags
8019
"""
8120

8221
_tags = {
22+
"authors": "SimonBlanke",
8323
"python_dependencies": ["gradient-free-optimizers>=1.5.0"],
8424
}
8525

86-
def __init__(
87-
self,
88-
search_space=None,
89-
initialize=None,
90-
constraints=None,
91-
random_state=None,
92-
rand_rest_p=0.1,
93-
epsilon=0.01,
94-
distribution="normal",
95-
n_neighbours=10,
96-
n_iter=100,
97-
verbose=False,
98-
experiment=None,
99-
):
100-
self.random_state = random_state
101-
self.rand_rest_p = rand_rest_p
102-
self.epsilon = epsilon
103-
self.distribution = distribution
104-
self.n_neighbours = n_neighbours
105-
self.search_space = search_space
106-
self.initialize = initialize
107-
self.constraints = constraints
108-
self.n_iter = n_iter
109-
self.experiment = experiment
110-
self.verbose = verbose
26+
def __init__(self):
11127

11228
super().__init__()
11329

114-
if initialize is None:
30+
if self.initialize is None:
11531
self._initialize = {"grid": 4, "random": 2, "vertices": 4}
11632
else:
117-
self._initialize = initialize
33+
self._initialize = self.initialize
34+
35+
def _get_gfo_class(self):
36+
"""Get the GFO class to use.
37+
38+
Returns
39+
-------
40+
class
41+
The GFO class to use. One of the concrete GFO classes
42+
"""
43+
raise NotImplementedError(
44+
"This method should be implemented in a subclass."
45+
)
11846

11947
def get_search_config(self):
12048
"""Get the search configuration.
@@ -131,14 +59,12 @@ def get_search_config(self):
13159

13260
def _run(self, experiment, **search_config):
13361
"""Run the optimization search process.
134-
13562
Parameters
13663
----------
13764
experiment : BaseExperiment
13865
The experiment to optimize parameters for.
13966
search_config : dict with str keys
14067
identical to return of ``get_search_config``.
141-
14268
Returns
14369
-------
14470
dict with str keys
@@ -148,7 +74,8 @@ def _run(self, experiment, **search_config):
14874
n_iter = search_config.pop("n_iter", 100)
14975
max_time = search_config.pop("max_time", None)
15076

151-
hcopt = HillClimbingOptimizer(**search_config)
77+
gfo_cls = self._get_gfo_class()
78+
hcopt = gfo_cls(**search_config)
15279

15380
with StdoutMute(active=not self.verbose):
15481
hcopt.search(

src/hyperactive/opt/gfo/__init__.py

Lines changed: 0 additions & 6 deletions
This file was deleted.
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
"""Grid search with sklearn style grid and backends."""
2+
# copyright: hyperactive developers, MIT License (see LICENSE file)
3+
4+
from hyperactive.opt.gridsearch._sk import GridSearchSk
5+
6+
__all__ = ["GridSearchSk"]

0 commit comments

Comments
 (0)