Skip to content

Commit a473070

Browse files
authored
Merge pull request #57 from GPflow/enhance_optimizers
Optimizer cleanup
2 parents 64d3c4f + f3a02c0 commit a473070

2 files changed

Lines changed: 122 additions & 71 deletions

File tree

GPflowOpt/optim.py

Lines changed: 66 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,14 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import numpy as np
16-
from scipy.optimize import OptimizeResult, minimize
17-
from GPflow import settings
1815
import contextlib
19-
import sys
2016
import os
17+
import sys
18+
import warnings
19+
20+
import numpy as np
21+
from GPflow import settings
22+
from scipy.optimize import OptimizeResult, minimize
2123

2224
from .design import RandomDesign
2325
from .objective import ObjectiveWrapper
@@ -126,47 +128,31 @@ def silent(self):
126128
sys.stdout = save_stdout
127129

128130

129-
class CandidateOptimizer(Optimizer):
131+
class MCOptimizer(Optimizer):
130132
"""
131-
Optimization of an objective function by evaluating a set of pre-defined candidate points.
132-
133-
Returns the point with minimal objective value.
133+
Optimization of an objective function by evaluating a set of random points.
134134
135-
For compatibility with the StagedOptimizer, the candidate points are concatenated with
136-
the initial points and evaluated.
135+
Note: each call to optimize, a different set of random points is evaluated.
137136
"""
138137

139-
def __init__(self, domain, candidates, batch=False):
138+
def __init__(self, domain, nsamples):
140139
"""
141-
:param domain: Optimization :class:`.domain.Domain`.
142-
:param candidates: candidate points, should be within the optimization domain.
143-
:param batch: bool, if true evaluates the objective function on all points at once
140+
:param domain: Optimization :class:`~.domain.Domain`.
141+
:param nsamples: number of random points to use
144142
"""
145-
super(CandidateOptimizer, self).__init__(domain, exclude_gradient=True)
146-
assert(candidates in domain)
147-
self.candidates = candidates
148-
self._batch_mode = batch
143+
super(MCOptimizer, self).__init__(domain, exclude_gradient=True)
144+
self._nsamples = nsamples
149145

150146
@Optimizer.domain.setter
151147
def domain(self, dom):
152-
# Attempt to transform candidates
153-
t = self.domain >> dom
154-
self.candidates = t.forward(self.candidates)
155148
self._domain = dom
156-
self.set_initial(dom.value)
157149

158-
def get_initial(self):
159-
return np.vstack((super(CandidateOptimizer, self).get_initial(), self.candidates))
160-
161-
def _evaluate_one_by_one(self, objective, X):
162-
"""
163-
Evaluates each row of X individually.
164-
"""
165-
return np.vstack(map(lambda x: objective(x), X))
150+
def _get_eval_points(self):
151+
return RandomDesign(self._nsamples, self.domain).generate()
166152

167153
def _optimize(self, objective):
168-
points = self.get_initial()
169-
evaluations = objective(points) if self._batch_mode else self._evaluate_one_by_one(objective, points)
154+
points = self._get_eval_points()
155+
evaluations = objective(points)
170156
idx_best = np.argmin(evaluations, axis=0)
171157

172158
return OptimizeResult(x=points[idx_best, :],
@@ -175,21 +161,39 @@ def _optimize(self, objective):
175161
nfev=points.shape[0],
176162
message="OK")
177163

164+
def set_initial(self, initial):
165+
initial = np.atleast_2d(initial)
166+
super(MCOptimizer, self).set_initial(initial)
167+
if initial.size > 0:
168+
warnings.warn("Initial points set in {0} are ignored.".format(self.__class__.__name__), UserWarning)
169+
178170

179-
class MCOptimizer(CandidateOptimizer):
171+
class CandidateOptimizer(MCOptimizer):
180172
"""
181-
Optimization of an objective function by evaluating a set of random points.
173+
Optimization of an objective function by evaluating a set of pre-defined candidate points.
182174
183-
Note: each call to optimize, a different set of random points is evaluated.
175+
Returns the point with minimal objective value.
184176
"""
185177

186-
def __init__(self, domain, nsamples, batch=False):
187-
super(MCOptimizer, self).__init__(domain, np.empty((0, domain.size)), batch=batch)
188-
self._nsamples = nsamples
178+
def __init__(self, domain, candidates):
179+
"""
180+
:param domain: Optimization :class:`~.domain.Domain`.
181+
:param candidates: candidate points, should be within the optimization domain.
182+
"""
183+
super(CandidateOptimizer, self).__init__(domain, candidates.shape[0])
184+
assert (candidates in domain)
185+
self.candidates = candidates
186+
# Clear the initial data points
187+
self.set_initial(np.empty((0, self.domain.size)))
189188

190-
def _optimize(self, objective):
191-
self.candidates = RandomDesign(self._nsamples, self.domain).generate()
192-
return super(MCOptimizer, self)._optimize(objective)
189+
def _get_eval_points(self):
190+
return self.candidates
191+
192+
@MCOptimizer.domain.setter
193+
def domain(self, dom):
194+
t = self.domain >> dom
195+
super(CandidateOptimizer, self.__class__).domain.fset(self, dom)
196+
self.candidates = t.forward(self.candidates)
193197

194198

195199
class SciPyOptimizer(Optimizer):
@@ -231,6 +235,17 @@ def __init__(self, optimizers):
231235
no_gradient = any(map(lambda opt: not opt.gradient_enabled(), optimizers))
232236
super(StagedOptimizer, self).__init__(optimizers[0].domain, exclude_gradient=no_gradient)
233237
self.optimizers = optimizers
238+
self.set_initial(np.empty((0, self.domain.size)))
239+
240+
@Optimizer.domain.setter
241+
def domain(self, domain):
242+
super(StagedOptimizer, self.__class__).domain.fset(self, domain)
243+
for optimizer in self.optimizers:
244+
optimizer.domain = domain
245+
246+
def _best_x(self, results):
247+
best_idx = np.argmin([r.fun for r in results if r.success])
248+
return results[best_idx].x, results[best_idx].fun
234249

235250
def optimize(self, objectivefx):
236251
"""
@@ -240,18 +255,21 @@ def optimize(self, objectivefx):
240255
"""
241256

242257
self.optimizers[0].set_initial(self.get_initial())
243-
fun_evals = []
244-
for current, next in zip(self.optimizers[:-1], self.optimizers[1:]):
258+
results = []
259+
for current, following in zip(self.optimizers[:-1], self.optimizers[1:]):
245260
result = current.optimize(objectivefx)
246-
fun_evals.append(result.nfev)
261+
results.append(result)
247262
if not result.success:
248263
result.message += " StagedOptimizer interrupted after {0}.".format(current.__class__.__name__)
249264
break
250-
next.set_initial(result.x)
265+
following.set_initial(self._best_x(results)[0])
251266

252267
if result.success:
253268
result = self.optimizers[-1].optimize(objectivefx)
254-
fun_evals.append(result.nfev)
255-
result.nfev = sum(fun_evals)
256-
result.nstages = len(fun_evals)
269+
results.append(result)
270+
271+
result.nfev = sum(r.nfev for r in results)
272+
result.nstages = len(results)
273+
if any(r.success for r in results):
274+
result.x, result.fun = self._best_x(results)
257275
return result

testing/test_optimizers.py

Lines changed: 56 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import six
66
import sys
77
import os
8+
import warnings
89
from contextlib import contextmanager
910
from scipy.optimize import OptimizeResult
1011

@@ -31,18 +32,20 @@ def __init__(self, iters_to_raise, f):
3132
self.iters_to_raise, self.f = iters_to_raise, f
3233
self.count = 0
3334

34-
def __call__(self, *a, **kw):
35-
self.count += 1
35+
def __call__(self, X):
3636
if self.count >= self.iters_to_raise:
3737
raise KeyboardInterrupt
38-
return self.f(*a, **kw)
38+
val = self.f(X)
39+
self.count += X.shape[0]
40+
return val
3941

4042

4143
class _TestOptimizer(object):
4244
_multiprocess_can_split_ = True
4345

4446
def setUp(self):
4547
self.optimizer = None
48+
warnings.simplefilter("once", category=UserWarning)
4649

4750
@property
4851
def domain(self):
@@ -71,9 +74,21 @@ def setUp(self):
7174
design = GPflowOpt.design.FactorialDesign(4, self.domain)
7275
self.optimizer = GPflowOpt.optim.CandidateOptimizer(self.domain, design.generate())
7376

77+
def test_default_initial(self):
78+
self.assertTupleEqual(self.optimizer._initial.shape, (0, 2), msg="Invalid shape of initial points array")
79+
80+
def test_set_initial(self):
81+
# When run separately this test works, however when calling nose to run all tests on python 2.7 this records
82+
# no warnings
83+
with warnings.catch_warnings(record=True) as w:
84+
super(TestCandidateOptimizer, self).test_set_initial()
85+
assert len(w) == 1
86+
assert issubclass(w[-1].category, UserWarning)
87+
7488
def test_object_integrity(self):
7589
self.assertTupleEqual(self.optimizer.candidates.shape, (16, 2), msg="Invalid shape of candidate property.")
76-
self.assertTupleEqual(self.optimizer.get_initial().shape, (17, 2), msg="Invalid shape of initial points")
90+
self.assertTupleEqual(self.optimizer._get_eval_points().shape, (16, 2))
91+
self.assertTupleEqual(self.optimizer.get_initial().shape, (0, 2), msg="Invalid shape of initial points")
7792
self.assertFalse(self.optimizer.gradient_enabled(), msg="CandidateOptimizer supports no gradients.")
7893

7994
def test_set_domain(self):
@@ -83,17 +98,17 @@ def test_set_domain(self):
8398
self.assertNotEqual(self.optimizer.domain, self.domain)
8499
self.assertEqual(self.optimizer.domain, GPflowOpt.domain.UnitCube(2))
85100
rescaled_candidates = GPflowOpt.design.FactorialDesign(4, GPflowOpt.domain.UnitCube(2)).generate()
86-
self.assertTrue(np.allclose(self.optimizer.get_initial(), np.vstack((0.5*np.ones((1,2)), rescaled_candidates))))
101+
self.assertTrue(np.allclose(self.optimizer.candidates, rescaled_candidates))
87102

88103
def test_optimize(self):
104+
self.optimizer.candidates = np.vstack((self.optimizer.candidates, np.zeros((1,2))))
89105
result = self.optimizer.optimize(parabola2d)
90106
self.assertTrue(result.success, msg="Optimization should succeed.")
91107
self.assertTrue(np.allclose(result.x, 0), msg="Optimum should be identified")
92108
self.assertTrue(np.allclose(result.fun, 0), msg="Function value in optimum is 0")
93109
self.assertEqual(result.nfev, 17, msg="Number of function evaluations equals candidates + initial points")
94110

95111
def test_optimize_second(self):
96-
self.optimizer.set_initial([0.67, 0.67])
97112
result = self.optimizer.optimize(parabola2d)
98113
self.assertGreater(result.fun, 0, msg="Optimum is not amongst candidates and initial points")
99114
self.assertLess(result.fun, 2, msg="Function value not reachable within domain")
@@ -131,33 +146,51 @@ class TestStagedOptimizer(_TestOptimizer, unittest.TestCase):
131146
def setUp(self):
132147
super(TestStagedOptimizer, self).setUp()
133148
self.optimizer = GPflowOpt.optim.StagedOptimizer([GPflowOpt.optim.MCOptimizer(self.domain, 5),
149+
GPflowOpt.optim.MCOptimizer(self.domain, 5),
134150
GPflowOpt.optim.SciPyOptimizer(self.domain, maxiter=10)])
135151

152+
def test_default_initial(self):
153+
self.assertTupleEqual(self.optimizer._initial.shape, (0,2))
154+
136155
def test_object_integrity(self):
137-
self.assertEqual(len(self.optimizer.optimizers), 2, msg="Two optimizers expected in optimizerlist")
156+
self.assertEqual(len(self.optimizer.optimizers), 3, msg="Two optimizers expected in optimizerlist")
138157
self.assertFalse(self.optimizer.gradient_enabled(), msg="MCOptimizer supports no gradients => neither "
139158
"does stagedoptimizer.")
140159

141160
def test_optimize(self):
142-
result = self.optimizer.optimize(parabola2d)
143-
self.assertTrue(result.success)
144-
self.assertLessEqual(result.nfev, 20, "Only 10 Iterations permitted")
145-
self.assertTrue(np.allclose(result.x, 0), msg="Optimizer failed to find optimum")
146-
self.assertTrue(np.allclose(result.fun, 0), msg="Incorrect function value returned")
161+
with warnings.catch_warnings():
162+
warnings.filterwarnings("ignore", category=UserWarning)
163+
result = self.optimizer.optimize(parabola2d)
164+
self.assertTrue(result.success)
165+
self.assertLessEqual(result.nfev, 20, "Only 20 Iterations permitted")
166+
self.assertTrue(np.allclose(result.x, 0), msg="Optimizer failed to find optimum")
167+
self.assertTrue(np.allclose(result.fun, 0), msg="Incorrect function value returned")
147168

148169
def test_optimizer_interrupt(self):
149-
self.optimizer.set_initial([-1, -1])
150-
result = self.optimizer.optimize(KeyboardRaiser(3, parabola2d))
151-
self.assertFalse(result.success, msg="After two evaluations, a keyboard interrupt is raised, "
152-
"non-succesfull result expected.")
153-
self.assertFalse(np.allclose(result.x, 0.0), msg="After one iteration, the optimum will not be found")
154-
self.assertEqual(result.nstages, 1, msg="Stage 1 should be in progress during interrupt")
170+
with warnings.catch_warnings():
171+
warnings.filterwarnings("ignore", category=UserWarning)
172+
result = self.optimizer.optimize(KeyboardRaiser(0, parabola2d))
173+
self.assertFalse(result.success, msg="non-succesfull result expected.")
174+
self.assertEqual(result.nstages, 1, msg="Stage 2 should be in progress during interrupt")
175+
self.assertEqual(result.nfev, 0)
176+
177+
result = self.optimizer.optimize(KeyboardRaiser(3, parabola2d))
178+
self.assertFalse(result.success, msg="non-succesfull result expected.")
179+
self.assertFalse(np.allclose(result.x, 0.0), msg="The optimum will not be found")
180+
self.assertEqual(result.nstages, 2, msg="Stage 2 should be in progress during interrupt")
181+
self.assertEqual(result.nfev, 5)
182+
183+
result = self.optimizer.optimize(KeyboardRaiser(12, parabola2d))
184+
print(result)
185+
self.assertFalse(result.success, msg="non-succesfull result expected.")
186+
self.assertEqual(result.nfev, 12)
187+
self.assertFalse(np.allclose(result.x[0, :], 0.0), msg="The optimum should not be found yet")
188+
self.assertEqual(result.nstages, 3, msg="Stage 3 should be in progress during interrupt")
155189

156-
result = self.optimizer.optimize(KeyboardRaiser(8, parabola2d))
157-
self.assertFalse(result.success, msg="After 7 evaluations, a keyboard interrupt is raised, "
158-
"non-succesfull result expected.")
159-
self.assertFalse(np.allclose(result.x[0, :], 0.0), msg="The optimum should not be found yet")
160-
self.assertEqual(result.nstages, 2, msg="Stage 2 should be in progress during interrupt")
190+
def test_set_domain(self):
191+
super(TestStagedOptimizer, self).test_set_domain()
192+
for opt in self.optimizer.optimizers:
193+
self.assertEqual(opt.domain, GPflowOpt.domain.UnitCube(3))
161194

162195

163196
class TestBayesianOptimizer(_TestOptimizer, unittest.TestCase):

0 commit comments

Comments
 (0)