Skip to content

Commit a159767

Browse files
committed
Fixed updating in on travis pip env, minor fixes in stagedoptimizer to avoid warnings with BayesianOptimizer, and solved instability of constrained_bo notebook
1 parent 44b4473 commit a159767

6 files changed

Lines changed: 62 additions & 58 deletions

File tree

.travis.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ cache: pip
99
install:
1010
- pip install -U pip wheel
1111
- pip install tensorflow==1.0.1
12-
- pip install --process-dependency-links .
13-
- pip install .[test]
14-
- pip install codecov
12+
- pip install -U --process-dependency-links .
13+
- pip install -U .[test]
14+
- pip install -U codecov
1515
script:
16-
- nosetests --nocapture testing
16+
- nosetests testing
1717
after_success:
1818
- codecov

GPflowOpt/optim.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,8 @@ def __init__(self, domain, nsamples):
142142
"""
143143
super(MCOptimizer, self).__init__(domain, exclude_gradient=True)
144144
self._nsamples = nsamples
145+
# Clear the initial data points
146+
self.set_initial(np.empty((0, self.domain.size)))
145147

146148
@Optimizer.domain.setter
147149
def domain(self, dom):
@@ -163,9 +165,11 @@ def _optimize(self, objective):
163165

164166
def set_initial(self, initial):
165167
initial = np.atleast_2d(initial)
166-
super(MCOptimizer, self).set_initial(initial)
167168
if initial.size > 0:
168169
warnings.warn("Initial points set in {0} are ignored.".format(self.__class__.__name__), UserWarning)
170+
return
171+
172+
super(MCOptimizer, self).set_initial(initial)
169173

170174

171175
class CandidateOptimizer(MCOptimizer):
@@ -183,8 +187,6 @@ def __init__(self, domain, candidates):
183187
super(CandidateOptimizer, self).__init__(domain, candidates.shape[0])
184188
assert (candidates in domain)
185189
self.candidates = candidates
186-
# Clear the initial data points
187-
self.set_initial(np.empty((0, self.domain.size)))
188190

189191
def _get_eval_points(self):
190192
return self.candidates
@@ -235,11 +237,11 @@ def __init__(self, optimizers):
235237
no_gradient = any(map(lambda opt: not opt.gradient_enabled(), optimizers))
236238
super(StagedOptimizer, self).__init__(optimizers[0].domain, exclude_gradient=no_gradient)
237239
self.optimizers = optimizers
238-
self.set_initial(np.empty((0, self.domain.size)))
240+
del self._initial
239241

240242
@Optimizer.domain.setter
241243
def domain(self, domain):
242-
super(StagedOptimizer, self.__class__).domain.fset(self, domain)
244+
self._domain = domain
243245
for optimizer in self.optimizers:
244246
optimizer.domain = domain
245247

@@ -254,7 +256,6 @@ def optimize(self, objectivefx):
254256
is returned.
255257
"""
256258

257-
self.optimizers[0].set_initial(self.get_initial())
258259
results = []
259260
for current, following in zip(self.optimizers[:-1], self.optimizers[1:]):
260261
result = current.optimize(objectivefx)
@@ -273,3 +274,9 @@ def optimize(self, objectivefx):
273274
if any(r.success for r in results):
274275
result.x, result.fun = self._best_x(results)
275276
return result
277+
278+
def get_initial(self):
279+
return self.optimizers[0].get_initial()
280+
281+
def set_initial(self, initial):
282+
self.optimizers[0].set_initial(initial)

doc/source/notebooks/constrained_bo.ipynb

Lines changed: 36 additions & 41 deletions
Large diffs are not rendered by default.

testing/test_acquisition.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
def parabola2d(X):
1010
return np.atleast_2d(np.sum(X ** 2, axis=1)).T
1111

12-
1312
def plane(X):
1413
return X[:, [0]] - 0.5
1514

@@ -125,7 +124,6 @@ def test_enable_scaling(self):
125124
self.assertFalse(
126125
any(m.wrapped.X.value in GPflowOpt.domain.UnitCube(self.domain.size) for m in self.acquisition.models))
127126
self.acquisition.enable_scaling(self.domain)
128-
print(self.acquisition.models[0].wrapped.X.value)
129127
self.assertTrue(
130128
all(m.wrapped.X.value in GPflowOpt.domain.UnitCube(self.domain.size) for m in self.acquisition.models))
131129

testing/test_datascaler.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,4 @@ def test_predict_scaling(self):
103103
Yt = parabola2d(Xt) #+ np.random.rand(20, 1) * 0.05
104104
fr = m.predict_density(Xt, Yt)
105105
fs = n.predict_density(Xt, Yt)
106-
print(fr)
107-
print(fs)
108106
np.testing.assert_allclose(fr, fs, rtol=1e-3)

testing/test_optimizers.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def test_set_initial(self):
8181
# When run separately this test works, however when calling nose to run all tests on python 2.7 this records
8282
# no warnings
8383
with warnings.catch_warnings(record=True) as w:
84-
super(TestCandidateOptimizer, self).test_set_initial()
84+
self.optimizer.set_initial([1, 1])
8585
assert len(w) == 1
8686
assert issubclass(w[-1].category, UserWarning)
8787

@@ -150,7 +150,14 @@ def setUp(self):
150150
GPflowOpt.optim.SciPyOptimizer(self.domain, maxiter=10)])
151151

152152
def test_default_initial(self):
153-
self.assertTupleEqual(self.optimizer._initial.shape, (0,2))
153+
self.assertTupleEqual(self.optimizer.optimizers[0]._initial.shape, (0,2))
154+
155+
def test_set_initial(self):
156+
self.optimizer.set_initial([1, 1])
157+
self.assertTupleEqual(self.optimizer.optimizers[0]._initial.shape, (0, 2))
158+
self.assertTupleEqual(self.optimizer.optimizers[1]._initial.shape, (0, 2))
159+
self.assertTupleEqual(self.optimizer.optimizers[2]._initial.shape, (1, 2))
160+
self.assertTupleEqual(self.optimizer.get_initial().shape, (0, 2))
154161

155162
def test_object_integrity(self):
156163
self.assertEqual(len(self.optimizer.optimizers), 3, msg="Two optimizers expected in optimizerlist")
@@ -181,7 +188,6 @@ def test_optimizer_interrupt(self):
181188
self.assertEqual(result.nfev, 5)
182189

183190
result = self.optimizer.optimize(KeyboardRaiser(12, parabola2d))
184-
print(result)
185191
self.assertFalse(result.success, msg="non-succesfull result expected.")
186192
self.assertEqual(result.nfev, 12)
187193
self.assertFalse(np.allclose(result.x[0, :], 0.0), msg="The optimum should not be found yet")

0 commit comments

Comments
 (0)