2424import tensorflow as tf
2525
2626import copy
27+ from functools import wraps
2728
2829float_type = settings .dtypes .float_type
2930
3031
32+ def setup_required (method ):
33+ """
34+ Decorator function to mark methods in Acquisition classes which require running setup if indicated by _needs_setup
35+ :param method: acquisition method
36+ """
37+ @wraps (method )
38+ def runnable (instance , * args , ** kwargs ):
39+ assert isinstance (instance , Acquisition )
40+ hp = instance .highest_parent
41+ if hp ._needs_setup :
42+ # Avoid infinite loops, caused by setup() somehow invoking the evaluate on another acquisition
43+ # e.g. through feasible_data_index.
44+ hp ._needs_setup = False
45+
46+ # 1 - optimize
47+ hp ._optimize_models ()
48+
49+ # 2 - setup
50+ hp ._setup ()
51+ results = method (instance , * args , ** kwargs )
52+ return results
53+
54+ return runnable
55+
56+
3157class Acquisition (Parameterized ):
3258 """
3359 An acquisition function maps the belief represented by a Bayesian model into a
3460 score indicating how promising a point is for evaluation.
3561
3662 In Bayesian Optimization this function is typically optimized over the optimization domain
37- to determine the next point for evaluation.
38-
39- An object of this class holds a list of GPflow models. Subclasses implement a build_acquisition function
40- which computes the acquisition function (usually from the predictive distribution) using TensorFlow.
41- Each model is automatically optimized when an acquisition object is constructed or when set_data is called.
42-
43- Acquisition functions can be combined through addition or multiplication to construct joint criteria.
44- For instance, for constrained optimization.
63+ to determine the next point for evaluation. An object of this class holds a list of GPflow models. Subclasses
64+ implement a build_acquisition function which computes the acquisition function (usually from the predictive
65+ distribution) using TensorFlow. Optionally, a method setup can be implemented which computes some quantities which
66+ are used to compute the acquisition, but do not depend on candidate points.
67+
68+ Acquisition functions can be combined through addition or multiplication to construct joint criteria. For instance,
69+ for constrained optimization. The objects then form a tree hierarchy.
70+
71+ Acquisition models implement a lazy strategy to optimize models and run setup. This is implemented by a _needs_setup
72+ attribute (similar to the _needs_recompile in GPflow). Calling :meth:`set_data` sets this flag to True. Calling methods
73+ marked with the setup_require decorator (such as evaluate) optimize all models, then call setup if this flag is set.
74+ In hierarchies, first acquisition objects handling constraint objectives are set up, then the objects handling
75+ objectives.
4576 """
4677
4778 def __init__ (self , models = [], optimize_restarts = 5 ):
@@ -56,14 +87,14 @@ def __init__(self, models=[], optimize_restarts=5):
5687
5788 assert (optimize_restarts >= 0 )
5889 self .optimize_restarts = optimize_restarts
59- self ._optimize_models ()
90+ self ._needs_setup = True
6091
6192 def _optimize_models (self ):
6293 """
6394 Optimizes the hyperparameters of all models that the acquisition function is based on.
6495
65- It is called automatically during initialization and each time set_data() is called.
66- When using the high-level :class:`..BayesianOptimizer` class calling set_data() is taken care of.
96+ It is called automatically during initialization and each time :meth:` set_data` is called.
97+ When using the high-level :class:`..BayesianOptimizer` class calling :meth:` set_data` is taken care of.
6798
6899 For each model the hyperparameters of the model at the time it was passed to __init__() are used as initial
69100 point and optimized. If optimize_restarts is set to >1, additional randomization
@@ -97,6 +128,9 @@ def enable_scaling(self, domain):
97128 """
98129 Enables and configures the :class:`.DataScaler` objects wrapping the GP models.
99130
131+ Sets the _needs_setup attribute to True so the contained models are optimized and :meth:`setup` is run again
132+ right before evaluating the :class:`Acquisition` function.
133+
100134 :param domain: :class:`.Domain` object, the input transform of the data scalers is configured as a transform
101135 from domain to the unit cube with the same dimensionality.
102136 """
@@ -105,12 +139,14 @@ def enable_scaling(self, domain):
105139 for m in self .models :
106140 m .input_transform = domain >> UnitCube (n_inputs )
107141 m .normalize_output = True
108- self ._optimize_models ()
142+ self .highest_parent . _needs_setup = True
109143
110144 def set_data (self , X , Y ):
111145 """
112- Update the training data of the contained models. Automatically triggers a hyperparameter optimization
113- step by calling _optimize_all() and an update of pre-computed quantities by calling setup().
146+ Update the training data of the contained models
147+
148+ Sets the _needs_setup attribute to True so the contained models are optimized and :meth:`setup` is run again
149+ right before evaluating the :class:`Acquisition` function.
114150
115151 Let Q be the the sum of the output dimensions of all contained models, Y should have a minimum of
116152 Q columns. Only the first Q columns of Y are used while returning the scalar Q
@@ -128,11 +164,7 @@ def set_data(self, X, Y):
128164 model .X = X
129165 model .Y = Ypart
130166
131- self ._optimize_models ()
132-
133- # Only call setup for the high-level acquisition function
134- if self .highest_parent == self :
135- self .setup ()
167+ self .highest_parent ._needs_setup = True
136168 return num_outputs_sum
137169
138170 @property
@@ -187,14 +219,31 @@ def feasible_data_index(self):
187219 """
188220 return np .ones (self .data [0 ].shape [0 ], dtype = bool )
189221
190- def setup (self ):
222+ def _setup (self ):
191223 """
192224 Pre-calculation of quantities used later in the evaluation of the acquisition function for candidate points.
193225
194- Automatically triggered by :meth:`~.Acquisition.set_data`.
226+ Subclasses can implement this method to compute quantities (such as fmin). The decision when to run this function
227+ is governed by :class:`Acquisition`, based on the setup_required decorator on methods which require
228+ setup to be run (e.g. set_data).
195229 """
196230 pass
197231
232+ def _setup_constraints (self ):
233+ """
234+ Run only if some outputs handled by this acquisition are constraints. Used in aggregation.
235+ """
236+ if self .constraint_indices ().size > 0 :
237+ self ._setup ()
238+
239+ def _setup_objectives (self ):
240+ """
241+ Run only if all outputs handled by this acquisition are objectives. Used in aggregation.
242+ """
243+ if self .constraint_indices ().size == 0 :
244+ self ._setup ()
245+
246+ @setup_required
198247 @AutoFlow ((float_type , [None , None ]))
199248 def evaluate_with_gradients (self , Xcand ):
200249 """
@@ -206,6 +255,7 @@ def evaluate_with_gradients(self, Xcand):
206255 acq = self .build_acquisition (Xcand )
207256 return acq , tf .gradients (acq , [Xcand ], name = "acquisition_gradient" )[0 ]
208257
258+ @setup_required
209259 @AutoFlow ((float_type , [None , None ]))
210260 def evaluate (self , Xcand ):
211261 """
@@ -241,6 +291,11 @@ def __mul__(self, other):
241291 return AcquisitionProduct ([self ] + other .operands .sorted_params )
242292 return AcquisitionProduct ([self , other ])
243293
294+ def __setattr__ (self , key , value ):
295+ super (Acquisition , self ).__setattr__ (key , value )
296+ if key is '_parent' :
297+ self .highest_parent ._needs_setup = True
298+
244299
245300class AcquisitionAggregation (Acquisition ):
246301 """
@@ -256,10 +311,10 @@ def __init__(self, operands, oper):
256311 assert (all ([isinstance (x , Acquisition ) for x in operands ]))
257312 self .operands = ParamList (operands )
258313 self ._oper = oper
259- self .setup ()
260314
261315 def _optimize_models (self ):
262- pass
316+ for oper in self .operands :
317+ oper ._optimize_models ()
263318
264319 @Acquisition .models .getter
265320 def models (self ):
@@ -273,13 +328,22 @@ def set_data(self, X, Y):
273328 offset = 0
274329 for operand in self .operands :
275330 offset += operand .set_data (X , Y [:, offset :])
276- if self .highest_parent == self :
277- self .setup ()
278331 return offset
279332
280- def setup (self ):
333+ def _setup_constraints (self ):
281334 for oper in self .operands :
282- oper .setup ()
335+ if oper .constraint_indices ().size > 0 : # Small optimization, skip subtrees with objectives only
336+ oper ._setup_constraints ()
337+
338+ def _setup_objectives (self ):
339+ for oper in self .operands :
340+ oper ._setup_objectives ()
341+
342+ def _setup (self ):
343+ # Important: First setup acquisitions involving constraints
344+ self ._setup_constraints ()
345+ # Then objectives as these might depend on the constraint acquisition
346+ self ._setup_objectives ()
283347
284348 def constraint_indices (self ):
285349 offset = [0 ]
@@ -304,7 +368,6 @@ class AcquisitionSum(AcquisitionAggregation):
304368 """
305369 Sum of acquisition functions
306370 """
307-
308371 def __init__ (self , operands ):
309372 super (AcquisitionSum , self ).__init__ (operands , tf .reduce_sum )
310373
@@ -319,7 +382,6 @@ class AcquisitionProduct(AcquisitionAggregation):
319382 """
320383 Product of acquisition functions
321384 """
322-
323385 def __init__ (self , operands ):
324386 super (AcquisitionProduct , self ).__init__ (operands , tf .reduce_prod )
325387
@@ -350,14 +412,16 @@ def __init__(self, acquisition, n_slices, **kwargs):
350412 # the call to the constructor of the parent classes, will optimize acquisition, so it obtains the MLE solution.
351413 super (MCMCAcquistion , self ).__init__ ([acquisition ] + copies )
352414 self ._sample_opt = kwargs
353- # Update the hyperparameters of the copies using HMC
354- self ._update_hyper_draws ()
355415
356- def _update_hyper_draws (self ):
416+ def _optimize_models (self ):
417+ # Optimize model #1
418+ self .operands [0 ]._optimize_models ()
419+
420+ # Draw samples using HMC
357421 # Sample each model of the acquisition function - results in a list of 2D ndarrays.
358422 hypers = np .hstack ([model .sample (len (self .operands ), ** self ._sample_opt ) for model in self .models ])
359423
360- # Now visit all copies, and set state
424+ # Now visit all acquisition copies, and set state
361425 for idx , draw in enumerate (self .operands ):
362426 draw .set_state (hypers [idx , :])
363427
@@ -371,9 +435,6 @@ def set_data(self, X, Y):
371435 # This triggers model.optimize() on self.operands[0]
372436 # All copies have optimization disabled, but must have update data.
373437 offset = operand .set_data (X , Y )
374- self ._update_hyper_draws ()
375- if self .highest_parent == self :
376- self .setup ()
377438 return offset
378439
379440 def build_acquisition (self , Xcand ):
0 commit comments