@@ -142,6 +142,8 @@ def __init__(self, domain, nsamples):
142142 """
143143 super (MCOptimizer , self ).__init__ (domain , exclude_gradient = True )
144144 self ._nsamples = nsamples
145+ # Clear the initial data points
146+ self .set_initial (np .empty ((0 , self .domain .size )))
145147
146148 @Optimizer .domain .setter
147149 def domain (self , dom ):
@@ -163,9 +165,11 @@ def _optimize(self, objective):
163165
164166 def set_initial (self , initial ):
165167 initial = np .atleast_2d (initial )
166- super (MCOptimizer , self ).set_initial (initial )
167168 if initial .size > 0 :
168169 warnings .warn ("Initial points set in {0} are ignored." .format (self .__class__ .__name__ ), UserWarning )
170+ return
171+
172+ super (MCOptimizer , self ).set_initial (initial )
169173
170174
171175class CandidateOptimizer (MCOptimizer ):
@@ -183,8 +187,6 @@ def __init__(self, domain, candidates):
183187 super (CandidateOptimizer , self ).__init__ (domain , candidates .shape [0 ])
184188 assert (candidates in domain )
185189 self .candidates = candidates
186- # Clear the initial data points
187- self .set_initial (np .empty ((0 , self .domain .size )))
188190
189191 def _get_eval_points (self ):
190192 return self .candidates
@@ -235,11 +237,11 @@ def __init__(self, optimizers):
235237 no_gradient = any (map (lambda opt : not opt .gradient_enabled (), optimizers ))
236238 super (StagedOptimizer , self ).__init__ (optimizers [0 ].domain , exclude_gradient = no_gradient )
237239 self .optimizers = optimizers
238- self . set_initial ( np . empty (( 0 , self .domain . size )))
240+ del self ._initial
239241
240242 @Optimizer .domain .setter
241243 def domain (self , domain ):
242- super ( StagedOptimizer , self .__class__ ). domain . fset ( self , domain )
244+ self ._domain = domain
243245 for optimizer in self .optimizers :
244246 optimizer .domain = domain
245247
@@ -254,7 +256,6 @@ def optimize(self, objectivefx):
254256 is returned.
255257 """
256258
257- self .optimizers [0 ].set_initial (self .get_initial ())
258259 results = []
259260 for current , following in zip (self .optimizers [:- 1 ], self .optimizers [1 :]):
260261 result = current .optimize (objectivefx )
@@ -273,3 +274,9 @@ def optimize(self, objectivefx):
273274 if any (r .success for r in results ):
274275 result .x , result .fun = self ._best_x (results )
275276 return result
277+
278+ def get_initial (self ):
279+ return self .optimizers [0 ].get_initial ()
280+
281+ def set_initial (self , initial ):
282+ self .optimizers [0 ].set_initial (initial )
0 commit comments