@@ -19,7 +19,7 @@ class StabilityReport(NamedTuple):
1919 unstable : bool
2020 candidate : np .ndarray
2121 tpd : float
22- stable_sample : bool
22+ early_exit : bool = False
2323
2424def edge_points_simplex_masked (z : np .ndarray ,
2525 points_per_edge : int = 12 ,
@@ -90,9 +90,10 @@ def edge_points_simplex_masked(z: np.ndarray,
9090 return np .array (pts )
9191
9292# Light weight
93- def lle_tangential_plane_analysis (gamma , z , T , P , sample = None ):
93+ def lle_tangential_plane_analysis (gamma , z , T , P , samples = None ):
9494 MW = np .array ([i .MW for i in gamma .chemicals ])
9595 logfz = np .log (z * gamma (z , T , P ) + 1e-30 )
96+ if samples is None : samples = ()
9697
9798 def objective (w , T , P , logfz , softmax = False ):
9899 if softmax :
@@ -101,29 +102,20 @@ def objective(w, T, P, logfz, softmax=False):
101102 return np .dot (w , np .log (w * gamma (w , T , P ) + 1e-30 ) - logfz )
102103
103104 args = (T , P , logfz )
104- if sample is None :
105- best_val = np .inf
106- best_result = None
107- stable_sample = False
108- else :
109- best_result = sample
110- best_val = objective (sample , * args )
111- stable_sample = best_val < 0 and np .abs (best_result - z ).sum () > 1e-9
112- if not stable_sample :
113- result = minimize (
114- objective ,
115- best_result ,
116- method = "L-BFGS-B" ,
117- options = dict (maxiter = 20 ),
118- args = (* args , True ),
105+ best_val = np .inf
106+ best_result = None
107+ for sample in samples :
108+ val = objective (sample , * args )
109+ if val < best_val :
110+ best_result = sample
111+ best_val = val
112+ if best_val < 0 and np .abs (best_result - z ).sum () > 1e-9 :
113+ return StabilityReport (
114+ unstable = True ,
115+ candidate = sample ,
116+ tpd = best_val ,
117+ early_exit = True ,
119118 )
120- value = result .fun
121- if value < best_val :
122- w = result .x
123- w = np .exp (w - np .max (w )) # Softmax for unconstrained optimization
124- w /= w .sum ()
125- best_result = w
126- best_val = value
127119 w = z * MW
128120 w /= w .sum ()
129121 samples = edge_points_simplex_masked (w )
@@ -152,7 +144,6 @@ def objective(w, T, P, logfz, softmax=False):
152144 unstable = best_val < 0 and np .abs (best_result - z ).sum () > 1e-9 ,
153145 candidate = best_result ,
154146 tpd = best_val ,
155- stable_sample = stable_sample ,
156147 )
157148
158149class TangentPlaneStabilityAnalysis :
@@ -171,36 +162,28 @@ def objective(self, w, T, P, model, logfz, reduce, softmax=False):
171162 w /= w .sum ()
172163 return np .dot (w , np .log (model (w , T , P , reduce ) + 1e-30 ) - logfz )
173164
174- def __call__ (self , z , T , P , reference_phase = 'l' , potential_phase = 'L' , sample = None ):
165+ def __call__ (self , z , T , P , reference_phase = 'l' , potential_phase = 'L' , samples = None ):
175166 reference_model = self .fugacity_models [reference_phase ]
176167 same_phase = reference_phase .lower () == potential_phase .lower ()
177168 logfz = np .log (reference_model (z , T , P , same_phase ) + 1e-30 )
178169 objective = self .objective
179170 model = self .fugacity_models [potential_phase ]
180171 args = (T , P , model , logfz , same_phase )
181- if sample is None :
182- best_val = np .inf
183- best_result = None
184- stable_sample = False
185- else :
186- best_result = sample
187- best_val = objective (sample , * args )
188- stable_sample = best_val < 0 and np .abs (best_result - z ).sum () > 1e-9
189- if not stable_sample :
190- result = minimize (
191- objective ,
192- best_result ,
193- method = "L-BFGS-B" ,
194- options = dict (maxiter = 20 ),
195- args = (* args , True ),
172+ if samples is None : samples = ()
173+ best_val = np .inf
174+ best_result = None
175+ for sample in samples :
176+ val = objective (sample , * args )
177+ if val < best_val :
178+ best_result = sample
179+ best_val = val
180+ if best_val < 0 and np .abs (best_result - z ).sum () > 1e-9 :
181+ return StabilityReport (
182+ unstable = True ,
183+ candidate = sample ,
184+ tpd = best_val ,
185+ early_exit = True ,
196186 )
197- value = result .fun
198- if value < best_val :
199- w = result .x
200- w = np .exp (w - np .max (w )) # Softmax for unconstrained optimization
201- w /= w .sum ()
202- best_result = w
203- best_val = value
204187 MW = self .MW
205188 w = z * MW
206189 w /= w .sum ()
@@ -230,7 +213,6 @@ def __call__(self, z, T, P, reference_phase='l', potential_phase='L', sample=Non
230213 unstable = best_val < 0 and np .abs (best_result - z ).sum () > 1e-9 ,
231214 candidate = best_result ,
232215 tpd = best_val ,
233- stable_sample = stable_sample ,
234216 )
235217
236218
0 commit comments