Skip to content

Commit da31f0e

Browse files
saitcakmakfacebook-github-bot
authored andcommitted
Deprecate OptimizationConfig.__init__(objective=), remove setter & clone_with_args(objective=)
Summary: Goal: We added `objectives` input, we're now migrating all usage to `objectives` and eliminating the `objective` input. This will make it easier to eliminate `MultiObjective` and `MultiObjectiveOptimizationConfig`. Phase 1 of the OptimizationConfig simplification migration: - Add DeprecationWarning when passing objective= to OptimizationConfig.__init__ - Remove the objective setter on OptimizationConfig (MOOC keeps its own) - Remove objective= param from OptimizationConfig.clone_with_args (MOOC/PreferenceOC keep theirs) - Migrate all callers of the removed setter and clone_with_args(objective=) Differential Revision: D99491494
1 parent 9e384e0 commit da31f0e

9 files changed

Lines changed: 85 additions & 86 deletions

File tree

ax/adapter/tests/test_torch_moo_adapter.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -333,7 +333,6 @@ def test_hypervolume(self, _, cuda: bool = False) -> None:
333333
)
334334
for trial in exp.trials.values():
335335
trial.mark_running(no_runner_required=True).mark_completed()
336-
# pyre-fixme[16]: Optional type has no attribute `metrics`.
337336
metrics_dict = exp.metrics
338337
# Objective thresholds and synthetic observations chosen to have closed-form
339338
# hypervolumes to test.
@@ -464,9 +463,15 @@ def test_infer_objective_thresholds(self, _, cuda: bool = False) -> None:
464463
first = sub_exprs[0]
465464
if not first.startswith("-"):
466465
sub_exprs[0] = f"-{first}"
467-
oc.objective = Objective(
468-
expression=", ".join(sub_exprs),
469-
metric_name_to_signature={s.lstrip("-"): s.lstrip("-") for s in sub_exprs},
466+
oc = oc.clone_with_args(
467+
objectives=[
468+
Objective(
469+
expression=", ".join(sub_exprs),
470+
metric_name_to_signature={
471+
s.lstrip("-"): s.lstrip("-") for s in sub_exprs
472+
},
473+
)
474+
]
470475
)
471476

472477
for use_partial_thresholds in (False, True):

ax/adapter/transforms/relativize.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,8 @@ def transform_optimization_config(
180180
)
181181
else:
182182
new_optimization_config = optimization_config.clone_with_args(
183-
objective=optimization_config.objective, outcome_constraints=constraints
183+
objectives=[optimization_config.objective],
184+
outcome_constraints=constraints,
184185
)
185186

186187
return new_optimization_config

ax/adapter/transforms/standardize_y.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,10 +133,12 @@ def transform_optimization_config(
133133
(name, new_w)
134134
for (name, _), new_w in zip(objective.metric_weights, new_weights)
135135
]
136-
optimization_config.objective = _build_objective_from_metric_weights(
137-
new_metric_weights,
138-
metric_name_to_signature=objective.metric_name_to_signature,
139-
)
136+
optimization_config._objectives = [
137+
_build_objective_from_metric_weights(
138+
new_metric_weights,
139+
metric_name_to_signature=objective.metric_name_to_signature,
140+
)
141+
]
140142

141143
new_constraints = self._transform_constraints(
142144
optimization_config.outcome_constraints, adapter

ax/adapter/transforms/stratified_standardize_y.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -196,10 +196,12 @@ def transform_optimization_config(
196196
(name, new_w)
197197
for (name, _), new_w in zip(objective.metric_weights, new_weights)
198198
]
199-
optimization_config.objective = _build_objective_from_metric_weights(
200-
new_metric_weights,
201-
metric_name_to_signature=objective.metric_name_to_signature,
202-
)
199+
optimization_config._objectives = [
200+
_build_objective_from_metric_weights(
201+
new_metric_weights,
202+
metric_name_to_signature=objective.metric_name_to_signature,
203+
)
204+
]
203205

204206
optimization_config.outcome_constraints = self._transform_constraints(
205207
optimization_config.outcome_constraints, strata, adapter

ax/core/optimization_config.py

Lines changed: 19 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from __future__ import annotations
1010

11+
import warnings
1112
from collections.abc import Mapping
1213
from itertools import groupby
1314
from typing import Self
@@ -80,11 +81,17 @@ def __init__(
8081
consideration, and if not, the parameter value will be replaced with
8182
the corresponding value in the target arm.
8283
"""
84+
if objective is not None:
85+
warnings.warn(
86+
"Passing `objective` to OptimizationConfig is deprecated. "
87+
"Use `objectives=[objective]` instead.",
88+
DeprecationWarning,
89+
stacklevel=2,
90+
)
8391
if objective is not None and objectives is not None:
8492
raise UserInputError(
8593
"Cannot specify both `objective` and `objectives`. "
86-
"Use `objective` for single-objective optimization or "
87-
"`objectives` for multi-objective optimization."
94+
"Use `objectives=[objective]` instead."
8895
)
8996
if objective is None and objectives is None:
9097
raise UserInputError("Must specify either `objective` or `objectives`.")
@@ -113,7 +120,6 @@ def clone(self) -> Self:
113120
def clone_with_args(
114121
self,
115122
*,
116-
objective: Objective | None = None,
117123
objectives: list[Objective] | None = None,
118124
outcome_constraints: None | (list[OutcomeConstraint]) = _NO_OUTCOME_CONSTRAINTS,
119125
pruning_target_parameterization: Arm
@@ -122,21 +128,12 @@ def clone_with_args(
122128
"""Make a copy of this optimization config.
123129
124130
Args:
125-
objective: Replace with a single objective. Mutually exclusive
126-
with ``objectives``.
127-
objectives: Replace with a list of objectives. Mutually exclusive
128-
with ``objective``.
131+
objectives: Replace with a list of objectives.
129132
outcome_constraints: Replace outcome constraints. Pass ``None``
130133
to clear them.
131134
pruning_target_parameterization: Replace the pruning target.
132135
"""
133-
if objective is not None and objectives is not None:
134-
raise UserInputError(
135-
"Cannot specify both `objective` and `objectives` in clone_with_args."
136-
)
137-
if objective is not None:
138-
cloned_objectives = [objective]
139-
elif objectives is not None:
136+
if objectives is not None:
140137
cloned_objectives = objectives
141138
else:
142139
cloned_objectives = [obj.clone() for obj in self._objectives]
@@ -182,15 +179,6 @@ def objective(self) -> Objective:
182179
)
183180
return self._objectives[0]
184181

185-
@objective.setter
186-
def objective(self, objective: Objective) -> None:
187-
"""Set objective. Only valid for single-objective configs."""
188-
self._validate_transformed_optimization_config(
189-
objectives=[objective],
190-
outcome_constraints=self.outcome_constraints,
191-
)
192-
self._objectives = [objective]
193-
194182
@property
195183
def all_constraints(self) -> list[OutcomeConstraint]:
196184
"""Get outcome constraints."""
@@ -449,7 +437,6 @@ def __init__(
449437
def clone_with_args(
450438
self,
451439
*,
452-
objective: Objective | None = None,
453440
objectives: list[Objective] | None = None,
454441
outcome_constraints: None | (list[OutcomeConstraint]) = _NO_OUTCOME_CONSTRAINTS,
455442
objective_thresholds: None
@@ -458,13 +445,7 @@ def clone_with_args(
458445
| None = _NO_PRUNING_TARGET_PARAMETERIZATION,
459446
) -> "MultiObjectiveOptimizationConfig":
460447
"""Make a copy of this optimization config."""
461-
if objective is not None and objectives is not None:
462-
raise UserInputError(
463-
"Cannot specify both `objective` and `objectives` in clone_with_args."
464-
)
465-
if objective is not None:
466-
cloned_objectives = [objective]
467-
elif objectives is not None:
448+
if objectives is not None:
468449
cloned_objectives = objectives
469450
else:
470451
cloned_objectives = [obj.clone() for obj in self._objectives]
@@ -702,15 +683,19 @@ def is_bope_problem(self) -> bool:
702683
def clone_with_args(
703684
self,
704685
*,
705-
objective: Objective | None = None,
686+
objectives: list[Objective] | None = None,
706687
preference_profile_name: str | None = None,
707688
outcome_constraints: list[OutcomeConstraint] | None = _NO_OUTCOME_CONSTRAINTS,
708689
expect_relativized_outcomes: bool | None = None,
709690
pruning_target_parameterization: Arm
710691
| None = _NO_PRUNING_TARGET_PARAMETERIZATION,
711692
) -> PreferenceOptimizationConfig:
712693
"""Make a copy of this optimization config."""
713-
objective = self._objectives[0].clone() if objective is None else objective
694+
cloned_objectives = (
695+
[obj.clone() for obj in self._objectives]
696+
if objectives is None
697+
else objectives
698+
)
714699

715700
preference_profile_name = (
716701
self.preference_profile_name
@@ -734,7 +719,7 @@ def clone_with_args(
734719
)
735720

736721
return PreferenceOptimizationConfig(
737-
objective=objective,
722+
objectives=cloned_objectives,
738723
preference_profile_name=preference_profile_name,
739724
outcome_constraints=outcome_constraints,
740725
expect_relativized_outcomes=expect_relativized_outcomes,

ax/core/tests/test_optimization_config.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,6 @@ def test_Init(self) -> None:
8484
objective=self.objective, outcome_constraints=self.outcome_constraints
8585
)
8686
self.assertEqual(str(config1), OC_STR)
87-
with self.assertRaises(ValueError):
88-
config1.objective = self.alt_objective # constrained Objective.
8987
# updating constraints is fine.
9088
config1.outcome_constraints = [self.outcome_constraint]
9189
self.assertEqual(len(config1.metric_names), 2)
@@ -94,10 +92,6 @@ def test_Init(self) -> None:
9492
config2 = OptimizationConfig(objective=self.objective)
9593
self.assertEqual(config2.outcome_constraints, [])
9694

97-
# setting objective is fine too, if it's compatible with constraints..
98-
config2.objective = self.m2_objective
99-
# setting constraints on objectives is fine for MultiObjective components.
100-
10195
config2.outcome_constraints = self.outcome_constraints
10296
self.assertEqual(config2.outcome_constraints, self.outcome_constraints)
10397

@@ -355,8 +349,8 @@ def test_objectives_kwarg_clone_and_repr(self) -> None:
355349
self.assertEqual(cloned.objectives[1].expression, "-m2")
356350
self.assertTrue(cloned.is_moo_problem)
357351

358-
# clone_with_args(objective=) replaces the list with a single objective
359-
cloned = config.clone_with_args(objective=self.obj1)
352+
# clone_with_args(objectives=) replaces the list with a single objective
353+
cloned = config.clone_with_args(objectives=[self.obj1])
360354
self.assertEqual(len(cloned.objectives), 1)
361355
self.assertFalse(cloned.is_moo_problem)
362356

@@ -366,10 +360,6 @@ def test_objectives_kwarg_clone_and_repr(self) -> None:
366360
self.assertEqual(len(cloned.objectives), 2)
367361
self.assertEqual(cloned.objectives[1].expression, "m3")
368362

369-
# objective= and objectives= are mutually exclusive in clone_with_args
370-
with self.assertRaisesRegex(UserInputError, "Cannot specify both"):
371-
config.clone_with_args(objective=self.obj1, objectives=[self.obj1])
372-
373363
# repr always uses "objectives="
374364
self.assertIn("objectives=", repr(config))
375365
single_config = OptimizationConfig(objectives=[self.obj1])

ax/service/tests/test_best_point.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -60,12 +60,14 @@ def test_get_trace(self) -> None:
6060
self.assertEqual(get_trace(exp), [11, 10, 9, 9, 5])
6161

6262
# Same experiment with maximize via new optimization config.
63-
opt_conf = none_throws(exp.optimization_config).clone()
64-
opt_conf.objective = Objective(
65-
expression=opt_conf.objective.metric_names[0],
66-
metric_name_to_signature={
67-
opt_conf.objective.metric_names[0]: opt_conf.objective.metric_names[0]
68-
},
63+
metric_name = none_throws(exp.optimization_config).objective.metric_names[0]
64+
opt_conf = none_throws(exp.optimization_config).clone_with_args(
65+
objectives=[
66+
Objective(
67+
expression=metric_name,
68+
metric_name_to_signature={metric_name: metric_name},
69+
)
70+
],
6971
)
7072
self.assertEqual(get_trace(exp, opt_conf), [11, 11, 11, 15, 15])
7173

@@ -441,12 +443,14 @@ def test_get_best_observed_value(self) -> None:
441443
)
442444
self.assertEqual(get_best(exp), 5)
443445
# Same experiment with maximize via new optimization config.
444-
opt_conf = none_throws(exp.optimization_config).clone()
445-
opt_conf.objective = Objective(
446-
expression=opt_conf.objective.metric_names[0],
447-
metric_name_to_signature={
448-
opt_conf.objective.metric_names[0]: opt_conf.objective.metric_names[0]
449-
},
446+
metric_name = none_throws(exp.optimization_config).objective.metric_names[0]
447+
opt_conf = none_throws(exp.optimization_config).clone_with_args(
448+
objectives=[
449+
Objective(
450+
expression=metric_name,
451+
metric_name_to_signature={metric_name: metric_name},
452+
)
453+
],
450454
)
451455
self.assertEqual(get_best(exp, opt_conf), 15)
452456

ax/service/tests/test_best_point_utils.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -615,7 +615,9 @@ def test_best_raw_objective_point_scalarized(self) -> None:
615615
exp = get_branin_experiment()
616616
gs = choose_generation_strategy_legacy(search_space=exp.search_space)
617617
exp.optimization_config = OptimizationConfig(
618-
objective=ScalarizedObjective(metrics=[get_branin_metric()], minimize=True)
618+
objectives=[
619+
ScalarizedObjective(metrics=[get_branin_metric()], minimize=True)
620+
],
619621
)
620622
with self.assertRaisesRegex(ValueError, "Cannot identify best "):
621623
get_best_raw_objective_point_with_trial_index(exp)
@@ -637,11 +639,16 @@ def test_best_raw_objective_point_scalarized_multi(self) -> None:
637639
exp = get_branin_experiment()
638640
gs = choose_generation_strategy_legacy(search_space=exp.search_space)
639641
exp.optimization_config = OptimizationConfig(
640-
objective=ScalarizedObjective(
641-
metrics=[get_branin_metric(), get_branin_metric(lower_is_better=False)],
642-
weights=[0.1, -0.9],
643-
minimize=True,
644-
)
642+
objectives=[
643+
ScalarizedObjective(
644+
metrics=[
645+
get_branin_metric(),
646+
get_branin_metric(lower_is_better=False),
647+
],
648+
weights=[0.1, -0.9],
649+
minimize=True,
650+
)
651+
],
645652
)
646653
with self.assertRaisesRegex(ValueError, "Cannot identify best "):
647654
get_best_raw_objective_point_with_trial_index(experiment=exp)
@@ -1037,11 +1044,13 @@ def test_best_parameters_from_model_predictions_scalarized(self) -> None:
10371044
)
10381045
exp.add_tracking_metric(metric2)
10391046
exp.optimization_config = OptimizationConfig(
1040-
objective=ScalarizedObjective(
1041-
metrics=[metric1, metric2],
1042-
weights=[0.5, 0.5],
1043-
minimize=True,
1044-
)
1047+
objectives=[
1048+
ScalarizedObjective(
1049+
metrics=[metric1, metric2],
1050+
weights=[0.5, 0.5],
1051+
minimize=True,
1052+
)
1053+
],
10451054
)
10461055

10471056
# Run trials and generate data

ax/storage/sqa_store/tests/test_sqa_store.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1270,9 +1270,10 @@ def test_experiment_objective_updates(self) -> None:
12701270

12711271
# update objective
12721272
# (should perform update in place)
1273-
optimization_config = get_optimization_config()
12741273
objective = get_objective(minimize=True)
1275-
optimization_config.objective = objective
1274+
optimization_config = get_optimization_config().clone_with_args(
1275+
objectives=[objective]
1276+
)
12761277
experiment.optimization_config = optimization_config
12771278
save_experiment(experiment)
12781279
self.assertEqual(
@@ -1282,8 +1283,8 @@ def test_experiment_objective_updates(self) -> None:
12821283
# replace objective
12831284
# (old one should become tracking metric)
12841285
experiment.add_tracking_metric(Metric(name="objective"))
1285-
optimization_config.objective = Objective(
1286-
metric=Metric(name="objective"), minimize=False
1286+
optimization_config = optimization_config.clone_with_args(
1287+
objectives=[Objective(metric=Metric(name="objective"), minimize=False)]
12871288
)
12881289
experiment.optimization_config = optimization_config
12891290
save_experiment(experiment)

0 commit comments

Comments
 (0)