Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions ax/adapter/tests/test_base_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def test_fit_tracking_metrics(self) -> None:
fit_tracking_metrics=False,
)
new_oc = OptimizationConfig(
objective=Objective(metric=Metric(name="test_metric2"), minimize=False),
objectives=[Objective(metric=Metric(name="test_metric2"), minimize=False)],
)
with self.assertRaisesRegex(UnsupportedError, "fit_tracking_metrics"):
adapter.gen(n=1, optimization_config=new_oc)
Expand Down Expand Up @@ -301,7 +301,7 @@ def test_gen_base(self, mock_fit: Mock, mock_gen_arms: Mock) -> None:

# Gen with a different optimization config.
oc2 = OptimizationConfig(
objective=Objective(metric=Metric(name="branin"), minimize=True)
objectives=[Objective(metric=Metric(name="branin"), minimize=True)]
)
with mock.patch(ADAPTER__GEN_PATH, return_value=mock_return_value) as mock_gen:
adapter.gen(n=1, search_space=search_space, optimization_config=oc2)
Expand Down Expand Up @@ -1369,10 +1369,12 @@ def test_untransform_observation_features_derived_parameter_with_digits(
is_test=True,
tracking_metrics=[metric],
optimization_config=OptimizationConfig(
objective=Objective(
metric=metric,
minimize=True,
)
objectives=[
Objective(
metric=metric,
minimize=True,
)
]
),
runner=SyntheticRunner(),
)
Expand Down
4 changes: 2 additions & 2 deletions ax/adapter/tests/test_cross_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ def test_has_good_opt_config_model_fit(self) -> None:

# Test single objective
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=True)
objectives=[Objective(metric=Metric("m1"), minimize=True)]
)
has_good_fit = has_good_opt_config_model_fit(
optimization_config=optimization_config,
Expand All @@ -500,7 +500,7 @@ def test_has_good_opt_config_model_fit(self) -> None:

# Test constraints
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=False),
objectives=[Objective(metric=Metric("m1"), minimize=False)],
outcome_constraints=[
OutcomeConstraint(metric=Metric("m2"), op=ComparisonOp.GEQ, bound=0.1)
],
Expand Down
4 changes: 2 additions & 2 deletions ax/adapter/tests/test_discrete_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def test_predict(self) -> None:
def test_gen(self) -> None:
# Test with constraints
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=True),
objectives=[Objective(metric=Metric("m1"), minimize=True)],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m2"), op=ComparisonOp.GEQ, bound=2, relative=False
Expand Down Expand Up @@ -231,7 +231,7 @@ def test_gen(self) -> None:

# Test validation
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=False),
objectives=[Objective(metric=Metric("m1"), minimize=False)],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m2"), op=ComparisonOp.GEQ, bound=2, relative=True
Expand Down
10 changes: 6 additions & 4 deletions ax/adapter/tests/test_hierarchical_search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,12 @@ def _test_gen_base(
search_space=hss,
tracking_metrics=[metric],
optimization_config=OptimizationConfig(
objective=Objective(
metric=metric,
minimize=True,
)
objectives=[
Objective(
metric=metric,
minimize=True,
)
]
),
runner=SyntheticRunner(),
)
Expand Down
20 changes: 11 additions & 9 deletions ax/adapter/tests/test_torch_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def test_TorchAdapter(self, device: torch.device | None = None) -> None:
min=0.0, max=5.0, parameter_names=feature_names
)
opt_config = OptimizationConfig(
objective=Objective(metric=Metric("y1"), minimize=True),
objectives=[Objective(metric=Metric("y1"), minimize=True)],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("y2"), op=ComparisonOp.GEQ, bound=0.0, relative=False
Expand Down Expand Up @@ -212,7 +212,7 @@ def test_TorchAdapter(self, device: torch.device | None = None) -> None:
)
best_point_return_value = torch.tensor([1.0, 2.0, 3.0], **tkwargs)
opt_config = OptimizationConfig(
objective=Objective(metric=Metric("y1"), minimize=False),
objectives=[Objective(metric=Metric("y1"), minimize=False)],
)
pending_observations = {
"y2": [ObservationFeatures(parameters={"x1": 1.0, "x2": 2.0, "x3": 3.0})]
Expand Down Expand Up @@ -406,7 +406,7 @@ def test_evaluate_acquisition_function(self) -> None:
def test_best_point(self) -> None:
search_space = get_search_space_for_range_value()
oc = OptimizationConfig(
objective=Objective(metric=Metric("a"), minimize=False),
objectives=[Objective(metric=Metric("a"), minimize=False)],
outcome_constraints=[],
)
exp = Experiment(search_space=search_space, optimization_config=oc, name="test")
Expand Down Expand Up @@ -469,7 +469,7 @@ def test_best_point(self) -> None:
adapter.gen(
n=1,
optimization_config=OptimizationConfig(
objective=Objective(metric=Metric("a"), minimize=False),
objectives=[Objective(metric=Metric("a"), minimize=False)],
outcome_constraints=[
ScalarizedOutcomeConstraint(
metrics=[Metric("wrong_metric_name")],
Expand Down Expand Up @@ -738,7 +738,7 @@ def test_convert_contextual_observations(self) -> None:
)
# Make an optimization config that includes all metrics.
opt_config = OptimizationConfig(
objective=Objective(metric=Metric("y"), minimize=True),
objectives=[Objective(metric=Metric("y"), minimize=True)],
outcome_constraints=[
OutcomeConstraint(
metric=Metric(f"y:c{i}"), op=ComparisonOp.GEQ, bound=0
Expand Down Expand Up @@ -1205,10 +1205,12 @@ def test_pairwise_preference_generator(self) -> None:
surrogate=surrogate,
),
optimization_config=OptimizationConfig(
Objective(
metric=Metric(Keys.PAIRWISE_PREFERENCE_QUERY.value),
minimize=False,
)
objectives=[
Objective(
metric=Metric(Keys.PAIRWISE_PREFERENCE_QUERY.value),
minimize=False,
)
]
),
fit_tracking_metrics=False,
)
Expand Down
13 changes: 9 additions & 4 deletions ax/adapter/tests/test_torch_moo_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,6 @@ def test_hypervolume(self, _, cuda: bool = False) -> None:
)
for trial in exp.trials.values():
trial.mark_running(no_runner_required=True).mark_completed()
# pyre-fixme[16]: Optional type has no attribute `metrics`.
metrics_dict = exp.metrics
# Objective thresholds and synthetic observations chosen to have closed-form
# hypervolumes to test.
Expand Down Expand Up @@ -464,9 +463,15 @@ def test_infer_objective_thresholds(self, _, cuda: bool = False) -> None:
first = sub_exprs[0]
if not first.startswith("-"):
sub_exprs[0] = f"-{first}"
oc.objective = Objective(
expression=", ".join(sub_exprs),
metric_name_to_signature={s.lstrip("-"): s.lstrip("-") for s in sub_exprs},
oc = oc.clone_with_args(
objectives=[
Objective(
expression=", ".join(sub_exprs),
metric_name_to_signature={
s.lstrip("-"): s.lstrip("-") for s in sub_exprs
},
)
]
)

for use_partial_thresholds in (False, True):
Expand Down
7 changes: 4 additions & 3 deletions ax/adapter/transforms/relativize.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def transform_optimization_config(
"Expected multi-objective, got single-objective"
)
new_optimization_config = optimization_config.clone_with_args(
objective=objective,
objectives=[objective],
outcome_constraints=constraints,
)
elif isinstance(optimization_config, MultiObjectiveOptimizationConfig):
Expand All @@ -174,13 +174,14 @@ def transform_optimization_config(
)

new_optimization_config = optimization_config.clone_with_args(
objective=optimization_config.objective,
objectives=[optimization_config.objective],
outcome_constraints=constraints,
objective_thresholds=obj_thresholds,
)
else:
new_optimization_config = optimization_config.clone_with_args(
objective=optimization_config.objective, outcome_constraints=constraints
objectives=[optimization_config.objective],
outcome_constraints=constraints,
)

return new_optimization_config
Expand Down
10 changes: 6 additions & 4 deletions ax/adapter/transforms/standardize_y.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,12 @@ def transform_optimization_config(
(name, new_w)
for (name, _), new_w in zip(objective.metric_weights, new_weights)
]
optimization_config.objective = _build_objective_from_metric_weights(
new_metric_weights,
metric_name_to_signature=objective.metric_name_to_signature,
)
optimization_config._objectives = [
_build_objective_from_metric_weights(
new_metric_weights,
metric_name_to_signature=objective.metric_name_to_signature,
)
]

new_constraints = self._transform_constraints(
optimization_config.outcome_constraints, adapter
Expand Down
10 changes: 6 additions & 4 deletions ax/adapter/transforms/stratified_standardize_y.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,10 +196,12 @@ def transform_optimization_config(
(name, new_w)
for (name, _), new_w in zip(objective.metric_weights, new_weights)
]
optimization_config.objective = _build_objective_from_metric_weights(
new_metric_weights,
metric_name_to_signature=objective.metric_name_to_signature,
)
optimization_config._objectives = [
_build_objective_from_metric_weights(
new_metric_weights,
metric_name_to_signature=objective.metric_name_to_signature,
)
]

optimization_config.outcome_constraints = self._transform_constraints(
optimization_config.outcome_constraints, strata, adapter
Expand Down
6 changes: 3 additions & 3 deletions ax/adapter/transforms/tests/test_base_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def test_transform_optimization_config_with_pruning_target_parameterization(
# modifies parameters
pruning_target_parameterization = Arm(parameters={"x1": 2.5, "x2": 7.5})
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=False),
objectives=[Objective(metric=Metric("m1"), minimize=False)],
pruning_target_parameterization=pruning_target_parameterization,
)

Expand All @@ -142,7 +142,7 @@ def test_transform_optimization_config_without_pruning_target_parameterization(
) -> None:
# Setup: create optimization config without target arm
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=False),
objectives=[Objective(metric=Metric("m1"), minimize=False)],
pruning_target_parameterization=None,
)

Expand All @@ -169,7 +169,7 @@ def test_transform_optimization_config_preserves_other_fields(self) -> None:
)
]
optimization_config = OptimizationConfig(
objective=Objective(metric=Metric("m1"), minimize=True),
objectives=[Objective(metric=Metric("m1"), minimize=True)],
outcome_constraints=outcome_constraints,
pruning_target_parameterization=pruning_target_parameterization,
)
Expand Down
12 changes: 6 additions & 6 deletions ax/adapter/transforms/tests/test_derelativize_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def _test_DerelativizeTransform(
# Test with no relative constraints
objective = Objective(metric=Metric("c"), minimize=True)
oc = OptimizationConfig(
objective=objective,
objectives=[objective],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m1"), op=ComparisonOp.LEQ, bound=2, relative=False
Expand All @@ -126,7 +126,7 @@ def _test_DerelativizeTransform(
# Test with relative constraint, in-design status quo
relative_bound = -10
oc = OptimizationConfig(
objective=objective,
objectives=[objective],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m1"), op=ComparisonOp.LEQ, bound=2, relative=False
Expand Down Expand Up @@ -192,7 +192,7 @@ def _test_DerelativizeTransform(
)
g = Adapter(experiment=experiment_2, generator=Generator())
oc = OptimizationConfig(
objective=objective,
objectives=[objective],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m1"), op=ComparisonOp.LEQ, bound=2, relative=False
Expand Down Expand Up @@ -250,7 +250,7 @@ def _test_DerelativizeTransform(
)
g = Adapter(experiment=experiment_3, generator=Generator())
oc = OptimizationConfig(
objective=objective,
objectives=[objective],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m1"), op=ComparisonOp.LEQ, bound=2, relative=False
Expand Down Expand Up @@ -282,7 +282,7 @@ def _test_DerelativizeTransform(

# Same for scalarized constraint only.
oc_scalarized_only = OptimizationConfig(
objective=objective,
objectives=[objective],
outcome_constraints=[
ScalarizedOutcomeConstraint(
metrics=[Metric("m1"), Metric("m2")],
Expand Down Expand Up @@ -319,7 +319,7 @@ def _test_DerelativizeTransform(
def test_errors(self) -> None:
t = Derelativize(search_space=None)
oc = OptimizationConfig(
objective=Objective(metric=Metric("c"), minimize=False),
objectives=[Objective(metric=Metric("c"), minimize=False)],
outcome_constraints=[
OutcomeConstraint(
metric=Metric("m1"), op=ComparisonOp.LEQ, bound=2, relative=True
Expand Down
12 changes: 6 additions & 6 deletions ax/adapter/transforms/tests/test_log_y_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,14 +138,14 @@ def test_TransformOptimizationConfig(self) -> None:
# basic test
m1 = Metric(name="m1")
objective_m1 = Objective(metric=m1, minimize=False)
oc = OptimizationConfig(objective=objective_m1, outcome_constraints=[])
oc = OptimizationConfig(objectives=[objective_m1], outcome_constraints=[])
tf = LogY(search_space=None, config={"metrics": ["m1"]})
oc_tf = tf.transform_optimization_config(deepcopy(oc), None, None)
self.assertEqual(oc_tf, oc)
# output constraint on a different metric should work
m2 = Metric(name="m2")
oc = OptimizationConfig(
objective=objective_m1,
objectives=[objective_m1],
outcome_constraints=[
get_outcome_constraint(metric=m2, bound=-1, relative=False)
],
Expand All @@ -155,7 +155,7 @@ def test_TransformOptimizationConfig(self) -> None:
# output constraint with a negative bound should fail
objective_m2 = Objective(metric=m2, minimize=False)
oc = OptimizationConfig(
objective=objective_m2,
objectives=[objective_m2],
outcome_constraints=[
get_outcome_constraint(metric=m1, bound=-1.234, relative=False)
],
Expand All @@ -170,7 +170,7 @@ def test_TransformOptimizationConfig(self) -> None:
)
# output constraint with a zero bound should also fail
oc = OptimizationConfig(
objective=objective_m2,
objectives=[objective_m2],
outcome_constraints=[
get_outcome_constraint(metric=m1, bound=0, relative=False)
],
Expand All @@ -185,7 +185,7 @@ def test_TransformOptimizationConfig(self) -> None:
)
# output constraint with a positive bound should work
oc = OptimizationConfig(
objective=objective_m2,
objectives=[objective_m2],
outcome_constraints=[
get_outcome_constraint(metric=m1, bound=2.345, relative=False)
],
Expand All @@ -200,7 +200,7 @@ def test_TransformOptimizationConfig(self) -> None:
self.assertEqual(oc_tf, oc)
# output constraint with a relative bound should fail
oc = OptimizationConfig(
objective=objective_m2,
objectives=[objective_m2],
outcome_constraints=[
get_outcome_constraint(metric=m1, bound=2.345, relative=True)
],
Expand Down
Loading
Loading