2828from ..agents .common_configs import CodeConfig
2929from ..evaluation .eval_metrics import EvalMetric
3030from .eval_metrics import BaseCriterion
31+ from .eval_metrics import MetricInfo
3132from .eval_metrics import Threshold
3233from .simulation .user_simulator import BaseUserSimulatorConfig
3334
3435logger = logging .getLogger ("google_adk." + __name__ )
3536
3637
38+ class CustomMetricConfig (BaseModel ):
39+ """Configuration for a custom metric."""
40+
41+ model_config = ConfigDict (
42+ alias_generator = alias_generators .to_camel ,
43+ populate_by_name = True ,
44+ )
45+
46+ code_config : CodeConfig = Field (
47+ description = (
48+ "Code config for the custom metric, used to locate the custom metric"
49+ " function."
50+ )
51+ )
52+ metric_info : Optional [MetricInfo ] = Field (
53+ default = None ,
54+ description = "Metric info for the custom metric." ,
55+ )
56+ description : str = Field (
57+ default = "" ,
58+ description = "Description for the custom metric info." ,
59+ )
60+
61+ @model_validator (mode = "after" )
62+ def check_code_config_args (self ) -> "CustomMetricConfig" :
63+ """Checks that the code config does not have args."""
64+ if self .code_config .args :
65+ raise ValueError (
66+ "args field in CodeConfig for custom metric is not supported."
67+ )
68+ return self
69+
70+
3771class EvalConfig (BaseModel ):
3872 """Configurations needed to run an Eval.
3973
@@ -74,24 +108,43 @@ class EvalConfig(BaseModel):
74108""" ,
75109 )
76110
77- custom_metrics : Optional [dict [str , CodeConfig ]] = Field (
111+ custom_metrics : Optional [dict [str , CustomMetricConfig ]] = Field (
78112 default = None ,
79- description = """A dictionary mapping custom metric names to CodeConfig
80- objects, which specify the path to the function for each custom metric .
113+ description = """A dictionary mapping custom metric names to
114+ a CustomMetricConfig object .
81115
82116If a metric name in `criteria` is also present in `custom_metrics`, the
83- corresponding `CodeConfig`'s `name` field will be used to locate the custom
84- metric implementation. The `name` field should contain the fully qualified
85- path to the custom metric function, e.g., `my.custom.metrics.metric_function`.
117+ `code_config` in `CustomMetricConfig` will be used to locate the custom metric
118+ implementation.
119+
120+ The `metric` field in `CustomMetricConfig` can be used to provide metric
121+ information like `min_value`, `max_value`, and `description`. If `metric`
122+ is not provided, a default `MetricInfo` will be created, using
123+ `description` from `CustomMetricConfig` if provided, and default values
124+ for `min_value` (0.0) and `max_value` (1.0).
86125
87126Example:
88127{
89128 "criteria": {
90- "my_custom_metric": 0.5
129+ "my_custom_metric": 0.5,
130+ "my_simple_metric": 0.8
91131 },
92132 "custom_metrics": {
133+ "my_simple_metric": {
134+ "code_config": {
135+ "name": "path.to.my.simple.metric.function"
136+ }
137+ },
93138 "my_custom_metric": {
94- "name": "path.to.my.custom.metric.function"
139+ "code_config": {
140+ "name": "path.to.my.custom.metric.function"
141+ },
142+ "metric": {
143+ "metric_name": "my_custom_metric",
144+ "min_value": -10.0,
145+ "max_value": 10.0,
146+ "description": "My custom metric."
147+ }
95148 }
96149 }
97150}
@@ -103,17 +156,6 @@ class EvalConfig(BaseModel):
103156 description = "Config to be used by the user simulator." ,
104157 )
105158
106- @model_validator (mode = "after" )
107- def check_custom_metrics_code_config_args (self ) -> "EvalConfig" :
108- if self .custom_metrics :
109- for metric_name , metric_config in self .custom_metrics .items ():
110- if metric_config .args :
111- raise ValueError (
112- f"args field in CodeConfig for custom metric '{ metric_name } ' is"
113- " not supported."
114- )
115- return self
116-
117159
118160_DEFAULT_EVAL_CONFIG = EvalConfig (
119161 criteria = {"tool_trajectory_avg_score" : 1.0 , "response_match_score" : 0.8 }
@@ -144,11 +186,10 @@ def get_eval_metrics_from_config(eval_config: EvalConfig) -> list[EvalMetric]:
144186 if eval_config .criteria :
145187 for metric_name , criterion in eval_config .criteria .items ():
146188 custom_function_path = None
147- if (
148- eval_config .custom_metrics
149- and metric_name in eval_config .custom_metrics
189+ if eval_config .custom_metrics and (
190+ config := eval_config .custom_metrics .get (metric_name )
150191 ):
151- custom_function_path = eval_config . custom_metrics [ metric_name ] .name
192+ custom_function_path = config . code_config .name
152193
153194 if isinstance (criterion , float ):
154195 eval_metric_list .append (
0 commit comments