Skip to content

Commit 2fa2177

Browse files
committed
Merge branch 'main' into fix/anthropic-thinking-streaming-continuity
Resolved conflicts in contributing/samples/gepa/{experiment,run_experiment}.py by taking main's version: utils.py was renamed to gepa_utils.py upstream, which supersedes the local blank-line-only formatting fix.
2 parents 38ec957 + 7de5bc5 commit 2fa2177

42 files changed

Lines changed: 2975 additions & 152 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/workflows/pre-commit.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
name: pre-commit
15+
name: Pre-commit Checks
1616

1717
on:
1818
push:

contributing/samples/gepa/experiment.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
import gepa
3232
from gepa.core.adapter import EvaluationBatch
3333
from gepa.core.adapter import GEPAAdapter
34+
import gepa_utils
3435
from litellm import provider_list
3536
import rater_lib
3637
from retry import retry
@@ -43,7 +44,6 @@
4344
from tau_bench.types import EnvRunResult
4445
from tau_bench.types import RunConfig
4546
import tau_bench_agent as tau_bench_agent_lib
46-
import utils
4747

4848

4949
def run_tau_bench_rollouts(
@@ -582,7 +582,7 @@ def run_gepa(
582582
task_lm=None, # this must be None when a custom adapter is used
583583
adapter=tau_bench_adapter,
584584
max_metric_calls=config.max_metric_calls,
585-
reflection_lm=utils.reflection_inference_fn(config.reflection_model),
585+
reflection_lm=gepa_utils.reflection_inference_fn(config.reflection_model),
586586
reflection_minibatch_size=config.reflection_minibatch_size,
587587
run_dir=output_dir,
588588
)

contributing/samples/gepa/run_experiment.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@
2424
from absl import app
2525
from absl import flags
2626
import experiment
27+
import gepa_utils
2728
from google.genai import types
28-
import utils
2929

3030
_OUTPUT_DIR = flags.DEFINE_string(
3131
'output_dir',
@@ -105,7 +105,7 @@ def main(argv: Sequence[str]) -> None:
105105
for logger in loggers:
106106
logger.setLevel(logging.WARNING)
107107

108-
types.logger.addFilter(utils.FilterInferenceWarnings())
108+
types.logger.addFilter(gepa_utils.FilterInferenceWarnings())
109109
output_dir = os.path.join(
110110
_OUTPUT_DIR.value, datetime.now().strftime('%Y%m%d%H%M%S%f')
111111
)

src/google/adk/a2a/converters/part_converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ def convert_genai_part_to_a2a_part(
179179
) -> Optional[a2a_types.Part]:
180180
"""Convert a Google GenAI Part to an A2A Part."""
181181

182-
if part.text:
182+
if part.text is not None:
183183
a2a_part = a2a_types.TextPart(text=part.text)
184184
if part.thought is not None:
185185
a2a_part.metadata = {_get_adk_metadata_key('thought'): part.thought}

src/google/adk/agents/__init__.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import importlib
16+
from typing import TYPE_CHECKING
17+
1518
from .base_agent import BaseAgent
1619
from .context import Context
1720
from .invocation_context import InvocationContext
@@ -20,11 +23,13 @@
2023
from .llm_agent import Agent
2124
from .llm_agent import LlmAgent
2225
from .loop_agent import LoopAgent
23-
from .mcp_instruction_provider import McpInstructionProvider
2426
from .parallel_agent import ParallelAgent
2527
from .run_config import RunConfig
2628
from .sequential_agent import SequentialAgent
2729

30+
if TYPE_CHECKING:
31+
from .mcp_instruction_provider import McpInstructionProvider
32+
2833
__all__ = [
2934
'Agent',
3035
'BaseAgent',
@@ -39,3 +44,16 @@
3944
'LiveRequestQueue',
4045
'RunConfig',
4146
]
47+
48+
49+
def __getattr__(name: str):
50+
if name == 'McpInstructionProvider':
51+
try:
52+
module = importlib.import_module(f'{__name__}.mcp_instruction_provider')
53+
except ImportError as e:
54+
raise ImportError(
55+
'`McpInstructionProvider` requires the `mcp` package.'
56+
' Install with: pip install google-adk[extensions]'
57+
) from e
58+
return module.McpInstructionProvider
59+
raise AttributeError(f'module {__name__!r} has no attribute {name!r}')

src/google/adk/apps/compaction.py

Lines changed: 44 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,47 @@
2222
from ..events.event import Event
2323
from ..sessions.base_session_service import BaseSessionService
2424
from ..sessions.session import Session
25+
from ..telemetry.tracing import _build_compaction_attributes
26+
from ..telemetry.tracing import _build_compaction_result_attributes
27+
from ..telemetry.tracing import tracer
2528
from .app import App
2629
from .app import EventsCompactionConfig
2730
from .llm_event_summarizer import LlmEventSummarizer
2831

2932
logger = logging.getLogger('google_adk.' + __name__)
3033

3134

35+
async def _summarize_events_with_trace(
36+
*,
37+
session: Session,
38+
config: EventsCompactionConfig,
39+
events_to_compact: list[Event],
40+
trigger: str,
41+
) -> Event | None:
42+
"""Summarizes events within a trace span labeled for compaction."""
43+
if config.summarizer is None:
44+
return None
45+
46+
attributes = _build_compaction_attributes(
47+
session_id=session.id,
48+
trigger=trigger,
49+
summarizer_type=type(config.summarizer).__name__,
50+
event_count=len(events_to_compact),
51+
token_threshold=config.token_threshold,
52+
event_retention_size=config.event_retention_size,
53+
compaction_interval=config.compaction_interval,
54+
overlap_size=config.overlap_size,
55+
)
56+
57+
with tracer.start_as_current_span(f'compact_events {trigger}') as span:
58+
span.set_attributes(attributes)
59+
compaction_event = await config.summarizer.maybe_summarize_events(
60+
events=events_to_compact
61+
)
62+
span.set_attributes(_build_compaction_result_attributes(compaction_event))
63+
return compaction_event
64+
65+
3266
def _count_text_chars_in_content(content: types.Content | None) -> int:
3367
"""Returns the number of text characters in a content object."""
3468
total_chars = 0
@@ -383,8 +417,11 @@ async def _run_compaction_for_token_threshold_config(
383417
if config.summarizer is None:
384418
return False
385419

386-
compaction_event = await config.summarizer.maybe_summarize_events(
387-
events=events_to_compact
420+
compaction_event = await _summarize_events_with_trace(
421+
session=session,
422+
config=config,
423+
events_to_compact=events_to_compact,
424+
trigger='token_threshold',
388425
)
389426
if compaction_event:
390427
await session_service.append_event(session=session, event=compaction_event)
@@ -602,8 +639,11 @@ async def _run_compaction_for_sliding_window(
602639
if config.summarizer is None:
603640
return None
604641

605-
compaction_event = await config.summarizer.maybe_summarize_events(
606-
events=events_to_compact
642+
compaction_event = await _summarize_events_with_trace(
643+
session=session,
644+
config=config,
645+
events_to_compact=events_to_compact,
646+
trigger='sliding_window',
607647
)
608648
if compaction_event:
609649
await session_service.append_event(session=session, event=compaction_event)

src/google/adk/apps/llm_event_summarizer.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,11 +104,13 @@ async def maybe_summarize_events(
104104
contents=[Content(role='user', parts=[Part(text=prompt)])],
105105
)
106106
summary_content = None
107+
summary_usage_metadata = None
107108
async for llm_response in self._llm.generate_content_async(
108109
llm_request, stream=False
109110
):
110111
if llm_response.content:
111112
summary_content = llm_response.content
113+
summary_usage_metadata = llm_response.usage_metadata
112114
break
113115

114116
if summary_content is None:
@@ -132,4 +134,5 @@ async def maybe_summarize_events(
132134
author='user',
133135
actions=actions,
134136
invocation_id=Event.new_id(),
137+
usage_metadata=summary_usage_metadata,
135138
)

src/google/adk/auth/__init__.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,25 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import annotations
16+
17+
import importlib
18+
from typing import TYPE_CHECKING
19+
1520
from .auth_credential import AuthCredential
1621
from .auth_credential import AuthCredentialTypes
1722
from .auth_credential import OAuth2Auth
18-
from .auth_handler import AuthHandler
1923
from .auth_schemes import AuthScheme
2024
from .auth_schemes import AuthSchemeType
2125
from .auth_schemes import OpenIdConnectWithConfig
2226
from .auth_tool import AuthConfig
2327
from .base_auth_provider import BaseAuthProvider
28+
29+
if TYPE_CHECKING:
30+
from .auth_handler import AuthHandler
31+
32+
33+
def __getattr__(name: str):
34+
if name == 'AuthHandler':
35+
return importlib.import_module(f'{__name__}.auth_handler').AuthHandler
36+
raise AttributeError(f'module {__name__!r} has no attribute {name!r}')

src/google/adk/flows/llm_flows/base_llm_flow.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,7 @@
3636
from ...agents.live_request_queue import LiveRequestQueue
3737
from ...agents.readonly_context import ReadonlyContext
3838
from ...agents.run_config import StreamingMode
39-
from ...auth.auth_handler import AuthHandler
4039
from ...auth.auth_tool import AuthConfig
41-
from ...auth.credential_manager import CredentialManager
4240
from ...events.event import Event
4341
from ...models.base_llm_connection import BaseLlmConnection
4442
from ...models.llm_request import LlmRequest
@@ -144,6 +142,8 @@ async def _resolve_toolset_auth(
144142
continue
145143

146144
auth_config_copy = auth_config.model_copy(deep=True)
145+
from ...auth.credential_manager import CredentialManager
146+
147147
try:
148148
credential = await CredentialManager(
149149
auth_config_copy
@@ -173,7 +173,8 @@ async def _resolve_toolset_auth(
173173
if not pending_auth_requests:
174174
return
175175

176-
# Build auth requests dict with generated auth requests
176+
from ...auth.auth_handler import AuthHandler
177+
177178
auth_requests = {
178179
credential_id: AuthHandler(auth_config).generate_auth_request()
179180
for credential_id, auth_config in pending_auth_requests.items()

0 commit comments

Comments
 (0)