From 820294bb67759d578e52c01663f0334363d49b70 Mon Sep 17 00:00:00 2001
From: nikhowlettMicrosoft
<97988752+nikhowlettMicrosoft@users.noreply.github.com>
Date: Fri, 27 Mar 2026 15:34:16 -0400
Subject: [PATCH 1/4] Connection Migration for Tools (Grounding, AI Search,
Fabric) (#103)
* initial commit
* save derived arm name
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* use display name in connection map
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* connection fixes
* always update display ame
* update source->target dictionary
* update connection map language
* use correct token
---------
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
migration/v1_to_v2_migration.py | 1158 ++++++++++++++++++++++++++++++-
1 file changed, 1140 insertions(+), 18 deletions(-)
diff --git a/migration/v1_to_v2_migration.py b/migration/v1_to_v2_migration.py
index d1e7bdac6..5b12a545d 100644
--- a/migration/v1_to_v2_migration.py
+++ b/migration/v1_to_v2_migration.py
@@ -40,6 +40,7 @@
PRODUCTION_SUBSCRIPTION = os.getenv("PRODUCTION_SUBSCRIPTION") # e.g., "b1615458-c1ea-49bc-8526-cafc948d3c25"
PRODUCTION_TENANT = os.getenv("PRODUCTION_TENANT") # e.g., "33e577a9-b1b8-4126-87c0-673f197bf624"
PRODUCTION_TOKEN = os.getenv("PRODUCTION_TOKEN") # Production token from PowerShell script
+PRODUCTION_ENDPOINT_OVERRIDE = os.getenv("PRODUCTION_ENDPOINT") # Optional: full endpoint URL override
# v1 API base URL
BASE_V1 = f"https://{HOST}/agents/v1.0/subscriptions/{SUBSCRIPTION_ID}/resourceGroups/{RESOURCE_GROUP}/providers/Microsoft.MachineLearningServices/workspaces/{WORKSPACE}"
@@ -91,8 +92,13 @@ def get_production_v2_base_url(resource_name: str, subscription_id: str, project
Returns:
The production v2 API base URL
"""
- # Production format: https://{resource}-resource.services.ai.azure.com/api/projects/{project}/agents/{agent}/versions
- return f"https://{resource_name}-resource.services.ai.azure.com/api/projects/{project_name}"
+ # Production format: https://{resource}.services.ai.azure.com/api/projects/{project}/agents/{agent}/versions
+ # Avoid double "-resource" suffix if the resource name already ends with it
+ if resource_name.endswith("-resource"):
+ hostname = f"{resource_name}.services.ai.azure.com"
+ else:
+ hostname = f"{resource_name}-resource.services.ai.azure.com"
+ return f"https://{hostname}/api/projects/{project_name}"
# Production token handling removed - now handled by PowerShell wrapper
# which provides PRODUCTION_TOKEN environment variable
@@ -437,6 +443,577 @@ def list_assistants_from_api() -> List[Dict[str, Any]]:
print(f"Warning: Unexpected API response format: {type(response_data)}")
return []
+# Platform tool types that require project-level connections to function
+PLATFORM_TOOL_TYPES = {
+ "bing_grounding": "Bing Search",
+ "bing_custom_search": "Bing Custom Search",
+ "azure_ai_search": "Azure AI Search",
+ "openapi": "OpenAPI",
+ "fabric_dataagent": "Microsoft Fabric Data Agent",
+ "sharepoint_grounding": "SharePoint",
+}
+
+
+def list_connections_from_project(project_endpoint: str, token: Optional[str] = None) -> List[Dict[str, Any]]:
+ """
+ List all connections configured in a project.
+ Uses the project endpoint connections API.
+
+ Args:
+ project_endpoint: The project endpoint URL (e.g., "https://...services.ai.azure.com/api/projects/proj")
+ token: Optional bearer token. If not provided, uses global TOKEN.
+
+ Returns:
+ List of connection objects
+ """
+ api_url = project_endpoint.rstrip('/') + '/connections'
+ params = {"api-version": API_VERSION}
+
+ print(f"š Listing connections from: {api_url}")
+
+ try:
+ if token:
+ response = do_api_request_with_token("GET", api_url, token, params=params)
+ else:
+ response = do_api_request("GET", api_url, params=params)
+
+ data = response.json()
+
+ # Handle different response envelope formats
+ if isinstance(data, dict):
+ connections = data.get("value", data.get("data", data.get("connections", [])))
+ # If none of the known keys worked and data looks like a single connection, wrap it
+ if not connections and "name" in data:
+ connections = [data]
+ elif isinstance(data, list):
+ connections = data
+ else:
+ connections = []
+
+ print(f" Found {len(connections)} connections")
+ return connections
+ except Exception as e:
+ print(f" ā ļø Failed to list connections: {e}")
+ return []
+
+
+def get_connection_detail(project_endpoint: str, connection_name: str, token: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ """
+ Get detailed information about a specific connection.
+
+ Args:
+ project_endpoint: The project endpoint URL
+ connection_name: Name of the connection to retrieve
+ token: Optional bearer token
+
+ Returns:
+ Connection detail dict, or None on failure
+ """
+ api_url = project_endpoint.rstrip('/') + f'/connections/{connection_name}'
+ params = {"api-version": API_VERSION}
+
+ try:
+ if token:
+ response = do_api_request_with_token("GET", api_url, token, params=params)
+ else:
+ response = do_api_request("GET", api_url, params=params)
+ return response.json()
+ except Exception as e:
+ print(f" ā ļø Failed to get connection '{connection_name}': {e}")
+ return None
+
+
+def get_agent_required_connections(v1_assistant: Dict[str, Any]) -> List[Dict[str, str]]:
+ """
+ Identify which platform connections a v1 assistant requires.
+ Returns a list of dicts with tool_type, friendly_name, and any connection_id hint.
+ """
+ tools = v1_assistant.get("tools", [])
+ if isinstance(tools, str):
+ try:
+ tools = json.loads(tools)
+ except:
+ tools = []
+ if not isinstance(tools, list):
+ tools = []
+
+ required = []
+ for tool in tools:
+ if not isinstance(tool, dict):
+ continue
+ tool_type = tool.get("type", "")
+ if tool_type in PLATFORM_TOOL_TYPES:
+ entry = {
+ "tool_type": tool_type,
+ "friendly_name": PLATFORM_TOOL_TYPES[tool_type],
+ }
+ # Extract any connection hints from the tool object itself
+ for key in ["connection_id", "connection_name", "project_connection_id"]:
+ if key in tool and tool[key]:
+ entry["connection_id"] = str(tool[key])
+ break
+ # For azure_ai_search, capture index config
+ if tool_type == "azure_ai_search":
+ for key in ["index_asset_id", "index_connection_id", "index_name"]:
+ if key in tool:
+ entry[key] = str(tool[key])
+ # For openapi, capture spec info
+ if tool_type == "openapi":
+ for key in ["spec", "auth", "connection_id"]:
+ if key in tool:
+ entry[key] = str(tool[key]) if not isinstance(tool[key], dict) else json.dumps(tool[key])
+ required.append(entry)
+ return required
+
+
+def create_connection_in_target(target_endpoint: str, connection_data: Dict[str, Any], token: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ """
+ Attempt to create a connection in the target project.
+
+ Args:
+ target_endpoint: The target project endpoint URL
+ connection_data: Connection definition (from source)
+ token: Optional bearer token
+
+ Returns:
+ Created connection data, or None on failure
+ """
+ connection_name = connection_data.get("name", "")
+ if not connection_name:
+ print(" ā Connection has no name, cannot create")
+ return None
+
+ api_url = target_endpoint.rstrip('/') + f'/connections/{connection_name}'
+ params = {"api-version": API_VERSION}
+
+ # Build the creation payload ā strip read-only fields
+ payload = {}
+ for key in ["name", "properties", "type", "target", "metadata", "credentials"]:
+ if key in connection_data:
+ payload[key] = connection_data[key]
+
+ print(f" š Creating connection '{connection_name}' in target...")
+
+ try:
+ if token:
+ response = do_api_request_with_token("PUT", api_url, token, params=params, json=payload)
+ else:
+ response = do_api_request("PUT", api_url, params=params, json=payload)
+
+ result = response.json()
+ print(f" ā
Connection '{connection_name}' created/updated")
+ return result
+ except Exception as e:
+ print(f" ā Failed to create connection '{connection_name}': {e}")
+ print(f" š” You may need to create this connection manually in the target project")
+ return None
+
+
+def _extract_arm_info_from_endpoint(project_endpoint: str) -> Optional[Dict[str, str]]:
+ """
+ Extract ARM routing info from a project endpoint URL.
+ E.g., "https://myresource-resource.services.ai.azure.com/api/projects/myproject"
+ -> {"account_host": "myresource-resource.services.ai.azure.com", "project_name": "myproject"}
+
+ Returns None if the URL can't be parsed.
+ """
+ import re
+ m = re.match(r'https://([^/]+\.services\.ai\.azure\.com)/api/projects/([^/]+)', project_endpoint)
+ if m:
+ return {"account_host": m.group(1), "project_name": m.group(2)}
+ return None
+
+
+def _derive_connection_display_name(connection: Dict[str, Any]) -> Optional[str]:
+ """
+ Derive an appropriate displayName for a connection from its metadata.
+ Uses the resource name from the ResourceId if available, otherwise
+ falls back to the connection name.
+
+ NOTE: The derived displayName uses hyphens (not underscores) since the v2
+ agent runtime enforces that project_connection_id values contain only
+ alphanumerics and hyphens.
+
+ Supported resource types:
+ - Microsoft.Bing/accounts/{name}
+ - Microsoft.Search/searchServices/{name}
+ - Microsoft.Fabric/capacities/{name}
+
+ Returns the displayName string, or None if unable to derive one.
+ """
+ metadata = connection.get('metadata', {})
+ resource_id = metadata.get('ResourceId', '')
+
+ # Extract resource name from common ARM ResourceId patterns
+ raw_name = None
+ for pattern in ['/accounts/', '/searchServices/', '/capacities/']:
+ if pattern in resource_id:
+ raw_name = resource_id.split(pattern)[-1]
+ break
+
+ # Fallback: use the last segment of ResourceId
+ if not raw_name and resource_id and '/' in resource_id:
+ raw_name = resource_id.split('/')[-1]
+
+ # Final fallback: use connection name as-is
+ if not raw_name:
+ raw_name = connection.get('name', '')
+
+ # Replace underscores with hyphens (v2 project_connection_id validation requires hyphens)
+ return raw_name.replace('_', '-') if raw_name else raw_name
+
+
+def ensure_connection_display_names(
+ connections: List[Dict[str, Any]],
+ subscription_id: str,
+ resource_group: str,
+ account_name: str,
+ token: Optional[str] = None,
+) -> Dict[str, str]:
+ """
+ Ensure all connections have a metadata.displayName set.
+ The v2 agent runtime resolves project_connection_id by displayName,
+ not by the raw connection name. Connections without a displayName
+ will fail with "connection ID not found" in v2 portal.
+
+ For connections that lack a displayName, this function:
+ 1. Derives an appropriate displayName (from the Bing account ResourceId or connection name)
+ 2. PATCHes the connection via the ARM API to set the displayName
+
+ Args:
+ connections: List of connection objects from the data-plane API
+ subscription_id: Azure subscription ID for the target project
+ resource_group: Resource group name
+ account_name: AI Services account name (e.g., "nikhowlett-1194-resource")
+ token: Optional ARM bearer token. If not provided, tries to acquire one.
+
+ Returns:
+ Dict mapping connection name -> v2 displayName (after any fixes)
+ """
+ result_map: Dict[str, str] = {}
+ arm_api_version = "2025-04-01-preview"
+
+ # Get ARM token
+ arm_token = token
+ if not arm_token:
+ try:
+ from azure.identity import DefaultAzureCredential
+ credential = DefaultAzureCredential()
+ arm_token = credential.get_token("https://management.azure.com/.default").token
+ except Exception as e:
+ print(f" ā ļø Could not acquire ARM token for displayName patching: {e}")
+ # Return what we can without patching
+ for c in connections:
+ cname = c.get('name', '')
+ dn = c.get('metadata', {}).get('displayName', '')
+ result_map[cname] = dn if dn else cname
+ return result_map
+
+ arm_headers = {"Authorization": f"Bearer {arm_token}", "Content-Type": "application/json"}
+
+ for conn in connections:
+ conn_name = conn.get('name', '')
+ conn_type = conn.get('type', '')
+ metadata = conn.get('metadata', {})
+ existing_dn = metadata.get('displayName', '')
+
+ if existing_dn and '_' not in existing_dn:
+ # Already has a valid displayName (no underscores) ā no action needed
+ result_map[conn_name] = existing_dn # Use the existing displayName
+ continue
+
+ if existing_dn and '_' in existing_dn:
+ print(f" š§ Connection '{conn_name}' has displayName with underscores ('{existing_dn}') ā re-patching with hyphens")
+
+ # Only patch Bing/tool connections that need displayName for v2
+ tool_type = metadata.get('type', '')
+ if tool_type not in ('bing_grounding', 'bing_custom_search', 'microsoft_fabric', 'sharepoint_grounding', 'azure_ai_search'):
+ result_map[conn_name] = conn_name
+ continue
+
+ # Derive a displayName
+ derived_dn = _derive_connection_display_name(conn)
+ if not derived_dn and existing_dn and '_' in existing_dn:
+ # Fallback: fix underscores in the existing displayName directly
+ derived_dn = existing_dn.replace('_', '-')
+ print(f" š§ Falling back to underscore-fixed displayName: '{existing_dn}' -> '{derived_dn}'")
+ if not derived_dn:
+ print(f" ā ļø Cannot derive displayName for '{conn_name}', using name as-is")
+ result_map[conn_name] = conn_name
+ continue
+
+ # PATCH via ARM to set displayName
+ arm_conn_url = (
+ f"https://management.azure.com/subscriptions/{subscription_id}"
+ f"/resourceGroups/{resource_group}"
+ f"/providers/Microsoft.CognitiveServices/accounts/{account_name}"
+ f"/connections/{conn_name}"
+ )
+
+ patch_body = {
+ "properties": {
+ "authType": metadata.get('authType', conn.get('credentials', {}).get('type', 'ApiKey')),
+ "category": conn_type,
+ "target": conn.get('target', ''),
+ "metadata": {**metadata, "displayName": derived_dn}
+ }
+ }
+
+ try:
+ resp = requests.patch(
+ f"{arm_conn_url}?api-version={arm_api_version}",
+ headers=arm_headers,
+ json=patch_body,
+ timeout=30
+ )
+ if resp.ok:
+ # Prefer the displayName returned by ARM if present; otherwise fall back to derived_dn
+ final_display_name = derived_dn
+ try:
+ resp_body = resp.json()
+ final_display_name = (
+ resp_body.get("properties", {})
+ .get("metadata", {})
+ .get("displayName", derived_dn)
+ )
+ except Exception:
+ # If the response body cannot be parsed, keep using derived_dn
+ final_display_name = derived_dn
+ result_map[conn_name] = final_display_name
+ print(
+ f" ā
Set displayName on '{conn_name}' -> '{final_display_name}' "
+ f"(raw name '{conn_name}' is the project_connection_id)"
+ )
+ else:
+ print(f" ā ļø Failed to set displayName on '{conn_name}': {resp.status_code} {resp.text[:200]}")
+ print(f" Connection will not be resolvable in v2 portal")
+ result_map[conn_name] = conn_name
+ except Exception as e:
+ print(f" ā ļø Failed to PATCH displayName on '{conn_name}': {e}")
+ result_map[conn_name] = conn_name
+
+ return result_map
+
+
+def _set_target_arm_prefix(target_endpoint: str, subscription_id: Optional[str] = None) -> bool:
+ """
+ Parse the target project endpoint URL and set TARGET_PROJECT_ARM_PREFIX globally.
+
+ The ARM prefix takes the form:
+ /subscriptions/{sub}/resourceGroups/{rg}/providers/Microsoft.CognitiveServices
+ /accounts/{acc}/projects/{proj}
+
+ This is required so that get_v2_connection_id() and resolve_connection_id() can
+ build full ARM paths for project_connection_id values (required by portal agent runner).
+
+ Returns True if the prefix was set successfully.
+ """
+ global TARGET_PROJECT_ARM_PREFIX
+ import re
+
+ m = re.match(r'https://([^.]+)\.services\.ai\.azure\.com/api/projects/([^/?]+)', target_endpoint)
+ if not m:
+ return False
+
+ account_name = m.group(1) # e.g., "nikhowlett-1194-resource"
+ project_name = m.group(2) # e.g., "nikhowlett-1194"
+
+ # Try to get RG from connection ARM IDs or fall back to deriving from account name pattern
+ rg_name = None
+ sub_id = subscription_id
+
+ if not rg_name and account_name:
+ # Common pattern: account name contains a suffix like "-resource"; RG often prefixed "rg-"
+ # We'll fill in when we have a connection list ā leave partial prefix for now
+ pass
+
+ if sub_id and rg_name:
+ TARGET_PROJECT_ARM_PREFIX = (
+ f"/subscriptions/{sub_id}/resourceGroups/{rg_name}"
+ f"/providers/Microsoft.CognitiveServices/accounts/{account_name}"
+ f"/projects/{project_name}"
+ )
+ print(f" š Target ARM prefix set: {TARGET_PROJECT_ARM_PREFIX}")
+ return True
+
+ return False
+
+
+def _set_target_arm_prefix_from_connections(
+ target_endpoint: str,
+ target_connections: List[Dict[str, Any]],
+ subscription_id: Optional[str] = None,
+) -> bool:
+ """
+ Set TARGET_PROJECT_ARM_PREFIX by parsing the target endpoint + extracting RG from
+ connection ARM IDs. Returns True if prefix was set.
+ """
+ global TARGET_PROJECT_ARM_PREFIX
+ import re
+
+ m = re.match(r'https://([^.]+)\.services\.ai\.azure\.com/api/projects/([^/?]+)', target_endpoint)
+ if not m:
+ print(" ā ļø Could not parse target endpoint, ARM prefix not set")
+ return False
+
+ account_name = m.group(1)
+ project_name = m.group(2)
+ sub_id = subscription_id
+ rg_name = None
+
+ # Extract sub/RG from connection ARM IDs
+ for c in target_connections:
+ cid = c.get('id', '')
+ arm_match = re.match(r'/subscriptions/([^/]+)/resourceGroups/([^/]+)/', cid)
+ if arm_match:
+ if not sub_id:
+ sub_id = arm_match.group(1)
+ rg_name = arm_match.group(2)
+ break
+
+ if not sub_id or not rg_name:
+ print(" ā ļø Could not determine subscription/RG, ARM prefix not set (project_connection_id will use raw name fallback)")
+ return False
+
+ TARGET_PROJECT_ARM_PREFIX = (
+ f"/subscriptions/{sub_id}/resourceGroups/{rg_name}"
+ f"/providers/Microsoft.CognitiveServices/accounts/{account_name}"
+ f"/projects/{project_name}"
+ )
+ print(f" š Target ARM prefix: {TARGET_PROJECT_ARM_PREFIX}")
+ return True
+
+
+def _try_ensure_display_names(
+ target_endpoint: str,
+ target_connections: List[Dict[str, Any]],
+ subscription_id: Optional[str] = None,
+) -> None:
+ """
+ Best-effort attempt to ensure all target connections have displayName set.
+ Parses the target endpoint URL to extract ARM routing info, then calls
+ ensure_connection_display_names to patch any connections that lack one.
+
+ Also sets TARGET_PROJECT_ARM_PREFIX so that project_connection_id values are built
+ as full ARM paths (required by portal agent runner).
+
+ This modifies the target_connections list in-place (updating metadata).
+ """
+ import re
+
+ # Parse endpoint to get account name and try to determine resource group
+ m = re.match(r'https://([^.]+)\.services\.ai\.azure\.com/api/projects/([^/]+)', target_endpoint)
+ if not m:
+ print(" ā ļø Could not parse target endpoint for ARM routing, skipping displayName enforcement")
+ return
+
+ account_name = m.group(1) # e.g., "nikhowlett-1194-resource"
+
+ # Try to determine subscription and resource group from connection ARM IDs
+ sub_id = subscription_id
+ rg_name = None
+ for c in target_connections:
+ cid = c.get('id', '')
+ arm_match = re.match(r'/subscriptions/([^/]+)/resourceGroups/([^/]+)/', cid)
+ if arm_match:
+ if not sub_id:
+ sub_id = arm_match.group(1)
+ rg_name = arm_match.group(2)
+ break
+
+ # Always set TARGET_PROJECT_ARM_PREFIX if we have enough info (needed for project_connection_id)
+ _set_target_arm_prefix_from_connections(target_endpoint, target_connections, sub_id)
+
+ # Check if any connections actually need fixing
+ needs_fix = [c for c in target_connections
+ if (not c.get('metadata', {}).get('displayName')
+ or '_' in c.get('metadata', {}).get('displayName', ''))
+ and c.get('metadata', {}).get('type') in ('bing_grounding', 'bing_custom_search', 'microsoft_fabric', 'sharepoint_grounding', 'azure_ai_search')]
+
+ if not needs_fix:
+ return
+
+ print(f"\n š§ {len(needs_fix)} connection(s) need displayName fix (required for v2 runtime)")
+
+ if not sub_id or not rg_name:
+ print(f" ā ļø Could not determine subscription/RG from connection IDs, skipping displayName enforcement")
+ print(f" Connections without displayName may fail in v2 portal. Fix manually or use --connection-map.")
+ return
+
+ print(f" š” Patching via ARM: subscription={sub_id}, RG={rg_name}, account={account_name}")
+ display_name_map = ensure_connection_display_names(
+ needs_fix, sub_id, rg_name, account_name
+ )
+
+ # Update the connection objects in-place so build_connection_map sees the displayNames
+ for c in target_connections:
+ cname = c.get('name', '')
+ if cname in display_name_map:
+ if 'metadata' not in c or not isinstance(c['metadata'], dict):
+ c['metadata'] = {}
+ c['metadata']['displayName'] = display_name_map[cname]
+
+
+def print_connection_migration_report(assistants: List[Dict[str, Any]], source_connections: List[Dict[str, Any]]):
+ """
+ Print a summary report of which connections each agent needs and whether they exist in source.
+ """
+ print("\n" + "=" * 60)
+ print("š CONNECTION MIGRATION REPORT")
+ print("=" * 60)
+
+ # Build a lookup of source connections by name and type
+ conn_by_name = {c.get("name", ""): c for c in source_connections}
+ conn_by_type = {}
+ for c in source_connections:
+ ctype = c.get("properties", {}).get("category", c.get("type", c.get("metadata", {}).get("type", "unknown")))
+ conn_by_type.setdefault(ctype, []).append(c)
+
+ all_needed_connections = set()
+
+ for assistant in assistants:
+ name = assistant.get("name", "unknown")
+ required = get_agent_required_connections(assistant)
+ if not required:
+ continue
+
+ print(f"\n š¤ {name} (ID: {assistant.get('id', 'unknown')})")
+ for req in required:
+ tool_type = req["tool_type"]
+ friendly = req["friendly_name"]
+ conn_id = req.get("connection_id", "(not specified in tool)")
+ print(f" ⢠{friendly} ({tool_type})")
+ print(f" Connection ref: {conn_id}")
+ if conn_id != "(not specified in tool)" and conn_id in conn_by_name:
+ src_conn = conn_by_name[conn_id]
+ print(f" ā
Found in source: {src_conn.get('name', 'N/A')} (type: {src_conn.get('type', 'N/A')})")
+ all_needed_connections.add(conn_id)
+ else:
+ # Try matching by tool type
+ matched = False
+ for c in source_connections:
+ c_meta_type = c.get("metadata", {}).get("type", "")
+ c_category = c.get("properties", {}).get("category", "")
+ if tool_type in [c_meta_type, c_category]:
+ print(f" š Possible match in source: '{c.get('name', 'N/A')}' (type: {c.get('type', 'N/A')})")
+ all_needed_connections.add(c.get("name", ""))
+ matched = True
+ if not matched:
+ print(f" ā ļø No matching connection found in source")
+
+ # Print extra config hints
+ for extra_key in ["index_asset_id", "index_connection_id", "index_name", "spec"]:
+ if extra_key in req:
+ print(f" Config: {extra_key} = {req[extra_key]}")
+
+ if all_needed_connections:
+ print(f"\n š Connections that should exist in target project:")
+ for cn in sorted(all_needed_connections):
+ print(f" - {cn}")
+
+ print("\n" + "=" * 60)
+
+
def ensure_project_connection_package():
"""Ensure the correct azure-ai-projects version is installed for project connection string functionality."""
try:
@@ -764,7 +1341,11 @@ def create_agent_version_via_api(agent_name: str, agent_version_data: Dict[str,
# Ensure agent name is lowercase for API compliance
agent_name = agent_name.lower()
- if production_resource and production_subscription:
+ if PRODUCTION_ENDPOINT_OVERRIDE:
+ # Direct endpoint override ā use as-is
+ url = f"{PRODUCTION_ENDPOINT_OVERRIDE.rstrip('/')}/agents/{agent_name}/versions"
+ print(f"š Using PRODUCTION endpoint (direct override)")
+ elif production_resource and production_subscription:
# Production mode: use Azure AI services endpoint format
base_url = get_production_v2_base_url(production_resource, production_subscription, production_resource)
url = f"{base_url}/agents/{agent_name}/versions"
@@ -799,9 +1380,10 @@ def create_agent_version_via_api(agent_name: str, agent_version_data: Dict[str,
try:
# Make the POST request to create the agent version with appropriate token
# Use production token from environment if available and production resource is specified
- if production_resource and PRODUCTION_TOKEN:
+ effective_token = production_token or PRODUCTION_TOKEN
+ if production_resource and effective_token:
print(f" š Using production token for authentication")
- response = do_api_request_with_token("POST", url, PRODUCTION_TOKEN, params=params, json=agent_version_data)
+ response = do_api_request_with_token("POST", url, effective_token, params=params, json=agent_version_data)
else:
print(f" š Using standard token for authentication")
response = do_api_request("POST", url, params=params, json=agent_version_data)
@@ -916,6 +1498,198 @@ def determine_agent_kind(v1_assistant: Dict[str, Any]) -> str:
# Default to prompt agent for all assistants (test assumption: all are prompt agents)
return "prompt"
+# Global connection mapping: source connection name -> target project_connection_id
+# Populated by --connection-map arg or auto-discovery
+# IMPORTANT: The v2 portal agent runner resolves project_connection_id by FULL ARM PATH, e.g.:
+# /subscriptions/{sub}/resourceGroups/{rg}/providers/Microsoft.CognitiveServices
+# /accounts/{acc}/projects/{proj}/connections/{conn_name}
+# Short names and displayNames only work for the Responses API, NOT the portal agent runner.
+CONNECTION_MAP: Dict[str, str] = {}
+
+# Target project ARM path prefix ā set by _build_target_arm_prefix() when target endpoint is known.
+# Used to build full ARM paths for project_connection_id values.
+TARGET_PROJECT_ARM_PREFIX: str = "" # e.g. '/subscriptions/.../projects/proj'
+
+
+def extract_connection_name_from_arm_path(arm_path: str) -> str:
+ """
+ Extract the connection name from a full ARM resource path.
+ E.g., '/subscriptions/.../connections/hengylbinggrounding' -> 'hengylbinggrounding'
+ """
+ if '/connections/' in arm_path:
+ return arm_path.split('/connections/')[-1]
+ return arm_path
+
+
+def get_v2_connection_id(connection: Dict[str, Any]) -> str:
+ """
+ Get the correct v2 project_connection_id for a connection object.
+
+ The v2 portal agent runner resolves project_connection_id by FULL ARM PATH:
+ /subscriptions/{sub}/resourceGroups/{rg}/providers/Microsoft.CognitiveServices
+ /accounts/{acc}/projects/{proj}/connections/{conn_name}
+
+ Short names and displayNames only work for the Responses API (different code path).
+ This function builds the full ARM path using TARGET_PROJECT_ARM_PREFIX if set,
+ otherwise falls back to the raw connection name with a warning.
+
+ Args:
+ connection: A connection object from the connections API
+
+ Returns:
+ The full ARM path to use as project_connection_id in v2 agent definitions
+ """
+ conn_name = connection.get('name', '')
+
+ if TARGET_PROJECT_ARM_PREFIX:
+ return f"{TARGET_PROJECT_ARM_PREFIX}/connections/{conn_name}"
+
+ # Fallback: no prefix set ā warn and return raw name
+ print(f" ā ļø TARGET_PROJECT_ARM_PREFIX not set; returning raw name '{conn_name}' as project_connection_id (may fail in portal agent runner)")
+ return conn_name
+
+
+def resolve_connection_id(v1_connection_id: str) -> str:
+ """
+ Resolve a v1 connection_id (full ARM path) to a v2 project_connection_id.
+
+ 1. If the connection name is in CONNECTION_MAP, use the mapped target connection name
+ and build a full ARM path via TARGET_PROJECT_ARM_PREFIX.
+ 2. Otherwise, extract the connection name from the ARM path and build the ARM path.
+
+ The v2 portal agent runner requires a full ARM path as project_connection_id.
+ """
+ source_name = extract_connection_name_from_arm_path(v1_connection_id)
+
+ if source_name in CONNECTION_MAP:
+ target_conn_name = CONNECTION_MAP[source_name]
+ # Strip any existing ARM prefix from the mapped value (it should be just a name)
+ target_conn_name = extract_connection_name_from_arm_path(target_conn_name)
+ if TARGET_PROJECT_ARM_PREFIX:
+ full_path = f"{TARGET_PROJECT_ARM_PREFIX}/connections/{target_conn_name}"
+ print(f" š Connection mapped: '{source_name}' -> '{full_path}'")
+ return full_path
+ print(f" š Connection mapped: '{source_name}' -> '{target_conn_name}' (no ARM prefix set)")
+ return target_conn_name
+
+ # No mapping ā build ARM path from source name directly
+ if TARGET_PROJECT_ARM_PREFIX:
+ full_path = f"{TARGET_PROJECT_ARM_PREFIX}/connections/{source_name}"
+ print(f" ā ļø No mapping for '{source_name}', using ARM path: '{full_path}'")
+ return full_path
+
+ print(f" ā ļø Connection '{source_name}' (no mapping, no ARM prefix ā may fail in v2 portal)")
+ return source_name
+
+
+def remap_connection_ids_in_tool(tool_data: Any) -> Any:
+ """
+ Recursively walk a tool data structure and:
+ - Rename 'connection_id' keys to 'project_connection_id'
+ - Resolve ARM paths to short connection names via CONNECTION_MAP
+ """
+ if isinstance(tool_data, dict):
+ result = {}
+ for key, value in tool_data.items():
+ if key == 'connection_id':
+ # Rename and resolve
+ resolved = resolve_connection_id(str(value)) if value else value
+ result['project_connection_id'] = resolved
+ else:
+ result[key] = remap_connection_ids_in_tool(value)
+ return result
+ elif isinstance(tool_data, list):
+ return [remap_connection_ids_in_tool(item) for item in tool_data]
+ return tool_data
+
+
+def build_connection_map_from_projects(
+ source_connections: List[Dict[str, Any]],
+ target_connections: List[Dict[str, Any]]
+) -> Dict[str, str]:
+ """
+ Auto-build a connection map by matching source connections to target connections
+ by type/category. Returns a dict of source_name -> target_v2_id.
+
+ The values are full ARM paths (when TARGET_PROJECT_ARM_PREFIX is set), as returned
+ by get_v2_connection_id(). They are used directly as project_connection_id in v2
+ agent definitions and do not need further processing by resolve_connection_id().
+ """
+ mapping: Dict[str, str] = {}
+
+ # Build target lookup by type
+ target_by_type: Dict[str, List[Dict[str, Any]]] = {}
+ for tc in target_connections:
+ ctype = tc.get('properties', {}).get('category', tc.get('type', 'unknown'))
+ meta_type = tc.get('metadata', {}).get('type', '')
+ for t in [ctype, meta_type]:
+ if t:
+ target_by_type.setdefault(t, []).append(tc)
+
+ # Log all target connections with their v2 IDs for debugging
+ print(" š Target connections (v2 resolution):")
+ for tc in target_connections:
+ tc_name = tc.get('name', '')
+ tc_v2_id = get_v2_connection_id(tc)
+ tc_type = tc.get('properties', {}).get('category', tc.get('type', 'unknown'))
+ display_note = f" (v2_id: '{tc_v2_id}')" if tc_v2_id != tc_name else " (v2_id: same as name)"
+ print(f" ⢠{tc_name}{display_note} [{tc_type}]")
+
+ # Match source to target by type
+ for sc in source_connections:
+ src_name = sc.get('name', '')
+ src_type = sc.get('properties', {}).get('category', sc.get('type', 'unknown'))
+ src_meta_type = sc.get('metadata', {}).get('type', '')
+
+ # Try matching by category first, then metadata type
+ for match_type in [src_type, src_meta_type]:
+ if match_type and match_type in target_by_type:
+ candidates = target_by_type[match_type]
+
+ # Filter to candidates that have a displayName set (preferred for v2)
+ candidates_with_display = [c for c in candidates if c.get('metadata', {}).get('displayName')]
+
+ if len(candidates_with_display) == 1:
+ # Prefer the candidate with a displayName (known to work in v2 runtime)
+ tgt = candidates_with_display[0]
+ tgt_v2_id = get_v2_connection_id(tgt)
+ mapping[src_name] = tgt_v2_id
+ print(f" š Auto-mapped: '{src_name}' ({match_type}) -> '{tgt_v2_id}' (displayName of '{tgt.get('name', '')}')")
+ break
+ elif len(candidates) == 1:
+ # Only one candidate total ā use its v2 ID
+ tgt = candidates[0]
+ tgt_v2_id = get_v2_connection_id(tgt)
+ mapping[src_name] = tgt_v2_id
+ print(f" š Auto-mapped: '{src_name}' ({match_type}) -> '{tgt_v2_id}'")
+ break
+ elif len(candidates) > 1:
+ # Multiple candidates ā prefer one with displayName, then exact name match
+ if len(candidates_with_display) > 0:
+ # Use the first candidate with a displayName
+ tgt = candidates_with_display[0]
+ tgt_v2_id = get_v2_connection_id(tgt)
+ mapping[src_name] = tgt_v2_id
+ print(f" š Auto-mapped (displayName preferred): '{src_name}' ({match_type}) -> '{tgt_v2_id}' (from {len(candidates)} candidates)")
+ else:
+ # No displayNames ā try exact name match
+ exact = [c for c in candidates if c.get('name', '') == src_name]
+ if exact:
+ tgt_v2_id = get_v2_connection_id(exact[0])
+ mapping[src_name] = tgt_v2_id
+ print(f" š Auto-mapped (exact name): '{src_name}' ({match_type}) -> '{tgt_v2_id}'")
+ else:
+ tgt = candidates[0]
+ tgt_v2_id = get_v2_connection_id(tgt)
+ mapping[src_name] = tgt_v2_id
+ print(f" ā ļø Auto-mapped (first of {len(candidates)}, no displayName): '{src_name}' ({match_type}) -> '{tgt_v2_id}'")
+ break
+ else:
+ print(f" ā ļø No target match for source connection '{src_name}' (type: {src_type})")
+
+ return mapping
+
+
def v1_assistant_to_v2_agent(v1_assistant: Dict[str, Any], agent_name: Optional[str] = None, version: str = "1") -> Dict[str, Any]:
"""
Transform a v1 assistant object to v2 agent structure.
@@ -1092,6 +1866,10 @@ def v1_assistant_to_v2_agent(v1_assistant: Dict[str, Any], agent_name: Optional[
# Copy function definition if present
if "function" in tool:
transformed_tool["function"] = tool["function"]
+ # v2 API requires a top-level 'name' on the tool object
+ fn_name = tool["function"].get("name", "")
+ if fn_name:
+ transformed_tool["name"] = fn_name
# Handle MCP tools
elif tool_type == "mcp":
@@ -1119,11 +1897,155 @@ def v1_assistant_to_v2_agent(v1_assistant: Dict[str, Any], agent_name: Optional[
# Handle azure_function tools
elif tool_type == "azure_function":
- # Copy all azure function specific properties
- for key in ["name", "description", "parameters", "input_queue", "output_queue"]:
+ # v2 API requires 'azure_function' sub-object with:
+ # - function: {name, description, parameters}
+ # - input_binding: {type: "storage_queue", storage_queue: {queue_service_endpoint, queue_name}}
+ # - output_binding: {type: "storage_queue", storage_queue: {queue_service_endpoint, queue_name}}
+ af_config: Dict[str, Any] = {}
+
+ # Build the 'function' sub-object from v1's top-level name/description/parameters
+ fn_def: Dict[str, Any] = {}
+ for key in ["name", "description", "parameters"]:
if key in tool:
- transformed_tool[key] = tool[key]
- print(f" Added Azure Function tool properties: {[k for k in tool.keys() if k != 'type']}")
+ fn_def[key] = tool[key]
+ if fn_def:
+ af_config["function"] = fn_def
+
+ # Map input_queue -> input_binding with storage_queue wrapper
+ if "input_queue" in tool:
+ iq = tool["input_queue"]
+ af_config["input_binding"] = {
+ "type": "storage_queue",
+ "storage_queue": {
+ "queue_service_endpoint": iq.get("storage_service_endpoint", ""),
+ "queue_name": iq.get("queue_name", ""),
+ }
+ }
+ # Map output_queue -> output_binding with storage_queue wrapper
+ if "output_queue" in tool:
+ oq = tool["output_queue"]
+ af_config["output_binding"] = {
+ "type": "storage_queue",
+ "storage_queue": {
+ "queue_service_endpoint": oq.get("storage_service_endpoint", ""),
+ "queue_name": oq.get("queue_name", ""),
+ }
+ }
+
+ transformed_tool["azure_function"] = af_config
+ print(f" Added Azure Function tool properties (nested): {list(af_config.keys())}")
+
+ # Handle azure_ai_search tools ā merge tool_resources inline
+ elif tool_type == "azure_ai_search":
+ # In v1, azure_ai_search config can be:
+ # a) Inline in the tool object (tool.azure_ai_search.indexes)
+ # b) In tool_resources.azure_ai_search.indexes (tool is bare {"type": "azure_ai_search"})
+ # In v2, it must always be inline in the tool.
+ search_config = tool.get("azure_ai_search", {})
+
+ # If no inline config, pull from tool_resources
+ if not search_config or not search_config.get("indexes"):
+ tr_search = v1_tool_resources.get("azure_ai_search", {})
+ if tr_search and tr_search.get("indexes"):
+ search_config = tr_search
+ print(f" Merged azure_ai_search config from tool_resources")
+
+ if search_config:
+ # Remap any connection_id fields in the search config
+ remapped = remap_connection_ids_in_tool(search_config)
+ transformed_tool["azure_ai_search"] = remapped
+
+ # Log what we found
+ indexes = remapped.get("indexes", [])
+ for idx in indexes:
+ if idx.get("index_asset_id"):
+ print(f" azure_ai_search index: asset_id='{idx['index_asset_id']}'")
+ elif idx.get("project_connection_id") or idx.get("index_project_connection_id"):
+ conn_id = idx.get("project_connection_id", idx.get("index_project_connection_id", ""))
+ print(f" azure_ai_search index: connection='{conn_id}', name='{idx.get('index_name', '')}'")
+ else:
+ print(f" ā ļø azure_ai_search tool has no config in tool or tool_resources")
+
+ # Handle fabric_dataagent tools ā v1 uses connections[], v2 uses fabric_dataagent_preview.project_connections[]
+ elif tool_type == "fabric_dataagent":
+ # v1 format:
+ # fabric_dataagent.connections[].connection_id (full ARM path already)
+ # v2 format:
+ # fabric_dataagent_preview.project_connections[].project_connection_id
+ v1_fabric = tool.get("fabric_dataagent", {})
+ v1_connections = v1_fabric.get("connections", [])
+
+ v2_project_connections = []
+ for conn in v1_connections:
+ conn_id = conn.get("connection_id", "")
+ # connection_id in v1 Fabric is already a full ARM path ā use as-is
+ entry: Dict[str, Any] = {"project_connection_id": conn_id}
+ if conn.get("instructions"):
+ entry["instructions"] = conn["instructions"]
+ v2_project_connections.append(entry)
+ print(f" fabric_dataagent connection: '{conn_id}'")
+
+ transformed_tool["fabric_dataagent_preview"] = {
+ "project_connections": v2_project_connections
+ }
+
+ # Copy any top-level instructions from the fabric config if present
+ if v1_fabric.get("instructions"):
+ transformed_tool["fabric_dataagent_preview"]["instructions"] = v1_fabric["instructions"]
+
+ print(f" Transformed fabric_dataagent: {len(v2_project_connections)} connection(s) -> fabric_dataagent_preview")
+
+ # Handle bing_grounding ā v2 requires search_configurations array
+ elif tool_type == "bing_grounding":
+ v1_bing = tool.get("bing_grounding", {})
+ conn_id = v1_bing.get("connection_id", "")
+ resolved_conn = resolve_connection_id(conn_id) if conn_id else conn_id
+ search_config = {
+ "project_connection_id": resolved_conn,
+ "market": "en-us",
+ "set_lang": "en",
+ "count": 5,
+ }
+ transformed_tool["bing_grounding"] = {
+ "search_configurations": [search_config]
+ }
+ print(f" bing_grounding: wrapped connection '{resolved_conn}' in search_configurations")
+
+ # Handle bing_custom_search ā v2 requires search_configurations array
+ elif tool_type == "bing_custom_search":
+ v1_bcs = tool.get("bing_custom_search", {})
+ conn_id = v1_bcs.get("connection_id", "")
+ resolved_conn = resolve_connection_id(conn_id) if conn_id else conn_id
+ search_config = {
+ "project_connection_id": resolved_conn,
+ }
+ # Carry over custom_config_id if present
+ if v1_bcs.get("custom_config_id"):
+ search_config["custom_config_id"] = v1_bcs["custom_config_id"]
+ transformed_tool["bing_custom_search"] = {
+ "search_configurations": [search_config]
+ }
+ print(f" bing_custom_search: wrapped connection '{resolved_conn}' in search_configurations")
+
+ # Handle sharepoint_grounding ā remap connection_id to project_connection_id
+ elif tool_type == "sharepoint_grounding":
+ v1_sp = tool.get("sharepoint_grounding", {})
+ conn_id = v1_sp.get("connection_id", "")
+ resolved_conn = resolve_connection_id(conn_id) if conn_id else conn_id
+ transformed_tool["sharepoint_grounding"] = {
+ "project_connection_id": resolved_conn
+ }
+ print(f" sharepoint_grounding: remapped connection '{resolved_conn}'")
+
+ # Handle other platform tools that need connection_id -> project_connection_id remapping
+ elif tool_type in PLATFORM_TOOL_TYPES:
+ # Copy all properties except 'type', but remap connection_id -> project_connection_id
+ for key, value in tool.items():
+ if key == "type":
+ continue
+ # Recursively remap connection_ids in nested structures
+ transformed_tool[key] = remap_connection_ids_in_tool(value)
+ print(f" Added platform tool '{tool_type}' with connection_id -> project_connection_id remapping")
# Handle any other tool types by copying all properties except 'type'
else:
@@ -1305,7 +2227,7 @@ def save_v2_agent_to_cosmos(v2_agent_data: Dict[str, Any], connection_string: st
print(f" Migration Doc: {migration_doc}")
raise
-def process_v1_assistants_to_v2_agents(args=None, assistant_id: Optional[str] = None, cosmos_connection_string: Optional[str] = None, use_api: bool = False, project_endpoint: Optional[str] = None, project_connection_string: Optional[str] = None, project_subscription: Optional[str] = None, project_resource_group: Optional[str] = None, project_name: Optional[str] = None, production_resource: Optional[str] = None, production_subscription: Optional[str] = None, production_tenant: Optional[str] = None, source_tenant: Optional[str] = None):
+def process_v1_assistants_to_v2_agents(args=None, assistant_id: Optional[str] = None, cosmos_connection_string: Optional[str] = None, use_api: bool = False, project_endpoint: Optional[str] = None, project_connection_string: Optional[str] = None, project_subscription: Optional[str] = None, project_resource_group: Optional[str] = None, project_name: Optional[str] = None, production_resource: Optional[str] = None, production_subscription: Optional[str] = None, production_tenant: Optional[str] = None, source_tenant: Optional[str] = None, only_with_tools: bool = False, only_without_tools: bool = False, migrate_connections: bool = False, production_endpoint: Optional[str] = None):
"""
Main processing function that reads v1 assistants from Cosmos DB, API, Project endpoint, or Project connection string,
converts them to v2 agents, and saves via v2 API.
@@ -1317,6 +2239,10 @@ def process_v1_assistants_to_v2_agents(args=None, assistant_id: Optional[str] =
project_endpoint: Optional project endpoint for AIProjectClient (e.g., "https://...api/projects/p-3")
project_connection_string: Optional project connection string for AIProjectClient (e.g., "eastus.api.azureml.ms;...;...;...")
source_tenant: Optional source tenant ID for authentication when reading v1 assistants
+ only_with_tools: If True, only migrate assistants that have at least one tool
+ only_without_tools: If True, only migrate assistants that have no tools
+ migrate_connections: If True, attempt to discover and recreate connections in target project
+ production_endpoint: Optional full production URL (overrides production_resource URL construction)
"""
# Handle package version management based on usage
@@ -1493,6 +2419,107 @@ def process_v1_assistants_to_v2_agents(args=None, assistant_id: Optional[str] =
v1_assistant = {k: v for k, v in v1_assistant.items() if v is not None}
v1_assistants.append(v1_assistant)
+ # āā Tool-based filtering āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+ if only_with_tools or only_without_tools:
+ def _has_tools(assistant: Dict[str, Any]) -> bool:
+ tools = assistant.get("tools", [])
+ if isinstance(tools, str):
+ try:
+ tools = json.loads(tools)
+ except:
+ tools = []
+ return isinstance(tools, list) and len(tools) > 0
+
+ before_count = len(v1_assistants)
+ if only_with_tools:
+ v1_assistants = [a for a in v1_assistants if _has_tools(a)]
+ print(f"š§ --only-with-tools: filtered {before_count} ā {len(v1_assistants)} assistants (keeping only those WITH tools)")
+ else:
+ v1_assistants = [a for a in v1_assistants if not _has_tools(a)]
+ print(f"š§ --only-without-tools: filtered {before_count} ā {len(v1_assistants)} assistants (keeping only those WITHOUT tools)")
+
+ if not v1_assistants:
+ print("ā No assistants remain after filtering. Nothing to migrate.")
+ return
+
+ # āā Connection discovery & migration āāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
+ source_connections: List[Dict[str, Any]] = []
+ if migrate_connections or only_with_tools:
+ # Try to discover connections from the source project
+ source_endpoint = project_endpoint # connections API needs a project endpoint
+ if source_endpoint:
+ print("\nš Discovering connections from source project...")
+ source_connections = list_connections_from_project(source_endpoint)
+ for conn in source_connections:
+ conn_name = conn.get("name", "N/A")
+ conn_type = conn.get("properties", {}).get("category", conn.get("type", "unknown"))
+ conn_target = conn.get("properties", {}).get("target", conn.get("target", "N/A"))
+ print(f" ⢠{conn_name} (type: {conn_type}, target: {conn_target})")
+ else:
+ print("\nā ļø Cannot discover connections: --project-endpoint is required to read source connections.")
+ print(" Connection migration requires reading from the source project's connections API.")
+
+ # Print connection report for agents with tools
+ if (migrate_connections or only_with_tools) and v1_assistants:
+ agents_with_tools = [a for a in v1_assistants if get_agent_required_connections(a)]
+ if agents_with_tools:
+ print_connection_migration_report(agents_with_tools, source_connections)
+
+ # Auto-build connection map if we have both source and target connections
+ if source_connections and not CONNECTION_MAP:
+ # Determine target endpoint for listing connections
+ if production_endpoint:
+ target_ep = production_endpoint
+ elif production_resource and production_subscription:
+ target_ep = get_production_v2_base_url(production_resource, production_subscription, production_resource)
+ else:
+ target_ep = None
+
+ if target_ep:
+ print("\nš Auto-discovering target connections for mapping...")
+ prod_token = PRODUCTION_TOKEN or TOKEN
+ target_connections = list_connections_from_project(target_ep, prod_token)
+ if target_connections:
+ # Ensure all target connections have displayName set (required for v2 runtime)
+ # The v2 agent runtime resolves project_connection_id by metadata.displayName,
+ # not by the raw connection name. We auto-patch connections that lack one.
+ _try_ensure_display_names(target_ep, target_connections, production_subscription)
+
+ auto_map = build_connection_map_from_projects(source_connections, target_connections)
+ # Merge auto-map with explicit CLI mappings (CLI takes precedence)
+ for k, v in auto_map.items():
+ if k not in CONNECTION_MAP:
+ CONNECTION_MAP[k] = v
+ print(f" š Connection map ({len(CONNECTION_MAP)} entries): {CONNECTION_MAP}")
+
+ # Attempt to create connections in target if requested
+ if migrate_connections and source_connections:
+ # Determine target endpoint
+ if production_endpoint:
+ target_ep = production_endpoint
+ elif production_resource and production_subscription:
+ target_ep = get_production_v2_base_url(production_resource, production_subscription, production_resource)
+ else:
+ target_ep = None
+
+ if target_ep:
+ prod_token = PRODUCTION_TOKEN or TOKEN
+ print(f"\nš Attempting to create connections in target project...")
+ print(f" Target: {target_ep}")
+ created_count = 0
+ failed_count = 0
+ for conn in source_connections:
+ result = create_connection_in_target(target_ep, conn, prod_token)
+ if result:
+ created_count += 1
+ else:
+ failed_count += 1
+ print(f"\n š Connection migration: {created_count} created, {failed_count} failed")
+ if failed_count > 0:
+ print(f" š” Failed connections may need secrets (API keys) added manually in the target project portal")
+ else:
+ print("\nā ļø Cannot create connections: need --production-endpoint or --production-resource to determine target")
+
# Ensure we have API authentication for v2 API saving
# Use source tenant for authentication (for reading v1 assistants)
# Force refresh if we have production resource (might have switched tenants)
@@ -1730,6 +2757,28 @@ def main():
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
+ # Migrate ONLY agents with tools from source project to target (with direct endpoint)
+ python v1_to_v2_migration.py --only-with-tools \\
+ --project-endpoint "https://yinchu-eastus2-resource.services.ai.azure.com/api/projects/yinchu-eastus2" \\
+ --production-endpoint "https://nikhowlett-1194-resource.services.ai.azure.com/api/projects/nikhowlett-1194" \\
+ --production-resource nikhowlett-1194-resource \\
+ --production-subscription 2d385bf4-0756-4a76-aa95-28bf9ed3b625 \\
+ --production-tenant 72f988bf-86f1-41af-91ab-2d7cd011db47
+
+ # Migrate agents with tools AND attempt to copy connections
+ python v1_to_v2_migration.py --only-with-tools --migrate-connections \\
+ --project-endpoint "https://source-project.services.ai.azure.com/api/projects/source" \\
+ --production-endpoint "https://target-resource.services.ai.azure.com/api/projects/target" \\
+ --production-resource target-resource \\
+ --production-subscription SUB_ID \\
+ --production-tenant TENANT_ID
+
+ # Migrate ONLY plain agents (no tools)
+ python v1_to_v2_migration.py --only-without-tools \\
+ --production-resource nextgen-eastus \\
+ --production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\
+ --production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624
+
# Migrate from v1 API to production v2 API (REQUIRED production parameters)
python v1_to_v2_migration.py --use-api \\
--source-tenant 72f988bf-86f1-41af-91ab-2d7cd011db47 \\
@@ -1744,10 +2793,11 @@ def main():
--production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\
--production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624
- # Migrate from project endpoint to production v2 API
+ # Migrate from project endpoint using direct production endpoint (no URL guessing)
python v1_to_v2_migration.py \\
--project-endpoint "https://your-project.api.azure.com/api/projects/p-3" \\
- --production-resource nextgen-eastus \\
+ --production-endpoint "https://target-resource.services.ai.azure.com/api/projects/target" \\
+ --production-resource target-resource \\
--production-subscription b1615458-c1ea-49bc-8526-cafc948d3c25 \\
--production-tenant 33e577a9-b1b8-4126-87c0-673f197bf624 \\
asst_abc123
@@ -1757,9 +2807,6 @@ def main():
# Read from project connection string (requires azure-ai-projects==1.0.0b10)
python v1_to_v2_migration.py --project-connection-string "eastus.api.azureml.ms;subscription-id;resource-group;project-name"
python v1_to_v2_migration.py asst_abc123 --project-connection-string "eastus.api.azureml.ms;subscription-id;resource-group;project-name"
-
- # Production deployment: migrate to production Azure AI resource
- python v1_to_v2_migration.py --project-endpoint "https://source-project.api.azure.com/api/projects/p-3" --production-resource "nextgen-eastus" --production-subscription "b1615458-c1ea-49bc-8526-cafc948d3c25" --production-tenant "33e577a9-b1b8-4126-87c0-673f197bf624" asst_abc123
"""
)
@@ -1813,6 +2860,38 @@ def main():
help='Project connection string for AIProjectClient (e.g., "eastus.api.azureml.ms;...;...;..."). Requires azure-ai-projects==1.0.0b10. If provided, reads assistants from project connection instead of other methods.'
)
+ # Tool filtering arguments (mutually exclusive)
+ tools_filter_group = parser.add_mutually_exclusive_group()
+ tools_filter_group.add_argument(
+ '--only-with-tools',
+ action='store_true',
+ help='Only migrate agents that have at least one tool configured (e.g., bing_grounding, file_search, etc.).'
+ )
+ tools_filter_group.add_argument(
+ '--only-without-tools',
+ action='store_true',
+ help='Only migrate agents that have NO tools configured (plain conversational agents).'
+ )
+
+ parser.add_argument(
+ '--migrate-connections',
+ action='store_true',
+ help='Attempt to discover connections from the source project and recreate them in the target project. '
+ 'Requires --project-endpoint for the source. Secrets (API keys) may not transfer and will need manual entry.'
+ )
+
+ parser.add_argument(
+ '--connection-map',
+ type=str,
+ action='append',
+ metavar='SOURCE=TARGET',
+ help='Map a source connection name to a target connection identifier. '
+ 'The TARGET value should be either the connection name (short name) or a full ARM resource ID for the '
+ 'connection. When using a target project ARM prefix, a short TARGET value will be appended as '
+ '"/connections/{TARGET}" to the prefix. Can be specified multiple times. '
+ 'Example: --connection-map hengylbinggrounding=hengyl-binggrounding'
+ )
+
parser.add_argument(
'--add-test-function',
action='store_true',
@@ -1848,7 +2927,8 @@ def main():
'--production-resource',
type=str,
required=True,
- help='Production Azure AI resource name (REQUIRED). Example: "nextgen-eastus"'
+ help='Production Azure AI resource name (REQUIRED). Example: "nextgen-eastus". '
+ 'If the name already ends with "-resource", it will NOT be doubled.'
)
parser.add_argument(
@@ -1865,6 +2945,13 @@ def main():
help='Production tenant ID for Azure authentication (REQUIRED). Example: "33e577a9-b1b8-4126-87c0-673f197bf624"'
)
+ parser.add_argument(
+ '--production-endpoint',
+ type=str,
+ help='Full production v2 API base URL (overrides --production-resource URL construction). '
+ 'Example: "https://nikhowlett-1194-resource.services.ai.azure.com/api/projects/nikhowlett-1194"'
+ )
+
parser.add_argument(
'--source-tenant',
type=str,
@@ -1879,6 +2966,21 @@ def main():
# Production arguments are now required, so no additional validation needed
+ # Set global production endpoint override if provided
+ global PRODUCTION_ENDPOINT_OVERRIDE, CONNECTION_MAP
+ if args.production_endpoint:
+ PRODUCTION_ENDPOINT_OVERRIDE = args.production_endpoint
+
+ # Parse connection map from CLI args
+ if args.connection_map:
+ for mapping in args.connection_map:
+ if '=' in mapping:
+ src, tgt = mapping.split('=', 1)
+ CONNECTION_MAP[src.strip()] = tgt.strip()
+ print(f"š Connection mapping: '{src.strip()}' -> '{tgt.strip()}'")
+ else:
+ print(f"ā ļø Invalid connection mapping (expected SOURCE=TARGET): {mapping}")
+
print("š Starting v1 to v2 Agent Migration")
print("=" * 50)
@@ -1887,6 +2989,12 @@ def main():
print(f" šÆ Resource: {args.production_resource}")
print(f" š Subscription: {args.production_subscription}")
print(f" š Tenant: {args.production_tenant}")
+ if args.production_endpoint:
+ print(f" š Endpoint Override: {args.production_endpoint}")
+ else:
+ # Show the URL that will be constructed
+ computed_url = get_production_v2_base_url(args.production_resource, args.production_subscription, args.production_resource)
+ print(f" š Computed URL: {computed_url}")
if PRODUCTION_TOKEN:
print(f" ā
Production token available (length: {len(PRODUCTION_TOKEN)})")
@@ -1894,6 +3002,14 @@ def main():
print(" ā ļø No PRODUCTION_TOKEN environment variable found")
print(" š” Use run-migration-docker-auth.ps1 for automatic dual-token authentication")
+ if args.only_with_tools:
+ print("š§ Filter: --only-with-tools (agents WITH tools only)")
+ elif args.only_without_tools:
+ print("š§ Filter: --only-without-tools (agents WITHOUT tools only)")
+
+ if args.migrate_connections:
+ print("š Connection migration: ENABLED")
+
if assistant_id:
print(f"šÆ Target Assistant ID: {assistant_id}")
else:
@@ -1914,7 +3030,9 @@ def main():
print("š¾ Reading assistants from Cosmos DB")
# Always using v2 API (required)
- if args.production_resource:
+ if args.production_endpoint:
+ print(f"š Saving agents via PRODUCTION v2 API (endpoint: {args.production_endpoint})")
+ elif args.production_resource:
print(f"š Saving agents via PRODUCTION v2 API (resource: {args.production_resource})")
print(f" š Production subscription: {args.production_subscription}")
else:
@@ -1926,7 +3044,11 @@ def main():
args, assistant_id, cosmos_connection_string, args.use_api,
args.project_endpoint, args.project_connection_string, args.project_subscription,
args.project_resource_group, args.project_name, args.production_resource,
- args.production_subscription, args.production_tenant, args.source_tenant
+ args.production_subscription, args.production_tenant, args.source_tenant,
+ only_with_tools=args.only_with_tools,
+ only_without_tools=args.only_without_tools,
+ migrate_connections=args.migrate_connections,
+ production_endpoint=args.production_endpoint
)
if __name__ == "__main__":
From 968cc5f1cde41094776e6f27c1c77c89eb325730 Mon Sep 17 00:00:00 2001
From: Dchillakuru <167816084+Dchillakuru@users.noreply.github.com>
Date: Mon, 30 Mar 2026 11:29:53 -0700
Subject: [PATCH 2/4] add provision network now and chain dependency in tf for
connections (#133)
---
.../18-managed-virtual-network-preview/azuredeploy.json | 3 ++-
.../modules-network-secured/managed-network.bicep | 1 +
.../18-managed-virtual-network-preview/ai-foundry.tf | 9 +++++++++
3 files changed, 12 insertions(+), 1 deletion(-)
diff --git a/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/azuredeploy.json b/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/azuredeploy.json
index 57c847169..56e7b43fd 100644
--- a/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/azuredeploy.json
+++ b/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/azuredeploy.json
@@ -1274,7 +1274,8 @@
"properties": {
"managedNetwork": {
"IsolationMode": "[parameters('isolationMode')]",
- "managedNetworkKind": "V2"
+ "managedNetworkKind": "V2",
+ "provisionNetworkNow": true
}
}
},
diff --git a/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/modules-network-secured/managed-network.bicep b/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/modules-network-secured/managed-network.bicep
index bbd387c58..1edeaf49b 100644
--- a/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/modules-network-secured/managed-network.bicep
+++ b/infrastructure/infrastructure-setup-bicep/18-managed-virtual-network-preview/modules-network-secured/managed-network.bicep
@@ -49,6 +49,7 @@ resource managedNetwork 'Microsoft.CognitiveServices/accounts/managednetworks@20
managedNetwork: {
IsolationMode: isolationMode
managedNetworkKind: 'V2'
+ provisionNetworkNow: true
//firewallSku: 'Standard' // Uncomment to enable firewall only when in AllowOnlyApprovedOutbound mode
}
}
diff --git a/infrastructure/infrastructure-setup-terraform/18-managed-virtual-network-preview/ai-foundry.tf b/infrastructure/infrastructure-setup-terraform/18-managed-virtual-network-preview/ai-foundry.tf
index a7704b9bc..52e02d972 100644
--- a/infrastructure/infrastructure-setup-terraform/18-managed-virtual-network-preview/ai-foundry.tf
+++ b/infrastructure/infrastructure-setup-terraform/18-managed-virtual-network-preview/ai-foundry.tf
@@ -148,6 +148,7 @@ resource "azapi_resource" "managed_network" {
managedNetwork = {
isolationMode = "AllowInternetOutbound"
managedNetworkKind = "V2"
+ provisionNetworkNow = true
}
}
}
@@ -380,6 +381,10 @@ resource "azapi_resource" "conn_cosmosdb" {
}
}
}
+
+ depends_on = [
+ azapi_resource.conn_aisearch
+ ]
}
# Connection: Storage Account
@@ -403,6 +408,10 @@ resource "azapi_resource" "conn_storage" {
}
}
}
+
+ depends_on = [
+ azapi_resource.conn_cosmosdb
+ ]
}
# Local variable to format project workspace ID as GUID
From b11d301004294a9bda481f36dbe33a819daef131 Mon Sep 17 00:00:00 2001
From: Alvin Ashcraft <73072+alvinashcraft@users.noreply.github.com>
Date: Mon, 30 Mar 2026 17:44:18 -0400
Subject: [PATCH 3/4] Updates for Java SDK GA (#135)
---
samples/java/quickstart/chat-with-agent/pom.xml | 2 +-
samples/java/quickstart/create-agent/pom.xml | 2 +-
samples/java/quickstart/responses/pom.xml | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/samples/java/quickstart/chat-with-agent/pom.xml b/samples/java/quickstart/chat-with-agent/pom.xml
index e8bdf65bf..fb6d7ce2d 100644
--- a/samples/java/quickstart/chat-with-agent/pom.xml
+++ b/samples/java/quickstart/chat-with-agent/pom.xml
@@ -27,7 +27,7 @@
com.azure
azure-ai-agents
- 1.0.0-beta.1
+ 2.0.0
com.azure
diff --git a/samples/java/quickstart/create-agent/pom.xml b/samples/java/quickstart/create-agent/pom.xml
index f8bcb3430..4934d1d33 100644
--- a/samples/java/quickstart/create-agent/pom.xml
+++ b/samples/java/quickstart/create-agent/pom.xml
@@ -27,7 +27,7 @@
com.azure
azure-ai-agents
- 1.0.0-beta.1
+ 2.0.0
com.azure
diff --git a/samples/java/quickstart/responses/pom.xml b/samples/java/quickstart/responses/pom.xml
index 2caf1119c..4eafc093a 100644
--- a/samples/java/quickstart/responses/pom.xml
+++ b/samples/java/quickstart/responses/pom.xml
@@ -27,7 +27,7 @@
com.azure
azure-ai-agents
- 1.0.0-beta.1
+ 2.0.0
com.azure
From a671a432bd72e6c5dc6c956fbb972cbd9f90e5cc Mon Sep 17 00:00:00 2001
From: "foundry-samples-repo-sync[bot]"
Date: Mon, 30 Mar 2026 22:38:16 +0000
Subject: [PATCH 4/4] Update sync state to 8dc749def5ba
---
.github/.sync-sha | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/.sync-sha b/.github/.sync-sha
index 3bfe06869..542225cef 100644
--- a/.github/.sync-sha
+++ b/.github/.sync-sha
@@ -1 +1 @@
-12648bcde39fef0e73e7f53d55691ebeb8763379
+8dc749def5ba8aaf43b57bab480fac44cfb95dcc