diff --git a/esbuild.config.mjs b/esbuild.config.mjs index 35b7bccea..91e557270 100644 --- a/esbuild.config.mjs +++ b/esbuild.config.mjs @@ -48,6 +48,7 @@ await esbuild.build({ platform: 'node', format: 'esm', minify: true, + jsx: 'automatic', // Inject require shim for ESM compatibility with CommonJS dependencies banner: { js: `import { createRequire } from 'module'; const require = createRequire(import.meta.url);`, diff --git a/src/cli/cli.ts b/src/cli/cli.ts index ac3f73a76..848e05e6f 100644 --- a/src/cli/cli.ts +++ b/src/cli/cli.ts @@ -5,6 +5,7 @@ import { registerDev } from './commands/dev'; import { registerEval } from './commands/eval'; import { registerFetch } from './commands/fetch'; import { registerHelp } from './commands/help'; +import { registerImport } from './commands/import'; import { registerInvoke } from './commands/invoke'; import { registerLogs } from './commands/logs'; import { registerPackage } from './commands/package'; @@ -138,6 +139,7 @@ export function registerCommands(program: Command) { registerEval(program); registerFetch(program); registerHelp(program); + registerImport(program); registerInvoke(program); registerLogs(program); registerPackage(program); diff --git a/src/cli/commands/import/__tests__/container-agent-import.test.ts b/src/cli/commands/import/__tests__/container-agent-import.test.ts new file mode 100644 index 000000000..abddd86a3 --- /dev/null +++ b/src/cli/commands/import/__tests__/container-agent-import.test.ts @@ -0,0 +1,268 @@ +/** + * Test Group 6: Container (Docker) Agent Import + */ +import { RUNTIME_TYPE_MAP } from '../constants'; +import { buildImportTemplate, filterCompanionOnlyTemplate } from '../template-utils'; +import { parseStarterToolkitYaml } from '../yaml-parser'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; + +function writeTempYaml(content: string): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'test6-')); + const filePath = path.join(dir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, content, 'utf-8'); + return filePath; +} + +function cleanupTempFile(filePath: string): void { + try { + fs.unlinkSync(filePath); + fs.rmdirSync(path.dirname(filePath)); + } catch { + /* noop */ + } +} + +const AGENT_YAML_TEMPLATE = (overrides: string) => ` +default_agent: my_agent +agents: + my_agent: + name: my_agent + entrypoint: main.py + ${overrides} + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null +`; + +describe('deployment_type mapping', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('container -> Container', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: container\n runtime_type: PYTHON_3_12')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.build).toBe('Container'); + }); + + it('direct_code_deploy -> CodeZip', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: direct_code_deploy\n runtime_type: PYTHON_3_12')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.build).toBe('CodeZip'); + }); + + it('missing -> Container (default)', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('runtime_type: PYTHON_3_12')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.build).toBe('Container'); + }); +}); + +describe('runtime_type handling', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('null -> PYTHON_3_12', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: container\n runtime_type: null')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.runtimeVersion).toBe('PYTHON_3_12'); + }); + + it('missing -> PYTHON_3_12', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: container')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.runtimeVersion).toBe('PYTHON_3_12'); + }); + + it('PYTHON_3_13 -> PYTHON_3_13', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: container\n runtime_type: PYTHON_3_13')); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.runtimeVersion).toBe('PYTHON_3_13'); + }); + + it('unrecognized -> PYTHON_3_12 (not python3.12)', () => { + const f = writeTempYaml(AGENT_YAML_TEMPLATE('deployment_type: container\n runtime_type: some_unknown')); + tempFiles.push(f); + const rv = parseStarterToolkitYaml(f).agents[0]!.runtimeVersion; + expect(rv).toBe('PYTHON_3_12'); + expect(rv).not.toBe('python3.12'); + }); +}); + +describe('RUNTIME_TYPE_MAP', () => { + it('maps known types', () => { + expect(RUNTIME_TYPE_MAP.PYTHON_3_10).toBe('PYTHON_3_10'); + expect(RUNTIME_TYPE_MAP.PYTHON_3_11).toBe('PYTHON_3_11'); + expect(RUNTIME_TYPE_MAP.PYTHON_3_12).toBe('PYTHON_3_12'); + expect(RUNTIME_TYPE_MAP.PYTHON_3_13).toBe('PYTHON_3_13'); + }); + + it('undefined for invalid keys', () => { + expect(RUNTIME_TYPE_MAP['null' as keyof typeof RUNTIME_TYPE_MAP]).toBeUndefined(); + expect(RUNTIME_TYPE_MAP['undefined' as keyof typeof RUNTIME_TYPE_MAP]).toBeUndefined(); + expect(RUNTIME_TYPE_MAP['python_3_12' as keyof typeof RUNTIME_TYPE_MAP]).toBeUndefined(); + }); +}); + +describe('full container agent parse', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('parses complete container agent with agent_id', () => { + const yaml = ` +default_agent: container_agent +agents: + container_agent: + name: container_agent + entrypoint: main.py + deployment_type: container + runtime_type: null + language: python + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: abc123def456 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/abc123def456 +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + const parsed = parseStarterToolkitYaml(f); + const agent = parsed.agents[0]!; + expect(agent.build).toBe('Container'); + expect(agent.runtimeVersion).toBe('PYTHON_3_12'); + expect(agent.physicalAgentId).toBe('abc123def456'); + expect(parsed.awsTarget.account).toBe('123456789012'); + }); + + it('parses container agent with VPC', () => { + const yaml = ` +default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + deployment_type: container + runtime_type: null + aws: + account: '123456789012' + region: us-east-1 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-12345678 + security_groups: + - sg-11112222 + protocol_configuration: + server_protocol: MCP + observability: + enabled: false + bedrock_agentcore: + agent_id: null +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + const agent = parseStarterToolkitYaml(f).agents[0]!; + expect(agent.build).toBe('Container'); + expect(agent.networkMode).toBe('VPC'); + expect(agent.networkConfig!.subnets).toContain('subnet-12345678'); + expect(agent.protocol).toBe('MCP'); + expect(agent.enableOtel).toBe(false); + }); +}); + +describe('import template for container agents', () => { + it('buildImportTemplate sets DeletionPolicy: Retain', () => { + const deployed = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { Role: { Type: 'AWS::IAM::Role', Properties: {} } }, + }; + const synth = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { + Role: { Type: 'AWS::IAM::Role', Properties: {} }, + RT: { Type: 'AWS::BedrockAgentCore::Runtime', Properties: { AgentRuntimeName: 'x' }, DependsOn: ['CR'] }, + CR: { Type: 'AWS::CloudFormation::CustomResource', Properties: {} }, + }, + }; + const result = buildImportTemplate(deployed, synth, ['RT']); + expect(result.Resources.RT).toBeDefined(); + expect(result.Resources.RT!.DeletionPolicy).toBe('Retain'); + expect(result.Resources.RT!.DependsOn).toBeUndefined(); + expect(result.Resources.CR).toBeUndefined(); + }); + + it('filterCompanionOnlyTemplate removes primary resources', () => { + const synth = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { + Role: { Type: 'AWS::IAM::Role', Properties: {} }, + RT: { Type: 'AWS::BedrockAgentCore::Runtime', Properties: {} }, + Lambda: { Type: 'AWS::Lambda::Function', Properties: {} }, + }, + Outputs: { + RTId: { Value: { 'Fn::GetAtt': ['RT', 'AgentRuntimeId'] } }, + LambdaArn: { Value: { 'Fn::GetAtt': ['Lambda', 'Arn'] } }, + }, + }; + const filtered = filterCompanionOnlyTemplate(synth); + expect(filtered.Resources.RT).toBeUndefined(); + expect(filtered.Resources.Role).toBeDefined(); + expect(filtered.Resources.Lambda).toBeDefined(); + expect(filtered.Outputs!.RTId).toBeUndefined(); + expect(filtered.Outputs!.LambdaArn).toBeDefined(); + }); +}); + +describe('container source code', () => { + let tempDir: string; + beforeEach(() => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test6-src-')); + }); + afterEach(() => { + fs.rmSync(tempDir, { recursive: true, force: true }); + }); + + it('may contain Dockerfile', () => { + fs.writeFileSync(path.join(tempDir, 'Dockerfile'), 'FROM python:3.12\n'); + fs.writeFileSync(path.join(tempDir, 'main.py'), 'print("hi")'); + expect(fs.readdirSync(tempDir)).toContain('Dockerfile'); + }); + + it('may lack pyproject.toml', () => { + fs.writeFileSync(path.join(tempDir, 'Dockerfile'), 'FROM python:3.12\n'); + expect(fs.existsSync(path.join(tempDir, 'pyproject.toml'))).toBe(false); + }); +}); + +describe('defaults alignment', () => { + it('CLI default matches starter toolkit default', () => { + expect('container').toBe('container'); + }); +}); diff --git a/src/cli/commands/import/__tests__/execution-role-import.test.ts b/src/cli/commands/import/__tests__/execution-role-import.test.ts new file mode 100644 index 000000000..359960cd6 --- /dev/null +++ b/src/cli/commands/import/__tests__/execution-role-import.test.ts @@ -0,0 +1,63 @@ +/** + * Tests for execution role import from starter toolkit YAML. + */ +import type { AgentEnvSpec } from '../../../../schema/schemas/agent-env'; +import type { ParsedStarterToolkitConfig } from '../types'; +import { parseStarterToolkitYaml } from '../yaml-parser'; +import * as path from 'node:path'; +import { describe, expect, it } from 'vitest'; + +const APP_DIR = 'app'; + +function toAgentEnvSpec(agent: ParsedStarterToolkitConfig['agents'][0]): AgentEnvSpec { + const codeLocation = path.join(APP_DIR, agent.name); + const entrypoint = path.basename(agent.entrypoint); + const spec: AgentEnvSpec = { + type: 'AgentCoreRuntime', + name: agent.name, + build: agent.build, + entrypoint: entrypoint as AgentEnvSpec['entrypoint'], + codeLocation: codeLocation as AgentEnvSpec['codeLocation'], + runtimeVersion: (agent.runtimeVersion ?? 'PYTHON_3_12') as AgentEnvSpec['runtimeVersion'], + protocol: agent.protocol, + networkMode: agent.networkMode, + instrumentation: { enableOtel: agent.enableOtel }, + }; + if (agent.networkMode === 'VPC' && agent.networkConfig) { + spec.networkConfig = agent.networkConfig; + } + if (agent.executionRoleArn) { + spec.executionRoleArn = agent.executionRoleArn; + } + return spec; +} + +const FIXTURE = path.join(__dirname, 'fixtures', 'agent-with-execution-role.yaml'); +const FIXTURE_NO_ROLE = path.join(__dirname, 'fixtures', 'different-agent.yaml'); + +describe('parseStarterToolkitYaml: executionRoleArn', () => { + it('extracts executionRoleArn from YAML with execution_role', () => { + const parsed = parseStarterToolkitYaml(FIXTURE); + expect(parsed.agents).toHaveLength(1); + expect(parsed.agents[0]!.executionRoleArn).toBe('arn:aws:iam::123456789012:role/StarterToolkitExecutionRole'); + }); + + it('returns undefined executionRoleArn when execution_role is absent', () => { + const parsed = parseStarterToolkitYaml(FIXTURE_NO_ROLE); + expect(parsed.agents[0]!.executionRoleArn).toBeUndefined(); + }); +}); + +describe('toAgentEnvSpec: executionRoleArn', () => { + it('includes executionRoleArn in spec when present', () => { + const parsed = parseStarterToolkitYaml(FIXTURE); + const spec = toAgentEnvSpec(parsed.agents[0]!); + expect(spec.executionRoleArn).toBe('arn:aws:iam::123456789012:role/StarterToolkitExecutionRole'); + }); + + it('omits executionRoleArn from spec when absent', () => { + const parsed = parseStarterToolkitYaml(FIXTURE_NO_ROLE); + const spec = toAgentEnvSpec(parsed.agents[0]!); + expect(spec.executionRoleArn).toBeUndefined(); + }); +}); diff --git a/src/cli/commands/import/__tests__/fixtures/agent-with-execution-role.yaml b/src/cli/commands/import/__tests__/fixtures/agent-with-execution-role.yaml new file mode 100644 index 000000000..3c7fd3c3e --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/agent-with-execution-role.yaml @@ -0,0 +1,23 @@ +default_agent: my_agent +agents: + my_agent: + name: my_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '123456789012' + region: us-west-2 + execution_role: arn:aws:iam::123456789012:role/StarterToolkitExecutionRole + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: AGENT_ROLE_123 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/AGENT_ROLE_123 + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/fixtures/cli-project-with-agent-and-memory.json b/src/cli/commands/import/__tests__/fixtures/cli-project-with-agent-and-memory.json new file mode 100644 index 000000000..cb9a7fa76 --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/cli-project-with-agent-and-memory.json @@ -0,0 +1,39 @@ +{ + "name": "MyProject", + "version": 1, + "agents": [ + { + "type": "AgentCoreRuntime", + "name": "existing_agent", + "build": "CodeZip", + "entrypoint": "main.py", + "codeLocation": "app/existing_agent", + "runtimeVersion": "PYTHON_3_12", + "protocol": "HTTP", + "networkMode": "PUBLIC", + "instrumentation": { + "enableOtel": true + } + } + ], + "memories": [ + { + "type": "AgentCoreMemory", + "name": "existing_agent_memory", + "eventExpiryDuration": 30, + "strategies": [ + { + "type": "SEMANTIC" + } + ] + } + ], + "credentials": [ + { + "type": "ApiKeyCredentialProvider", + "name": "my_api_key" + } + ], + "evaluators": [], + "onlineEvalConfigs": [] +} diff --git a/src/cli/commands/import/__tests__/fixtures/different-agent.yaml b/src/cli/commands/import/__tests__/fixtures/different-agent.yaml new file mode 100644 index 000000000..cef7d3f64 --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/different-agent.yaml @@ -0,0 +1,28 @@ +default_agent: new_toolkit_agent +agents: + new_toolkit_agent: + name: new_toolkit_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: AGENT_NEW_123 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/AGENT_NEW_123 + agent_session_id: null + memory: + mode: STM_AND_LTM + memory_id: MEM_NEW_456 + memory_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/MEM_NEW_456 + memory_name: new_toolkit_memory + event_expiry_days: 60 + api_key_credential_provider_name: new_api_key_cred diff --git a/src/cli/commands/import/__tests__/fixtures/partial-import.yaml b/src/cli/commands/import/__tests__/fixtures/partial-import.yaml new file mode 100644 index 000000000..96746fa3f --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/partial-import.yaml @@ -0,0 +1,44 @@ +default_agent: deployed_agent +agents: + deployed_agent: + name: deployed_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-deployed-111 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:111122223333:runtime/agent-deployed-111 + memory: + mode: STM_ONLY + memory_id: null + memory_name: deployed_agent_memory + event_expiry_days: 14 + new_agent: + name: new_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/fixtures/same-name-agent.yaml b/src/cli/commands/import/__tests__/fixtures/same-name-agent.yaml new file mode 100644 index 000000000..40a507f25 --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/same-name-agent.yaml @@ -0,0 +1,32 @@ +default_agent: existing_agent +agents: + existing_agent: + name: existing_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '999888777666' + region: us-east-1 + network_configuration: + network_mode: VPC + network_mode_config: + subnet_ids: + - subnet-aaaa1111 + security_group_ids: + - sg-bbbb2222 + protocol_configuration: + server_protocol: MCP + observability: + enabled: false + bedrock_agentcore: + agent_id: AGENT_EXISTING_999 + agent_arn: arn:aws:bedrock-agentcore:us-east-1:999888777666:runtime/AGENT_EXISTING_999 + agent_session_id: null + memory: + mode: STM_ONLY + memory_id: MEM_EXISTING_888 + memory_arn: arn:aws:bedrock-agentcore:us-east-1:999888777666:memory/MEM_EXISTING_888 + memory_name: existing_agent_memory + event_expiry_days: 14 diff --git a/src/cli/commands/import/__tests__/fixtures/similar-names.yaml b/src/cli/commands/import/__tests__/fixtures/similar-names.yaml new file mode 100644 index 000000000..125e24189 --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/similar-names.yaml @@ -0,0 +1,40 @@ +default_agent: agent1 +agents: + agent1: + name: agent1 + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-short-111 + memory: + mode: NO_MEMORY + agent1_v2: + name: agent1_v2 + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-long-222 + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/fixtures/three-agents-shared-memory.yaml b/src/cli/commands/import/__tests__/fixtures/three-agents-shared-memory.yaml new file mode 100644 index 000000000..022ad254b --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/three-agents-shared-memory.yaml @@ -0,0 +1,65 @@ +default_agent: agent_alpha +agents: + agent_alpha: + name: agent_alpha + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-aaa-111 + memory: + mode: STM_AND_LTM + memory_id: mem-shared-001 + memory_name: shared_memory + event_expiry_days: 30 + agent_beta: + name: agent_beta + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_13 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: false + bedrock_agentcore: + agent_id: agent-bbb-222 + memory: + mode: STM_AND_LTM + memory_id: mem-shared-001 + memory_name: shared_memory + event_expiry_days: 30 + agent_gamma: + name: agent_gamma + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_11 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-ccc-333 + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/fixtures/two-agents.yaml b/src/cli/commands/import/__tests__/fixtures/two-agents.yaml new file mode 100644 index 000000000..b0a203192 --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/two-agents.yaml @@ -0,0 +1,49 @@ +default_agent: search_agent +agents: + search_agent: + name: search_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-abc-111 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:111122223333:runtime/agent-abc-111 + memory: + mode: STM_AND_LTM + memory_id: mem-xyz-999 + memory_name: shared_memory + event_expiry_days: 30 + identity: + credential_providers: + - name: github-oauth + credential_provider_type: OAUTH + chat_agent: + name: chat_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-def-222 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:111122223333:runtime/agent-def-222 + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/fixtures/underscore-names.yaml b/src/cli/commands/import/__tests__/fixtures/underscore-names.yaml new file mode 100644 index 000000000..81e58ce8f --- /dev/null +++ b/src/cli/commands/import/__tests__/fixtures/underscore-names.yaml @@ -0,0 +1,40 @@ +default_agent: my_search_agent +agents: + my_search_agent: + name: my_search_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-us-111 + memory: + mode: NO_MEMORY + my_chat_agent: + name: my_chat_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: true + bedrock_agentcore: + agent_id: agent-us-222 + memory: + mode: NO_MEMORY diff --git a/src/cli/commands/import/__tests__/idempotency.test.ts b/src/cli/commands/import/__tests__/idempotency.test.ts new file mode 100644 index 000000000..dbf9e6372 --- /dev/null +++ b/src/cli/commands/import/__tests__/idempotency.test.ts @@ -0,0 +1,646 @@ +/** + * Test Group 7: Import Idempotency + * + * Verifies that running `agentcore import` twice with the same source is safe: + * - No duplicate agents/memories in the config + * - Second import skips already-existing resources + * - Phase 1/Phase 2 are NOT re-run for already-imported resources + * - Deployed state is not corrupted + */ +// ── Import the function under test AFTER mocks ──────────────────────────────── +import { handleImport } from '../actions'; +import type { ParsedStarterToolkitConfig } from '../types'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +// ── Hoisted mock fns (available inside vi.mock factories) ───────────────────── + +const { + mockFindConfigRoot, + mockConfigIOInstance, + MockConfigIOClass, + mockValidateAwsCredentials, + mockBuildCdkProject, + mockSynthesizeCdk, + mockCheckBootstrapNeeded, + mockBootstrapEnvironment, + mockSetupPythonProject, + mockExecutePhase1, + mockGetDeployedTemplate, + mockExecutePhase2, + mockPublishCdkAssets, + mockParseStarterToolkitYaml, + mockExistsSync, + mockMkdirSync, + mockCopyFileSync, + mockReaddirSync, + mockReadFileSync, + mockWriteFileSync, +} = vi.hoisted(() => { + const inst = { + readProjectSpec: vi.fn(), + writeProjectSpec: vi.fn(), + readAWSDeploymentTargets: vi.fn(), + writeAWSDeploymentTargets: vi.fn(), + readDeployedState: vi.fn(), + writeDeployedState: vi.fn(), + }; + return { + mockFindConfigRoot: vi.fn(), + mockConfigIOInstance: inst, + MockConfigIOClass: vi.fn(function (this: any) { + Object.assign(this, inst); + return this; + }), + mockValidateAwsCredentials: vi.fn(), + mockBuildCdkProject: vi.fn(), + mockSynthesizeCdk: vi.fn(), + mockSetupPythonProject: vi.fn(), + mockExecutePhase1: vi.fn(), + mockGetDeployedTemplate: vi.fn(), + mockExecutePhase2: vi.fn(), + mockCheckBootstrapNeeded: vi.fn(), + mockBootstrapEnvironment: vi.fn(), + mockPublishCdkAssets: vi.fn(), + mockParseStarterToolkitYaml: vi.fn(), + mockExistsSync: vi.fn(), + mockMkdirSync: vi.fn(), + mockCopyFileSync: vi.fn(), + mockReaddirSync: vi.fn(), + mockReadFileSync: vi.fn(), + mockWriteFileSync: vi.fn(), + }; +}); + +// ── Module mocks ────────────────────────────────────────────────────────────── + +vi.mock('../../../../lib', () => ({ + APP_DIR: 'app', + ConfigIO: MockConfigIOClass, + findConfigRoot: (...args: unknown[]) => mockFindConfigRoot(...args), +})); + +vi.mock('../../../aws/account', () => ({ + validateAwsCredentials: (...args: unknown[]) => mockValidateAwsCredentials(...args), +})); + +vi.mock('../../../operations/deploy', () => ({ + buildCdkProject: (...args: unknown[]) => mockBuildCdkProject(...args), + synthesizeCdk: (...args: unknown[]) => mockSynthesizeCdk(...args), + checkBootstrapNeeded: (...args: unknown[]) => mockCheckBootstrapNeeded(...args), + bootstrapEnvironment: (...args: unknown[]) => mockBootstrapEnvironment(...args), +})); + +vi.mock('../../../cdk/local-cdk-project', () => ({ + LocalCdkProject: vi.fn(), +})); + +vi.mock('../../../cdk/toolkit-lib', () => ({ + silentIoHost: {}, +})); + +vi.mock('../../../logging', () => ({ + ExecLogger: class MockExecLogger { + startStep = vi.fn(); + endStep = vi.fn(); + log = vi.fn(); + finalize = vi.fn(); + getRelativeLogPath = vi.fn().mockReturnValue('agentcore/.cli/logs/import/import-mock.log'); + logFilePath = 'agentcore/.cli/logs/import/import-mock.log'; + }, +})); + +vi.mock('../../../operations/python/setup', () => ({ + setupPythonProject: (...args: unknown[]) => mockSetupPythonProject(...args), +})); + +vi.mock('../phase1-update', () => ({ + executePhase1: (...args: unknown[]) => mockExecutePhase1(...args), + getDeployedTemplate: (...args: unknown[]) => mockGetDeployedTemplate(...args), +})); + +vi.mock('../phase2-import', () => ({ + executePhase2: (...args: unknown[]) => mockExecutePhase2(...args), + publishCdkAssets: (...args: unknown[]) => mockPublishCdkAssets(...args), +})); + +vi.mock('../yaml-parser', () => ({ + parseStarterToolkitYaml: (...args: unknown[]) => mockParseStarterToolkitYaml(...args), +})); + +vi.mock('node:fs', () => ({ + existsSync: (...args: unknown[]) => mockExistsSync(...args), + mkdirSync: (...args: unknown[]) => mockMkdirSync(...args), + copyFileSync: (...args: unknown[]) => mockCopyFileSync(...args), + readdirSync: (...args: unknown[]) => mockReaddirSync(...args), + readFileSync: (...args: unknown[]) => mockReadFileSync(...args), + writeFileSync: (...args: unknown[]) => mockWriteFileSync(...args), +})); + +// ── Test Fixtures ───────────────────────────────────────────────────────────── + +function makeParsedConfig(overrides?: Partial): ParsedStarterToolkitConfig { + return { + defaultAgent: 'my-agent', + agents: [ + { + name: 'my-agent', + entrypoint: 'main.py', + build: 'CodeZip' as const, + runtimeVersion: 'PYTHON_3_12', + language: 'python' as const, + sourcePath: '/tmp/src/my-agent', + networkMode: 'PUBLIC' as const, + protocol: 'HTTP' as const, + enableOtel: true, + physicalAgentId: 'rt-abc123', + physicalAgentArn: 'arn:aws:bedrock-agentcore:us-east-1:123456789012:runtime/rt-abc123', + }, + ], + memories: [ + { + name: 'my-memory', + mode: 'STM_ONLY' as const, + eventExpiryDays: 30, + physicalMemoryId: 'mem-xyz789', + physicalMemoryArn: 'arn:aws:bedrock-agentcore:us-east-1:123456789012:memory/mem-xyz789', + }, + ], + credentials: [], + awsTarget: { account: '123456789012', region: 'us-east-1' }, + ...overrides, + }; +} + +function makeProjectSpec(agents: { name: string }[] = [], memories: { name: string }[] = []) { + return { + name: 'TestProject', + version: 1, + agents: agents.map(a => ({ + type: 'AgentCoreRuntime', + name: a.name, + build: 'CodeZip', + entrypoint: 'main.py', + codeLocation: `app/${a.name}`, + runtimeVersion: 'PYTHON_3_12', + protocol: 'HTTP', + networkMode: 'PUBLIC', + instrumentation: { enableOtel: true }, + })), + memories: memories.map(m => ({ + type: 'AgentCoreMemory', + name: m.name, + eventExpiryDuration: 30, + strategies: [{ type: 'SEMANTIC' }], + })), + credentials: [], + }; +} + +const synthTemplate = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { + MyAgentRuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'TestProject_my-agent' }, + }, + MyMemory: { + Type: 'AWS::BedrockAgentCore::Memory', + Properties: { Name: 'my-memory' }, + }, + MyRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'my-role' }, + }, + }, +}; + +const deployedTemplate = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { + MyRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'my-role' }, + }, + }, +}; + +// ── Common setup ────────────────────────────────────────────────────────────── + +function setupCommonMocks() { + mockFindConfigRoot.mockReturnValue('/tmp/project/agentcore'); + + mockConfigIOInstance.readAWSDeploymentTargets.mockResolvedValue([ + { name: 'default', account: '123456789012', region: 'us-east-1' }, + ]); + + mockValidateAwsCredentials.mockResolvedValue(undefined); + mockSetupPythonProject.mockResolvedValue({ status: 'success' }); + + mockExistsSync.mockReturnValue(true); + mockReaddirSync.mockReturnValue([]); + mockReadFileSync.mockReturnValue(JSON.stringify(synthTemplate)); + + mockCheckBootstrapNeeded.mockResolvedValue({ needsBootstrap: false }); + mockBootstrapEnvironment.mockResolvedValue(undefined); + mockBuildCdkProject.mockResolvedValue(undefined); + mockSynthesizeCdk.mockResolvedValue({ + toolkitWrapper: { + synth: vi.fn().mockResolvedValue({ assemblyDirectory: '/tmp/cdk.out' }), + dispose: vi.fn(), + }, + }); + + mockExecutePhase1.mockResolvedValue({ success: true, stackExists: true }); + mockGetDeployedTemplate.mockResolvedValue(deployedTemplate); + mockExecutePhase2.mockResolvedValue({ success: true }); + mockPublishCdkAssets.mockResolvedValue(undefined); + + mockConfigIOInstance.readDeployedState.mockResolvedValue({ targets: {} }); + mockConfigIOInstance.writeDeployedState.mockResolvedValue(undefined); + mockConfigIOInstance.writeProjectSpec.mockResolvedValue(undefined); +} + +// ── Tests ───────────────────────────────────────────────────────────────────── + +describe('Import Idempotency (Test Group 7)', () => { + beforeEach(() => { + vi.clearAllMocks(); + setupCommonMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + // ── First Import: Normal Flow ────────────────────────────────────────────── + + describe('first import (clean project)', () => { + it('adds agents and memories to a project with no existing agents', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(true); + expect(result.importedAgents).toContain('my-agent'); + expect(result.importedMemories).toContain('my-memory'); + + expect(mockConfigIOInstance.writeProjectSpec).toHaveBeenCalledTimes(1); + const writtenSpec = mockConfigIOInstance.writeProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.agents).toHaveLength(1); + expect(writtenSpec.agents[0].name).toBe('my-agent'); + expect(writtenSpec.memories).toHaveLength(1); + expect(writtenSpec.memories[0].name).toBe('my-memory'); + }); + + it('calls Phase 1 and Phase 2 on first import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(true); + expect(mockExecutePhase1).toHaveBeenCalledTimes(1); + expect(mockExecutePhase2).toHaveBeenCalledTimes(1); + }); + + it('builds resourcesToImport from agents with physical IDs', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + await handleImport({ source: '/tmp/config.yaml' }); + + expect(mockExecutePhase2).toHaveBeenCalledTimes(1); + const phase2Options = mockExecutePhase2.mock.calls[0]![0]; + expect(phase2Options.resourcesToImport).toHaveLength(2); + expect(phase2Options.resourcesToImport[0].resourceType).toBe('AWS::BedrockAgentCore::Runtime'); + expect(phase2Options.resourcesToImport[0].resourceIdentifier).toEqual({ AgentRuntimeId: 'rt-abc123' }); + expect(phase2Options.resourcesToImport[1].resourceType).toBe('AWS::BedrockAgentCore::Memory'); + expect(phase2Options.resourcesToImport[1].resourceIdentifier).toEqual({ MemoryId: 'mem-xyz789' }); + }); + + it('writes deployed state after successful import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + await handleImport({ source: '/tmp/config.yaml' }); + + expect(mockConfigIOInstance.writeDeployedState).toHaveBeenCalledTimes(1); + const state = mockConfigIOInstance.writeDeployedState.mock.calls[0]![0]; + expect(state.targets.default.resources.agents['my-agent']).toBeDefined(); + expect(state.targets.default.resources.agents['my-agent'].runtimeId).toBe('rt-abc123'); + expect(state.targets.default.resources.memories['my-memory']).toBeDefined(); + expect(state.targets.default.resources.memories['my-memory'].memoryId).toBe('mem-xyz789'); + }); + }); + + // ── Second Import: Idempotency ───────────────────────────────────────────── + + describe('second import (agents already exist in project)', () => { + it('skips agents that already exist in the project config', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + const progressMessages: string[] = []; + const result = await handleImport({ + source: '/tmp/config.yaml', + onProgress: msg => progressMessages.push(msg), + }); + + expect(result.success).toBe(true); + expect(progressMessages.some(m => m.includes('Skipping agent "my-agent"'))).toBe(true); + expect(progressMessages.some(m => m.includes('already exists in project'))).toBe(true); + expect(progressMessages.some(m => m.includes('Skipping memory "my-memory"'))).toBe(true); + }); + + it('does not duplicate agents in the config on second import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + await handleImport({ source: '/tmp/config.yaml' }); + + expect(mockConfigIOInstance.writeProjectSpec).toHaveBeenCalledTimes(1); + const writtenSpec = mockConfigIOInstance.writeProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.agents).toHaveLength(1); + expect(writtenSpec.memories).toHaveLength(1); + }); + + it('does NOT re-run Phase 2 for already-imported resources (bug fix)', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + expect(result.success).toBe(true); + + // After the fix: when all agents/memories already exist in the project, + // newlyAddedAgentNames and newlyAddedMemoryNames are empty, so + // agentsToImport and memoriesToImport are empty. + // The early return at "agentsToImport.length === 0 && memoriesToImport.length === 0" + // fires and Phase 2 is never called. + expect(mockExecutePhase2).not.toHaveBeenCalled(); + expect(mockExecutePhase1).not.toHaveBeenCalled(); + }); + + it('returns empty import lists when all resources already exist', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(true); + expect(result.importedAgents).toEqual([]); + expect(result.importedMemories).toEqual([]); + }); + + it('does not corrupt deployed state on second import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + const existingDeployedState = { + targets: { + default: { + resources: { + stackName: 'AgentCore-TestProject-default', + agents: { + 'my-agent': { + runtimeId: 'rt-abc123', + runtimeArn: 'arn:aws:bedrock-agentcore:us-east-1:123456789012:runtime/rt-abc123', + roleArn: 'imported', + }, + }, + memories: { + 'my-memory': { + memoryId: 'mem-xyz789', + memoryArn: 'arn:aws:bedrock-agentcore:us-east-1:123456789012:memory/mem-xyz789', + }, + }, + }, + }, + }, + }; + mockConfigIOInstance.readDeployedState.mockResolvedValue(existingDeployedState); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + expect(result.success).toBe(true); + + // No Phase 2 was run, so writeDeployedState should NOT be called + // (the early return fires before the deployed state update). + expect(mockConfigIOInstance.writeDeployedState).not.toHaveBeenCalled(); + }); + }); + + // ── Partial Overlap ──────────────────────────────────────────────────────── + + describe('partial overlap (some agents new, some existing)', () => { + it('imports only new agents and skips existing ones', async () => { + const parsed: ParsedStarterToolkitConfig = { + defaultAgent: 'agent-a', + agents: [ + { + name: 'agent-a', + entrypoint: 'main.py', + build: 'CodeZip' as const, + runtimeVersion: 'PYTHON_3_12', + language: 'python' as const, + networkMode: 'PUBLIC' as const, + protocol: 'HTTP' as const, + enableOtel: true, + physicalAgentId: 'rt-aaa', + }, + { + name: 'agent-b', + entrypoint: 'main.py', + build: 'CodeZip' as const, + runtimeVersion: 'PYTHON_3_12', + language: 'python' as const, + networkMode: 'PUBLIC' as const, + protocol: 'HTTP' as const, + enableOtel: true, + physicalAgentId: 'rt-bbb', + }, + ], + memories: [], + credentials: [], + awsTarget: { account: '123456789012', region: 'us-east-1' }, + }; + mockParseStarterToolkitYaml.mockReturnValue(parsed); + + // agent-a already exists, agent-b is new + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec([{ name: 'agent-a' }])); + + const multiAgentSynthTemplate = { + AWSTemplateFormatVersion: '2010-09-09', + Resources: { + AgentARuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'TestProject_agent-a' }, + }, + AgentBRuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'TestProject_agent-b' }, + }, + MyRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'my-role' }, + }, + }, + }; + mockReadFileSync.mockReturnValue(JSON.stringify(multiAgentSynthTemplate)); + + const progressMessages: string[] = []; + const result = await handleImport({ + source: '/tmp/config.yaml', + onProgress: msg => progressMessages.push(msg), + }); + + expect(result.success).toBe(true); + expect(progressMessages.some(m => m.includes('Skipping agent "agent-a"'))).toBe(true); + + const writtenSpec = mockConfigIOInstance.writeProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.agents).toHaveLength(2); + expect(writtenSpec.agents.map((a: { name: string }) => a.name)).toContain('agent-b'); + + // Phase 2 should only import agent-b, not agent-a + expect(mockExecutePhase2).toHaveBeenCalledTimes(1); + const phase2Options = mockExecutePhase2.mock.calls[0]![0]; + const importedIds = phase2Options.resourcesToImport.map( + (r: { resourceIdentifier: Record }) => r.resourceIdentifier.AgentRuntimeId + ); + expect(importedIds).toContain('rt-bbb'); + expect(importedIds).not.toContain('rt-aaa'); + }); + }); + + // ── Credential Idempotency ───────────────────────────────────────────────── + + describe('credential idempotency', () => { + it('skips credentials that already exist', async () => { + mockParseStarterToolkitYaml.mockReturnValue( + makeParsedConfig({ credentials: [{ name: 'my-cred', providerType: 'api_key' as const }] }) + ); + + const existingSpec = makeProjectSpec(); + (existingSpec as any).credentials = [{ type: 'ApiKeyCredentialProvider', name: 'my-cred' }]; + mockConfigIOInstance.readProjectSpec.mockResolvedValue(existingSpec); + + const progressMessages: string[] = []; + await handleImport({ + source: '/tmp/config.yaml', + onProgress: msg => progressMessages.push(msg), + }); + + expect(progressMessages.some(m => m.includes('Skipping credential "my-cred"'))).toBe(true); + const writtenSpec = mockConfigIOInstance.writeProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.credentials).toHaveLength(1); + }); + }); + + // ── Source Code Copy Behavior ────────────────────────────────────────────── + + describe('source code copy on re-import', () => { + it('copies source files for new agents during first import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + mockExistsSync.mockReturnValue(true); + mockReaddirSync.mockReturnValue([{ name: 'main.py', isDirectory: () => false, isSymbolicLink: () => false }]); + + await handleImport({ source: '/tmp/config.yaml' }); + + // On first import, the agent is new so source copy runs + expect(mockCopyFileSync).toHaveBeenCalled(); + }); + + it('runs python setup for new agents during first import', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + await handleImport({ source: '/tmp/config.yaml' }); + + expect(mockSetupPythonProject).toHaveBeenCalledTimes(1); + }); + }); + + // ── Edge Cases ───────────────────────────────────────────────────────────── + + describe('edge cases', () => { + it('handles agents with no physical IDs on second import (no CFN phases)', async () => { + mockParseStarterToolkitYaml.mockReturnValue( + makeParsedConfig({ + agents: [ + { + name: 'my-agent', + entrypoint: 'main.py', + build: 'CodeZip' as const, + runtimeVersion: 'PYTHON_3_12', + language: 'python' as const, + networkMode: 'PUBLIC' as const, + protocol: 'HTTP' as const, + enableOtel: true, + // No physicalAgentId + }, + ], + memories: [], + }) + ); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec([{ name: 'my-agent' }])); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(true); + expect(mockExecutePhase1).not.toHaveBeenCalled(); + expect(mockExecutePhase2).not.toHaveBeenCalled(); + }); + + it('returns early when no agents in YAML', async () => { + mockParseStarterToolkitYaml.mockReturnValue({ + agents: [], + memories: [], + credentials: [], + awsTarget: {}, + }); + mockConfigIOInstance.readProjectSpec.mockResolvedValue(makeProjectSpec()); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(false); + expect(result.error).toContain('No agents found'); + }); + + it('returns error when no project found', async () => { + mockFindConfigRoot.mockReturnValue(null); + + const result = await handleImport({ source: '/tmp/config.yaml' }); + + expect(result.success).toBe(false); + expect(result.error).toContain('No agentcore project found'); + }); + }); + + // ── Deployment target idempotency ────────────────────────────────────────── + + describe('deployment target idempotency', () => { + it('uses existing target on second import without creating a new one', async () => { + mockParseStarterToolkitYaml.mockReturnValue(makeParsedConfig()); + mockConfigIOInstance.readAWSDeploymentTargets.mockResolvedValue([ + { name: 'default', account: '123456789012', region: 'us-east-1' }, + ]); + mockConfigIOInstance.readProjectSpec.mockResolvedValue( + makeProjectSpec([{ name: 'my-agent' }], [{ name: 'my-memory' }]) + ); + + await handleImport({ source: '/tmp/config.yaml' }); + + expect(mockConfigIOInstance.writeAWSDeploymentTargets).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/src/cli/commands/import/__tests__/import-memory.test.ts b/src/cli/commands/import/__tests__/import-memory.test.ts new file mode 100644 index 000000000..4d713f291 --- /dev/null +++ b/src/cli/commands/import/__tests__/import-memory.test.ts @@ -0,0 +1,1175 @@ +/* eslint-disable @typescript-eslint/dot-notation */ +/** + * Test Group 2: Agent with Memory (STM + LTM) — Import Memory Unit Tests + * + * Covers: + * - YAML parsing of agent + memory config (STM_AND_LTM mode) + * - toMemorySpec conversion: strategies mapping + * - eventExpiryDuration clamping (min 7, max 365) + * - Memory merge logic + * - Memory physical ID extraction + * - Phase 2 import resource list construction + * - Memory name deduplication + * - Deployed state update with memory info + * - Template logical ID lookup for memories + */ +import type { Memory } from '../../../../schema'; +import { buildImportTemplate, findLogicalIdByProperty, findLogicalIdsByType } from '../template-utils'; +import type { CfnTemplate } from '../template-utils'; +import type { ParsedStarterToolkitMemory, ResourceToImport } from '../types'; +import { parseStarterToolkitYaml } from '../yaml-parser'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { describe, expect, it } from 'vitest'; + +// ============================================================================ +// Helper: replicates toMemorySpec from actions.ts for unit testing +// (We test the logic directly since the function is not exported) +// ============================================================================ +function toMemorySpec(mem: ParsedStarterToolkitMemory): Memory { + const strategies: Memory['strategies'] = []; + + if (mem.mode === 'STM_ONLY' || mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SEMANTIC' }); + } + if (mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SUMMARIZATION' }); + strategies.push({ type: 'USER_PREFERENCE' }); + } + + return { + type: 'AgentCoreMemory', + name: mem.name, + eventExpiryDuration: Math.max(7, Math.min(365, mem.eventExpiryDays)), + strategies, + }; +} + +// ============================================================================ +// Test YAML fixtures +// ============================================================================ + +function createTempYaml(content: string): string { + const tmpDir = os.tmpdir(); + const tmpFile = path.join(tmpDir, `test-import-memory-${Date.now()}-${Math.random().toString(36).slice(2)}.yaml`); + fs.writeFileSync(tmpFile, content, 'utf-8'); + return tmpFile; +} + +const STM_AND_LTM_YAML = ` +agents: + my_memory_agent: + name: my_memory_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + observability: + enabled: true + bedrock_agentcore: + agent_id: abc123def456 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/abc123def456 + memory: + mode: STM_AND_LTM + memory_name: my_agent_memory + memory_id: mem-001122334455 + memory_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-001122334455 + event_expiry_days: 30 +`; + +const STM_ONLY_YAML = ` +agents: + stm_agent: + name: stm_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: stm-agent-id-001 + memory: + mode: STM_ONLY + memory_name: stm_memory + memory_id: mem-stm-only-001 + event_expiry_days: 14 +`; + +const NO_MEMORY_YAML = ` +agents: + no_mem_agent: + name: no_mem_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: no-mem-agent-001 + memory: + mode: NO_MEMORY + memory_id: mem-should-be-ignored +`; + +const MULTI_AGENT_SHARED_MEMORY_YAML = ` +agents: + agent_a: + name: agent_a + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-a-id + memory: + mode: STM_AND_LTM + memory_name: shared_memory + memory_id: mem-shared-001 + memory_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-shared-001 + event_expiry_days: 60 + agent_b: + name: agent_b + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-b-id + memory: + mode: STM_AND_LTM + memory_name: shared_memory + memory_id: mem-shared-001 + memory_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-shared-001 + event_expiry_days: 60 +`; + +const MISSING_MEMORY_NAME_YAML = ` +agents: + unnamed_mem_agent: + name: unnamed_mem_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: unnamed-mem-id + memory: + mode: STM_AND_LTM + memory_id: mem-unnamed-001 + event_expiry_days: 30 +`; + +const EXPIRY_CLAMPING_LOW_YAML = ` +agents: + clamp_low_agent: + name: clamp_low_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + memory: + mode: STM_ONLY + memory_name: clamp_low_memory + event_expiry_days: 1 +`; + +const EXPIRY_CLAMPING_HIGH_YAML = ` +agents: + clamp_high_agent: + name: clamp_high_agent + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + memory: + mode: STM_AND_LTM + memory_name: clamp_high_memory + event_expiry_days: 999 +`; + +// ============================================================================ +// YAML Parsing Tests +// ============================================================================ + +describe('YAML Parsing: Agent with Memory', () => { + it('parses STM_AND_LTM agent + memory config correctly', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + expect(parsed.agents).toHaveLength(1); + expect(parsed.agents[0]!.name).toBe('my_memory_agent'); + expect(parsed.agents[0]!.physicalAgentId).toBe('abc123def456'); + expect(parsed.agents[0]!.physicalAgentArn).toBe( + 'arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/abc123def456' + ); + + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.name).toBe('my_agent_memory'); + expect(parsed.memories[0]!.mode).toBe('STM_AND_LTM'); + expect(parsed.memories[0]!.physicalMemoryId).toBe('mem-001122334455'); + expect(parsed.memories[0]!.physicalMemoryArn).toBe( + 'arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-001122334455' + ); + expect(parsed.memories[0]!.eventExpiryDays).toBe(30); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('parses STM_ONLY memory config correctly', () => { + const tmpFile = createTempYaml(STM_ONLY_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.name).toBe('stm_memory'); + expect(parsed.memories[0]!.mode).toBe('STM_ONLY'); + expect(parsed.memories[0]!.physicalMemoryId).toBe('mem-stm-only-001'); + expect(parsed.memories[0]!.eventExpiryDays).toBe(14); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('skips NO_MEMORY mode even if memory_id is present', () => { + const tmpFile = createTempYaml(NO_MEMORY_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + // Agent should still be parsed + expect(parsed.agents).toHaveLength(1); + expect(parsed.agents[0]!.name).toBe('no_mem_agent'); + expect(parsed.agents[0]!.physicalAgentId).toBe('no-mem-agent-001'); + + // Memory should NOT be parsed since mode is NO_MEMORY + expect(parsed.memories).toHaveLength(0); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('extracts AWS target info from agent config', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + expect(parsed.awsTarget.account).toBe('123456789012'); + expect(parsed.awsTarget.region).toBe('us-west-2'); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('generates default memory name when memory_name is missing', () => { + const tmpFile = createTempYaml(MISSING_MEMORY_NAME_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + expect(parsed.memories).toHaveLength(1); + // Should fallback to agent_name + "_memory" + expect(parsed.memories[0]!.name).toBe('unnamed_mem_agent_memory'); + expect(parsed.memories[0]!.physicalMemoryId).toBe('mem-unnamed-001'); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// Memory Name Deduplication Tests +// ============================================================================ + +describe('Memory Name Deduplication', () => { + it('deduplicates shared memory across multiple agents', () => { + const tmpFile = createTempYaml(MULTI_AGENT_SHARED_MEMORY_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + // Both agents should be parsed + expect(parsed.agents).toHaveLength(2); + + // But only ONE memory should exist (deduped by name) + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.name).toBe('shared_memory'); + expect(parsed.memories[0]!.physicalMemoryId).toBe('mem-shared-001'); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// toMemorySpec Conversion Tests +// ============================================================================ + +describe('toMemorySpec', () => { + it('STM_AND_LTM mode produces SEMANTIC, SUMMARIZATION, USER_PREFERENCE strategies', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'test_memory', + mode: 'STM_AND_LTM', + eventExpiryDays: 30, + physicalMemoryId: 'mem-123', + }; + + const result = toMemorySpec(mem); + + expect(result.type).toBe('AgentCoreMemory'); + expect(result.name).toBe('test_memory'); + expect(result.eventExpiryDuration).toBe(30); + expect(result.strategies).toHaveLength(3); + expect(result.strategies.map(s => s.type)).toEqual(['SEMANTIC', 'SUMMARIZATION', 'USER_PREFERENCE']); + }); + + it('STM_ONLY mode produces only SEMANTIC strategy', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'stm_memory', + mode: 'STM_ONLY', + eventExpiryDays: 14, + physicalMemoryId: 'mem-456', + }; + + const result = toMemorySpec(mem); + + expect(result.type).toBe('AgentCoreMemory'); + expect(result.name).toBe('stm_memory'); + expect(result.eventExpiryDuration).toBe(14); + expect(result.strategies).toHaveLength(1); + expect(result.strategies[0]!.type).toBe('SEMANTIC'); + }); + + it('NO_MEMORY mode produces empty strategies', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'no_mem', + mode: 'NO_MEMORY', + eventExpiryDays: 30, + }; + + const result = toMemorySpec(mem); + + expect(result.strategies).toHaveLength(0); + }); + + describe('eventExpiryDuration clamping', () => { + it('clamps low values to minimum of 7', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'low_expiry', + mode: 'STM_ONLY', + eventExpiryDays: 1, + }; + + const result = toMemorySpec(mem); + expect(result.eventExpiryDuration).toBe(7); + }); + + it('clamps zero to minimum of 7', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'zero_expiry', + mode: 'STM_ONLY', + eventExpiryDays: 0, + }; + + const result = toMemorySpec(mem); + expect(result.eventExpiryDuration).toBe(7); + }); + + it('clamps negative values to minimum of 7', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'neg_expiry', + mode: 'STM_ONLY', + eventExpiryDays: -10, + }; + + const result = toMemorySpec(mem); + expect(result.eventExpiryDuration).toBe(7); + }); + + it('clamps high values to maximum of 365', () => { + const mem: ParsedStarterToolkitMemory = { + name: 'high_expiry', + mode: 'STM_AND_LTM', + eventExpiryDays: 999, + }; + + const result = toMemorySpec(mem); + expect(result.eventExpiryDuration).toBe(365); + }); + + it('preserves valid values within range', () => { + for (const days of [7, 30, 90, 180, 365]) { + const mem: ParsedStarterToolkitMemory = { + name: `valid_${days}`, + mode: 'STM_ONLY', + eventExpiryDays: days, + }; + const result = toMemorySpec(mem); + expect(result.eventExpiryDuration).toBe(days); + } + }); + }); +}); + +// ============================================================================ +// YAML Parsing: eventExpiryDays Clamping via YAML +// ============================================================================ + +describe('YAML Parsing: eventExpiryDays values', () => { + it('parses low event_expiry_days from YAML (clamping happens in toMemorySpec)', () => { + const tmpFile = createTempYaml(EXPIRY_CLAMPING_LOW_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + expect(parsed.memories).toHaveLength(1); + // Raw value from YAML is 1 -- clamping is done in toMemorySpec, not in parser + expect(parsed.memories[0]!.eventExpiryDays).toBe(1); + + // But toMemorySpec should clamp it + const spec = toMemorySpec(parsed.memories[0]!); + expect(spec.eventExpiryDuration).toBe(7); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('parses high event_expiry_days from YAML (clamping happens in toMemorySpec)', () => { + const tmpFile = createTempYaml(EXPIRY_CLAMPING_HIGH_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.eventExpiryDays).toBe(999); + + const spec = toMemorySpec(parsed.memories[0]!); + expect(spec.eventExpiryDuration).toBe(365); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// Memory Merge Logic Tests +// ============================================================================ + +describe('Memory Merge Logic', () => { + it('skips existing memories by name', () => { + const existingMemories: Memory[] = [ + { + type: 'AgentCoreMemory', + name: 'existing_memory', + eventExpiryDuration: 30, + strategies: [{ type: 'SEMANTIC' }], + }, + ]; + + const parsedMemories: ParsedStarterToolkitMemory[] = [ + { + name: 'existing_memory', + mode: 'STM_AND_LTM', + eventExpiryDays: 60, + physicalMemoryId: 'mem-existing', + }, + { + name: 'new_memory', + mode: 'STM_ONLY', + eventExpiryDays: 14, + physicalMemoryId: 'mem-new', + }, + ]; + + // Replicate the merge logic from actions.ts + const existingMemoryNames = new Set(existingMemories.map(m => m.name)); + const merged = [...existingMemories]; + const skipped: string[] = []; + + for (const mem of parsedMemories) { + if (!existingMemoryNames.has(mem.name)) { + merged.push(toMemorySpec(mem)); + } else { + skipped.push(mem.name); + } + } + + expect(merged).toHaveLength(2); + expect(skipped).toEqual(['existing_memory']); + + // The existing memory should not be updated (keeps original config) + const existing = merged.find(m => m.name === 'existing_memory')!; + expect(existing.eventExpiryDuration).toBe(30); // Original value, not 60 + + // The new memory should be added + const newMem = merged.find(m => m.name === 'new_memory')!; + expect(newMem.eventExpiryDuration).toBe(14); + expect(newMem.strategies).toHaveLength(1); + expect(newMem.strategies[0]!.type).toBe('SEMANTIC'); + }); + + it('adds all memories when project has none', () => { + const existingMemories: Memory[] = []; + const parsedMemories: ParsedStarterToolkitMemory[] = [ + { + name: 'memory_one', + mode: 'STM_AND_LTM', + eventExpiryDays: 30, + physicalMemoryId: 'mem-1', + }, + { + name: 'memory_two', + mode: 'STM_ONLY', + eventExpiryDays: 7, + physicalMemoryId: 'mem-2', + }, + ]; + + const existingMemoryNames = new Set(existingMemories.map(m => m.name)); + const merged = [...existingMemories]; + + for (const mem of parsedMemories) { + if (!existingMemoryNames.has(mem.name)) { + merged.push(toMemorySpec(mem)); + } + } + + expect(merged).toHaveLength(2); + expect(merged[0]!.name).toBe('memory_one'); + expect(merged[0]!.strategies).toHaveLength(3); + expect(merged[1]!.name).toBe('memory_two'); + expect(merged[1]!.strategies).toHaveLength(1); + }); +}); + +// ============================================================================ +// Physical ID Extraction Tests +// ============================================================================ + +describe('Memory Physical ID Extraction', () => { + it('extracts physicalMemoryId and physicalMemoryArn from YAML', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + const mem = parsed.memories[0]!; + expect(mem.physicalMemoryId).toBe('mem-001122334455'); + expect(mem.physicalMemoryArn).toBe('arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-001122334455'); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('handles memory with no physicalMemoryArn', () => { + const tmpFile = createTempYaml(STM_ONLY_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + const mem = parsed.memories[0]!; + expect(mem.physicalMemoryId).toBe('mem-stm-only-001'); + // STM_ONLY_YAML doesn't include memory_arn + expect(mem.physicalMemoryArn).toBeUndefined(); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('filters memories with physicalMemoryId for import', () => { + const tmpFile = createTempYaml(EXPIRY_CLAMPING_LOW_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + // This YAML has no memory_id + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + expect(memoriesToImport).toHaveLength(0); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// Template Logical ID Lookup Tests +// ============================================================================ + +describe('Template Logical ID Lookup for Memories', () => { + const synthTemplate: CfnTemplate = { + Resources: { + MyAgentMemoryResource: { + Type: 'AWS::BedrockAgentCore::Memory', + Properties: { + Name: 'my_agent_memory', + EventExpiryDuration: 30, + Strategies: [], + }, + }, + MyAgentRuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { + AgentRuntimeName: 'TestProject_my_memory_agent', + }, + }, + MyIAMRole: { + Type: 'AWS::IAM::Role', + Properties: { + RoleName: 'MyRole', + }, + }, + }, + }; + + it('finds memory logical ID by Name property', () => { + const logicalId = findLogicalIdByProperty( + synthTemplate, + 'AWS::BedrockAgentCore::Memory', + 'Name', + 'my_agent_memory' + ); + expect(logicalId).toBe('MyAgentMemoryResource'); + }); + + it('finds all memory logical IDs by type', () => { + const logicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Memory'); + expect(logicalIds).toEqual(['MyAgentMemoryResource']); + }); + + it('finds runtime logical ID by AgentRuntimeName', () => { + const logicalId = findLogicalIdByProperty( + synthTemplate, + 'AWS::BedrockAgentCore::Runtime', + 'AgentRuntimeName', + 'TestProject_my_memory_agent' + ); + expect(logicalId).toBe('MyAgentRuntime'); + }); + + it('returns undefined for non-existent memory name', () => { + const logicalId = findLogicalIdByProperty( + synthTemplate, + 'AWS::BedrockAgentCore::Memory', + 'Name', + 'nonexistent_memory' + ); + expect(logicalId).toBeUndefined(); + }); + + it('falls back to single memory logical ID when name does not match', () => { + const memoryLogicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Memory'); + let logicalId = findLogicalIdByProperty(synthTemplate, 'AWS::BedrockAgentCore::Memory', 'Name', 'different_name'); + + // Primary lookup fails + expect(logicalId).toBeUndefined(); + + // Fallback: if there's only one memory resource, use it + if (!logicalId && memoryLogicalIds.length === 1) { + logicalId = memoryLogicalIds[0]; + } + expect(logicalId).toBe('MyAgentMemoryResource'); + }); +}); + +// ============================================================================ +// Phase 2 Resource Import List Construction +// ============================================================================ + +describe('Phase 2: ResourceToImport List Construction', () => { + const synthTemplate: CfnTemplate = { + Resources: { + RuntimeLogicalId: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { + AgentRuntimeName: 'TestProject_my_memory_agent', + }, + }, + MemoryLogicalId: { + Type: 'AWS::BedrockAgentCore::Memory', + Properties: { + Name: 'my_agent_memory', + }, + }, + IAMRoleLogicalId: { + Type: 'AWS::IAM::Role', + Properties: {}, + }, + }, + }; + + it('builds ResourceToImport list containing both Runtime and Memory', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + const projectName = 'TestProject'; + + const agentsToImport = parsed.agents.filter(a => a.physicalAgentId); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + + const resourcesToImport: ResourceToImport[] = []; + + // Build agent resources + for (const agent of agentsToImport) { + const expectedRuntimeName = `${projectName}_${agent.name}`; + let logicalId = findLogicalIdByProperty( + synthTemplate, + 'AWS::BedrockAgentCore::Runtime', + 'AgentRuntimeName', + expectedRuntimeName + ); + + if (!logicalId) { + const runtimeLogicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Runtime'); + if (runtimeLogicalIds.length === 1) logicalId = runtimeLogicalIds[0]; + } + + if (logicalId) { + resourcesToImport.push({ + resourceType: 'AWS::BedrockAgentCore::Runtime', + logicalResourceId: logicalId, + resourceIdentifier: { AgentRuntimeId: agent.physicalAgentId! }, + }); + } + } + + // Build memory resources + for (const memory of memoriesToImport) { + let logicalId = findLogicalIdByProperty(synthTemplate, 'AWS::BedrockAgentCore::Memory', 'Name', memory.name); + + if (!logicalId) { + const memoryLogicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Memory'); + if (memoryLogicalIds.length === 1) logicalId = memoryLogicalIds[0]; + } + + if (logicalId) { + resourcesToImport.push({ + resourceType: 'AWS::BedrockAgentCore::Memory', + logicalResourceId: logicalId, + resourceIdentifier: { MemoryId: memory.physicalMemoryId! }, + }); + } + } + + // Verify the list + expect(resourcesToImport).toHaveLength(2); + + const runtimeImport = resourcesToImport.find(r => r.resourceType === 'AWS::BedrockAgentCore::Runtime'); + expect(runtimeImport).toBeDefined(); + expect(runtimeImport!.logicalResourceId).toBe('RuntimeLogicalId'); + expect(runtimeImport!.resourceIdentifier).toEqual({ AgentRuntimeId: 'abc123def456' }); + + const memoryImport = resourcesToImport.find(r => r.resourceType === 'AWS::BedrockAgentCore::Memory'); + expect(memoryImport).toBeDefined(); + expect(memoryImport!.logicalResourceId).toBe('MemoryLogicalId'); + expect(memoryImport!.resourceIdentifier).toEqual({ MemoryId: 'mem-001122334455' }); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('produces only Runtime resource when memory has no physicalMemoryId', () => { + const yamlNoMemId = ` +agents: + agent_no_memid: + name: agent_no_memid + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-id-001 + memory: + mode: STM_AND_LTM + memory_name: mem_without_id + event_expiry_days: 30 +`; + const tmpFile = createTempYaml(yamlNoMemId); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + + const agentsToImport = parsed.agents.filter(a => a.physicalAgentId); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + + expect(agentsToImport).toHaveLength(1); + expect(memoriesToImport).toHaveLength(0); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// Deployed State Update Tests +// ============================================================================ + +describe('Deployed State Update with Memory', () => { + it('constructs memory ARN from components when physicalMemoryArn is provided', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + const targetRegion = 'us-west-2'; + const targetAccount = '123456789012'; + + // Simulate deployed state update logic from actions.ts + const targetState: Record = { resources: {} }; + const resources = targetState.resources as Record; + resources.memories = {}; + + for (const memory of memoriesToImport) { + if (memory.physicalMemoryId) { + (resources.memories as Record)[memory.name] = { + memoryId: memory.physicalMemoryId, + memoryArn: + memory.physicalMemoryArn ?? + `arn:aws:bedrock-agentcore:${targetRegion}:${targetAccount}:memory/${memory.physicalMemoryId}`, + }; + } + } + + const memState = (resources.memories as Record>)['my_agent_memory']!; + expect(memState.memoryId).toBe('mem-001122334455'); + // Should use the ARN from YAML since it's provided + expect(memState.memoryArn).toBe('arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-001122334455'); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('constructs memory ARN from region/account when physicalMemoryArn is missing', () => { + const tmpFile = createTempYaml(STM_ONLY_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + const targetRegion = 'us-west-2'; + const targetAccount = '123456789012'; + + const memoryStates: Record = {}; + + for (const memory of memoriesToImport) { + if (memory.physicalMemoryId) { + memoryStates[memory.name] = { + memoryId: memory.physicalMemoryId, + memoryArn: + memory.physicalMemoryArn ?? + `arn:aws:bedrock-agentcore:${targetRegion}:${targetAccount}:memory/${memory.physicalMemoryId}`, + }; + } + } + + const memState = memoryStates['stm_memory']!; + expect(memState.memoryId).toBe('mem-stm-only-001'); + // Should construct ARN since YAML doesn't have memory_arn + expect(memState.memoryArn).toBe('arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/mem-stm-only-001'); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('includes both agent and memory info in deployed state', () => { + const tmpFile = createTempYaml(STM_AND_LTM_YAML); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + const agentsToImport = parsed.agents.filter(a => a.physicalAgentId); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + const targetRegion = 'us-west-2'; + const targetAccount = '123456789012'; + + // Simulate full deployed state + const existingState: Record = { targets: {} }; + const targetState: Record = { resources: {} }; + const resources = targetState.resources as Record; + resources.stackName = 'AgentCore-TestProject-default'; + + if (agentsToImport.length > 0) { + resources.agents = {}; + for (const agent of agentsToImport) { + if (agent.physicalAgentId) { + (resources.agents as Record)[agent.name] = { + runtimeId: agent.physicalAgentId, + runtimeArn: + agent.physicalAgentArn ?? + `arn:aws:bedrock-agentcore:${targetRegion}:${targetAccount}:runtime/${agent.physicalAgentId}`, + roleArn: 'imported', + }; + } + } + } + + if (memoriesToImport.length > 0) { + resources.memories = {}; + for (const memory of memoriesToImport) { + if (memory.physicalMemoryId) { + (resources.memories as Record)[memory.name] = { + memoryId: memory.physicalMemoryId, + memoryArn: + memory.physicalMemoryArn ?? + `arn:aws:bedrock-agentcore:${targetRegion}:${targetAccount}:memory/${memory.physicalMemoryId}`, + }; + } + } + } + + (existingState.targets as Record)['default'] = targetState; + + // Verify deployed state structure + const target = (existingState.targets as Record>)['default']!; + const res = target.resources as Record; + + expect(res.stackName).toBe('AgentCore-TestProject-default'); + + const agents = res.agents as Record>; + expect(agents['my_memory_agent']).toBeDefined(); + expect(agents['my_memory_agent']!.runtimeId).toBe('abc123def456'); + + const memories = res.memories as Record>; + expect(memories['my_agent_memory']).toBeDefined(); + expect(memories['my_agent_memory']!.memoryId).toBe('mem-001122334455'); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); + +// ============================================================================ +// buildImportTemplate Tests for Memory Resources +// ============================================================================ + +describe('buildImportTemplate with Memory', () => { + it('adds memory resource to deployed template with Retain deletion policy', () => { + const deployedTemplate: CfnTemplate = { + Resources: { + ExistingIAMRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'ExistingRole' }, + }, + }, + }; + + const synthTemplate: CfnTemplate = { + Resources: { + ExistingIAMRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'ExistingRole' }, + }, + MemoryLogicalId: { + Type: 'AWS::BedrockAgentCore::Memory', + Properties: { + Name: 'my_agent_memory', + EventExpiryDuration: 30, + }, + DependsOn: 'ExistingIAMRole', + }, + RuntimeLogicalId: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { + AgentRuntimeName: 'TestProject_my_agent', + }, + DependsOn: ['ExistingIAMRole', 'MemoryLogicalId'], + }, + }, + }; + + const importTemplate = buildImportTemplate(deployedTemplate, synthTemplate, [ + 'MemoryLogicalId', + 'RuntimeLogicalId', + ]); + + // Verify memory resource was added + expect(importTemplate.Resources['MemoryLogicalId']).toBeDefined(); + expect(importTemplate.Resources['MemoryLogicalId']!.Type).toBe('AWS::BedrockAgentCore::Memory'); + expect(importTemplate.Resources['MemoryLogicalId']!.DeletionPolicy).toBe('Retain'); + expect(importTemplate.Resources['MemoryLogicalId']!.UpdateReplacePolicy).toBe('Retain'); + + // DependsOn should be removed for import + expect(importTemplate.Resources['MemoryLogicalId']!.DependsOn).toBeUndefined(); + + // Verify runtime resource was also added + expect(importTemplate.Resources['RuntimeLogicalId']).toBeDefined(); + expect(importTemplate.Resources['RuntimeLogicalId']!.DeletionPolicy).toBe('Retain'); + expect(importTemplate.Resources['RuntimeLogicalId']!.DependsOn).toBeUndefined(); + + // Original resource should still be there + expect(importTemplate.Resources['ExistingIAMRole']).toBeDefined(); + }); +}); + +// ============================================================================ +// Edge Cases +// ============================================================================ + +describe('Edge Cases', () => { + it('handles agent with memory mode but no memory section gracefully', () => { + const yaml = ` +agents: + agent_no_memory_section: + name: agent_no_memory_section + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-no-mem-section +`; + const tmpFile = createTempYaml(yaml); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + expect(parsed.agents).toHaveLength(1); + expect(parsed.memories).toHaveLength(0); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('handles memory with empty mode string', () => { + const yaml = ` +agents: + agent_empty_mode: + name: agent_empty_mode + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + memory: + mode: + memory_name: empty_mode_memory +`; + const tmpFile = createTempYaml(yaml); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + // mode is null/empty, so memory should not be added (condition: memoryConfig.mode) + expect(parsed.memories).toHaveLength(0); + } finally { + fs.unlinkSync(tmpFile); + } + }); + + it('multiple agents with unique memories create separate memory entries', () => { + const yaml = ` +agents: + agent_x: + name: agent_x + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-x-id + memory: + mode: STM_AND_LTM + memory_name: memory_x + memory_id: mem-x + event_expiry_days: 30 + agent_y: + name: agent_y + deployment_type: container + runtime_type: PYTHON_3_12 + entrypoint: main.py + language: python + aws: + account: "123456789012" + region: us-west-2 + protocol_configuration: + server_protocol: HTTP + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: agent-y-id + memory: + mode: STM_ONLY + memory_name: memory_y + memory_id: mem-y + event_expiry_days: 14 +`; + const tmpFile = createTempYaml(yaml); + try { + const parsed = parseStarterToolkitYaml(tmpFile); + expect(parsed.agents).toHaveLength(2); + expect(parsed.memories).toHaveLength(2); + + const memX = parsed.memories.find(m => m.name === 'memory_x')!; + expect(memX.mode).toBe('STM_AND_LTM'); + expect(memX.physicalMemoryId).toBe('mem-x'); + + const memY = parsed.memories.find(m => m.name === 'memory_y')!; + expect(memY.mode).toBe('STM_ONLY'); + expect(memY.physicalMemoryId).toBe('mem-y'); + } finally { + fs.unlinkSync(tmpFile); + } + }); +}); diff --git a/src/cli/commands/import/__tests__/import-no-deploy.test.ts b/src/cli/commands/import/__tests__/import-no-deploy.test.ts new file mode 100644 index 000000000..cb4a6738f --- /dev/null +++ b/src/cli/commands/import/__tests__/import-no-deploy.test.ts @@ -0,0 +1,913 @@ +/** + * Test Group 8: Import Without Prior Deploy (No Physical IDs) + * + * Verifies that the import command correctly handles starter toolkit projects + * that were created but never deployed (no agent_id/memory_id in YAML). + */ +import { parseStarterToolkitYaml } from '../yaml-parser.js'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +// ---- Mocks ---- + +const mockReadProjectSpec = vi.fn(); +const mockWriteProjectSpec = vi.fn(); +const mockReadAWSDeploymentTargets = vi.fn(); +const mockWriteAWSDeploymentTargets = vi.fn(); +const mockReadDeployedState = vi.fn(); +const mockWriteDeployedState = vi.fn(); +const mockFindConfigRoot = vi.fn(); + +vi.mock('../../../../lib', () => ({ + APP_DIR: 'app', + ConfigIO: class MockConfigIO { + readProjectSpec = mockReadProjectSpec; + writeProjectSpec = mockWriteProjectSpec; + readAWSDeploymentTargets = mockReadAWSDeploymentTargets; + writeAWSDeploymentTargets = mockWriteAWSDeploymentTargets; + readDeployedState = mockReadDeployedState; + writeDeployedState = mockWriteDeployedState; + }, + findConfigRoot: (...args: unknown[]) => mockFindConfigRoot(...args), +})); + +const mockValidateAwsCredentials = vi.fn().mockResolvedValue(undefined); +vi.mock('../../../aws/account', () => ({ + validateAwsCredentials: (...args: unknown[]) => mockValidateAwsCredentials(...args), +})); + +vi.mock('../../../cdk/local-cdk-project', () => ({ + LocalCdkProject: vi.fn(), +})); + +vi.mock('../../../cdk/toolkit-lib', () => ({ + silentIoHost: {}, +})); + +vi.mock('../../../logging', () => ({ + ExecLogger: class MockExecLogger { + startStep = vi.fn(); + endStep = vi.fn(); + log = vi.fn(); + finalize = vi.fn(); + getRelativeLogPath = vi.fn().mockReturnValue('agentcore/.cli/logs/import/import-mock.log'); + logFilePath = 'agentcore/.cli/logs/import/import-mock.log'; + }, +})); + +const mockBuildCdkProject = vi.fn(); +const mockSynthesizeCdk = vi.fn(); +vi.mock('../../../operations/deploy', () => ({ + buildCdkProject: (...args: unknown[]) => mockBuildCdkProject(...args), + synthesizeCdk: (...args: unknown[]) => mockSynthesizeCdk(...args), +})); + +const mockSetupPythonProject = vi.fn().mockResolvedValue({ status: 'success' }); +vi.mock('../../../operations/python/setup', () => ({ + setupPythonProject: (...args: unknown[]) => mockSetupPythonProject(...args), +})); + +const mockExecutePhase1 = vi.fn(); +const mockGetDeployedTemplate = vi.fn(); +vi.mock('../phase1-update', () => ({ + executePhase1: (...args: unknown[]) => mockExecutePhase1(...args), + getDeployedTemplate: (...args: unknown[]) => mockGetDeployedTemplate(...args), +})); + +const mockExecutePhase2 = vi.fn(); +const mockPublishCdkAssets = vi.fn(); +vi.mock('../phase2-import', () => ({ + executePhase2: (...args: unknown[]) => mockExecutePhase2(...args), + publishCdkAssets: (...args: unknown[]) => mockPublishCdkAssets(...args), +})); + +// ============================================================================ +// YAML Parsing Tests: null physical IDs +// ============================================================================ + +describe('YAML parsing: null physical IDs', () => { + let tmpDir: string; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test8-yaml-')); + }); + + afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('parses agent_id: null as falsy physicalAgentId', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + expect(parsed.agents).toHaveLength(1); + expect(parsed.agents[0]!.physicalAgentId).toBeFalsy(); + expect(parsed.agents[0]!.physicalAgentArn).toBeFalsy(); + }); + + it('parses memory_id: null as falsy physicalMemoryId', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: STM_AND_LTM + memory_id: null + memory_arn: null + memory_name: test_memory + event_expiry_days: 30 +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.physicalMemoryId).toBeFalsy(); + expect(parsed.memories[0]!.physicalMemoryArn).toBeFalsy(); + }); + + it('filters agents with null physical IDs correctly', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: STM_AND_LTM + memory_id: null + memory_arn: null + memory_name: test_memory + event_expiry_days: 30 +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + const agentsToImport = parsed.agents.filter(a => a.physicalAgentId); + const memoriesToImport = parsed.memories.filter(m => m.physicalMemoryId); + + expect(agentsToImport).toHaveLength(0); + expect(memoriesToImport).toHaveLength(0); + }); + + it('handles YAML with account: null and region: null', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + aws: + account: null + region: null + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + // account: null -> parseYamlValue returns null -> String(null ?? '') = '' + expect(parsed.awsTarget.account).toBe(''); + expect(parsed.awsTarget.region).toBe(''); + }); + + it('handles YAML with completely empty aws section (no account/region keys)', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + // When no account/region keys at all, awsTarget gets empty strings + expect(parsed.awsTarget.account).toBe(''); + expect(parsed.awsTarget.region).toBe(''); + }); + + it('handles agent_id with string value "null" (quoted) vs actual null', () => { + const yamlNull = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath1 = path.join(tmpDir, 'null.yaml'); + fs.writeFileSync(filePath1, yamlNull); + const parsed1 = parseStarterToolkitYaml(filePath1); + expect(parsed1.agents[0]!.physicalAgentId).toBeFalsy(); + + // Quoted "null" string should be the literal string "null" + const yamlQuotedNull = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: "null" + agent_arn: "null" + memory: + mode: NO_MEMORY +`; + const filePath2 = path.join(tmpDir, 'quoted-null.yaml'); + fs.writeFileSync(filePath2, yamlQuotedNull); + const parsed2 = parseStarterToolkitYaml(filePath2); + + // Quoted "null" is the literal string "null" which is truthy! + // This would incorrectly try to import with ID "null" + expect(parsed2.agents[0]!.physicalAgentId).toBe('null'); + }); + + it('handles tilde (~) as YAML null value', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: ~ + agent_arn: ~ + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + // ~ is treated as null by parseYamlValue + expect(parsed.agents[0]!.physicalAgentId).toBeFalsy(); + }); + + it('handles account with value but no region', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: null + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + expect(parsed.awsTarget.account).toBe('111122223333'); + expect(parsed.awsTarget.region).toBe(''); + }); +}); + +// ============================================================================ +// handleImport Tests: no-deploy path +// ============================================================================ + +describe('handleImport: no-deploy path (no physical IDs)', () => { + let tmpDir: string; + let yamlPath: string; + + beforeEach(() => { + vi.clearAllMocks(); + + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test8-import-')); + + // Create a no-deploy YAML with valid account/region + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: STM_AND_LTM + memory_id: null + memory_arn: null + memory_name: test_agent_memory + event_expiry_days: 30 +`; + yamlPath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(yamlPath, yamlContent); + + // Set up project structure + const projectDir = path.join(tmpDir, 'myproject'); + const configDir = path.join(projectDir, 'agentcore'); + fs.mkdirSync(configDir, { recursive: true }); + fs.writeFileSync( + path.join(configDir, 'agentcore.json'), + JSON.stringify({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }) + ); + + // Mock findConfigRoot to return our test config directory + mockFindConfigRoot.mockReturnValue(configDir); + }); + + afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('succeeds with empty importedAgents/importedMemories when no physical IDs', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + + const progressMessages: string[] = []; + const result = await handleImport({ + source: yamlPath, + onProgress: (msg: string) => progressMessages.push(msg), + }); + + expect(result.success).toBe(true); + expect(result.importedAgents).toEqual([]); + expect(result.importedMemories).toEqual([]); + expect(result.stackName).toBeDefined(); + expect(result.projectSpec).toBeDefined(); + }); + + it('emits "No deployed resources found" message', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + + const progressMessages: string[] = []; + await handleImport({ + source: yamlPath, + onProgress: (msg: string) => progressMessages.push(msg), + }); + + const noResourcesMsg = progressMessages.find(m => m.includes('No deployed resources found')); + expect(noResourcesMsg).toBeDefined(); + expect(noResourcesMsg).toContain('agentcore deploy'); + }); + + it('writes projectSpec (config merge happens) even without physical IDs', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + // writeProjectSpec should have been called with the merged config + expect(mockWriteProjectSpec).toHaveBeenCalledTimes(1); + const writtenSpec = mockWriteProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.agents).toHaveLength(1); + expect(writtenSpec.agents[0].name).toBe('test_agent'); + }); + + it('adds memory to project config even without physical memory ID', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + const writtenSpec = mockWriteProjectSpec.mock.calls[0]![0]; + expect(writtenSpec.memories).toHaveLength(1); + expect(writtenSpec.memories[0].name).toBe('test_agent_memory'); + expect(writtenSpec.memories[0].type).toBe('AgentCoreMemory'); + }); + + it('does NOT call CDK build/synth operations', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + expect(mockBuildCdkProject).not.toHaveBeenCalled(); + expect(mockSynthesizeCdk).not.toHaveBeenCalled(); + }); + + it('does NOT call Phase 1 or Phase 2', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + expect(mockExecutePhase1).not.toHaveBeenCalled(); + expect(mockExecutePhase2).not.toHaveBeenCalled(); + expect(mockPublishCdkAssets).not.toHaveBeenCalled(); + }); + + it('does NOT modify deployed state', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + expect(mockReadDeployedState).not.toHaveBeenCalled(); + expect(mockWriteDeployedState).not.toHaveBeenCalled(); + }); + + it('skips Python setup for container agents', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + await handleImport({ source: yamlPath }); + + expect(mockSetupPythonProject).not.toHaveBeenCalled(); + }); + + it('returns correct stackName in result', async () => { + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + const result = await handleImport({ source: yamlPath }); + + expect(result.stackName).toBe('AgentCore-myproject-default'); + }); +}); + +// ============================================================================ +// Target resolution for no-deploy imports +// ============================================================================ + +describe('handleImport: target resolution with null account/region', () => { + let tmpDir: string; + + beforeEach(() => { + vi.clearAllMocks(); + + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test8-target-')); + + // Set up project structure + const projectDir = path.join(tmpDir, 'myproject'); + const configDir = path.join(projectDir, 'agentcore'); + fs.mkdirSync(configDir, { recursive: true }); + fs.writeFileSync( + path.join(configDir, 'agentcore.json'), + JSON.stringify({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }) + ); + + mockFindConfigRoot.mockReturnValue(configDir); + }); + + afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('succeeds when no targets exist AND YAML has null account/region (no physical IDs)', async () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: null + region: null + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const yamlPath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(yamlPath, yamlContent); + + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([]); // No existing targets + + const { handleImport } = await import('../actions.js'); + const result = await handleImport({ source: yamlPath }); + + // No physical IDs means target resolution is skipped entirely. + // The import succeeds -- config merge + source copy still happen. + expect(result.success).toBe(true); + expect(result.importedAgents).toEqual([]); + expect(result.importedMemories).toEqual([]); + }); + + it('succeeds when project already has targets even with null YAML account/region', async () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: null + region: null + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const yamlPath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(yamlPath, yamlContent); + + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([{ name: 'default', account: '111122223333', region: 'us-east-1' }]); + + const { handleImport } = await import('../actions.js'); + const result = await handleImport({ source: yamlPath }); + + expect(result.success).toBe(true); + expect(result.importedAgents).toEqual([]); + expect(result.importedMemories).toEqual([]); + }); + + it('does not write targets when YAML has account/region but no physical IDs', async () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const yamlPath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(yamlPath, yamlContent); + + mockReadProjectSpec.mockResolvedValue({ + name: 'myproject', + version: 1, + agents: [], + memories: [], + credentials: [], + }); + mockWriteProjectSpec.mockResolvedValue(undefined); + mockReadAWSDeploymentTargets.mockResolvedValue([]); // No existing targets + + const { handleImport } = await import('../actions.js'); + const result = await handleImport({ source: yamlPath }); + + expect(result.success).toBe(true); + // No physical IDs means target is not written to disk + expect(mockWriteAWSDeploymentTargets).not.toHaveBeenCalled(); + // But the stackName should still be computed using 'default' fallback + expect(result.stackName).toBe('AgentCore-myproject-default'); + }); +}); + +// ============================================================================ +// Edge case: empty value after colon in YAML +// ============================================================================ + +describe('YAML parsing edge cases', () => { + let tmpDir: string; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test8-edge-')); + }); + + afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('handles agent_id with empty value after colon (treated as nested object, not null)', () => { + // agent_id: (empty) is treated as a nested object {} by the parser, not null. + // This is a known limitation of the simple YAML parser. + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: container + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: + agent_arn: + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + + // The parser creates {} for empty values after colon. + // An empty object {} is truthy but not a usable ID. + // Starter toolkit always writes "null" not empty, so this is academic. + const agent = parsed.agents[0]!; + expect(agent.physicalAgentId).toBeDefined(); // {} is defined (not undefined) + }); + + it('preserves agent metadata even when physical IDs are null', () => { + const yamlContent = ` +default_agent: test_agent +agents: + test_agent: + name: test_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + source_path: null + aws: + account: '111122223333' + region: us-east-1 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: MCP + observability: + enabled: false + bedrock_agentcore: + agent_id: null + agent_arn: null + memory: + mode: NO_MEMORY +`; + const filePath = path.join(tmpDir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, yamlContent); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + + expect(agent.name).toBe('test_agent'); + expect(agent.build).toBe('CodeZip'); + expect(agent.protocol).toBe('MCP'); + expect(agent.enableOtel).toBe(false); + expect(agent.runtimeVersion).toBe('PYTHON_3_12'); + }); +}); diff --git a/src/cli/commands/import/__tests__/merge-logic.test.ts b/src/cli/commands/import/__tests__/merge-logic.test.ts new file mode 100644 index 000000000..f4ffd4e65 --- /dev/null +++ b/src/cli/commands/import/__tests__/merge-logic.test.ts @@ -0,0 +1,223 @@ +/** + * Test Group 3: CLI-Native Create with Memory, Then Import Over It + */ +import type { AgentCoreProjectSpec, Credential, Memory } from '../../../../schema'; +import type { AgentEnvSpec } from '../../../../schema/schemas/agent-env'; +import type { ParsedStarterToolkitConfig } from '../types'; +import { parseStarterToolkitYaml } from '../yaml-parser'; +import * as fs from 'node:fs'; +import * as path from 'node:path'; +import { describe, expect, it } from 'vitest'; + +const APP_DIR = 'app'; + +function toAgentEnvSpec(agent: ParsedStarterToolkitConfig['agents'][0]): AgentEnvSpec { + const codeLocation = path.join(APP_DIR, agent.name); + const entrypoint = path.basename(agent.entrypoint); + const spec: AgentEnvSpec = { + type: 'AgentCoreRuntime', + name: agent.name, + build: agent.build, + entrypoint: entrypoint as AgentEnvSpec['entrypoint'], + codeLocation: codeLocation as AgentEnvSpec['codeLocation'], + runtimeVersion: (agent.runtimeVersion ?? 'PYTHON_3_12') as AgentEnvSpec['runtimeVersion'], + protocol: agent.protocol, + networkMode: agent.networkMode, + instrumentation: { enableOtel: agent.enableOtel }, + }; + if (agent.networkMode === 'VPC' && agent.networkConfig) { + spec.networkConfig = agent.networkConfig; + } + return spec; +} + +function toMemorySpec(mem: ParsedStarterToolkitConfig['memories'][0]): Memory { + const strategies: Memory['strategies'] = []; + if (mem.mode === 'STM_ONLY' || mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SEMANTIC' }); + } + if (mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SUMMARIZATION' }); + strategies.push({ type: 'USER_PREFERENCE' }); + } + return { + type: 'AgentCoreMemory', + name: mem.name, + eventExpiryDuration: Math.max(7, Math.min(365, mem.eventExpiryDays)), + strategies, + }; +} + +function toCredentialSpec(cred: ParsedStarterToolkitConfig['credentials'][0]): Credential { + return { type: 'ApiKeyCredentialProvider', name: cred.name }; +} + +function simulateMerge( + projectSpec: AgentCoreProjectSpec, + parsed: ParsedStarterToolkitConfig +): { messages: string[]; projectSpec: AgentCoreProjectSpec } { + const messages: string[] = []; + const onProgress = (msg: string) => messages.push(msg); + const existingAgentNames = new Set(projectSpec.agents.map(a => a.name)); + for (const agent of parsed.agents) { + if (!existingAgentNames.has(agent.name)) { + projectSpec.agents.push(toAgentEnvSpec(agent)); + } else { + onProgress(`Skipping agent "${agent.name}" (already exists in project)`); + } + } + const existingMemoryNames = new Set((projectSpec.memories ?? []).map(m => m.name)); + for (const mem of parsed.memories) { + if (!existingMemoryNames.has(mem.name)) { + (projectSpec.memories ??= []).push(toMemorySpec(mem)); + } else { + onProgress(`Skipping memory "${mem.name}" (already exists in project)`); + } + } + const existingCredentialNames = new Set((projectSpec.credentials ?? []).map(c => c.name)); + for (const cred of parsed.credentials) { + if (!existingCredentialNames.has(cred.name)) { + (projectSpec.credentials ??= []).push(toCredentialSpec(cred)); + onProgress(`Added credential "${cred.name}" (${cred.providerType})`); + } else { + onProgress(`Skipping credential "${cred.name}" (already exists in project)`); + } + } + return { messages, projectSpec }; +} + +const FIXTURES = path.join(__dirname, 'fixtures'); +const CLI_PROJECT_PATH = path.join(FIXTURES, 'cli-project-with-agent-and-memory.json'); +const DIFFERENT_AGENT_YAML = path.join(FIXTURES, 'different-agent.yaml'); +const SAME_NAME_AGENT_YAML = path.join(FIXTURES, 'same-name-agent.yaml'); + +function loadCliProjectSpec(): AgentCoreProjectSpec { + const content = fs.readFileSync(CLI_PROJECT_PATH, 'utf-8'); + return JSON.parse(content) as AgentCoreProjectSpec; +} + +describe('parseStarterToolkitYaml', () => { + it('parses a different-agent YAML', () => { + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + expect(parsed.agents).toHaveLength(1); + expect(parsed.agents[0]!.name).toBe('new_toolkit_agent'); + expect(parsed.agents[0]!.physicalAgentId).toBe('AGENT_NEW_123'); + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.name).toBe('new_toolkit_memory'); + expect(parsed.credentials).toHaveLength(1); + expect(parsed.credentials[0]!.name).toBe('new_api_key_cred'); + }); + it('parses a same-name-agent YAML', () => { + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + expect(parsed.agents[0]!.name).toBe('existing_agent'); + expect(parsed.agents[0]!.physicalAgentId).toBe('AGENT_EXISTING_999'); + }); +}); + +describe('merge: agent deduplication', () => { + it('adds agent with different name', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.agents).toHaveLength(2); + }); + it('skips agent with same name', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + const { messages, projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.agents).toHaveLength(1); + expect(messages).toContain('Skipping agent "existing_agent" (already exists in project)'); + }); + it('preserves original config when skipping', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.agents[0]!.networkMode).toBe('PUBLIC'); + expect(merged.agents[0]!.protocol).toBe('HTTP'); + }); +}); + +describe('merge: memory deduplication', () => { + it('adds memory with different name', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.memories).toHaveLength(2); + }); + it('skips memory with same name', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + const { messages } = simulateMerge(projectSpec, parsed); + expect(messages).toContain('Skipping memory "existing_agent_memory" (already exists in project)'); + }); +}); + +describe('merge: credential deduplication', () => { + it('adds credential with different name', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.credentials).toHaveLength(2); + }); + it('skips credential with same name', () => { + const projectSpec = loadCliProjectSpec(); + projectSpec.credentials.push({ type: 'ApiKeyCredentialProvider', name: 'new_api_key_cred' }); + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { messages } = simulateMerge(projectSpec, parsed); + expect(messages).toContain('Skipping credential "new_api_key_cred" (already exists in project)'); + }); +}); + +describe('merge: combined', () => { + it('merging different agent produces combined projectSpec', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.agents).toHaveLength(2); + expect(merged.memories).toHaveLength(2); + expect(merged.credentials).toHaveLength(2); + }); + it('handles undefined memories', () => { + const projectSpec = loadCliProjectSpec(); + delete (projectSpec as Record).memories; + const parsed = parseStarterToolkitYaml(DIFFERENT_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.memories).toHaveLength(1); + }); +}); + +describe('source copy skip logic', () => { + it('identifies agents to skip', () => { + const projectSpec = loadCliProjectSpec(); + const existingAgentNames = new Set(projectSpec.agents.map(a => a.name)); + expect(existingAgentNames.has(parseStarterToolkitYaml(SAME_NAME_AGENT_YAML).agents[0]!.name)).toBe(true); + expect(existingAgentNames.has(parseStarterToolkitYaml(DIFFERENT_AGENT_YAML).agents[0]!.name)).toBe(false); + }); +}); + +describe('toMemorySpec', () => { + it('clamps below 7', () => { + const mem: ParsedStarterToolkitConfig['memories'][0] = { name: 't', mode: 'STM_ONLY', eventExpiryDays: 1 }; + expect(toMemorySpec(mem).eventExpiryDuration).toBe(7); + }); + it('clamps above 365', () => { + const mem: ParsedStarterToolkitConfig['memories'][0] = { name: 't', mode: 'STM_ONLY', eventExpiryDays: 999 }; + expect(toMemorySpec(mem).eventExpiryDuration).toBe(365); + }); +}); + +describe('edge cases', () => { + it('dedup is name-only', () => { + const projectSpec = loadCliProjectSpec(); + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + const { messages } = simulateMerge(projectSpec, parsed); + expect(messages.find(m => m.includes('Skipping agent'))).toBeDefined(); + }); + it('merge is append-only', () => { + const projectSpec = loadCliProjectSpec(); + const n = projectSpec.agents.length; + const parsed = parseStarterToolkitYaml(SAME_NAME_AGENT_YAML); + const { projectSpec: merged } = simulateMerge(projectSpec, parsed); + expect(merged.agents.length).toBeGreaterThanOrEqual(n); + }); +}); diff --git a/src/cli/commands/import/__tests__/multi-agent.test.ts b/src/cli/commands/import/__tests__/multi-agent.test.ts new file mode 100644 index 000000000..b3a39dace --- /dev/null +++ b/src/cli/commands/import/__tests__/multi-agent.test.ts @@ -0,0 +1,357 @@ +import { + buildImportTemplate, + filterCompanionOnlyTemplate, + findLogicalIdByProperty, + findLogicalIdsByType, +} from '../template-utils.js'; +import type { CfnTemplate } from '../template-utils.js'; +import { parseStarterToolkitYaml } from '../yaml-parser.js'; +import * as path from 'node:path'; +import { describe, expect, it } from 'vitest'; + +const FIXTURES_DIR = path.join(__dirname, 'fixtures'); + +describe('parseStarterToolkitYaml - multi-agent', () => { + it('parses a YAML file with 2 agents', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.agents).toHaveLength(2); + expect(result.agents[0]!.name).toBe('search_agent'); + expect(result.agents[1]!.name).toBe('chat_agent'); + }); + + it('extracts correct properties for each agent', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.agents[0]!.build).toBe('CodeZip'); + expect(result.agents[0]!.protocol).toBe('HTTP'); + expect(result.agents[0]!.physicalAgentId).toBe('agent-abc-111'); + expect(result.agents[1]!.protocol).toBe('MCP'); + expect(result.agents[1]!.physicalAgentId).toBe('agent-def-222'); + }); + + it('extracts awsTarget from the first agent', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.awsTarget.account).toBe('111122223333'); + expect(result.awsTarget.region).toBe('us-west-2'); + }); + + it('extracts defaultAgent', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.defaultAgent).toBe('search_agent'); + }); + + it('parses memory only from agents with non-NO_MEMORY mode', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.memories).toHaveLength(1); + expect(result.memories[0]!.name).toBe('shared_memory'); + expect(result.memories[0]!.mode).toBe('STM_AND_LTM'); + expect(result.memories[0]!.physicalMemoryId).toBe('mem-xyz-999'); + }); + + it('extracts credentials from agents', () => { + const result = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'two-agents.yaml')); + expect(result.credentials).toHaveLength(1); + expect(result.credentials[0]!.name).toBe('github-oauth'); + }); +}); + +describe('parseStarterToolkitYaml - 3 agents with shared memory', () => { + it('parses a YAML file with 3 agents', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'three-agents-shared-memory.yaml')); + expect(r.agents).toHaveLength(3); + expect(r.agents.map(a => a.name)).toEqual(['agent_alpha', 'agent_beta', 'agent_gamma']); + }); + + it('deduplicates shared memory', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'three-agents-shared-memory.yaml')); + expect(r.memories).toHaveLength(1); + expect(r.memories[0]!.name).toBe('shared_memory'); + }); + + it('extracts different runtime versions', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'three-agents-shared-memory.yaml')); + expect(r.agents.map(a => a.runtimeVersion)).toEqual(['PYTHON_3_12', 'PYTHON_3_13', 'PYTHON_3_11']); + }); + + it('extracts different protocols', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'three-agents-shared-memory.yaml')); + expect(r.agents.map(a => a.protocol)).toEqual(['HTTP', 'MCP', 'HTTP']); + }); +}); + +describe('parseStarterToolkitYaml - similar agent names', () => { + it('parses agents with similar names correctly', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'similar-names.yaml')); + expect(r.agents).toHaveLength(2); + expect(r.agents[0]!.name).toBe('agent1'); + expect(r.agents[1]!.name).toBe('agent1_v2'); + }); +}); + +describe('parseStarterToolkitYaml - underscore names', () => { + it('parses agents with underscores in names', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'underscore-names.yaml')); + expect(r.agents).toHaveLength(2); + expect(r.agents[0]!.name).toBe('my_search_agent'); + expect(r.agents[1]!.name).toBe('my_chat_agent'); + }); +}); + +describe('parseStarterToolkitYaml - partial import', () => { + it('parses both agents, one with physicalAgentId and one without', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'partial-import.yaml')); + expect(r.agents).toHaveLength(2); + expect(r.agents[0]!.physicalAgentId).toBe('agent-deployed-111'); + expect(r.agents[1]!.physicalAgentId).toBeFalsy(); + }); + + it('memory from deployed agent is extracted', () => { + const r = parseStarterToolkitYaml(path.join(FIXTURES_DIR, 'partial-import.yaml')); + expect(r.memories).toHaveLength(1); + expect(r.memories[0]!.name).toBe('deployed_agent_memory'); + expect(r.memories[0]!.mode).toBe('STM_ONLY'); + expect(r.memories[0]!.eventExpiryDays).toBe(14); + }); +}); + +describe('findLogicalIdsByType - multiple runtimes', () => { + const template: CfnTemplate = { + Resources: { + SearchRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'p_search' }, + }, + ChatRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'p_chat' }, + }, + Mem: { Type: 'AWS::BedrockAgentCore::Memory', Properties: { Name: 'mem' } }, + Role: { Type: 'AWS::IAM::Role', Properties: { RoleName: 'role' } }, + }, + }; + + it('finds all runtime logical IDs', () => { + const ids = findLogicalIdsByType(template, 'AWS::BedrockAgentCore::Runtime'); + expect(ids).toHaveLength(2); + expect(ids).toContain('SearchRT'); + expect(ids).toContain('ChatRT'); + }); + + it('finds memory logical IDs', () => { + expect(findLogicalIdsByType(template, 'AWS::BedrockAgentCore::Memory')).toEqual(['Mem']); + }); +}); + +describe('findLogicalIdByProperty - multiple runtimes', () => { + const template: CfnTemplate = { + Resources: { + SearchRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_search' }, + }, + ChatRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_chat' }, + }, + }, + }; + + it('finds correct logical ID for each agent', () => { + expect(findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_search')).toBe( + 'SearchRT' + ); + expect(findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_chat')).toBe( + 'ChatRT' + ); + }); + + it('returns undefined for non-existent agent', () => { + expect( + findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_missing') + ).toBeUndefined(); + }); +}); + +describe('findLogicalIdByProperty - similar names with direct string values', () => { + const template: CfnTemplate = { + Resources: { + Agent1RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_agent1' }, + }, + Agent1V2RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_agent1_v2' }, + }, + }, + }; + + it('exact match takes precedence', () => { + expect(findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_agent1')).toBe( + 'Agent1RT' + ); + expect( + findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_agent1_v2') + ).toBe('Agent1V2RT'); + }); +}); + +describe('findLogicalIdByProperty - Fn::Sub false match fix', () => { + const template: CfnTemplate = { + Resources: { + Agent1V2RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: { 'Fn::Sub': 'proj_agent1_v2' } }, + }, + Agent1RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: { 'Fn::Sub': 'proj_agent1' } }, + }, + }, + }; + + it('correctly matches Agent1RT for proj_agent1 (not Agent1V2RT)', () => { + expect(findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_agent1')).toBe( + 'Agent1RT' + ); + }); +}); + +describe('findLogicalIdByProperty - fallback single-runtime logic', () => { + const template: CfnTemplate = { + Resources: { + RT1: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_a' }, + }, + RT2: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_b' }, + }, + }, + }; + + it('with multiple runtimes, fallback is NOT triggered', () => { + expect(findLogicalIdsByType(template, 'AWS::BedrockAgentCore::Runtime').length).toBeGreaterThan(1); + expect( + findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'proj_missing') + ).toBeUndefined(); + }); +}); + +describe('filterCompanionOnlyTemplate - multiple agents', () => { + const synthTemplate: CfnTemplate = { + Resources: { + SearchRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_search' }, + }, + ChatRT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'proj_chat' }, + }, + Mem: { Type: 'AWS::BedrockAgentCore::Memory', Properties: { Name: 'mem' } }, + SearchRole: { Type: 'AWS::IAM::Role', Properties: { RoleName: 'SearchRole' } }, + Policy: { + Type: 'AWS::IAM::Policy', + Properties: { + PolicyDocument: { + Statement: [{ Resource: { 'Fn::GetAtt': ['SearchRT', 'Arn'] } }], + }, + }, + DependsOn: 'SearchRT', + }, + }, + Outputs: { + SearchId: { Value: { Ref: 'SearchRT' } }, + RoleArn: { Value: { 'Fn::GetAtt': ['SearchRole', 'Arn'] } }, + }, + }; + + it('removes all primary resources', () => { + const f = filterCompanionOnlyTemplate(synthTemplate); + expect(f.Resources).not.toHaveProperty('SearchRT'); + expect(f.Resources).not.toHaveProperty('ChatRT'); + expect(f.Resources).not.toHaveProperty('Mem'); + expect(f.Resources).toHaveProperty('SearchRole'); + expect(f.Resources).toHaveProperty('Policy'); + }); + + it('removes outputs referencing primary resources', () => { + const f = filterCompanionOnlyTemplate(synthTemplate); + expect(f.Outputs).not.toHaveProperty('SearchId'); + expect(f.Outputs).toHaveProperty('RoleArn'); + }); + + it('replaces dangling refs and removes DependsOn', () => { + const f = filterCompanionOnlyTemplate(synthTemplate); + const doc = f.Resources.Policy!.Properties!.PolicyDocument as { + Statement: { Resource: unknown }[]; + }; + expect(doc.Statement[0]!.Resource).toBe('*'); + expect(f.Resources.Policy!.DependsOn).toBeUndefined(); + }); +}); + +describe('buildImportTemplate - multiple agents', () => { + const deployed: CfnTemplate = { + Resources: { + Role: { Type: 'AWS::IAM::Role', Properties: { RoleName: 'R' } }, + }, + }; + const synth: CfnTemplate = { + Resources: { + Role: { Type: 'AWS::IAM::Role', Properties: { RoleName: 'R' } }, + RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { N: 'x' }, + DependsOn: 'Role', + }, + Mem: { Type: 'AWS::BedrockAgentCore::Memory', Properties: { Name: 'mem' } }, + }, + }; + + it('adds resources with DeletionPolicy Retain and no DependsOn', () => { + const t = buildImportTemplate(deployed, synth, ['RT', 'Mem']); + expect(t.Resources.RT!.DeletionPolicy).toBe('Retain'); + expect(t.Resources.RT!.DependsOn).toBeUndefined(); + expect(t.Resources.Mem!.DeletionPolicy).toBe('Retain'); + }); + + it('does not mutate original', () => { + const keys = Object.keys(deployed.Resources); + buildImportTemplate(deployed, synth, ['RT']); + expect(Object.keys(deployed.Resources)).toEqual(keys); + }); +}); + +describe('sanitize and toStackName', () => { + const sanitize = (n: string) => n.replace(/_/g, '-'); + const toStackName = (p: string, t: string) => `AgentCore-${sanitize(p)}-${sanitize(t)}`; + + it('replaces underscores with hyphens', () => { + expect(sanitize('my_project')).toBe('my-project'); + }); + + it('generates correct stack name', () => { + expect(toStackName('my_project', 'default')).toBe('AgentCore-my-project-default'); + }); +}); + +describe('credential deduplication', () => { + it('deduplicates credentials with same name', () => { + const creds: { name: string }[] = []; + for (const n of ['shared', 'shared', 'unique']) { + if (!creds.find(c => c.name === n)) creds.push({ name: n }); + } + expect(creds).toHaveLength(2); + }); +}); + +describe('source code directory structure', () => { + it('each agent gets its own directory', () => { + const dirs = ['search_agent', 'chat_agent'].map(n => path.join('/proj', 'app', n)); + expect(dirs[0]).toBe('/proj/app/search_agent'); + expect(dirs[1]).toBe('/proj/app/chat_agent'); + expect(new Set(dirs).size).toBe(2); + }); +}); diff --git a/src/cli/commands/import/__tests__/test1-no-memory-agent.test.ts b/src/cli/commands/import/__tests__/test1-no-memory-agent.test.ts new file mode 100644 index 000000000..376f00924 --- /dev/null +++ b/src/cli/commands/import/__tests__/test1-no-memory-agent.test.ts @@ -0,0 +1,665 @@ +/** + * Test Group 1: Starter Toolkit Agent Only (No Memory) + * + * Tests the import path for a single agent with no memory, no credentials, + * CodeZip build, PUBLIC network, HTTP protocol. + */ +import { PRIMARY_RESOURCE_TYPES } from '../constants'; +import { + buildImportTemplate, + filterCompanionOnlyTemplate, + findLogicalIdByProperty, + findLogicalIdsByType, +} from '../template-utils'; +import type { ParsedStarterToolkitAgent, ParsedStarterToolkitConfig } from '../types'; +import { parseStarterToolkitYaml } from '../yaml-parser'; +import * as fs from 'node:fs'; +import * as os from 'node:os'; +import * as path from 'node:path'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; + +const NO_MEMORY_YAML = ` +default_agent: my_strands_agent +agents: + my_strands_agent: + name: my_strands_agent + entrypoint: main.py + deployment_type: direct_code_deploy + runtime_type: PYTHON_3_12 + language: python + source_path: ./agent_src + aws: + account: '111122223333' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + memory: + mode: NO_MEMORY + bedrock_agentcore: + agent_id: ABCDEFGHIJ + agent_arn: arn:aws:bedrock-agentcore:us-west-2:111122223333:runtime/ABCDEFGHIJ +`; + +function writeTempYaml(content: string): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'test1-')); + const filePath = path.join(dir, '.bedrock_agentcore.yaml'); + fs.writeFileSync(filePath, content, 'utf-8'); + return filePath; +} + +function cleanupTempFile(filePath: string): void { + try { + fs.rmSync(path.dirname(filePath), { recursive: true, force: true }); + } catch { + /* noop */ + } +} + +describe('YAML Parsing: No-memory agent config', () => { + const tempFiles: string[] = []; + let parsed: ParsedStarterToolkitConfig; + + beforeEach(() => { + const f = writeTempYaml(NO_MEMORY_YAML); + tempFiles.push(f); + parsed = parseStarterToolkitYaml(f); + }); + + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('should parse exactly one agent', () => { + expect(parsed.agents).toHaveLength(1); + }); + + it('should have zero memories when mode is NO_MEMORY', () => { + expect(parsed.memories).toHaveLength(0); + }); + + it('should have zero credentials', () => { + expect(parsed.credentials).toHaveLength(0); + }); + + it('should parse agent name correctly', () => { + expect(parsed.agents[0]!.name).toBe('my_strands_agent'); + }); + + it('should parse default_agent correctly', () => { + expect(parsed.defaultAgent).toBe('my_strands_agent'); + }); + + it('should parse deployment_type as CodeZip for direct_code_deploy', () => { + expect(parsed.agents[0]!.build).toBe('CodeZip'); + }); + + it('should parse protocol as HTTP', () => { + expect(parsed.agents[0]!.protocol).toBe('HTTP'); + }); + + it('should parse network mode as PUBLIC', () => { + expect(parsed.agents[0]!.networkMode).toBe('PUBLIC'); + }); + + it('should parse networkConfig as undefined for PUBLIC mode', () => { + expect(parsed.agents[0]!.networkConfig).toBeUndefined(); + }); + + it('should parse runtime version as PYTHON_3_12', () => { + expect(parsed.agents[0]!.runtimeVersion).toBe('PYTHON_3_12'); + }); + + it('should parse physical agent ID', () => { + expect(parsed.agents[0]!.physicalAgentId).toBe('ABCDEFGHIJ'); + }); + + it('should parse physical agent ARN', () => { + expect(parsed.agents[0]!.physicalAgentArn).toBe( + 'arn:aws:bedrock-agentcore:us-west-2:111122223333:runtime/ABCDEFGHIJ' + ); + }); + + it('should parse AWS account and region', () => { + expect(parsed.awsTarget.account).toBe('111122223333'); + expect(parsed.awsTarget.region).toBe('us-west-2'); + }); + + it('should parse observability enabled as true', () => { + expect(parsed.agents[0]!.enableOtel).toBe(true); + }); + + it('should parse entrypoint correctly', () => { + expect(parsed.agents[0]!.entrypoint).toBe('main.py'); + }); + + it('should parse language as python (default)', () => { + expect(parsed.agents[0]!.language).toBe('python'); + }); + + it('should resolve source_path relative to YAML file directory', () => { + const agent = parsed.agents[0]!; + expect(path.isAbsolute(agent.sourcePath!)).toBe(true); + expect(agent.sourcePath!).toContain('agent_src'); + }); +}); + +describe('YAML Parsing: Edge cases', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('should handle missing memory section', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + aws: + account: '111' + region: us-east-1 + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).memories).toHaveLength(0); + }); + + it('should handle memory mode null', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + aws: + account: '111' + region: us-east-1 + memory: + mode: null + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).memories).toHaveLength(0); + }); + + it('should handle runtime_type null -> PYTHON_3_12', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + runtime_type: null + aws: + account: '111' + region: us-east-1 + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.runtimeVersion).toBe('PYTHON_3_12'); + }); + + it('should fall back to PYTHON_3_12 for unknown runtime_type (not python3.12)', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + runtime_type: some_unknown + aws: + account: '111' + region: us-east-1 + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + const rv = parseStarterToolkitYaml(f).agents[0]!.runtimeVersion; + expect(rv).toBe('PYTHON_3_12'); + expect(rv).not.toBe('python3.12'); + }); + + it('should default to Container build when deployment_type missing', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + aws: + account: '111' + region: us-east-1 + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.build).toBe('Container'); + }); + + it('should set sourcePath to undefined when absent', () => { + const yaml = ` +default_agent: a +agents: + a: + name: a + entrypoint: main.py + aws: + account: '111' + region: us-east-1 + bedrock_agentcore: + agent_id: X +`; + const f = writeTempYaml(yaml); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).agents[0]!.sourcePath).toBeUndefined(); + }); +}); + +describe('toAgentEnvSpec conversion', () => { + const APP_DIR = 'app'; + function toAgentEnvSpec(agent: ParsedStarterToolkitAgent) { + return { + type: 'AgentCoreRuntime' as const, + name: agent.name, + build: agent.build, + entrypoint: path.basename(agent.entrypoint), + codeLocation: path.join(APP_DIR, agent.name), + runtimeVersion: agent.runtimeVersion, + networkMode: agent.networkMode, + networkConfig: agent.networkMode === 'VPC' ? agent.networkConfig : undefined, + protocol: agent.protocol, + instrumentation: agent.enableOtel ? { otel: true } : undefined, + }; + } + + const base: ParsedStarterToolkitAgent = { + name: 'my_strands_agent', + entrypoint: 'main.py', + build: 'CodeZip', + runtimeVersion: 'PYTHON_3_12', + language: 'python', + networkMode: 'PUBLIC', + protocol: 'HTTP', + enableOtel: true, + }; + + it('type=AgentCoreRuntime', () => { + expect(toAgentEnvSpec(base).type).toBe('AgentCoreRuntime'); + }); + it('build=CodeZip', () => { + expect(toAgentEnvSpec(base).build).toBe('CodeZip'); + }); + it('protocol=HTTP', () => { + expect(toAgentEnvSpec(base).protocol).toBe('HTTP'); + }); + it('networkMode=PUBLIC', () => { + expect(toAgentEnvSpec(base).networkMode).toBe('PUBLIC'); + }); + it('codeLocation=app/', () => { + expect(toAgentEnvSpec(base).codeLocation).toBe('app/my_strands_agent'); + }); + it('basename entrypoint', () => { + expect(toAgentEnvSpec({ ...base, entrypoint: 'src/main.py' }).entrypoint).toBe('main.py'); + }); + it('instrumentation', () => { + expect(toAgentEnvSpec(base).instrumentation).toEqual({ otel: true }); + }); + it('no networkConfig for PUBLIC', () => { + expect(toAgentEnvSpec(base).networkConfig).toBeUndefined(); + }); + it('runtimeVersion=PYTHON_3_12', () => { + expect(toAgentEnvSpec(base).runtimeVersion).toBe('PYTHON_3_12'); + }); +}); + +describe('toMemorySpec: not invoked for no-memory', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('zero memories for NO_MEMORY', () => { + const f = writeTempYaml(NO_MEMORY_YAML); + tempFiles.push(f); + expect(parseStarterToolkitYaml(f).memories).toHaveLength(0); + }); +}); + +describe('Merge logic', () => { + it('should add agent to empty project', () => { + const existingAgentNames = new Set(); + const agentName = 'my_strands_agent'; + expect(!existingAgentNames.has(agentName)).toBe(true); + }); + + it('should skip duplicate agent', () => { + const existingAgentNames = new Set(['my_strands_agent']); + const agentName = 'my_strands_agent'; + expect(!existingAgentNames.has(agentName)).toBe(false); + }); +}); + +describe('Source code copy', () => { + let tempDir: string; + let destDir: string; + + beforeEach(() => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test1-src-')); + destDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test1-dst-')); + }); + afterEach(() => { + fs.rmSync(tempDir, { recursive: true, force: true }); + fs.rmSync(destDir, { recursive: true, force: true }); + }); + + it('copies recursively', () => { + fs.writeFileSync(path.join(tempDir, 'main.py'), 'print("hi")'); + fs.mkdirSync(path.join(tempDir, 'sub')); + fs.writeFileSync(path.join(tempDir, 'sub', 'util.py'), '# util'); + copyDirRecursive(tempDir, destDir); + expect(fs.existsSync(path.join(destDir, 'main.py'))).toBe(true); + expect(fs.existsSync(path.join(destDir, 'sub', 'util.py'))).toBe(true); + }); + + it('fixes pyproject.toml setuptools', () => { + const content = [ + '[build-system]', + 'requires = ["setuptools>=68", "wheel"]', + 'build-backend = "setuptools.build_meta"', + '', + '[tool.setuptools.packages.find]', + 'where = ["src"]', + ].join('\n'); + const filePath = path.join(tempDir, 'pyproject.toml'); + fs.writeFileSync(filePath, content); + const raw = fs.readFileSync(filePath, 'utf-8'); + const fixed = raw.replace(/\[tool\.setuptools\.packages\.find\]\n.*where\s*=.*\n?/g, '').trim(); + fs.writeFileSync(filePath, fixed); + const result = fs.readFileSync(filePath, 'utf-8'); + expect(result).not.toContain('[tool.setuptools.packages.find]'); + expect(result).toContain('[build-system]'); + }); +}); + +function copyDirRecursive(src: string, dest: string): void { + if (!fs.existsSync(dest)) fs.mkdirSync(dest, { recursive: true }); + for (const entry of fs.readdirSync(src, { withFileTypes: true })) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + if (entry.isDirectory()) { + copyDirRecursive(srcPath, destPath); + } else { + fs.copyFileSync(srcPath, destPath); + } + } +} + +describe('Phase 1: filterCompanionOnlyTemplate', () => { + const synthTemplate = { + AWSTemplateFormatVersion: '2010-09-09' as const, + Resources: { + AgentRole: { + Type: 'AWS::IAM::Role', + Properties: { RoleName: 'role' }, + }, + AgentPolicy: { + Type: 'AWS::IAM::Policy', + Properties: { PolicyName: 'policy' }, + DependsOn: ['AgentRuntime'] as string[], + }, + AgentRuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'my_strands_agent' }, + }, + LogGroup: { + Type: 'AWS::Logs::LogGroup', + Properties: { + LogGroupName: { 'Fn::Sub': '/aws/agentcore/${AgentRuntime}' }, + }, + }, + }, + Outputs: { + RuntimeId: { + Value: { 'Fn::GetAtt': ['AgentRuntime', 'AgentRuntimeId'] }, + }, + RoleArn: { Value: { 'Fn::GetAtt': ['AgentRole', 'Arn'] } }, + }, + }; + + it('removes Runtime resources', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + expect(filtered.Resources.AgentRuntime).toBeUndefined(); + }); + + it('keeps IAM Role', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + expect(filtered.Resources.AgentRole).toBeDefined(); + }); + + it('keeps IAM Policy', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + expect(filtered.Resources.AgentPolicy).toBeDefined(); + }); + + it('replaces dangling Fn::GetAtt with "*"', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + const logGroupProps = filtered.Resources.LogGroup?.Properties; + const logGroupName = logGroupProps?.LogGroupName as Record | undefined; + if (logGroupName && 'Fn::Sub' in logGroupName) { + expect(logGroupName['Fn::Sub']).toContain('*'); + } + }); + + it('removes outputs referencing removed resources', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + expect(filtered.Outputs?.RuntimeId).toBeUndefined(); + }); + + it('keeps outputs not referencing removed resources', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + expect(filtered.Outputs?.RoleArn).toBeDefined(); + }); + + it('removes DependsOn to removed resources', () => { + const filtered = filterCompanionOnlyTemplate(synthTemplate); + const policy = filtered.Resources.AgentPolicy; + if (policy?.DependsOn) { + if (Array.isArray(policy.DependsOn)) { + expect(policy.DependsOn).not.toContain('AgentRuntime'); + } + } + }); + + it('handles only-primary template', () => { + const onlyPrimary = { + AWSTemplateFormatVersion: '2010-09-09' as const, + Resources: { + RT: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: {}, + }, + }, + }; + const filtered = filterCompanionOnlyTemplate(onlyPrimary); + expect(filtered.Resources.RT).toBeUndefined(); + expect(Object.keys(filtered.Resources)).toHaveLength(0); + }); +}); + +describe('Phase 2: buildImportTemplate', () => { + const deployedTemplate = { + AWSTemplateFormatVersion: '2010-09-09' as const, + Resources: { + AgentRole: { Type: 'AWS::IAM::Role', Properties: {} }, + }, + }; + + const synthTemplate = { + AWSTemplateFormatVersion: '2010-09-09' as const, + Resources: { + AgentRole: { Type: 'AWS::IAM::Role', Properties: {} }, + AgentRuntime: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'my_strands_agent' }, + DependsOn: ['SomeCustomResource'] as string[], + }, + SomeCustomResource: { + Type: 'AWS::CloudFormation::CustomResource', + Properties: {}, + }, + }, + }; + + it('adds primary resource', () => { + const result = buildImportTemplate(deployedTemplate, synthTemplate, ['AgentRuntime']); + expect(result.Resources.AgentRuntime).toBeDefined(); + }); + + it('sets DeletionPolicy=Retain', () => { + const result = buildImportTemplate(deployedTemplate, synthTemplate, ['AgentRuntime']); + const rt = result.Resources.AgentRuntime!; + expect(rt.DeletionPolicy).toBe('Retain'); + }); + + it('sets UpdateReplacePolicy=Retain', () => { + const result = buildImportTemplate(deployedTemplate, synthTemplate, ['AgentRuntime']); + const rt = result.Resources.AgentRuntime!; + expect(rt.UpdateReplacePolicy).toBe('Retain'); + }); + + it('removes DependsOn', () => { + const result = buildImportTemplate(deployedTemplate, synthTemplate, ['AgentRuntime']); + const rt = result.Resources.AgentRuntime!; + expect(rt.DependsOn).toBeUndefined(); + }); + + it('does not modify original', () => { + buildImportTemplate(deployedTemplate, synthTemplate, ['AgentRuntime']); + expect(deployedTemplate.Resources).not.toHaveProperty('AgentRuntime'); + }); + + it('throws for missing logical ID', () => { + expect(() => buildImportTemplate(deployedTemplate, synthTemplate, ['NonExistent'])).toThrow(); + }); +}); + +describe('Template utils: findLogicalId', () => { + const template = { + AWSTemplateFormatVersion: '2010-09-09' as const, + Resources: { + RT1: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'agent_a' }, + }, + RT2: { + Type: 'AWS::BedrockAgentCore::Runtime', + Properties: { AgentRuntimeName: 'agent_b' }, + }, + Role: { Type: 'AWS::IAM::Role', Properties: {} }, + }, + }; + + it('finds by property', () => { + const id = findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'agent_a'); + expect(id).toBe('RT1'); + }); + + it('returns undefined for non-match', () => { + const id = findLogicalIdByProperty(template, 'AWS::BedrockAgentCore::Runtime', 'AgentRuntimeName', 'nope'); + expect(id).toBeUndefined(); + }); + + it('finds by type', () => { + const ids = findLogicalIdsByType(template, 'AWS::BedrockAgentCore::Runtime'); + expect(ids).toHaveLength(2); + expect(ids).toContain('RT1'); + expect(ids).toContain('RT2'); + }); + + it('empty for missing type', () => { + const ids = findLogicalIdsByType(template, 'AWS::Lambda::Function'); + expect(ids).toHaveLength(0); + }); +}); + +describe('sanitize and toStackName', () => { + it('replaces underscores', () => { + const sanitize = (n: string) => n.replace(/_/g, '-'); + expect(sanitize('my_strands_agent')).toBe('my-strands-agent'); + }); + + it('correct stack name', () => { + const sanitize = (n: string) => n.replace(/_/g, '-'); + const toStackName = (p: string) => `agentcore-${sanitize(p)}`; + expect(toStackName('my_project')).toBe('agentcore-my-project'); + }); +}); + +describe('Constants', () => { + it('includes Runtime', () => { + expect(PRIMARY_RESOURCE_TYPES).toContain('AWS::BedrockAgentCore::Runtime'); + }); + + it('includes Memory', () => { + expect(PRIMARY_RESOURCE_TYPES).toContain('AWS::BedrockAgentCore::Memory'); + }); + + it('excludes IAM::Role', () => { + expect(PRIMARY_RESOURCE_TYPES).not.toContain('AWS::IAM::Role'); + }); +}); + +describe('Integration: full parse', () => { + const tempFiles: string[] = []; + afterEach(() => { + for (const f of tempFiles) cleanupTempFile(f); + tempFiles.length = 0; + }); + + it('1 agent, 0 memories, 0 credentials', () => { + const f = writeTempYaml(NO_MEMORY_YAML); + tempFiles.push(f); + const parsed = parseStarterToolkitYaml(f); + expect(parsed.agents).toHaveLength(1); + expect(parsed.memories).toHaveLength(0); + expect(parsed.credentials).toHaveLength(0); + }); + + it('correct physical agent ID', () => { + const f = writeTempYaml(NO_MEMORY_YAML); + tempFiles.push(f); + const parsed = parseStarterToolkitYaml(f); + expect(parsed.agents[0]!.physicalAgentId).toBe('ABCDEFGHIJ'); + }); + + it('zero memories to import', () => { + const f = writeTempYaml(NO_MEMORY_YAML); + tempFiles.push(f); + const parsed = parseStarterToolkitYaml(f); + const resourcesToImport = parsed.agents + .filter(a => a.physicalAgentId) + .map(a => ({ + ResourceType: 'AWS::BedrockAgentCore::Runtime', + LogicalResourceId: 'RT', + ResourceIdentifier: { AgentRuntimeId: a.physicalAgentId! }, + })); + expect(resourcesToImport).toHaveLength(1); + expect(resourcesToImport[0]!.ResourceIdentifier.AgentRuntimeId).toBe('ABCDEFGHIJ'); + }); +}); diff --git a/src/cli/commands/import/__tests__/vpc-import.test.ts b/src/cli/commands/import/__tests__/vpc-import.test.ts new file mode 100644 index 000000000..6d0da5e1c --- /dev/null +++ b/src/cli/commands/import/__tests__/vpc-import.test.ts @@ -0,0 +1,690 @@ +/** + * Test Group 5: VPC Agent Import + * + * Tests that the import flow correctly handles agents configured with VPC networking: + * - YAML parsing of network_configuration with VPC mode + * - toAgentEnvSpec sets networkMode and networkConfig correctly + * - PUBLIC agents don't get networkConfig + * - Edge cases: empty arrays, null network_mode_config + * - Custom YAML parser handles nested list structures + */ +// We need to test the yaml-parser module. Since parseSimpleYaml is not exported, +// we test it through parseStarterToolkitYaml by writing temp files. +import { parseStarterToolkitYaml } from '../yaml-parser.js'; +import * as fs from 'node:fs'; +import * as path from 'node:path'; +import { afterEach, describe, expect, it } from 'vitest'; + +const FIXTURES_DIR = path.join(__dirname, 'fixtures-vpc'); + +function writeFixture(name: string, content: string): string { + const filePath = path.join(FIXTURES_DIR, name); + fs.mkdirSync(FIXTURES_DIR, { recursive: true }); + fs.writeFileSync(filePath, content, 'utf-8'); + return filePath; +} + +afterEach(() => { + // Clean up fixtures + if (fs.existsSync(FIXTURES_DIR)) { + fs.rmSync(FIXTURES_DIR, { recursive: true, force: true }); + } +}); + +// ============================================================================ +// 1. YAML Parsing: VPC Config Extraction +// ============================================================================ +describe('YAML parsing: VPC config extraction', () => { + it('parses VPC agent with subnets and security_groups as arrays', () => { + const filePath = writeFixture( + 'vpc-basic.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + - subnet-def456 + security_groups: + - sg-12345 + observability: + enabled: true + bedrock_agentcore: + agent_id: VPCAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + expect(parsed.agents).toHaveLength(1); + + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('VPC'); + expect(agent.networkConfig).toBeDefined(); + expect(agent.networkConfig!.subnets).toEqual(['subnet-abc123', 'subnet-def456']); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-12345']); + }); + + it('parses PUBLIC agent without networkConfig', () => { + const filePath = writeFixture( + 'public-basic.yaml', + `default_agent: public_agent +agents: + public_agent: + name: public_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + observability: + enabled: true + bedrock_agentcore: + agent_id: PUBAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + expect(parsed.agents).toHaveLength(1); + + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('PUBLIC'); + expect(agent.networkConfig).toBeUndefined(); + }); + + it('defaults to PUBLIC when network_configuration is absent', () => { + const filePath = writeFixture( + 'no-network.yaml', + `default_agent: simple_agent +agents: + simple_agent: + name: simple_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('PUBLIC'); + expect(agent.networkConfig).toBeUndefined(); + }); + + it('extracts physicalAgentId for VPC agents', () => { + const filePath = writeFixture( + 'vpc-with-id.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + security_groups: + - sg-12345 + bedrock_agentcore: + agent_id: VPCAGENT001 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/VPCAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.physicalAgentId).toBe('VPCAGENT001'); + expect(agent.physicalAgentArn).toBe('arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/VPCAGENT001'); + }); +}); + +// ============================================================================ +// 2. PUBLIC agents don't get networkConfig even if network_mode_config exists +// ============================================================================ +describe('PUBLIC agents: no networkConfig even if network_mode_config present', () => { + it('ignores network_mode_config for PUBLIC mode', () => { + const filePath = writeFixture( + 'public-with-config.yaml', + `default_agent: public_agent +agents: + public_agent: + name: public_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + network_mode_config: + subnets: + - subnet-abc123 + security_groups: + - sg-12345 + bedrock_agentcore: + agent_id: PUBAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('PUBLIC'); + // networkConfig should be undefined because networkMode is PUBLIC + expect(agent.networkConfig).toBeUndefined(); + }); +}); + +// ============================================================================ +// 3. Edge Cases +// ============================================================================ +describe('VPC edge cases', () => { + it('VPC mode with empty subnets and security_groups keys (no list items)', () => { + const filePath = writeFixture( + 'vpc-empty-arrays.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + security_groups: + bedrock_agentcore: + agent_id: VPCAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('VPC'); + // networkModeConfig exists (it's an object, not null), so networkConfig should be set. + expect(agent.networkConfig).toBeDefined(); + // After fix: Array.isArray guard ensures that when the YAML parser creates + // empty objects {} (for keys with no list items), we fall back to []. + expect(Array.isArray(agent.networkConfig!.subnets)).toBe(true); + expect(agent.networkConfig!.subnets).toEqual([]); + expect(Array.isArray(agent.networkConfig!.securityGroups)).toBe(true); + expect(agent.networkConfig!.securityGroups).toEqual([]); + }); + + it('VPC mode with null network_mode_config', () => { + const filePath = writeFixture( + 'vpc-null-config.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: null + bedrock_agentcore: + agent_id: VPCAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('VPC'); + // network_mode_config is null, so networkConfig should be undefined + expect(agent.networkConfig).toBeUndefined(); + }); + + it('handles single subnet and single security group', () => { + const filePath = writeFixture( + 'vpc-single.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-only1234 + security_groups: + - sg-only5678 + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('VPC'); + expect(agent.networkConfig).toBeDefined(); + expect(agent.networkConfig!.subnets).toEqual(['subnet-only1234']); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-only5678']); + }); + + it('handles many subnets and security groups', () => { + const filePath = writeFixture( + 'vpc-many.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-aaa11111 + - subnet-bbb22222 + - subnet-ccc33333 + security_groups: + - sg-xxx11111 + - sg-yyy22222 + bedrock_agentcore: + agent_id: VPCAGENT999 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig!.subnets).toEqual(['subnet-aaa11111', 'subnet-bbb22222', 'subnet-ccc33333']); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-xxx11111', 'sg-yyy22222']); + }); +}); + +// ============================================================================ +// 4. Custom YAML parser: nested list structures +// ============================================================================ +describe('Custom YAML parser: nested lists in objects', () => { + it('parses subnets as string arrays, not objects or numbers', () => { + const filePath = writeFixture( + 'type-check.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + - subnet-def456 + security_groups: + - sg-12345 + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig).toBeDefined(); + + // Verify each element is a string (not parsed as number or object) + for (const subnet of agent.networkConfig!.subnets) { + expect(typeof subnet).toBe('string'); + } + for (const sg of agent.networkConfig!.securityGroups) { + expect(typeof sg).toBe('string'); + } + }); + + it('handles mixed VPC and PUBLIC agents in same config', () => { + const filePath = writeFixture( + 'mixed-agents.yaml', + `default_agent: public_agent +agents: + public_agent: + name: public_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + observability: + enabled: true + bedrock_agentcore: + agent_id: PUB001 + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + security_groups: + - sg-12345 + observability: + enabled: false + bedrock_agentcore: + agent_id: VPC001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + expect(parsed.agents).toHaveLength(2); + + const publicAgent = parsed.agents.find(a => a.name === 'public_agent')!; + const vpcAgent = parsed.agents.find(a => a.name === 'vpc_agent')!; + + expect(publicAgent.networkMode).toBe('PUBLIC'); + expect(publicAgent.networkConfig).toBeUndefined(); + + expect(vpcAgent.networkMode).toBe('VPC'); + expect(vpcAgent.networkConfig).toBeDefined(); + expect(vpcAgent.networkConfig!.subnets).toEqual(['subnet-abc123']); + expect(vpcAgent.networkConfig!.securityGroups).toEqual(['sg-12345']); + + // Also verify other fields are not cross-contaminated + expect(publicAgent.enableOtel).toBe(true); + expect(vpcAgent.enableOtel).toBe(false); + }); + + it('handles network_mode_config with quoted subnet values', () => { + const filePath = writeFixture( + 'vpc-quoted.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - "subnet-abc123" + - 'subnet-def456' + security_groups: + - "sg-12345" + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig).toBeDefined(); + // Quoted values should have quotes stripped + expect(agent.networkConfig!.subnets).toEqual(['subnet-abc123', 'subnet-def456']); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-12345']); + }); +}); + +// ============================================================================ +// 5. toAgentEnvSpec: VPC config makes it into agentcore.json format +// ============================================================================ +describe('toAgentEnvSpec: VPC config in final output', () => { + it('VPC agent parsed result has correct structure for toAgentEnvSpec', () => { + const filePath = writeFixture( + 'vpc-for-spec.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + deployment_type: container + runtime_type: PYTHON_3_12 + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + - subnet-def456 + security_groups: + - sg-12345 + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: VPCAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + + // Verify agent has all the fields that toAgentEnvSpec expects + expect(agent.name).toBe('vpc_agent'); + expect(agent.entrypoint).toBe('main.py'); + expect(agent.build).toBe('Container'); + expect(agent.runtimeVersion).toBe('PYTHON_3_12'); + expect(agent.networkMode).toBe('VPC'); + expect(agent.networkConfig).toEqual({ + subnets: ['subnet-abc123', 'subnet-def456'], + securityGroups: ['sg-12345'], + }); + expect(agent.protocol).toBe('HTTP'); + expect(agent.enableOtel).toBe(true); + expect(agent.physicalAgentId).toBe('VPCAGENT001'); + }); + + it('PUBLIC agent parsed result has no networkConfig', () => { + const filePath = writeFixture( + 'public-for-spec.yaml', + `default_agent: public_agent +agents: + public_agent: + name: public_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: PUBLIC + bedrock_agentcore: + agent_id: PUBAGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('PUBLIC'); + expect(agent.networkConfig).toBeUndefined(); + }); +}); + +// ============================================================================ +// 6. Starter Toolkit fixture format (real Pydantic model_dump output) +// ============================================================================ +describe('Starter toolkit fixture format compatibility', () => { + it('handles the exact format from a real starter toolkit YAML', () => { + const filePath = writeFixture( + 'real-toolkit-format.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: agent.py + deployment_type: container + runtime_type: PYTHON_3_12 + platform: linux/amd64 + container_runtime: docker + language: python + aws: + execution_role: arn:aws:iam::123456789012:role/TestRole + execution_role_auto_create: true + account: '123456789012' + region: us-west-2 + ecr_repository: null + ecr_auto_create: false + s3_path: null + s3_auto_create: false + network_configuration: + network_mode: VPC + network_mode_config: + security_groups: + - sg-12345678 + subnets: + - subnet-12345678 + - subnet-87654321 + protocol_configuration: + server_protocol: HTTP + observability: + enabled: true + bedrock_agentcore: + agent_id: REALAGENT01 + agent_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:runtime/REALAGENT01 + agent_session_id: session-123 + memory: + mode: STM_AND_LTM + memory_id: MEM001 + memory_arn: arn:aws:bedrock-agentcore:us-west-2:123456789012:memory/MEM001 + memory_name: vpc_agent_memory + event_expiry_days: 30 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + expect(parsed.agents).toHaveLength(1); + + const agent = parsed.agents[0]!; + expect(agent.networkMode).toBe('VPC'); + expect(agent.networkConfig).toBeDefined(); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-12345678']); + expect(agent.networkConfig!.subnets).toEqual(['subnet-12345678', 'subnet-87654321']); + expect(agent.physicalAgentId).toBe('REALAGENT01'); + + // Memory should also be parsed + expect(parsed.memories).toHaveLength(1); + expect(parsed.memories[0]!.name).toBe('vpc_agent_memory'); + expect(parsed.memories[0]!.physicalMemoryId).toBe('MEM001'); + }); + + it('handles security_groups listed before subnets', () => { + const filePath = writeFixture( + 'sg-before-subnets.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + security_groups: + - sg-first1234 + subnets: + - subnet-second56 + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig).toBeDefined(); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-first1234']); + expect(agent.networkConfig!.subnets).toEqual(['subnet-second56']); + }); +}); + +// ============================================================================ +// 7. YAML parser regression: edge cases for list handling +// ============================================================================ +describe('YAML parser: list handling edge cases', () => { + it('does not mistake subnet-xxx as a key:value pair (no colon in value)', () => { + const filePath = writeFixture( + 'list-no-colon.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + security_groups: + - sg-12345 + bedrock_agentcore: + agent_id: null +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig!.subnets[0]).toBe('subnet-abc123'); + expect(typeof agent.networkConfig!.subnets[0]).toBe('string'); + }); + + it('correctly parses VPC config followed by sibling keys at same level', () => { + const filePath = writeFixture( + 'vpc-with-siblings.yaml', + `default_agent: vpc_agent +agents: + vpc_agent: + name: vpc_agent + entrypoint: main.py + aws: + account: '123456789012' + region: us-west-2 + network_configuration: + network_mode: VPC + network_mode_config: + subnets: + - subnet-abc123 + security_groups: + - sg-12345 + protocol_configuration: + server_protocol: MCP + observability: + enabled: false + bedrock_agentcore: + agent_id: AGENT001 +` + ); + + const parsed = parseStarterToolkitYaml(filePath); + const agent = parsed.agents[0]!; + expect(agent.networkConfig!.subnets).toEqual(['subnet-abc123']); + expect(agent.networkConfig!.securityGroups).toEqual(['sg-12345']); + expect(agent.protocol).toBe('MCP'); + expect(agent.enableOtel).toBe(false); + }); +}); diff --git a/src/cli/commands/import/actions.ts b/src/cli/commands/import/actions.ts new file mode 100644 index 000000000..39d620f13 --- /dev/null +++ b/src/cli/commands/import/actions.ts @@ -0,0 +1,791 @@ +import { APP_DIR, ConfigIO, findConfigRoot } from '../../../lib'; +import type { AgentCoreRegion, AgentEnvSpec, AwsDeploymentTarget, Credential, Memory } from '../../../schema'; +import { validateAwsCredentials } from '../../aws/account'; +import { LocalCdkProject } from '../../cdk/local-cdk-project'; +import { silentIoHost } from '../../cdk/toolkit-lib'; +import { ExecLogger } from '../../logging'; +import { bootstrapEnvironment, buildCdkProject, checkBootstrapNeeded, synthesizeCdk } from '../../operations/deploy'; +import { setupPythonProject } from '../../operations/python/setup'; +import { executePhase1, getDeployedTemplate } from './phase1-update'; +import { executePhase2, publishCdkAssets } from './phase2-import'; +import type { CfnTemplate } from './template-utils'; +import { findLogicalIdByProperty, findLogicalIdsByType } from './template-utils'; +import type { ImportResult, ParsedStarterToolkitConfig, ResourceToImport } from './types'; +import { parseStarterToolkitYaml } from './yaml-parser'; +import * as fs from 'node:fs'; +import * as path from 'node:path'; + +export interface ImportOptions { + source: string; + target?: string; + yes?: boolean; + onProgress?: (message: string) => void; +} + +function sanitize(name: string): string { + return name.replace(/_/g, '-'); +} + +function toStackName(projectName: string, targetName: string): string { + return `AgentCore-${sanitize(projectName)}-${sanitize(targetName)}`; +} + +/** + * Convert parsed starter toolkit agents to CLI AgentEnvSpec format. + */ +function toAgentEnvSpec(agent: ParsedStarterToolkitConfig['agents'][0]): AgentEnvSpec { + const codeLocation = path.join(APP_DIR, agent.name); + const entrypoint = path.basename(agent.entrypoint); + + /* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any */ + const spec: AgentEnvSpec = { + type: 'AgentCoreRuntime', + name: agent.name, + build: agent.build, + entrypoint: entrypoint as any, + codeLocation: codeLocation as any, + runtimeVersion: (agent.runtimeVersion ?? 'PYTHON_3_12') as any, + protocol: agent.protocol, + networkMode: agent.networkMode, + instrumentation: { enableOtel: agent.enableOtel }, + }; + /* eslint-enable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any */ + + if (agent.networkMode === 'VPC' && agent.networkConfig) { + spec.networkConfig = agent.networkConfig; + } + + if (agent.executionRoleArn) { + spec.executionRoleArn = agent.executionRoleArn; + } + + return spec; +} + +/** + * Convert parsed starter toolkit memory to CLI Memory format. + */ +function toMemorySpec(mem: ParsedStarterToolkitConfig['memories'][0]): Memory { + const strategies: Memory['strategies'] = []; + + if (mem.mode === 'STM_ONLY' || mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SEMANTIC' }); + } + if (mem.mode === 'STM_AND_LTM') { + strategies.push({ type: 'SUMMARIZATION' }); + strategies.push({ type: 'USER_PREFERENCE' }); + } + + return { + type: 'AgentCoreMemory', + name: mem.name, + eventExpiryDuration: Math.max(7, Math.min(365, mem.eventExpiryDays)), + strategies, + }; +} + +/** + * Convert parsed starter toolkit credential to CLI Credential format. + * OAuth providers map to OAuthCredentialProvider (discoveryUrl omitted — provider already exists in Identity service). + * API key providers map to ApiKeyCredentialProvider. + */ +function toCredentialSpec(cred: ParsedStarterToolkitConfig['credentials'][0]): Credential { + if (cred.providerType === 'api_key') { + return { type: 'ApiKeyCredentialProvider', name: cred.name }; + } + // OAuth providers already exist in Identity service. We map them as OAuthCredentialProvider + // so the CLI correctly wires CLIENT_ID/CLIENT_SECRET env vars (not API_KEY). + // discoveryUrl is omitted since it's not available from the YAML and the provider + // already exists — pre-deploy will skip if no credentials are in .env.local. + return { type: 'OAuthCredentialProvider', name: cred.name, vendor: 'CustomOauth2' }; +} + +export async function handleImport(options: ImportOptions): Promise { + const { source, onProgress } = options; + const logger = new ExecLogger({ command: 'import' }); + + try { + // 1. Validate we're inside an existing agentcore project + logger.startStep('Validate project context'); + const configRoot = findConfigRoot(process.cwd()); + if (!configRoot) { + const error = + 'No agentcore project found in the current directory.\nRun `agentcore create ` first, then run import from inside the project.'; + logger.endStep('error', error); + logger.finalize(false); + return { + success: false, + error, + logPath: logger.getRelativeLogPath(), + }; + } + + const projectRoot = path.dirname(configRoot); + const configIO = new ConfigIO({ baseDir: configRoot }); + logger.endStep('success'); + + // 2. Read existing project config + logger.startStep('Read project config'); + const projectSpec = await configIO.readProjectSpec(); + const projectName = projectSpec.name; + logger.log(`Using existing project: ${projectName}`); + onProgress?.(`Using existing project: ${projectName}`); + logger.endStep('success'); + + // 3. Parse the YAML config (before target resolution so we can use YAML info if needed) + logger.startStep('Parse YAML'); + logger.log(`Parsing ${source}...`); + onProgress?.(`Parsing ${source}...`); + const parsed = parseStarterToolkitYaml(source); + + if (parsed.agents.length === 0) { + const error = 'No agents found in the YAML config'; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + + logger.log( + `Found ${parsed.agents.length} agent(s), ${parsed.memories.length} memory(ies), ${parsed.credentials.length} credential(s)` + ); + onProgress?.( + `Found ${parsed.agents.length} agent(s), ${parsed.memories.length} memory(ies), ${parsed.credentials.length} credential(s)` + ); + logger.endStep('success'); + + // Check early whether there are any physical IDs to import. + // This determines whether we need strict target resolution (account/region required). + const hasPhysicalIds = parsed.agents.some(a => a.physicalAgentId) || parsed.memories.some(m => m.physicalMemoryId); + + // 4. Resolve deployment target + logger.startStep('Resolve deployment target'); + let target: AwsDeploymentTarget | undefined; + + if (hasPhysicalIds) { + // Strict target resolution: we NEED a valid target for CloudFormation import. + // If the YAML specifies a region, override AWS_REGION before reading targets + // because readAWSDeploymentTargets() overrides file-based regions with AWS_REGION. + // The YAML region is authoritative — it's where the resources actually exist. + if (parsed.awsTarget.region) { + process.env.AWS_REGION = parsed.awsTarget.region; + process.env.AWS_DEFAULT_REGION = parsed.awsTarget.region; + } + let targets = await configIO.readAWSDeploymentTargets(); + + // If no targets exist (CLI-mode create leaves targets empty), create one from YAML info + if (targets.length === 0) { + if (!parsed.awsTarget.account || !parsed.awsTarget.region) { + const error = + 'No deployment targets found in project and YAML has no AWS account/region info.\nRun `agentcore deploy` first to set up a target, then re-run import.'; + logger.endStep('error', error); + logger.finalize(false); + return { + success: false, + error, + logPath: logger.getRelativeLogPath(), + }; + } + const defaultTarget: AwsDeploymentTarget = { + name: 'default', + account: parsed.awsTarget.account, + region: parsed.awsTarget.region as AgentCoreRegion, + }; + await configIO.writeAWSDeploymentTargets([defaultTarget]); + targets = [defaultTarget]; + logger.log(`Created default target from YAML: ${defaultTarget.region}, ${defaultTarget.account}`); + onProgress?.(`Created default target from YAML: ${defaultTarget.region}, ${defaultTarget.account}`); + } + + if (options.target) { + const found = targets.find(t => t.name === options.target); + if (!found) { + const names = targets.map(t => ` - ${t.name} (${t.region}, ${t.account})`).join('\n'); + const error = `Target "${options.target}" not found. Available targets:\n${names}`; + logger.endStep('error', error); + logger.finalize(false); + return { + success: false, + error, + logPath: logger.getRelativeLogPath(), + }; + } + target = found; + } else if (targets.length === 1) { + target = targets[0]!; + } else { + const names = targets.map(t => ` - ${t.name} (${t.region}, ${t.account})`).join('\n'); + const error = `Multiple deployment targets found. Specify one with --target:\n${names}`; + logger.endStep('error', error); + logger.finalize(false); + return { + success: false, + error, + logPath: logger.getRelativeLogPath(), + }; + } + + logger.log(`Using target: ${target.name} (${target.region}, ${target.account})`); + onProgress?.(`Using target: ${target.name} (${target.region}, ${target.account})`); + + // Warn if YAML account/region differs from target + if (parsed.awsTarget.account && parsed.awsTarget.account !== target.account) { + logger.log( + `Warning: YAML account (${parsed.awsTarget.account}) differs from target account (${target.account})`, + 'warn' + ); + onProgress?.( + `Warning: YAML account (${parsed.awsTarget.account}) differs from target account (${target.account})` + ); + } + if (parsed.awsTarget.region && parsed.awsTarget.region !== target.region) { + logger.log( + `Warning: YAML region (${parsed.awsTarget.region}) differs from target region (${target.region})`, + 'warn' + ); + onProgress?.(`Warning: YAML region (${parsed.awsTarget.region}) differs from target region (${target.region})`); + } + + // Validate AWS credentials + logger.log('Validating AWS credentials...'); + onProgress?.('Validating AWS credentials...'); + await validateAwsCredentials(); + } else { + // No physical IDs — target is only needed for stackName computation. + // Try to read existing targets gracefully; don't fail if none exist. + const targets = await configIO.readAWSDeploymentTargets().catch(() => [] as AwsDeploymentTarget[]); + if (targets.length === 1) { + target = targets[0]; + } else if (options.target) { + target = targets.find(t => t.name === options.target); + } + // If still no target, that's fine — we'll use 'default' for the stackName + } + logger.endStep('success'); + + // 5. Merge agents/memories into existing project config + logger.startStep('Merge agents and memories'); + logger.log('Merging into existing project...'); + onProgress?.('Merging into existing project...'); + const existingAgentNames = new Set(projectSpec.agents.map(a => a.name)); + const newlyAddedAgentNames = new Set(); + for (const agent of parsed.agents) { + if (!existingAgentNames.has(agent.name)) { + projectSpec.agents.push(toAgentEnvSpec(agent)); + newlyAddedAgentNames.add(agent.name); + } else { + logger.log(`Skipping agent "${agent.name}" (already exists in project)`); + onProgress?.(`Skipping agent "${agent.name}" (already exists in project)`); + } + } + + for (const agent of parsed.agents) { + if (agent.hasAuthorizerConfig) { + const warnMsg = + `Warning: Agent "${agent.name}" has a custom JWT authorizer configured in the starter toolkit. ` + + `This is not automatically imported. To recreate it, run: agentcore add gateway --authorizer-type CUSTOM_JWT`; + logger.log(warnMsg, 'warn'); + onProgress?.(warnMsg); + } + } + + const existingMemoryNames = new Set((projectSpec.memories ?? []).map(m => m.name)); + const newlyAddedMemoryNames = new Set(); + for (const mem of parsed.memories) { + if (!existingMemoryNames.has(mem.name)) { + (projectSpec.memories ??= []).push(toMemorySpec(mem)); + newlyAddedMemoryNames.add(mem.name); + } else { + logger.log(`Skipping memory "${mem.name}" (already exists in project)`); + onProgress?.(`Skipping memory "${mem.name}" (already exists in project)`); + } + } + + // Warn about memory env var mismatch for imported agents + if (parsed.memories.length > 0) { + for (const mem of parsed.memories) { + const cdkEnvVar = `MEMORY_${mem.name.toUpperCase().replace(/[.-]/g, '_')}_ID`; + const warnMsg = + `Warning: Memory "${mem.name}" env var must be updated in your agent code:\n` + + ` \x1b[31m- MEMORY_ID = os.getenv("BEDROCK_AGENTCORE_MEMORY_ID")\x1b[0m\n` + + ` \x1b[32m+ MEMORY_ID = os.getenv("${cdkEnvVar}")\x1b[0m`; + logger.log(`Memory "${mem.name}" env var must be updated: use ${cdkEnvVar}`, 'warn'); + onProgress?.(warnMsg); + } + } + + const existingCredentialNames = new Set((projectSpec.credentials ?? []).map(c => c.name)); + for (const cred of parsed.credentials) { + if (!existingCredentialNames.has(cred.name)) { + (projectSpec.credentials ??= []).push(toCredentialSpec(cred)); + logger.log(`Added credential "${cred.name}" (${cred.providerType})`); + onProgress?.(`Added credential "${cred.name}" (${cred.providerType})`); + } else { + logger.log(`Skipping credential "${cred.name}" (already exists in project)`); + onProgress?.(`Skipping credential "${cred.name}" (already exists in project)`); + } + } + + // Write updated project config + await configIO.writeProjectSpec(projectSpec); + logger.endStep('success'); + + // 6. Copy agent source code to app// (only for newly added agents) + logger.startStep('Copy agent source and setup Python'); + for (const agent of parsed.agents) { + if (existingAgentNames.has(agent.name)) { + logger.log(`Skipping source copy for agent "${agent.name}" (already exists in project)`); + onProgress?.(`Skipping source copy for agent "${agent.name}" (already exists in project)`); + continue; + } + const appDir = path.join(projectRoot, APP_DIR, agent.name); + if (!fs.existsSync(appDir)) { + fs.mkdirSync(appDir, { recursive: true }); + } + + if (agent.sourcePath && fs.existsSync(agent.sourcePath)) { + logger.log(`Copying agent source from ${agent.sourcePath} to ./${APP_DIR}/${agent.name}`); + onProgress?.(`Copying agent source from ${agent.sourcePath} to ./${APP_DIR}/${agent.name}`); + copyDirRecursive(agent.sourcePath, appDir); + + // Also copy pyproject.toml from the parent of source_path if it exists + const parentPyproject = path.join(path.dirname(agent.sourcePath), 'pyproject.toml'); + const destPyproject = path.join(appDir, 'pyproject.toml'); + if (fs.existsSync(parentPyproject) && !fs.existsSync(destPyproject)) { + fs.copyFileSync(parentPyproject, destPyproject); + } + + // For Container builds, copy the Dockerfile from the starter toolkit config dir + if (agent.build === 'Container') { + const destDockerfile = path.join(appDir, 'Dockerfile'); + if (!fs.existsSync(destDockerfile)) { + // Starter toolkit stores Dockerfile at .bedrock_agentcore//Dockerfile + const toolkitProjectDir = path.dirname(agent.sourcePath); + const toolkitDockerfile = path.join(toolkitProjectDir, '.bedrock_agentcore', agent.name, 'Dockerfile'); + if (fs.existsSync(toolkitDockerfile)) { + logger.log('Copying Dockerfile from starter toolkit config'); + onProgress?.(`Copying Dockerfile from starter toolkit config`); + fs.copyFileSync(toolkitDockerfile, destDockerfile); + } else { + // Generate a minimal Dockerfile for Container builds + logger.log('Generating Dockerfile for Container build'); + onProgress?.(`Generating Dockerfile for Container build`); + const entryModule = path.basename(agent.entrypoint, '.py'); + fs.writeFileSync( + destDockerfile, + [ + 'FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim', + 'WORKDIR /app', + '', + 'ENV UV_SYSTEM_PYTHON=1 \\', + ' UV_COMPILE_BYTECODE=1 \\', + ' UV_NO_PROGRESS=1 \\', + ' PYTHONUNBUFFERED=1 \\', + ' DOCKER_CONTAINER=1', + '', + 'RUN useradd -m -u 1000 bedrock_agentcore', + '', + 'COPY pyproject.toml uv.lock ./', + 'RUN uv sync --frozen --no-dev --no-install-project', + '', + 'COPY --chown=bedrock_agentcore:bedrock_agentcore . .', + 'RUN uv sync --frozen --no-dev', + '', + 'USER bedrock_agentcore', + '', + 'EXPOSE 8080 8000 9000', + '', + `CMD ["opentelemetry-instrument", "python", "-m", "${entryModule}"]`, + '', + ].join('\n') + ); + } + } + } + } else { + // Create a minimal pyproject.toml if no source path available + const pyprojectPath = path.join(appDir, 'pyproject.toml'); + if (!fs.existsSync(pyprojectPath)) { + logger.log(`Creating minimal pyproject.toml at ${appDir}`); + onProgress?.(`Creating minimal pyproject.toml at ${appDir}`); + fs.writeFileSync( + pyprojectPath, + [ + '[build-system]', + 'requires = ["setuptools>=68", "wheel"]', + 'build-backend = "setuptools.build_meta"', + '', + '[project]', + `name = "${agent.name}"`, + 'version = "0.1.0"', + 'requires-python = ">=3.10"', + 'dependencies = []', + '', + ].join('\n') + ); + } + } + + // Container agents install dependencies inside the Docker image, + // so skip local Python environment setup for them. + if (agent.build !== 'Container') { + // Fix pyproject.toml for setuptools: starter toolkit projects may have + // multiple top-level directories (model/, mcp_client/, etc.) which causes + // setuptools auto-discovery to fail. Add py-modules = [] to suppress this. + fixPyprojectForSetuptools(path.join(appDir, 'pyproject.toml')); + + // Set up Python environment (venv + install dependencies) + logger.log(`Setting up Python environment for ${agent.name}...`); + onProgress?.(`Setting up Python environment for ${agent.name}...`); + const setupResult = await setupPythonProject({ projectDir: appDir }); + if (setupResult.status === 'success') { + logger.log(`Python environment ready for ${agent.name}`); + onProgress?.(`Python environment ready for ${agent.name}`); + } else if (setupResult.status === 'uv_not_found') { + logger.log(`Warning: uv not found — run "uv sync" manually in ${APP_DIR}/${agent.name}`, 'warn'); + onProgress?.(`Warning: uv not found — run "uv sync" manually in ${APP_DIR}/${agent.name}`); + } else { + logger.log( + `Warning: Python setup failed for ${agent.name}: ${setupResult.error ?? setupResult.status}`, + 'warn' + ); + onProgress?.(`Warning: Python setup failed for ${agent.name}: ${setupResult.error ?? setupResult.status}`); + } + } + } + logger.endStep('success'); + + // 7. Determine which resources need importing (have physical IDs). + // Only import newly added resources — skip ones already in the project. + logger.startStep('Determine resources to import'); + const agentsToImport = parsed.agents.filter(a => { + return a.physicalAgentId && newlyAddedAgentNames.has(a.name); + }); + const memoriesToImport = parsed.memories.filter(m => { + return m.physicalMemoryId && newlyAddedMemoryNames.has(m.name); + }); + const targetName = target?.name ?? 'default'; + const stackName = toStackName(projectName, targetName); + + if (agentsToImport.length === 0 && memoriesToImport.length === 0) { + const msg = + 'No deployed resources found to import (no agent_id or memory_id in YAML). ' + + 'Run `agentcore deploy` to create new resources.'; + logger.log(msg); + onProgress?.(msg); + logger.endStep('success'); + logger.finalize(true); + return { + success: true, + projectSpec, + importedAgents: [], + importedMemories: [], + stackName, + logPath: logger.getRelativeLogPath(), + }; + } + + logger.log(`Will import: ${agentsToImport.length} agent(s), ${memoriesToImport.length} memory(ies)`); + onProgress?.(`Will import: ${agentsToImport.length} agent(s), ${memoriesToImport.length} memory(ies)`); + + // At this point we know hasPhysicalIds is true, so target must be defined. + if (!target) { + const error = 'No deployment target available for import.'; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + logger.endStep('success'); + + // 8. Build and synth CDK to get the full template + logger.startStep('Build and synth CDK'); + logger.log('Building CDK project...'); + onProgress?.('Building CDK project...'); + const cdkProject = new LocalCdkProject(projectRoot); + await buildCdkProject(cdkProject); + + logger.log('Synthesizing CloudFormation template...'); + onProgress?.('Synthesizing CloudFormation template...'); + const synthResult = await synthesizeCdk(cdkProject, { ioHost: silentIoHost }); + const { toolkitWrapper } = synthResult; + + // Read the synthesized template from the assembly directory + const synthInfo = await toolkitWrapper.synth(); + const assemblyDirectory = synthInfo.assemblyDirectory; + const synthTemplatePath = path.join(assemblyDirectory, `${stackName}.template.json`); + + let synthTemplate: CfnTemplate; + try { + synthTemplate = JSON.parse(fs.readFileSync(synthTemplatePath, 'utf-8')) as CfnTemplate; + } catch (_err) { + // Try without stack name prefix + const files = fs.readdirSync(assemblyDirectory).filter((f: string) => f.endsWith('.template.json')); + if (files.length === 0) { + await toolkitWrapper.dispose(); + const error = 'No CloudFormation template found in CDK assembly'; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + synthTemplate = JSON.parse(fs.readFileSync(path.join(assemblyDirectory, files[0]!), 'utf-8')) as CfnTemplate; + } + + // 8b. Check CDK bootstrap and auto-bootstrap if needed (before disposing toolkit wrapper) + logger.log('Checking CDK bootstrap status...'); + onProgress?.('Checking CDK bootstrap status...'); + const bootstrapCheck = await checkBootstrapNeeded([target]); + if (bootstrapCheck.needsBootstrap) { + logger.log('AWS environment not bootstrapped. Bootstrapping...'); + onProgress?.('AWS environment not bootstrapped. Bootstrapping...'); + await bootstrapEnvironment(toolkitWrapper, target); + logger.log('CDK bootstrap complete'); + onProgress?.('CDK bootstrap complete'); + } + + await toolkitWrapper.dispose(); + logger.endStep('success'); + + // 8c. Publish CDK assets to S3 (source zips needed by CodeBuild during Phase 1) + logger.startStep('Publish CDK assets'); + logger.log('Publishing CDK assets to S3...'); + onProgress?.('Publishing CDK assets to S3...'); + await publishCdkAssets(assemblyDirectory, target.region, onProgress); + logger.endStep('success'); + + // 9. Phase 1: UPDATE — deploy companion resources + logger.startStep('Phase 1: Deploy companion resources'); + logger.log('Phase 1: Deploying companion resources (IAM roles, policies)...'); + onProgress?.('Phase 1: Deploying companion resources (IAM roles, policies)...'); + const phase1Result = await executePhase1({ + region: target.region, + stackName, + synthTemplate, + onProgress, + }); + + if (!phase1Result.success) { + const error = `Phase 1 failed: ${phase1Result.error}`; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + logger.endStep('success'); + + // 10. Phase 2: IMPORT — adopt primary resources + logger.startStep('Phase 2: Import resources'); + logger.log('Reading deployed template...'); + onProgress?.('Reading deployed template...'); + const deployedTemplate = await getDeployedTemplate(target.region, stackName); + if (!deployedTemplate) { + const error = 'Could not read deployed template after Phase 1'; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + + // Build ResourcesToImport list + const resourcesToImport: ResourceToImport[] = []; + + for (const agent of agentsToImport) { + const runtimeLogicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Runtime'); + let logicalId: string | undefined; + + const expectedRuntimeName = `${projectName}_${agent.name}`; + logicalId = findLogicalIdByProperty( + synthTemplate, + 'AWS::BedrockAgentCore::Runtime', + 'AgentRuntimeName', + expectedRuntimeName + ); + + if (!logicalId && runtimeLogicalIds.length === 1) { + logicalId = runtimeLogicalIds[0]; + } + + if (!logicalId) { + logger.log(`Warning: Could not find logical ID for agent ${agent.name}, skipping`, 'warn'); + onProgress?.(`Warning: Could not find logical ID for agent ${agent.name}, skipping`); + continue; + } + + resourcesToImport.push({ + resourceType: 'AWS::BedrockAgentCore::Runtime', + logicalResourceId: logicalId, + resourceIdentifier: { AgentRuntimeId: agent.physicalAgentId! }, + }); + } + + for (const memory of memoriesToImport) { + const memoryLogicalIds = findLogicalIdsByType(synthTemplate, 'AWS::BedrockAgentCore::Memory'); + let logicalId: string | undefined; + + logicalId = findLogicalIdByProperty(synthTemplate, 'AWS::BedrockAgentCore::Memory', 'Name', memory.name); + + // CDK prefixes memory names with the project name (e.g. "myproject_Agent_mem"), + // so also try matching with the project name prefix. + if (!logicalId) { + const prefixedName = `${projectName}_${memory.name}`; + logicalId = findLogicalIdByProperty(synthTemplate, 'AWS::BedrockAgentCore::Memory', 'Name', prefixedName); + } + + if (!logicalId && memoryLogicalIds.length === 1) { + logicalId = memoryLogicalIds[0]; + } + + if (!logicalId) { + logger.log(`Warning: Could not find logical ID for memory ${memory.name}, skipping`, 'warn'); + onProgress?.(`Warning: Could not find logical ID for memory ${memory.name}, skipping`); + continue; + } + + resourcesToImport.push({ + resourceType: 'AWS::BedrockAgentCore::Memory', + logicalResourceId: logicalId, + resourceIdentifier: { MemoryId: memory.physicalMemoryId! }, + }); + } + + if (resourcesToImport.length === 0) { + logger.log('No resources could be matched for import'); + onProgress?.('No resources could be matched for import'); + logger.endStep('success'); + logger.finalize(true); + return { + success: true, + projectSpec, + importedAgents: [], + importedMemories: [], + stackName, + logPath: logger.getRelativeLogPath(), + }; + } + + logger.log(`Phase 2: Importing ${resourcesToImport.length} resource(s) via CloudFormation IMPORT...`); + onProgress?.(`Phase 2: Importing ${resourcesToImport.length} resource(s) via CloudFormation IMPORT...`); + const phase2Result = await executePhase2({ + region: target.region, + stackName, + deployedTemplate, + synthTemplate, + resourcesToImport, + assemblyDirectory, + onProgress, + }); + + if (!phase2Result.success) { + const error = `Phase 2 failed: ${phase2Result.error}`; + logger.endStep('error', error); + logger.finalize(false); + return { success: false, error, logPath: logger.getRelativeLogPath() }; + } + logger.endStep('success'); + + // 11. Update deployed state + logger.startStep('Update deployed state'); + logger.log('Updating deployed state...'); + onProgress?.('Updating deployed state...'); + /* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any */ + const existingState: any = await configIO.readDeployedState().catch(() => ({ targets: {} })); + const targetState = existingState.targets[targetName] ?? { resources: {} }; + targetState.resources ??= {}; + targetState.resources.stackName = stackName; + + if (agentsToImport.length > 0) { + targetState.resources.agents ??= {}; + for (const agent of agentsToImport) { + if (agent.physicalAgentId) { + targetState.resources.agents[agent.name] = { + runtimeId: agent.physicalAgentId, + runtimeArn: + agent.physicalAgentArn ?? + `arn:aws:bedrock-agentcore:${target.region}:${target.account}:runtime/${agent.physicalAgentId}`, + roleArn: 'imported', // Placeholder — updated after agentcore deploy + }; + } + } + } + + if (memoriesToImport.length > 0) { + targetState.resources.memories ??= {}; + for (const memory of memoriesToImport) { + if (memory.physicalMemoryId) { + targetState.resources.memories[memory.name] = { + memoryId: memory.physicalMemoryId, + memoryArn: + memory.physicalMemoryArn ?? + `arn:aws:bedrock-agentcore:${target.region}:${target.account}:memory/${memory.physicalMemoryId}`, + }; + } + } + } + + existingState.targets[targetName] = targetState; + await configIO.writeDeployedState(existingState); + /* eslint-enable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any */ + logger.endStep('success'); + + logger.finalize(true); + return { + success: true, + projectSpec, + importedAgents: agentsToImport.map(a => a.name), + importedMemories: memoriesToImport.map(m => m.name), + stackName, + logPath: logger.getRelativeLogPath(), + }; + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + logger.log(message, 'error'); + logger.finalize(false); + return { success: false, error: message, logPath: logger.getRelativeLogPath() }; + } +} + +/** + * Fix pyproject.toml for setuptools auto-discovery issues. + * Starter toolkit projects may have multiple top-level directories (model/, mcp_client/) + * which causes setuptools to refuse building. Adding `py-modules = []` tells setuptools + * not to auto-discover packages. + */ +function fixPyprojectForSetuptools(pyprojectPath: string): void { + if (!fs.existsSync(pyprojectPath)) return; + + const content = fs.readFileSync(pyprojectPath, 'utf-8'); + + // Already has [tool.setuptools] section — don't touch it + if (content.includes('[tool.setuptools]')) return; + + // Append the fix + fs.writeFileSync(pyprojectPath, content.trimEnd() + '\n\n[tool.setuptools]\npy-modules = []\n'); +} + +const COPY_EXCLUDE_DIRS = new Set([ + '.venv', + '.git', + '__pycache__', + 'node_modules', + '.pytest_cache', + '.bedrock_agentcore', + '.mypy_cache', + '.ruff_cache', +]); + +/** + * Recursively copy directory contents, skipping excluded directories and symlinks. + */ +function copyDirRecursive(src: string, dest: string): void { + const entries = fs.readdirSync(src, { withFileTypes: true }); + for (const entry of entries) { + if (entry.isSymbolicLink()) continue; + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + if (entry.isDirectory()) { + if (COPY_EXCLUDE_DIRS.has(entry.name)) continue; + if (!fs.existsSync(destPath)) { + fs.mkdirSync(destPath, { recursive: true }); + } + copyDirRecursive(srcPath, destPath); + } else { + fs.copyFileSync(srcPath, destPath); + } + } +} diff --git a/src/cli/commands/import/command.ts b/src/cli/commands/import/command.ts new file mode 100644 index 000000000..e8a4e05c6 --- /dev/null +++ b/src/cli/commands/import/command.ts @@ -0,0 +1,95 @@ +import { handleImport } from './actions'; +import type { Command } from '@commander-js/extra-typings'; +import * as fs from 'node:fs'; + +const green = '\x1b[32m'; +const yellow = '\x1b[33m'; +const cyan = '\x1b[36m'; +const dim = '\x1b[2m'; +const reset = '\x1b[0m'; + +export const registerImport = (program: Command) => { + program + .command('import') + .description('Import resources from a Bedrock AgentCore Starter Toolkit project') + .requiredOption('--source ', 'Path to the .bedrock_agentcore.yaml configuration file') + .option('--target ', 'Deployment target name (only needed if project has multiple targets)') + .option('-y, --yes', 'Auto-confirm prompts') + .action(async (cliOptions: { source: string; target?: string; yes?: boolean }) => { + // Validate source file exists + if (!fs.existsSync(cliOptions.source)) { + console.error(`\x1b[31m[error]${reset} Source file not found: ${cliOptions.source}`); + process.exit(1); + } + + const warnings: string[] = []; + + const result = await handleImport({ + source: cliOptions.source, + target: cliOptions.target, + yes: cliOptions.yes, + onProgress: (message: string) => { + // Collect warnings for end-of-output display + if (message.includes('Warning') || message.includes('\x1b[33m')) { + warnings.push(message); + return; + } + + // Skipped items shown dimmed + if (message.startsWith('Skipping')) { + console.log(`${dim}[skip]${reset} ${message}`); + return; + } + + // Normal progress steps shown as [done] + console.log(`${green}[done]${reset} ${message}`); + }, + }); + + if (result.success) { + // Summary + console.log(''); + console.log(`${green}Import complete!${reset}`); + + console.log(''); + console.log(`${dim}Imported:${reset}`); + console.log(` Stack: ${result.stackName}`); + if (result.importedAgents && result.importedAgents.length > 0) { + for (const agent of result.importedAgents) { + console.log(` Agent: ${agent}`); + } + } + if (result.importedMemories && result.importedMemories.length > 0) { + for (const mem of result.importedMemories) { + console.log(` Memory: ${mem}`); + } + } + + // Show collected warnings + if (warnings.length > 0) { + console.log(''); + for (const w of warnings) { + console.log(`${yellow}[warn]${reset} ${w}`); + } + } + + // Next steps + console.log(''); + console.log('To continue:'); + console.log(''); + console.log(` ${cyan}agentcore deploy${reset} ${dim}Deploy the imported stack${reset}`); + console.log(` ${cyan}agentcore status${reset} ${dim}Verify resource status${reset}`); + console.log(` ${cyan}agentcore invoke${reset} ${dim}Test your agent${reset}`); + console.log(''); + if (result.logPath) { + console.log(`Log: ${result.logPath}`); + } + } else { + console.error(`\n\x1b[31m[error]${reset} Import failed: ${result.error}`); + if (result.logPath) { + console.error(`Log: ${result.logPath}`); + } + process.exit(1); + } + }); +}; diff --git a/src/cli/commands/import/constants.ts b/src/cli/commands/import/constants.ts new file mode 100644 index 000000000..780719330 --- /dev/null +++ b/src/cli/commands/import/constants.ts @@ -0,0 +1,39 @@ +/** + * CloudFormation resource type to identifier key mapping for IMPORT. + */ +export const CFN_RESOURCE_IDENTIFIERS: Record = { + 'AWS::BedrockAgentCore::Runtime': ['AgentRuntimeId'], + 'AWS::BedrockAgentCore::Memory': ['MemoryId'], + 'AWS::BedrockAgentCore::Gateway': ['GatewayIdentifier'], +}; + +/** + * CloudFormation resource types that are primary (importable) resources. + * Everything else is a companion resource. + */ +export const PRIMARY_RESOURCE_TYPES = [ + 'AWS::BedrockAgentCore::Runtime', + 'AWS::BedrockAgentCore::Memory', + 'AWS::BedrockAgentCore::Gateway', + 'AWS::BedrockAgentCore::GatewayTarget', + 'AWS::BedrockAgentCore::Evaluator', + 'AWS::BedrockAgentCore::OnlineEvaluationConfig', + 'AWS::BedrockAgentCore::RuntimeEndpoint', + 'AWS::BedrockAgentCore::WorkloadIdentity', + 'AWS::BedrockAgentCore::BrowserCustom', + 'AWS::BedrockAgentCore::BrowserProfile', + 'AWS::BedrockAgentCore::CodeInterpreterCustom', + 'AWS::BedrockAgentCore::Policy', + 'AWS::BedrockAgentCore::PolicyEngine', +]; + +/** + * Map from starter toolkit runtime_type to CLI runtimeVersion. + * CLI schema uses PYTHON_3_XX format (matching the Zod enum). + */ +export const RUNTIME_TYPE_MAP: Record = { + PYTHON_3_10: 'PYTHON_3_10', + PYTHON_3_11: 'PYTHON_3_11', + PYTHON_3_12: 'PYTHON_3_12', + PYTHON_3_13: 'PYTHON_3_13', +}; diff --git a/src/cli/commands/import/index.ts b/src/cli/commands/import/index.ts new file mode 100644 index 000000000..d0d644d05 --- /dev/null +++ b/src/cli/commands/import/index.ts @@ -0,0 +1 @@ +export { registerImport } from './command'; diff --git a/src/cli/commands/import/phase1-update.ts b/src/cli/commands/import/phase1-update.ts new file mode 100644 index 000000000..38d622c6b --- /dev/null +++ b/src/cli/commands/import/phase1-update.ts @@ -0,0 +1,155 @@ +import { getCredentialProvider } from '../../aws/account'; +import type { CfnTemplate } from './template-utils'; +import { filterCompanionOnlyTemplate } from './template-utils'; +import { + CloudFormationClient, + CreateStackCommand, + DescribeStacksCommand, + GetTemplateCommand, + UpdateStackCommand, + waitUntilStackCreateComplete, + waitUntilStackUpdateComplete, +} from '@aws-sdk/client-cloudformation'; + +export interface Phase1Options { + region: string; + stackName: string; + synthTemplate: CfnTemplate; + onProgress?: (message: string) => void; +} + +export interface Phase1Result { + success: boolean; + stackExists: boolean; + error?: string; +} + +/** + * Phase 1: UPDATE (pre-import) + * + * Creates companion resources (IAM roles, policies) that the imported primary + * resources will reference. This is done by deploying a filtered template that + * includes only companion resources (no AWS::BedrockAgentCore::* resources). + */ +export async function executePhase1(options: Phase1Options): Promise { + const { region, stackName, synthTemplate, onProgress } = options; + + const cfn = new CloudFormationClient({ region, credentials: getCredentialProvider() }); + + // Filter template to companion-only + const companionTemplate = filterCompanionOnlyTemplate(synthTemplate); + + // Check if the companion template has any resources at all + if (Object.keys(companionTemplate.Resources).length === 0) { + onProgress?.('No companion resources needed, skipping Phase 1'); + // Still need to check if stack exists + const stackExists = await doesStackExist(cfn, stackName); + return { success: true, stackExists }; + } + + const templateBody = JSON.stringify(companionTemplate); + + // Check if stack already exists + const stackExists = await doesStackExist(cfn, stackName); + + if (stackExists) { + // When updating, preserve any primary resources that were already imported + // into the stack. filterCompanionOnlyTemplate strips all primary resources, + // but previously imported ones must be kept or CFN will try to delete them. + const deployedTemplate = await getDeployedTemplate(region, stackName); + if (deployedTemplate) { + for (const [logicalId, resource] of Object.entries(deployedTemplate.Resources)) { + if (!(logicalId in companionTemplate.Resources)) { + companionTemplate.Resources[logicalId] = resource; + } + } + } + const updateTemplateBody = JSON.stringify(companionTemplate); + + onProgress?.(`Updating stack ${stackName} with companion resources...`); + try { + await cfn.send( + new UpdateStackCommand({ + StackName: stackName, + TemplateBody: updateTemplateBody, + Capabilities: ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], + }) + ); + + onProgress?.('Waiting for stack update to complete...'); + await waitUntilStackUpdateComplete( + { client: cfn, maxWaitTime: 600, minDelay: 5, maxDelay: 15 }, + { StackName: stackName } + ); + onProgress?.('Phase 1 UPDATE complete'); + } catch (err: unknown) { + // "No updates are to be performed" is not an error + const message = err instanceof Error ? err.message : String(err); + if (message.includes('No updates are to be performed')) { + onProgress?.('Stack already has companion resources, no update needed'); + return { success: true, stackExists: true }; + } + return { success: false, stackExists: true, error: message }; + } + } else { + onProgress?.(`Creating stack ${stackName} with companion resources...`); + try { + await cfn.send( + new CreateStackCommand({ + StackName: stackName, + TemplateBody: templateBody, + Capabilities: ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], + Tags: [ + { Key: 'agentcore:project-name', Value: stackName.replace(/^AgentCore-/, '').replace(/-[^-]+$/, '') }, + { Key: 'agentcore:target-name', Value: 'default' }, + ], + }) + ); + + onProgress?.('Waiting for stack creation to complete...'); + await waitUntilStackCreateComplete( + { client: cfn, maxWaitTime: 600, minDelay: 5, maxDelay: 15 }, + { StackName: stackName } + ); + onProgress?.('Phase 1 CREATE complete'); + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + return { success: false, stackExists: false, error: message }; + } + } + + return { success: true, stackExists }; +} + +/** + * Get the currently deployed CloudFormation template. + */ +export async function getDeployedTemplate(region: string, stackName: string): Promise { + const cfn = new CloudFormationClient({ region, credentials: getCredentialProvider() }); + + try { + const response = await cfn.send( + new GetTemplateCommand({ + StackName: stackName, + TemplateStage: 'Original', + }) + ); + + if (response.TemplateBody) { + return JSON.parse(response.TemplateBody) as CfnTemplate; + } + return null; + } catch { + return null; + } +} + +async function doesStackExist(cfn: CloudFormationClient, stackName: string): Promise { + try { + const response = await cfn.send(new DescribeStacksCommand({ StackName: stackName })); + const stack = response.Stacks?.[0]; + return !!stack && stack.StackStatus !== 'DELETE_COMPLETE'; + } catch { + return false; + } +} diff --git a/src/cli/commands/import/phase2-import.ts b/src/cli/commands/import/phase2-import.ts new file mode 100644 index 000000000..9d08e223a --- /dev/null +++ b/src/cli/commands/import/phase2-import.ts @@ -0,0 +1,296 @@ +import { getCredentialProvider } from '../../aws/account'; +import type { CfnTemplate } from './template-utils'; +import { buildImportTemplate } from './template-utils'; +import type { ResourceToImport } from './types'; +import { + type ResourceToImport as CfnResourceToImport, + CloudFormationClient, + CreateChangeSetCommand, + DescribeChangeSetCommand, + DescribeStacksCommand, + ExecuteChangeSetCommand, +} from '@aws-sdk/client-cloudformation'; +import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3'; +import { AssumeRoleCommand, STSClient } from '@aws-sdk/client-sts'; +import { execSync } from 'node:child_process'; +import * as fs from 'node:fs'; +import * as path from 'node:path'; + +export interface Phase2Options { + region: string; + stackName: string; + deployedTemplate: CfnTemplate; + synthTemplate: CfnTemplate; + resourcesToImport: ResourceToImport[]; + assemblyDirectory: string; + onProgress?: (message: string) => void; +} + +export interface Phase2Result { + success: boolean; + error?: string; +} + +/** + * Phase 2: IMPORT + * + * Uses CloudFormation's IMPORT change set mechanism to bring pre-existing + * resources under stack management. + * + * Three strict restrictions: + * 1. Cannot create new resources outside ResourcesToImport + * 2. Cannot update existing resources in the stack + * 3. Cannot add or modify Outputs + */ +export async function executePhase2(options: Phase2Options): Promise { + const { region, stackName, deployedTemplate, synthTemplate, resourcesToImport, assemblyDirectory, onProgress } = + options; + + if (resourcesToImport.length === 0) { + onProgress?.('No resources to import'); + return { success: true }; + } + + const credentials = getCredentialProvider(); + const cfn = new CloudFormationClient({ region, credentials }); + + // Publish CDK assets to S3 before creating the import change set + onProgress?.('Publishing CDK assets to S3...'); + await publishCdkAssets(assemblyDirectory, region, onProgress); + + // Build import template: deployed template + primary resources with DeletionPolicy: Retain + const logicalIds = resourcesToImport.map(r => r.logicalResourceId); + const importTemplate = buildImportTemplate(deployedTemplate, synthTemplate, logicalIds); + const templateBody = JSON.stringify(importTemplate); + + // Map to CloudFormation's ResourcesToImport format + const cfnResourcesToImport: CfnResourceToImport[] = resourcesToImport.map(r => ({ + ResourceType: r.resourceType, + LogicalResourceId: r.logicalResourceId, + ResourceIdentifier: r.resourceIdentifier, + })); + + const changeSetName = `import-${Date.now()}`; + + onProgress?.(`Creating IMPORT change set: ${changeSetName}`); + + try { + // Create the import change set + await cfn.send( + new CreateChangeSetCommand({ + StackName: stackName, + ChangeSetName: changeSetName, + ChangeSetType: 'IMPORT', + TemplateBody: templateBody, + ResourcesToImport: cfnResourcesToImport, + Capabilities: ['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], + }) + ); + + // Wait for the change set to be created + onProgress?.('Waiting for change set to be created...'); + await waitForChangeSetReady(cfn, stackName, changeSetName); + + // Describe the change set to see what it will do + const changeSetDescription = await cfn.send( + new DescribeChangeSetCommand({ + StackName: stackName, + ChangeSetName: changeSetName, + }) + ); + + onProgress?.(`Change set has ${changeSetDescription.Changes?.length ?? 0} changes. Executing...`); + + // Execute the change set + await cfn.send( + new ExecuteChangeSetCommand({ + StackName: stackName, + ChangeSetName: changeSetName, + }) + ); + + // Wait for import to complete + onProgress?.('Waiting for IMPORT to complete...'); + await waitForStackImportComplete(cfn, stackName); + + onProgress?.('Phase 2 IMPORT complete'); + return { success: true }; + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + return { success: false, error: `Import change set failed: ${message}` }; + } +} + +/** + * Wait for a change set to be in CREATE_COMPLETE status. + */ +async function waitForChangeSetReady( + cfn: CloudFormationClient, + stackName: string, + changeSetName: string +): Promise { + const maxAttempts = 60; + const delay = 5000; // 5 seconds + + for (let attempt = 0; attempt < maxAttempts; attempt++) { + const response = await cfn.send( + new DescribeChangeSetCommand({ + StackName: stackName, + ChangeSetName: changeSetName, + }) + ); + + const status = response.Status; + + if (status === 'CREATE_COMPLETE') { + return; + } + + if (status === 'FAILED') { + throw new Error(`Change set creation failed: ${response.StatusReason ?? 'Unknown reason'}`); + } + + // CREATE_PENDING, CREATE_IN_PROGRESS — keep waiting + await new Promise(resolve => setTimeout(resolve, delay)); + } + + throw new Error('Timed out waiting for change set creation'); +} + +/** + * Wait for stack to reach IMPORT_COMPLETE status. + */ +async function waitForStackImportComplete(cfn: CloudFormationClient, stackName: string): Promise { + const maxAttempts = 120; + const delay = 5000; // 5 seconds + + for (let attempt = 0; attempt < maxAttempts; attempt++) { + const response = await cfn.send(new DescribeStacksCommand({ StackName: stackName })); + const stack = response.Stacks?.[0]; + + if (!stack) { + throw new Error(`Stack ${stackName} not found during import wait`); + } + + const status = stack.StackStatus ?? ''; + + if (status === 'IMPORT_COMPLETE') { + return; + } + + if (status.includes('FAILED') || status.includes('ROLLBACK')) { + throw new Error(`Import failed with status: ${status}. Reason: ${stack.StackStatusReason ?? 'Unknown'}`); + } + + // IMPORT_IN_PROGRESS — keep waiting + await new Promise(resolve => setTimeout(resolve, delay)); + } + + throw new Error('Timed out waiting for import to complete'); +} + +/** + * Publish CDK file assets (code zips, templates) to the bootstrap S3 bucket. + * Reads the assets manifest from the CDK assembly directory. + */ +export async function publishCdkAssets( + assemblyDirectory: string, + region: string, + onProgress?: (message: string) => void +): Promise { + // Find the assets manifest + const manifestFiles = fs.readdirSync(assemblyDirectory).filter(f => f.endsWith('.assets.json')); + if (manifestFiles.length === 0) { + onProgress?.('No assets manifest found, skipping asset publishing'); + return; + } + + for (const manifestFile of manifestFiles) { + const manifest = JSON.parse(fs.readFileSync(path.join(assemblyDirectory, manifestFile), 'utf-8')) as { + files?: Record< + string, + { + source: { path: string; packaging: string }; + destinations: Record< + string, + { + bucketName: string; + objectKey: string; + region: string; + assumeRoleArn?: string; + } + >; + } + >; + }; + + if (!manifest.files) continue; + + for (const [_assetHash, asset] of Object.entries(manifest.files)) { + const sourcePath = path.join(assemblyDirectory, asset.source.path); + if (!fs.existsSync(sourcePath)) { + onProgress?.(`Asset file not found: ${asset.source.path}, skipping`); + continue; + } + + // Determine the file body to upload + let body: Buffer; + const stat = fs.statSync(sourcePath); + if (stat.isDirectory()) { + if (asset.source.packaging === 'zip') { + // Zip the directory contents + const zipPath = `${sourcePath}.zip`; + execSync(`cd "${sourcePath}" && zip -rq "${zipPath}" .`); + body = fs.readFileSync(zipPath); + fs.unlinkSync(zipPath); + } else { + // Skip directory assets that aren't zip packaging (e.g. Docker image contexts) + onProgress?.(`Skipping directory asset: ${asset.source.path} (packaging: ${asset.source.packaging})`); + continue; + } + } else { + body = fs.readFileSync(sourcePath); + } + + for (const dest of Object.values(asset.destinations)) { + const destRegion = dest.region || region; + + // Get credentials — try assuming the publishing role if specified + let s3Credentials = getCredentialProvider(); + if (dest.assumeRoleArn && !dest.assumeRoleArn.includes('${')) { + try { + const sts = new STSClient({ region: destRegion, credentials: getCredentialProvider() }); + const assumed = await sts.send( + new AssumeRoleCommand({ + RoleArn: dest.assumeRoleArn, + RoleSessionName: 'agentcore-import-publish', + }) + ); + if (assumed.Credentials) { + /* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any */ + s3Credentials = { + accessKeyId: assumed.Credentials.AccessKeyId!, + secretAccessKey: assumed.Credentials.SecretAccessKey!, + sessionToken: assumed.Credentials.SessionToken, + } as any; + /* eslint-enable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any */ + } + } catch { + // Fall back to default credentials if role assumption fails + } + } + + const s3 = new S3Client({ region: destRegion, credentials: s3Credentials }); + + onProgress?.(`Uploading ${asset.source.path} → s3://${dest.bucketName}/${dest.objectKey}`); + await s3.send( + new PutObjectCommand({ + Bucket: dest.bucketName, + Key: dest.objectKey, + Body: body, + }) + ); + } + } + } +} diff --git a/src/cli/commands/import/template-utils.ts b/src/cli/commands/import/template-utils.ts new file mode 100644 index 000000000..4e6a516af --- /dev/null +++ b/src/cli/commands/import/template-utils.ts @@ -0,0 +1,233 @@ +import { PRIMARY_RESOURCE_TYPES } from './constants'; + +/** + * A simplified CloudFormation template structure. + */ +export interface CfnTemplate { + AWSTemplateFormatVersion?: string; + Description?: string; + Parameters?: Record; + Mappings?: Record; + Conditions?: Record; + Resources: Record; + Outputs?: Record; + Rules?: Record; + Transform?: unknown; + Metadata?: Record; +} + +export interface CfnResource { + Type: string; + Properties?: Record; + DependsOn?: string | string[]; + DeletionPolicy?: string; + UpdateReplacePolicy?: string; + Condition?: string; + Metadata?: Record; +} + +/** + * Check if a CFN resource type is a primary AgentCore resource. + */ +function isPrimaryResourceType(type: string): boolean { + return PRIMARY_RESOURCE_TYPES.some(t => type.startsWith(t)); +} + +/** + * Recursively replace { "Ref": "" } and { "Fn::GetAtt": ["", ...] } + * references to removed logical IDs with a wildcard placeholder. + * + * Uses "*" because these references often end up in IAM policy Resource fields + * which require ARN format or "*". Phase 3 (agentcore deploy) replaces the + * entire template with the real synthesized values. + */ +function replaceDanglingRefs(value: unknown, removedIds: Set): unknown { + if (value === null || value === undefined) return value; + if (typeof value !== 'object') return value; + + if (Array.isArray(value)) { + return value.map(item => replaceDanglingRefs(item, removedIds)); + } + + const obj = value as Record; + + // Handle { "Ref": "LogicalId" } + if ('Ref' in obj && typeof obj.Ref === 'string' && removedIds.has(obj.Ref)) { + return '*'; + } + + // Handle { "Fn::GetAtt": ["LogicalId", "Attribute"] } + if ('Fn::GetAtt' in obj) { + const getAtt = obj['Fn::GetAtt']; + if (Array.isArray(getAtt) && getAtt.length >= 1 && removedIds.has(getAtt[0] as string)) { + return '*'; + } + } + + // Handle { "Fn::Sub": "...${LogicalId}..." } or { "Fn::Sub": ["...", { ... }] } + if ('Fn::Sub' in obj) { + const sub = obj['Fn::Sub']; + if (typeof sub === 'string') { + let replaced = sub; + for (const id of removedIds) { + // eslint-disable-next-line security/detect-non-literal-regexp -- id comes from template logical IDs + replaced = replaced.replace(new RegExp(`\\$\\{${id}[^}]*\\}`, 'g'), '*'); + } + if (replaced !== sub) return { 'Fn::Sub': replaced }; + } + } + + // Recurse into all properties + const result: Record = {}; + for (const [key, val] of Object.entries(obj)) { + result[key] = replaceDanglingRefs(val, removedIds); + } + return result; +} + +/** + * Filter a synthesized CDK template to keep only companion resources. + * Removes all AWS::BedrockAgentCore::* resources and their related Outputs. + * Replaces dangling Ref/Fn::GetAtt references with placeholders. + * + * Used for Phase 1 (UPDATE) to create companion IAM roles and policies + * without the primary resources. + */ +export function filterCompanionOnlyTemplate(synthTemplate: CfnTemplate): CfnTemplate { + const filtered: CfnTemplate = { + ...synthTemplate, + Resources: {}, + Outputs: {}, + }; + + // Collect logical IDs of primary resources to remove + const removedLogicalIds = new Set(); + + for (const [logicalId, resource] of Object.entries(synthTemplate.Resources)) { + if (isPrimaryResourceType(resource.Type)) { + removedLogicalIds.add(logicalId); + } else { + // Deep clone to avoid mutating original + filtered.Resources[logicalId] = JSON.parse(JSON.stringify(resource)) as CfnResource; + } + } + + // Replace dangling Ref/Fn::GetAtt references in companion resources + for (const [logicalId, resource] of Object.entries(filtered.Resources)) { + filtered.Resources[logicalId] = replaceDanglingRefs(resource, removedLogicalIds) as CfnResource; + } + + // Keep outputs that don't reference removed resources + if (synthTemplate.Outputs) { + for (const [outputKey, outputValue] of Object.entries(synthTemplate.Outputs)) { + const outputJson = JSON.stringify(outputValue); + // Check if any removed logical ID is referenced in this output + const referencesRemoved = Array.from(removedLogicalIds).some(id => outputJson.includes(id)); + if (!referencesRemoved) { + filtered.Outputs![outputKey] = outputValue; + } + } + } + + // Remove DependsOn references to removed resources + for (const [, resource] of Object.entries(filtered.Resources)) { + if (resource.DependsOn) { + if (typeof resource.DependsOn === 'string') { + if (removedLogicalIds.has(resource.DependsOn)) { + delete resource.DependsOn; + } + } else if (Array.isArray(resource.DependsOn)) { + resource.DependsOn = resource.DependsOn.filter(d => !removedLogicalIds.has(d)); + if (resource.DependsOn.length === 0) { + delete resource.DependsOn; + } + } + } + } + + return filtered; +} + +/** + * Build the import template by adding primary resources to the deployed template. + * Sets DeletionPolicy: Retain on all imported resources. + * Does NOT add any new Outputs (CFN restriction). + */ +export function buildImportTemplate( + deployedTemplate: CfnTemplate, + synthTemplate: CfnTemplate, + logicalIdsToImport: string[] +): CfnTemplate { + const importTemplate = JSON.parse(JSON.stringify(deployedTemplate)) as CfnTemplate; + + for (const logicalId of logicalIdsToImport) { + const resource = synthTemplate.Resources[logicalId]; + if (!resource) { + throw new Error(`Logical ID ${logicalId} not found in synthesized template`); + } + + // Deep clone and set DeletionPolicy: Retain + const importedResource = JSON.parse(JSON.stringify(resource)) as CfnResource; + importedResource.DeletionPolicy = 'Retain'; + importedResource.UpdateReplacePolicy = 'Retain'; + + // Remove DependsOn to avoid issues with resources not yet in the stack + // Phase 3 (agentcore deploy) will add these back + delete importedResource.DependsOn; + + // Keep all properties including AgentRuntimeArtifact so that CFN validation + // passes. The CDK assets must be published to S3 before creating the IMPORT + // change set (handled in phase2-import). + + importTemplate.Resources[logicalId] = importedResource; + } + + return importTemplate; +} + +/** + * Find the logical ID of a resource in a synthesized template by its type and a property value. + */ +export function findLogicalIdByProperty( + template: CfnTemplate, + resourceType: string, + propertyName: string, + propertyValue: string +): string | undefined { + // First pass: exact string match (highest confidence) + for (const [logicalId, resource] of Object.entries(template.Resources)) { + if (resource.Type === resourceType && resource.Properties) { + if (resource.Properties[propertyName] === propertyValue) { + return logicalId; + } + } + } + + // Second pass: check intrinsic functions (Fn::Join, Fn::Sub, etc.) + // Use a regex boundary check to avoid false substring matches + // (e.g., "agent1" matching "agent1_v2") + const escaped = propertyValue.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + // eslint-disable-next-line security/detect-non-literal-regexp + const pattern = new RegExp(escaped + '(?=[^a-zA-Z0-9_]|$)'); + + for (const [logicalId, resource] of Object.entries(template.Resources)) { + if (resource.Type === resourceType && resource.Properties) { + const propVal = resource.Properties[propertyName]; + if (typeof propVal === 'object' && propVal !== null) { + if (pattern.test(JSON.stringify(propVal))) { + return logicalId; + } + } + } + } + return undefined; +} + +/** + * Find all logical IDs of a specific resource type in a template. + */ +export function findLogicalIdsByType(template: CfnTemplate, resourceType: string): string[] { + return Object.entries(template.Resources) + .filter(([, resource]) => resource.Type === resourceType) + .map(([logicalId]) => logicalId); +} diff --git a/src/cli/commands/import/types.ts b/src/cli/commands/import/types.ts new file mode 100644 index 000000000..922d3420f --- /dev/null +++ b/src/cli/commands/import/types.ts @@ -0,0 +1,84 @@ +import type { AgentCoreProjectSpec } from '../../../schema'; + +/** + * Parsed representation of a starter toolkit agent from .bedrock_agentcore.yaml. + */ +export interface ParsedStarterToolkitAgent { + name: string; + entrypoint: string; + build: 'CodeZip' | 'Container'; + runtimeVersion?: string; + language: 'python' | 'typescript'; + sourcePath?: string; + networkMode: 'PUBLIC' | 'VPC'; + networkConfig?: { subnets: string[]; securityGroups: string[] }; + protocol: 'HTTP' | 'MCP' | 'A2A'; + enableOtel: boolean; + /** Physical agent runtime ID from the starter toolkit deployment */ + physicalAgentId?: string; + /** Physical agent runtime ARN */ + physicalAgentArn?: string; + /** Whether this agent has a custom JWT authorizer configured (not imported) */ + hasAuthorizerConfig?: boolean; + /** ARN of the execution role from the starter toolkit deployment */ + executionRoleArn?: string; +} + +/** + * Parsed representation of a starter toolkit memory config. + */ +export interface ParsedStarterToolkitMemory { + name: string; + mode: 'STM_ONLY' | 'STM_AND_LTM' | 'NO_MEMORY'; + eventExpiryDays: number; + /** Physical memory ID from the starter toolkit deployment */ + physicalMemoryId?: string; + /** Physical memory ARN */ + physicalMemoryArn?: string; +} + +/** + * Parsed representation of a starter toolkit credential provider. + */ +export interface ParsedStarterToolkitCredential { + /** Credential provider name in Identity service */ + name: string; + /** Provider type: cognito, github, google, salesforce, or api_key */ + providerType: 'oauth' | 'api_key'; +} + +/** + * Full parsed result from the YAML file. + */ +export interface ParsedStarterToolkitConfig { + defaultAgent?: string; + agents: ParsedStarterToolkitAgent[]; + memories: ParsedStarterToolkitMemory[]; + credentials: ParsedStarterToolkitCredential[]; + awsTarget: { + account?: string; + region?: string; + }; +} + +/** + * Resource to be imported via CloudFormation IMPORT change set. + */ +export interface ResourceToImport { + resourceType: string; + logicalResourceId: string; + resourceIdentifier: Record; +} + +/** + * Result of the import command. + */ +export interface ImportResult { + success: boolean; + error?: string; + projectSpec?: AgentCoreProjectSpec; + importedAgents?: string[]; + importedMemories?: string[]; + stackName?: string; + logPath?: string; +} diff --git a/src/cli/commands/import/yaml-parser.ts b/src/cli/commands/import/yaml-parser.ts new file mode 100644 index 000000000..e6b6198d6 --- /dev/null +++ b/src/cli/commands/import/yaml-parser.ts @@ -0,0 +1,245 @@ +import { RUNTIME_TYPE_MAP } from './constants'; +import type { + ParsedStarterToolkitAgent, + ParsedStarterToolkitConfig, + ParsedStarterToolkitCredential, + ParsedStarterToolkitMemory, +} from './types'; +import * as fs from 'node:fs'; +import * as path from 'node:path'; + +/** + * Minimal YAML parser for the starter toolkit config. + * Handles the simple key-value YAML format without needing a full YAML library. + * Falls back to JSON.parse for JSON-format configs. + */ +function parseSimpleYaml(content: string): Record { + // Try JSON first + try { + return JSON.parse(content) as Record; + } catch { + // Not JSON, parse YAML + } + + const result: Record = {}; + const lines = content.split('\n'); + const stack: { indent: number; obj: Record }[] = [{ indent: -1, obj: result }]; + + for (const rawLine of lines) { + // Skip empty lines and comments + const trimmed = rawLine.trim(); + if (!trimmed || trimmed.startsWith('#')) continue; + + // Calculate indent level + const indent = rawLine.search(/\S/); + + // Handle list items (- value or - key: value) + if (trimmed.startsWith('- ')) { + let parentEntry = findParent(stack, indent); + let parentObj = parentEntry.obj; + let keys = Object.keys(parentObj); + let lastKey = keys[keys.length - 1]; + + // If parent is an empty object (created from "key:" with no value), go up one + // level and replace it with an array. This handles "credential_providers:\n - name: X". + if (!lastKey && Object.keys(parentObj).length === 0 && stack.length > 1) { + stack.pop(); + parentEntry = stack[stack.length - 1]!; + parentObj = parentEntry.obj; + keys = Object.keys(parentObj); + lastKey = keys[keys.length - 1]; + } + + if (lastKey) { + if (!Array.isArray(parentObj[lastKey])) { + parentObj[lastKey] = []; + } + const itemContent = trimmed.slice(2).trim(); + const itemColonIdx = itemContent.indexOf(':'); + if (itemColonIdx > 0 && !itemContent.startsWith('http')) { + // List item is a key-value pair (e.g., "- name: Foo") — start a new object + const itemObj: Record = {}; + const itemKey = itemContent.slice(0, itemColonIdx).trim(); + const itemVal = itemContent.slice(itemColonIdx + 1).trim(); + itemObj[itemKey] = itemVal === '' ? {} : parseYamlValue(itemVal); + (parentObj[lastKey] as unknown[]).push(itemObj); + // Push onto stack so subsequent indented lines go into this object. + // Use the same indent as the "- " line so that lines indented further + // (e.g., arn: at indent+2) become children, while the next "- " at the + // same indent triggers findParent to pop this item and start a new one. + stack.push({ indent, obj: itemObj }); + } else { + (parentObj[lastKey] as unknown[]).push(parseYamlValue(itemContent)); + } + } + continue; + } + + const colonIdx = trimmed.indexOf(':'); + if (colonIdx === -1) continue; + + const key = trimmed.slice(0, colonIdx).trim(); + const valueStr = trimmed.slice(colonIdx + 1).trim(); + + // Pop stack to find correct parent + const parent = findParent(stack, indent); + + if (valueStr === '' || valueStr === '|') { + // Nested object + const child: Record = {}; + parent.obj[key] = child; + stack.push({ indent, obj: child }); + } else { + parent.obj[key] = parseYamlValue(valueStr); + } + } + + return result; +} + +function findParent( + stack: { indent: number; obj: Record }[], + indent: number +): { indent: number; obj: Record } { + while (stack.length > 1 && stack[stack.length - 1]!.indent >= indent) { + stack.pop(); + } + return stack[stack.length - 1]!; +} + +function parseYamlValue(value: string): unknown { + if (value === 'null' || value === '~' || value === '') return null; + if (value === 'true') return true; + if (value === 'false') return false; + // Check for quoted strings + if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) { + return value.slice(1, -1); + } + // Check for numbers + const num = Number(value); + if (!isNaN(num) && value.trim() !== '') return num; + return value; +} + +/** + * Parse a .bedrock_agentcore.yaml file into our internal representation. + */ +export function parseStarterToolkitYaml(filePath: string): ParsedStarterToolkitConfig { + const content = fs.readFileSync(filePath, 'utf-8'); + const raw = parseSimpleYaml(content); + const yamlDir = path.dirname(path.resolve(filePath)); + + const agents: ParsedStarterToolkitAgent[] = []; + const memories: ParsedStarterToolkitMemory[] = []; + const credentials: ParsedStarterToolkitCredential[] = []; + let awsTarget: { account?: string; region?: string } = {}; + + const defaultAgent = raw.default_agent as string | undefined; + const agentsMap = raw.agents as Record> | undefined; + + if (agentsMap) { + for (const [agentKey, agentConfig] of Object.entries(agentsMap)) { + const awsConfig = agentConfig.aws as Record | undefined; + const bedrockConfig = agentConfig.bedrock_agentcore as Record | undefined; + const memoryConfig = agentConfig.memory as Record | undefined; + const networkConfig = awsConfig?.network_configuration as Record | undefined; + const protocolConfig = awsConfig?.protocol_configuration as Record | undefined; + const obsConfig = awsConfig?.observability as Record | undefined; + + // Extract AWS target from first agent + if (awsConfig && (!awsTarget.account || !awsTarget.region)) { + awsTarget = { + account: String((awsConfig.account as string) ?? ''), + region: String((awsConfig.region as string) ?? ''), + }; + } + + // Map deployment_type + const deploymentType = String((agentConfig.deployment_type as string) ?? 'container'); + const build = deploymentType === 'direct_code_deploy' ? 'CodeZip' : 'Container'; + + // Map runtime_type + const rawRuntimeType = String((agentConfig.runtime_type as string) ?? 'PYTHON_3_12'); + const runtimeVersion = RUNTIME_TYPE_MAP[rawRuntimeType] ?? 'PYTHON_3_12'; + + // Map network mode + const networkMode = String((networkConfig?.network_mode as string) ?? 'PUBLIC') as 'PUBLIC' | 'VPC'; + const networkModeConfig = networkConfig?.network_mode_config as Record | undefined; + + // Map protocol + const protocol = String((protocolConfig?.server_protocol as string) ?? 'HTTP') as 'HTTP' | 'MCP' | 'A2A'; + + agents.push({ + name: String((agentConfig.name as string) ?? agentKey), + entrypoint: String((agentConfig.entrypoint as string) ?? 'main.py'), + build, + runtimeVersion, + language: (agentConfig.language as 'python' | 'typescript') ?? 'python', + sourcePath: agentConfig.source_path + ? path.resolve(yamlDir, String(agentConfig.source_path as string)) + : undefined, + networkMode, + networkConfig: + networkMode === 'VPC' && networkModeConfig + ? { + subnets: Array.isArray(networkModeConfig.subnets) ? (networkModeConfig.subnets as string[]) : [], + securityGroups: Array.isArray(networkModeConfig.security_groups) + ? (networkModeConfig.security_groups as string[]) + : [], + } + : undefined, + protocol, + enableOtel: (obsConfig?.enabled as boolean) ?? true, + physicalAgentId: bedrockConfig?.agent_id as string | undefined, + physicalAgentArn: bedrockConfig?.agent_arn as string | undefined, + hasAuthorizerConfig: + agentConfig.authorizer_configuration != null && agentConfig.authorizer_configuration !== 'null', + executionRoleArn: (awsConfig?.execution_role as string) || undefined, + }); + + // Extract memory config per agent — ensure mode is a non-empty string + // (the simple YAML parser turns bare "mode:" into an empty object {}) + if ( + memoryConfig && + typeof memoryConfig.mode === 'string' && + memoryConfig.mode !== 'NO_MEMORY' && + memoryConfig.mode + ) { + const memName = + (memoryConfig.memory_name as string) ?? `${String((agentConfig.name as string) ?? agentKey)}_memory`; + // Avoid duplicate memories + if (!memories.find(m => m.name === memName)) { + memories.push({ + name: memName, + mode: memoryConfig.mode as 'STM_ONLY' | 'STM_AND_LTM', + eventExpiryDays: (memoryConfig.event_expiry_days as number) ?? 30, + physicalMemoryId: memoryConfig.memory_id as string | undefined, + physicalMemoryArn: memoryConfig.memory_arn as string | undefined, + }); + } + } + + // Extract credential providers (OAuth and API key) + const identityConfig = agentConfig.identity as Record | undefined; + if (identityConfig) { + const providers = identityConfig.credential_providers as Record[] | undefined; + if (Array.isArray(providers)) { + for (const provider of providers) { + const providerName = provider.name as string | undefined; + if (providerName && !credentials.find(c => c.name === providerName)) { + credentials.push({ name: providerName, providerType: 'oauth' }); + } + } + } + } + + // Extract API key credential provider + const apiKeyCredName = agentConfig.api_key_credential_provider_name as string | undefined; + if (apiKeyCredName && !credentials.find(c => c.name === apiKeyCredName)) { + credentials.push({ name: apiKeyCredName, providerType: 'api_key' }); + } + } + } + + return { defaultAgent, agents, memories, credentials, awsTarget }; +} diff --git a/src/cli/operations/deploy/__tests__/pre-deploy-identity.test.ts b/src/cli/operations/deploy/__tests__/pre-deploy-identity.test.ts index b849a0de9..51727a0ad 100644 --- a/src/cli/operations/deploy/__tests__/pre-deploy-identity.test.ts +++ b/src/cli/operations/deploy/__tests__/pre-deploy-identity.test.ts @@ -322,7 +322,13 @@ describe('setupOAuth2Providers', () => { mockUpdateOAuth2Provider.mockResolvedValue({ success: true, result: {} }); const projectSpec = { - credentials: [{ name: 'test-oauth', type: 'OAuthCredentialProvider' }], + credentials: [ + { + name: 'test-oauth', + type: 'OAuthCredentialProvider', + discoveryUrl: 'https://accounts.google.com/.well-known/openid_configuration', + }, + ], }; const result = await setupOAuth2Providers({ @@ -365,7 +371,13 @@ describe('setupOAuth2Providers', () => { mockCreateOAuth2Provider.mockResolvedValue({ success: false, error: 'Creation failed' }); const projectSpec = { - credentials: [{ name: 'test-oauth', type: 'OAuthCredentialProvider' }], + credentials: [ + { + name: 'test-oauth', + type: 'OAuthCredentialProvider', + discoveryUrl: 'https://accounts.google.com/.well-known/openid_configuration', + }, + ], }; const result = await setupOAuth2Providers({ diff --git a/src/cli/operations/deploy/pre-deploy-identity.ts b/src/cli/operations/deploy/pre-deploy-identity.ts index f843d0d59..c76a29131 100644 --- a/src/cli/operations/deploy/pre-deploy-identity.ts +++ b/src/cli/operations/deploy/pre-deploy-identity.ts @@ -341,6 +341,16 @@ async function setupSingleOAuth2Provider( }; } + // Imported OAuth providers may not have a discoveryUrl (provider already exists in Identity service). + // Skip create/update since we can't build a valid config without it. + if (!credential.discoveryUrl) { + return { + providerName: credential.name, + status: 'skipped', + error: `No discoveryUrl configured for "${credential.name}". Provider already exists in Identity service — credentials in .env.local will be ignored.`, + }; + } + const params = { name: credential.name, vendor: credential.vendor, diff --git a/src/cli/tui/utils/commands.ts b/src/cli/tui/utils/commands.ts index 677e2e27d..7d9ed2b58 100644 --- a/src/cli/tui/utils/commands.ts +++ b/src/cli/tui/utils/commands.ts @@ -11,7 +11,7 @@ export interface CommandMeta { /** * Commands hidden from TUI help but still available via CLI. */ -const HIDDEN_FROM_TUI = ['help', 'update', 'package', 'logs', 'traces', 'pause', 'resume'] as const; +const HIDDEN_FROM_TUI = ['help', 'update', 'package', 'logs', 'traces', 'pause', 'resume', 'import'] as const; /** * Commands hidden from TUI when inside an existing project. diff --git a/src/schema/schemas/__tests__/agentcore-project.test.ts b/src/schema/schemas/__tests__/agentcore-project.test.ts index cb3a3ab98..64aa5ea9e 100644 --- a/src/schema/schemas/__tests__/agentcore-project.test.ts +++ b/src/schema/schemas/__tests__/agentcore-project.test.ts @@ -290,12 +290,12 @@ describe('CredentialSchema', () => { expect(result.success).toBe(true); }); - it('OAuthCredentialProvider without discoveryUrl fails', () => { + it('OAuthCredentialProvider without discoveryUrl succeeds (optional for imported providers)', () => { const result = CredentialSchema.safeParse({ type: 'OAuthCredentialProvider', name: 'MyOAuth', }); - expect(result.success).toBe(false); + expect(result.success).toBe(true); }); it('invalid type fails discriminated union', () => { diff --git a/src/schema/schemas/agent-env.ts b/src/schema/schemas/agent-env.ts index 97acbb5db..ebf1a59a0 100644 --- a/src/schema/schemas/agent-env.ts +++ b/src/schema/schemas/agent-env.ts @@ -166,6 +166,8 @@ export const AgentEnvSpecSchema = z protocol: ProtocolModeSchema.optional(), /** Allowed request headers forwarded to the runtime at invocation time. */ requestHeaderAllowlist: RequestHeaderAllowlistSchema.optional(), + /** ARN of an existing IAM execution role to use instead of creating a new one. */ + executionRoleArn: z.string().optional(), tags: TagsSchema.optional(), }) .superRefine((data, ctx) => { diff --git a/src/schema/schemas/agentcore-project.ts b/src/schema/schemas/agentcore-project.ts index b814933c3..9bebf8b7b 100644 --- a/src/schema/schemas/agentcore-project.ts +++ b/src/schema/schemas/agentcore-project.ts @@ -113,8 +113,8 @@ export type ApiKeyCredential = z.infer; export const OAuthCredentialSchema = z.object({ type: z.literal('OAuthCredentialProvider'), name: CredentialNameSchema, - /** OIDC discovery URL for the OAuth provider */ - discoveryUrl: z.string().url(), + /** OIDC discovery URL for the OAuth provider (optional for imported providers that already exist in Identity service) */ + discoveryUrl: z.string().url().optional(), /** Scopes this credential provider supports */ scopes: z.array(z.string()).optional(), /** Credential provider vendor type */