diff --git a/src/runner/templates/agents/cloudflare/vercel/config.json b/src/runner/templates/agents/cloudflare/vercel/config.json index ccdd800..e67849c 100644 --- a/src/runner/templates/agents/cloudflare/vercel/config.json +++ b/src/runner/templates/agents/cloudflare/vercel/config.json @@ -3,6 +3,7 @@ "displayName": "Vercel AI SDK (Cloudflare)", "type": "agentic", "platform": "cloudflare", + "streamingMode": "both", "dependencies": [ { "package": "ai", @@ -11,8 +12,27 @@ { "package": "@ai-sdk/openai", "version": "latest" + }, + { + "package": "@ai-sdk/anthropic", + "version": "latest" } ], "versions": ["6.0.116"], - "sentryVersions": ["latest"] + "sentryVersions": ["latest"], + "options": { + "agentStyle": ["function", "class"], + "provider": [ + "openai", + { + "value": "anthropic", + "overrides": { + "modelOverrides": { + "request": "claude-haiku-4-5", + "response": "claude-haiku-4-5*" + } + } + } + ] + } } diff --git a/src/runner/templates/agents/cloudflare/vercel/template.njk b/src/runner/templates/agents/cloudflare/vercel/template.njk index 3eac2c3..2a24911 100644 --- a/src/runner/templates/agents/cloudflare/vercel/template.njk +++ b/src/runner/templates/agents/cloudflare/vercel/template.njk @@ -27,14 +27,33 @@ {% endblock %} {% block imports %} +{% if agentStyle == "class" %} +import { ToolLoopAgent, tool, jsonSchema, stepCountIs } from "ai"; +{% elif isStreaming %} +import { streamText, tool, jsonSchema, stepCountIs } from "ai"; +{% else %} import { generateText, tool, jsonSchema, stepCountIs } from "ai"; +{% endif %} +{% if provider == "anthropic" %} +import { createAnthropic } from "@ai-sdk/anthropic"; +{% else %} import { createOpenAI } from "@ai-sdk/openai"; +{% endif %} {% endblock %} {% block dynamic_imports %} +{% if provider == "anthropic" %} + const anthropic = createAnthropic({ apiKey: env.ANTHROPIC_API_KEY }); +{% else %} const openai = createOpenAI({ apiKey: env.OPENAI_API_KEY }); +{% endif %} {% endblock %} +{# Helper macro for model reference #} +{% macro modelRef(input) %} +{% if provider == "anthropic" %}anthropic({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% else %}openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% endif %} +{% endmacro %} + {% block test %} {% if agent and agent.tools and agent.tools.length > 0 %} const tools = { @@ -70,10 +89,106 @@ import { createOpenAI } from "@ai-sdk/openai"; // Request {{ loop.index }}{% if loop.length > 1 %} of {{ loop.length }}{% endif %} try { +{% if agentStyle == "class" %} + // ToolLoopAgent class-based approach + const agent = new ToolLoopAgent({ + model: {{ modelRef(input) | trim }}, +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, +{% endif %} +{% if system_content %} + instructions: "{{ system_content }}", +{% endif %} + stopWhen: stepCountIs(10), + experimental_telemetry: { + isEnabled: true, + functionId: "{{ agent.name if agent else 'assistant' }}", + recordInputs: true, + recordOutputs: true, + }, + }); + +{% if isStreaming %} +{% if user_content is iterable and user_content is not string %} + const { textStream } = await agent.stream({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { textStream } = await agent.stream({ + prompt: "{{ user_content }}", + }); +{% endif %} + const chunks = []; + for await (const chunk of textStream) { + chunks.push(chunk); + } + console.log("Response:", chunks.join("")); +{% else %} +{% if user_content is iterable and user_content is not string %} + const { text } = await agent.generate({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { text } = await agent.generate({ + prompt: "{{ user_content }}", + }); +{% endif %} + console.log("Response:", text); +{% endif %} +{% else %} + // Function-based approach +{% if isStreaming %} +{% if user_content is iterable and user_content is not string %} + const { textStream } = streamText({ + model: {{ modelRef(input) | trim }}, +{% if system_content %} + system: "{{ system_content }}", +{% endif %} + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, + stopWhen: stepCountIs(10), +{% endif %} + experimental_telemetry: { isEnabled: true, recordInputs: true, recordOutputs: true }, + }); +{% else %} + const { textStream } = streamText({ + model: {{ modelRef(input) | trim }}, +{% if system_content %} + system: "{{ system_content }}", +{% endif %} + prompt: "{{ user_content }}", +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, + stopWhen: stepCountIs(10), +{% endif %} + experimental_telemetry: { isEnabled: true, recordInputs: true, recordOutputs: true }, + }); +{% endif %} + const chunks = []; + for await (const chunk of textStream) { + chunks.push(chunk); + } + console.log("Response:", chunks.join("")); +{% else %} {% if user_content is iterable and user_content is not string %} - // Multimodal content - use messages array const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -90,9 +205,8 @@ import { createOpenAI } from "@ai-sdk/openai"; experimental_telemetry: { isEnabled: true, recordInputs: true, recordOutputs: true }, }); {% else %} - // Simple text prompt const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -105,6 +219,8 @@ import { createOpenAI } from "@ai-sdk/openai"; }); {% endif %} console.log("Response:", text); +{% endif %} +{% endif %} } catch (error) { Sentry.captureException(error); console.error("Error:", error.message); diff --git a/src/runner/templates/agents/nextjs/vercel/config.json b/src/runner/templates/agents/nextjs/vercel/config.json index 83a80bb..00ed22f 100644 --- a/src/runner/templates/agents/nextjs/vercel/config.json +++ b/src/runner/templates/agents/nextjs/vercel/config.json @@ -13,6 +13,10 @@ "package": "@ai-sdk/openai", "version": "latest" }, + { + "package": "@ai-sdk/anthropic", + "version": "latest" + }, { "package": "@sentry/nextjs", "version": "sentry" @@ -23,5 +27,20 @@ } ], "versions": ["6.0.116"], - "sentryVersions": ["latest"] + "sentryVersions": ["latest"], + "options": { + "agentStyle": ["function", "class"], + "provider": [ + "openai", + { + "value": "anthropic", + "overrides": { + "modelOverrides": { + "request": "claude-haiku-4-5", + "response": "claude-haiku-4-5*" + } + } + } + ] + } } diff --git a/src/runner/templates/agents/nextjs/vercel/template.njk b/src/runner/templates/agents/nextjs/vercel/template.njk index 63a9907..1d40094 100644 --- a/src/runner/templates/agents/nextjs/vercel/template.njk +++ b/src/runner/templates/agents/nextjs/vercel/template.njk @@ -18,11 +18,24 @@ {% endmacro %} {% block dynamic_imports %} +{% if agentStyle == "class" %} +const { ToolLoopAgent, tool, jsonSchema, stepCountIs } = await import("ai"); +{% else %} const { generateText, streamText, tool, jsonSchema, stepCountIs } = await import("ai"); +{% endif %} +{% if provider == "anthropic" %} +const { anthropic } = await import("@ai-sdk/anthropic"); +{% else %} const { openai } = await import("@ai-sdk/openai"); +{% endif %} console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); {% endblock %} +{# Helper macro for model reference #} +{% macro modelRef(input) %} +{% if provider == "anthropic" %}anthropic({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% else %}openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% endif %} +{% endmacro %} + {% block test %} {% if agent and agent.tools and agent.tools.length > 0 %} const tools = { @@ -58,13 +71,76 @@ console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); // Request {{ loop.index }}{% if loop.length > 1 %} of {{ loop.length }}{% endif %} try { +{% if agentStyle == "class" %} + // ToolLoopAgent class-based approach + const agent = new ToolLoopAgent({ + model: {{ modelRef(input) | trim }}, +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, +{% endif %} +{% if system_content %} + instructions: "{{ system_content }}", +{% endif %} + stopWhen: stepCountIs(10), + experimental_telemetry: { + isEnabled: true, + functionId: "{{ agent.name if agent else 'assistant' }}", + recordInputs: true, + recordOutputs: true, + }, + }); + +{% if isStreaming %} + console.log('Starting streaming request {{ loop.index }}...'); + +{% if user_content is iterable and user_content is not string %} + const { textStream } = await agent.stream({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { textStream } = await agent.stream({ + prompt: "{{ user_content }}", + }); +{% endif %} + + const chunks = []; + for await (const chunk of textStream) { + chunks.push(chunk); + } + console.log("Response {{ loop.index }}:", chunks.join("")); +{% else %} + console.log('Starting blocking request {{ loop.index }}...'); + +{% if user_content is iterable and user_content is not string %} + const { text } = await agent.generate({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { text } = await agent.generate({ + prompt: "{{ user_content }}", + }); +{% endif %} + console.log("Response {{ loop.index }}:", text); +{% endif %} +{% else %} + // Function-based approach {% if isStreaming %} console.log('Starting streaming request {{ loop.index }}...'); {% if user_content is iterable and user_content is not string %} // Multimodal content - use messages array const { textStream } = streamText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -87,7 +163,7 @@ console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); {% else %} // Simple text prompt const { textStream } = streamText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -115,7 +191,7 @@ console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); {% if user_content is iterable and user_content is not string %} // Multimodal content - use messages array const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -138,7 +214,7 @@ console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); {% else %} // Simple text prompt const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -155,6 +231,7 @@ console.log('Vercel AI SDK initialized (auto-instrumentation enabled)'); }); {% endif %} console.log("Response {{ loop.index }}:", text); +{% endif %} {% endif %} } catch (error) { Sentry.captureException(error); diff --git a/src/runner/templates/agents/node/vercel/config.json b/src/runner/templates/agents/node/vercel/config.json index d8fa5c7..4752527 100644 --- a/src/runner/templates/agents/node/vercel/config.json +++ b/src/runner/templates/agents/node/vercel/config.json @@ -3,6 +3,7 @@ "displayName": "Vercel AI SDK", "type": "agentic", "platform": "node", + "streamingMode": "both", "dependencies": [ { "package": "ai", @@ -11,8 +12,27 @@ { "package": "@ai-sdk/openai", "version": "latest" + }, + { + "package": "@ai-sdk/anthropic", + "version": "latest" } ], "versions": ["6.0.116"], - "sentryVersions": ["latest"] + "sentryVersions": ["latest"], + "options": { + "agentStyle": ["function", "class"], + "provider": [ + "openai", + { + "value": "anthropic", + "overrides": { + "modelOverrides": { + "request": "claude-haiku-4-5", + "response": "claude-haiku-4-5*" + } + } + } + ] + } } diff --git a/src/runner/templates/agents/node/vercel/template.njk b/src/runner/templates/agents/node/vercel/template.njk index 77efbf2..327bb92 100644 --- a/src/runner/templates/agents/node/vercel/template.njk +++ b/src/runner/templates/agents/node/vercel/template.njk @@ -23,10 +23,25 @@ {% endmacro %} {% block dynamic_imports %} +{% if agentStyle == "class" %} +const { ToolLoopAgent, tool, jsonSchema, stepCountIs } = await import("ai"); +{% elif isStreaming %} +const { streamText, tool, jsonSchema, stepCountIs } = await import("ai"); +{% else %} const { generateText, tool, jsonSchema, stepCountIs } = await import("ai"); +{% endif %} +{% if provider == "anthropic" %} +const { anthropic } = await import("@ai-sdk/anthropic"); +{% else %} const { openai } = await import("@ai-sdk/openai"); +{% endif %} {% endblock %} +{# Helper macro for model reference #} +{% macro modelRef(input) %} +{% if provider == "anthropic" %}anthropic({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% else %}openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}){% endif %} +{% endmacro %} + {% block test %} {% if agent and agent.tools and agent.tools.length > 0 %} const tools = { @@ -62,10 +77,104 @@ const { openai } = await import("@ai-sdk/openai"); // Request {{ loop.index }}{% if loop.length > 1 %} of {{ loop.length }}{% endif %} try { +{% if agentStyle == "class" %} + // ToolLoopAgent class-based approach + const agent = new ToolLoopAgent({ + model: {{ modelRef(input) | trim }}, +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, +{% endif %} +{% if system_content %} + instructions: "{{ system_content }}", +{% endif %} + stopWhen: stepCountIs(10), + experimental_telemetry: { + isEnabled: true, + functionId: "{{ agent.name if agent else 'assistant' }}", + recordInputs: true, + recordOutputs: true, + }, + }); + +{% if isStreaming %} +{% if user_content is iterable and user_content is not string %} + const { textStream } = await agent.stream({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { textStream } = await agent.stream({ + prompt: "{{ user_content }}", + }); +{% endif %} + const chunks = []; + for await (const chunk of textStream) { + chunks.push(chunk); + } + console.log("Response:", chunks.join("")); +{% else %} +{% if user_content is iterable and user_content is not string %} + const { text } = await agent.generate({ + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], + }); +{% else %} + const { text } = await agent.generate({ + prompt: "{{ user_content }}", + }); +{% endif %} + console.log("Response:", text); +{% endif %} +{% else %} + // Function-based approach +{% if isStreaming %} +{% if user_content is iterable and user_content is not string %} + const { textStream } = streamText({ + model: {{ modelRef(input) | trim }}, +{% if system_content %} + system: "{{ system_content }}", +{% endif %} + messages: [ + { + role: "user", + content: {{ renderVercelContent(user_content) }}, + }, + ], +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, + stopWhen: stepCountIs(10), +{% endif %} + }); +{% else %} + const { textStream } = streamText({ + model: {{ modelRef(input) | trim }}, +{% if system_content %} + system: "{{ system_content }}", +{% endif %} + prompt: "{{ user_content }}", +{% if agent and agent.tools and agent.tools.length > 0 %} + tools, + stopWhen: stepCountIs(10), +{% endif %} + }); +{% endif %} + const chunks = []; + for await (const chunk of textStream) { + chunks.push(chunk); + } + console.log("Response:", chunks.join("")); +{% else %} {% if user_content is iterable and user_content is not string %} - // Multimodal content - use messages array const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -81,9 +190,8 @@ const { openai } = await import("@ai-sdk/openai"); {% endif %} }); {% else %} - // Simple text prompt const { text } = await generateText({ - model: openai({% if causeAPIError %}"invalid-model"{% else %}"{{ input.model }}"{% endif %}), + model: {{ modelRef(input) | trim }}, {% if system_content %} system: "{{ system_content }}", {% endif %} @@ -95,6 +203,8 @@ const { openai } = await import("@ai-sdk/openai"); }); {% endif %} console.log("Response:", text); +{% endif %} +{% endif %} } catch (error) { Sentry.captureException(error); console.error("Error:", error.message);