Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]

### Added
- **AIServicePlugin Auto-Detection** — AIServicePlugin now automatically detects and initializes
LLM providers based on environment variables, eliminating the need for manual adapter configuration
in each deployment:
- Auto-detection priority: `AI_GATEWAY_MODEL` → `OPENAI_API_KEY` → `ANTHROPIC_API_KEY` → `GOOGLE_GENERATIVE_AI_API_KEY`
- Graceful fallback to MemoryLLMAdapter when no provider is configured
- Comprehensive logging of selected provider and warnings for missing SDKs
- Supports custom model selection via `AI_MODEL` environment variable
- Consistent behavior across CLI, Vercel, Docker, and custom deployments
- Dynamic import failures are handled as soft errors with automatic fallback
([#1067](https://github.com/objectstack-ai/framework/issues/1067))

- **Metadata Versioning & History** — Comprehensive version history tracking and rollback capabilities
for metadata items. Key features include:
- `MetadataHistoryRecordSchema` defining structure for historical snapshots
Expand Down
49 changes: 5 additions & 44 deletions packages/cli/src/commands/serve.ts
Original file line number Diff line number Diff line change
Expand Up @@ -344,51 +344,12 @@ export default class Serve extends Command {
if (!hasAIPlugin) {
try {
const aiPkg = '@objectstack/service-ai';
const { AIServicePlugin, VercelLLMAdapter } = await import(/* webpackIgnore: true */ aiPkg);

// Auto-detect LLM provider from environment variables.
// Priority: 1) Vercel AI Gateway 2) Direct provider SDKs 3) MemoryLLMAdapter (echo)
let adapter: any = undefined;

// 1. Vercel AI Gateway — works with any provider via gateway('provider/model')
// Uses OIDC on Vercel, VERCEL_API_KEY locally.
const gatewayModel = process.env.AI_GATEWAY_MODEL; // e.g. 'anthropic/claude-sonnet-4.6'
if (gatewayModel) {
try {
const gatewayPkg = '@ai-sdk/gateway';
const { gateway } = await import(/* webpackIgnore: true */ gatewayPkg);
adapter = new VercelLLMAdapter({ model: gateway(gatewayModel) });
} catch {
// @ai-sdk/gateway not installed
}
}

// 2. Direct provider SDKs
if (!adapter) {
const providerConfigs: Array<{ envKey: string; pkg: string; factory: string; defaultModel: string }> = [
{ envKey: 'OPENAI_API_KEY', pkg: '@ai-sdk/openai', factory: 'openai', defaultModel: 'gpt-4o' },
{ envKey: 'ANTHROPIC_API_KEY', pkg: '@ai-sdk/anthropic', factory: 'anthropic', defaultModel: 'claude-sonnet-4-20250514' },
{ envKey: 'GOOGLE_GENERATIVE_AI_API_KEY', pkg: '@ai-sdk/google', factory: 'google', defaultModel: 'gemini-2.0-flash' },
];

for (const { envKey, pkg, factory, defaultModel } of providerConfigs) {
if (process.env[envKey]) {
try {
const mod = await import(/* webpackIgnore: true */ pkg);
const createModel = mod[factory] ?? mod.default;
if (typeof createModel === 'function') {
const modelId = process.env.AI_MODEL ?? defaultModel;
adapter = new VercelLLMAdapter({ model: createModel(modelId) });
break;
}
} catch {
// Provider SDK not installed — skip
}
}
}
}
const { AIServicePlugin } = await import(/* webpackIgnore: true */ aiPkg);

await kernel.use(new AIServicePlugin(adapter ? { adapter } : undefined));
// AIServicePlugin will auto-detect LLM provider from environment variables
// (AI_GATEWAY_MODEL, OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_GENERATIVE_AI_API_KEY)
// No need to manually construct the adapter here.
await kernel.use(new AIServicePlugin());
trackPlugin('AIService');
} catch {
// @objectstack/service-ai not installed — AI features unavailable
Expand Down
6 changes: 3 additions & 3 deletions packages/metadata/src/metadata-history.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { MetadataManager } from './metadata-manager';
import { DatabaseLoader } from './loaders/database-loader';
import { MemoryDriver } from '@objectstack/driver-memory';
import { InMemoryDriver } from '@objectstack/driver-memory';

describe('Metadata History', () => {
let manager: MetadataManager;
let driver: MemoryDriver;
let driver: InMemoryDriver;

beforeEach(async () => {
// Create a fresh in-memory driver and database loader
driver = new MemoryDriver({});
driver = new InMemoryDriver({});

const dbLoader = new DatabaseLoader({
driver,
Expand Down Expand Up @@ -42,7 +42,7 @@
if (manager.getHistory) {
const history = await manager.getHistory('object', 'test_object');

expect(history.records.length).toBeGreaterThan(0);

Check failure on line 45 in packages/metadata/src/metadata-history.test.ts

View workflow job for this annotation

GitHub Actions / Test Core

src/metadata-history.test.ts > Metadata History > should create history record on metadata creation

AssertionError: expected 0 to be greater than 0 ❯ src/metadata-history.test.ts:45:38
expect(history.records[0].operationType).toBe('create');
expect(history.records[0].version).toBe(1);
}
Expand Down Expand Up @@ -76,7 +76,7 @@
if (manager.getHistory) {
const history = await manager.getHistory('object', 'test_object');

expect(history.records.length).toBeGreaterThanOrEqual(2);

Check failure on line 79 in packages/metadata/src/metadata-history.test.ts

View workflow job for this annotation

GitHub Actions / Test Core

src/metadata-history.test.ts > Metadata History > should create history record on metadata update

AssertionError: expected 0 to be greater than or equal to 2 ❯ src/metadata-history.test.ts:79:38
expect(history.records[0].operationType).toBe('update');
expect(history.records[0].version).toBe(2);
}
Expand Down Expand Up @@ -166,7 +166,7 @@
});

expect(limitedHistory.records.length).toBeLessThanOrEqual(3);
expect(limitedHistory.total).toBeGreaterThanOrEqual(5);

Check failure on line 169 in packages/metadata/src/metadata-history.test.ts

View workflow job for this annotation

GitHub Actions / Test Core

src/metadata-history.test.ts > Metadata History > should handle history query with filters

AssertionError: expected 0 to be greater than or equal to 5 ❯ src/metadata-history.test.ts:169:36

// Query with operation type filter
const createHistory = await manager.getHistory('object', 'test_object', {
Expand Down Expand Up @@ -194,7 +194,7 @@

// Should only have one history record (the create)
// The second register should be skipped due to identical checksum
expect(history.records.length).toBe(1);

Check failure on line 197 in packages/metadata/src/metadata-history.test.ts

View workflow job for this annotation

GitHub Actions / Test Core

src/metadata-history.test.ts > Metadata History > should skip history record when checksum is unchanged

AssertionError: expected +0 to be 1 // Object.is equality - Expected + Received - 1 + 0 ❯ src/metadata-history.test.ts:197:38
}
});

Expand Down
2 changes: 2 additions & 0 deletions packages/metadata/vitest.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ export default defineConfig({
resolve: {
alias: {
'@objectstack/core': path.resolve(__dirname, '../core/src/index.ts'),
'@objectstack/driver-memory': path.resolve(__dirname, '../plugins/driver-memory/src/index.ts'),
'@objectstack/spec/api': path.resolve(__dirname, '../spec/src/api/index.ts'),
'@objectstack/spec/contracts': path.resolve(__dirname, '../spec/src/contracts/index.ts'),
'@objectstack/spec/data': path.resolve(__dirname, '../spec/src/data/index.ts'),
'@objectstack/spec/kernel': path.resolve(__dirname, '../spec/src/kernel/index.ts'),
Expand Down
25 changes: 25 additions & 0 deletions packages/runtime/vitest.config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
// Copyright (c) 2025 ObjectStack. Licensed under the Apache-2.0 license.

import { defineConfig } from 'vitest/config';
import path from 'node:path';

export default defineConfig({
resolve: {
alias: {
'@objectstack/core': path.resolve(__dirname, '../core/src/index.ts'),
'@objectstack/rest': path.resolve(__dirname, '../rest/src/index.ts'),
'@objectstack/spec/api': path.resolve(__dirname, '../spec/src/api/index.ts'),
'@objectstack/spec/contracts': path.resolve(__dirname, '../spec/src/contracts/index.ts'),
'@objectstack/spec/data': path.resolve(__dirname, '../spec/src/data/index.ts'),
'@objectstack/spec/kernel': path.resolve(__dirname, '../spec/src/kernel/index.ts'),
'@objectstack/spec/system': path.resolve(__dirname, '../spec/src/system/index.ts'),
'@objectstack/spec': path.resolve(__dirname, '../spec/src/index.ts'),
'@objectstack/types': path.resolve(__dirname, '../types/src/index.ts'),
},
},
test: {
globals: true,
environment: 'node',
include: ['src/**/*.test.ts'],
},
});
112 changes: 112 additions & 0 deletions packages/services/service-ai/src/__tests__/ai-service.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -837,4 +837,116 @@ describe('AIServicePlugin', () => {

expect(ctx.hook).toHaveBeenCalledWith('ai:beforeChat', expect.any(Function));
});

// ── LLM Provider Auto-Detection ─────────────────────────────────

it('should use MemoryLLMAdapter when no env vars are set', async () => {
const plugin = new AIServicePlugin();
const ctx = createMockContext();

// Ensure no LLM provider env vars are set
const oldEnv = { ...process.env };
delete process.env.AI_GATEWAY_MODEL;
delete process.env.OPENAI_API_KEY;
delete process.env.ANTHROPIC_API_KEY;
delete process.env.GOOGLE_GENERATIVE_AI_API_KEY;

try {
await plugin.init(ctx);

const service = ctx.getService<AIService>('ai');
expect(service.adapterName).toBe('memory');

// Verify warning was logged
expect(silentLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('No LLM provider configured')
);
} finally {
// Restore environment
process.env = oldEnv;
}
});

it('should fallback to MemoryLLMAdapter when provider SDK is not installed', async () => {
const plugin = new AIServicePlugin();
const ctx = createMockContext();

const oldEnv = { ...process.env };
// Set env var, but the SDK won't be available in test environment
process.env.OPENAI_API_KEY = 'fake-openai-key';
delete process.env.AI_GATEWAY_MODEL;
delete process.env.ANTHROPIC_API_KEY;
delete process.env.GOOGLE_GENERATIVE_AI_API_KEY;

try {
await plugin.init(ctx);

const service = ctx.getService<AIService>('ai');
// Should fall back to memory because @ai-sdk/openai is not installed
expect(service.adapterName).toBe('memory');

// Verify warning was logged about SDK load failure
expect(silentLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('Failed to load @ai-sdk/openai'),
expect.objectContaining({ error: expect.any(String) })
);

// Verify warning was logged about final fallback
expect(silentLogger.warn).toHaveBeenCalledWith(
expect.stringContaining('No LLM provider configured')
);
} finally {
process.env = oldEnv;
}
});

it('should prefer explicit adapter over auto-detection', async () => {
const customAdapter: LLMAdapter = {
name: 'custom-explicit',
chat: async () => ({ content: 'explicit' }),
complete: async () => ({ content: '' }),
};

const plugin = new AIServicePlugin({ adapter: customAdapter });
const ctx = createMockContext();

const oldEnv = { ...process.env };
process.env.OPENAI_API_KEY = 'fake-openai-key';

try {
await plugin.init(ctx);

const service = ctx.getService<AIService>('ai');
expect(service.adapterName).toBe('custom-explicit');

// Verify it logged as explicitly configured
expect(silentLogger.info).toHaveBeenCalledWith(
expect.stringContaining('explicitly configured')
);
} finally {
process.env = oldEnv;
}
});

it('should log adapter selection', async () => {
const plugin = new AIServicePlugin();
const ctx = createMockContext();

const oldEnv = { ...process.env };
delete process.env.AI_GATEWAY_MODEL;
delete process.env.OPENAI_API_KEY;
delete process.env.ANTHROPIC_API_KEY;
delete process.env.GOOGLE_GENERATIVE_AI_API_KEY;

try {
await plugin.init(ctx);

// Verify adapter selection was logged
expect(silentLogger.info).toHaveBeenCalledWith(
expect.stringContaining('Using LLM adapter')
);
} finally {
process.env = oldEnv;
}
});
});
106 changes: 105 additions & 1 deletion packages/services/service-ai/src/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import { registerDataTools } from './tools/data-tools.js';
import { registerMetadataTools } from './tools/metadata-tools.js';
import { AgentRuntime } from './agent-runtime.js';
import { DATA_CHAT_AGENT, METADATA_ASSISTANT_AGENT } from './agents/index.js';
import { VercelLLMAdapter } from './adapters/vercel-adapter.js';
import { MemoryLLMAdapter } from './adapters/memory-adapter.js';

/**
* Configuration options for the AIServicePlugin.
Expand Down Expand Up @@ -61,6 +63,90 @@ export class AIServicePlugin implements Plugin {
this.options = options;
}

/**
* Auto-detect LLM provider from environment variables.
*
* Priority order:
* 1. AI_GATEWAY_MODEL → Vercel AI Gateway
* 2. OPENAI_API_KEY → OpenAI
* 3. ANTHROPIC_API_KEY → Anthropic
* 4. GOOGLE_GENERATIVE_AI_API_KEY → Google
* 5. Fallback → MemoryLLMAdapter
*
* Returns the adapter and a description for logging.
*/
private async detectAdapter(ctx: PluginContext): Promise<{ adapter: LLMAdapter; description: string }> {
// 1. Vercel AI Gateway — works with any provider via gateway('provider/model')
const gatewayModel = process.env.AI_GATEWAY_MODEL;
if (gatewayModel) {
try {
const gatewayPkg = '@ai-sdk/gateway';
const { gateway } = await import(/* webpackIgnore: true */ gatewayPkg);
const adapter = new VercelLLMAdapter({ model: gateway(gatewayModel) });
return { adapter, description: `Vercel AI Gateway (model: ${gatewayModel})` };
} catch (err) {
ctx.logger.warn(
`[AI] Failed to load @ai-sdk/gateway for AI_GATEWAY_MODEL=${gatewayModel}, trying next provider`,
err instanceof Error ? { error: err.message } : undefined
);
}
}

// 2. Direct provider SDKs
const providerConfigs: Array<{
envKey: string;
pkg: string;
factory: string;
defaultModel: string;
displayName: string;
}> = [
{
envKey: 'OPENAI_API_KEY',
pkg: '@ai-sdk/openai',
factory: 'openai',
defaultModel: 'gpt-4o',
displayName: 'OpenAI'
},
{
envKey: 'ANTHROPIC_API_KEY',
pkg: '@ai-sdk/anthropic',
factory: 'anthropic',
defaultModel: 'claude-sonnet-4-20250514',
displayName: 'Anthropic'
},
{
envKey: 'GOOGLE_GENERATIVE_AI_API_KEY',
pkg: '@ai-sdk/google',
factory: 'google',
defaultModel: 'gemini-2.0-flash',
displayName: 'Google'
},
];

for (const { envKey, pkg, factory, defaultModel, displayName } of providerConfigs) {
if (process.env[envKey]) {
try {
const mod = await import(/* webpackIgnore: true */ pkg);
const createModel = mod[factory] ?? mod.default;
if (typeof createModel === 'function') {
const modelId = process.env.AI_MODEL ?? defaultModel;
const adapter = new VercelLLMAdapter({ model: createModel(modelId) });
return { adapter, description: `${displayName} (model: ${modelId})` };
}
} catch (err) {
ctx.logger.warn(
`[AI] Failed to load ${pkg} for ${envKey}, trying next provider`,
err instanceof Error ? { error: err.message } : undefined
);
}
}
}

// 3. Fallback to MemoryLLMAdapter
ctx.logger.warn('[AI] No LLM provider configured via environment variables. Falling back to MemoryLLMAdapter (echo mode). Set AI_GATEWAY_MODEL, OPENAI_API_KEY, ANTHROPIC_API_KEY, or GOOGLE_GENERATIVE_AI_API_KEY to use a real LLM.');
return { adapter: new MemoryLLMAdapter(), description: 'MemoryLLMAdapter (echo mode - for testing only)' };
Comment on lines +145 to +147
Copy link

Copilot AI Apr 7, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The final fallback warning is misleading when an env var is set but the provider SDK fails to load (or the factory export isn’t a function): it currently logs “No LLM provider configured via environment variables…”. Consider changing this to something like “No usable LLM provider could be initialized; falling back to MemoryLLMAdapter” and optionally include which provider/envKey was attempted so production logs are actionable (update the related test expectations accordingly).

Copilot uses AI. Check for mistakes.
}

async init(ctx: PluginContext): Promise<void> {
// Check if there is an existing AI service (e.g. from dev-plugin)
let hasExisting = false;
Expand Down Expand Up @@ -88,8 +174,26 @@ export class AIServicePlugin implements Plugin {
}
}

// Determine LLM adapter: explicit > auto-detect from env > MemoryLLMAdapter fallback
let adapter: LLMAdapter;
let adapterDescription: string;

if (this.options.adapter) {
// User provided an explicit adapter
adapter = this.options.adapter;
adapterDescription = `${adapter.name} (explicitly configured)`;
} else {
// Auto-detect from environment variables
const detected = await this.detectAdapter(ctx);
adapter = detected.adapter;
adapterDescription = detected.description;
}

// Log the selected adapter
ctx.logger.info(`[AI] Using LLM adapter: ${adapterDescription}`);

const config: AIServiceConfig = {
adapter: this.options.adapter,
adapter,
logger: ctx.logger,
conversationService,
};
Expand Down
Loading