Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 23 additions & 3 deletions packages/mcp/src/client.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { env } from './env.js';
import { listReposResponseSchema, searchResponseSchema, fileSourceResponseSchema, listCommitsResponseSchema } from './schemas.js';
import { FileSourceRequest, ListReposQueryParams, SearchRequest, ListCommitsQueryParamsSchema } from './types.js';
import { listReposResponseSchema, searchResponseSchema, fileSourceResponseSchema, listCommitsResponseSchema, askCodebaseResponseSchema } from './schemas.js';
import { AskCodebaseRequest, AskCodebaseResponse, FileSourceRequest, ListReposQueryParams, SearchRequest, ListCommitsQueryParamsSchema } from './types.js';
import { isServiceError, ServiceErrorException } from './utils.js';
import { z } from 'zod';

Expand Down Expand Up @@ -106,4 +106,24 @@ export const listCommits = async (queryParams: ListCommitsQueryParamsSchema) =>
const commits = await parseResponse(response, listCommitsResponseSchema);
const totalCount = parseInt(response.headers.get('X-Total-Count') ?? '0', 10);
return { commits, totalCount };
}
}

/**
* Asks a natural language question about the codebase using the Sourcebot AI agent.
* This is a blocking call that runs the full agent loop and returns when complete.
*
* @param request - The question and optional repo filters
* @returns The agent's answer, chat URL, sources, and metadata
*/
Comment thread
coderabbitai[bot] marked this conversation as resolved.
export const askCodebase = async (request: AskCodebaseRequest): Promise<AskCodebaseResponse> => {
const response = await fetch(`${env.SOURCEBOT_HOST}/api/chat/blocking`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {})
},
Comment thread
coderabbitai[bot] marked this conversation as resolved.
body: JSON.stringify(request),
});

return parseResponse(response, askCodebaseResponseSchema);
}
39 changes: 36 additions & 3 deletions packages/mcp/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
import _dedent from "dedent";
import escapeStringRegexp from 'escape-string-regexp';
import { z } from 'zod';
import { getFileSource, listCommits, listRepos, search } from './client.js';
import { askCodebase, getFileSource, listCommits, listRepos, search } from './client.js';
import { env, numberSchema } from './env.js';
import { fileSourceRequestSchema, listCommitsQueryParamsSchema, listReposQueryParamsSchema } from './schemas.js';
import { FileSourceRequest, ListCommitsQueryParamsSchema, ListReposQueryParams, TextContent } from './types.js';
import { askCodebaseRequestSchema, fileSourceRequestSchema, listCommitsQueryParamsSchema, listReposQueryParamsSchema } from './schemas.js';
import { AskCodebaseRequest, FileSourceRequest, ListCommitsQueryParamsSchema, ListReposQueryParams, TextContent } from './types.js';

const dedent = _dedent.withOptions({ alignValues: true });

Expand Down Expand Up @@ -238,7 +238,40 @@ server.tool(
}
);

server.tool(
"ask_codebase",
dedent`
Ask a natural language question about the codebase. This tool uses an AI agent to autonomously search code, read files, and find symbol references/definitions to answer your question.

The agent will:
- Analyze your question and determine what context it needs
- Search the codebase using multiple strategies (code search, symbol lookup, file reading)
- Synthesize findings into a comprehensive answer with code references

Returns a detailed answer in markdown format with code references, plus a link to view the full research session (including all tool calls and reasoning) in the Sourcebot web UI.

This is a blocking operation that may take 30-60+ seconds for complex questions as the agent researches the codebase.
`,
askCodebaseRequestSchema.shape,
async (request: AskCodebaseRequest) => {
const response = await askCodebase(request);

// Format the response with the answer and a link to the chat
const formattedResponse = dedent`
${response.answer}

---
**View full research session:** ${response.chatUrl}
`;

return {
content: [{
type: "text",
text: formattedResponse,
}],
};
}
);

const runServer = async () => {
const transport = new StdioServerTransport();
Expand Down
25 changes: 25 additions & 0 deletions packages/mcp/src/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -272,3 +272,28 @@ export const listCommitsResponseSchema = z.array(z.object({
author_name: z.string(),
author_email: z.string(),
}));

export const askCodebaseRequestSchema = z.object({
query: z
.string()
.describe("The query to ask about the codebase."),
repos: z
.array(z.string())
.optional()
.describe("The repositories that are accessible to the agent during the chat. If not provided, all repositories are accessible."),
});

export const sourceSchema = z.object({
type: z.literal('file'),
repo: z.string(),
path: z.string(),
name: z.string(),
language: z.string(),
revision: z.string(),
});

export const askCodebaseResponseSchema = z.object({
answer: z.string().describe("The agent's final answer in markdown format"),
chatId: z.string().describe("ID of the persisted chat session"),
chatUrl: z.string().describe("URL to view the chat in the web UI"),
});
5 changes: 5 additions & 0 deletions packages/mcp/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import {
serviceErrorSchema,
listCommitsQueryParamsSchema,
listCommitsResponseSchema,
askCodebaseRequestSchema,
askCodebaseResponseSchema,
} from "./schemas.js";
import { z } from "zod";

Expand All @@ -34,3 +36,6 @@ export type ServiceError = z.infer<typeof serviceErrorSchema>;

export type ListCommitsQueryParamsSchema = z.infer<typeof listCommitsQueryParamsSchema>;
export type ListCommitsResponse = z.infer<typeof listCommitsResponseSchema>;

export type AskCodebaseRequest = z.infer<typeof askCodebaseRequestSchema>;
export type AskCodebaseResponse = z.infer<typeof askCodebaseResponseSchema>;
1 change: 1 addition & 0 deletions packages/web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@
"codemirror-lang-spreadsheet": "^1.3.0",
"codemirror-lang-zig": "^0.1.0",
"date-fns": "^4.1.0",
"dedent": "^1.7.1",
"embla-carousel-auto-scroll": "^8.3.0",
"embla-carousel-react": "^8.3.0",
"escape-string-regexp": "^5.0.0",
Expand Down
204 changes: 204 additions & 0 deletions packages/web/src/app/api/(server)/chat/blocking/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
import { sew } from "@/actions";
import { _getConfiguredLanguageModelsFull, _getAISDKLanguageModelAndOptions, updateChatMessages, generateAndUpdateChatNameFromMessage } from "@/features/chat/actions";
import { SBChatMessage, SearchScope } from "@/features/chat/types";
import { convertLLMOutputToPortableMarkdown, getAnswerPartFromAssistantMessage } from "@/features/chat/utils";
import { ErrorCode } from "@/lib/errorCodes";
import { requestBodySchemaValidationError, ServiceError, ServiceErrorException, serviceErrorResponse } from "@/lib/serviceError";
import { isServiceError } from "@/lib/utils";
import { getBaseUrl } from "@/lib/utils.server";
import { withOptionalAuthV2 } from "@/withAuthV2";
import { ChatVisibility, Prisma } from "@sourcebot/db";
import { createLogger } from "@sourcebot/shared";
import { randomUUID } from "crypto";
import { StatusCodes } from "http-status-codes";
import { headers } from "next/headers";
import { NextRequest, NextResponse } from "next/server";
import { z } from "zod";
import { createMessageStream } from "../route";
import { InferUIMessageChunk, UITools, UIDataTypes, UIMessage } from "ai";
import { apiHandler } from "@/lib/apiHandler";

const logger = createLogger('chat-blocking-api');

/**
* Request schema for the blocking chat API.
* This is a simpler interface designed for MCP and other programmatic integrations.
*/
const blockingChatRequestSchema = z.object({
query: z
.string()
.describe("The query to ask about the codebase."),
repos: z
.array(z.string())
.optional()
.describe("The repositories that are accessible to the agent during the chat. If not provided, all repositories are accessible."),
});

/**
* Response schema for the blocking chat API.
*/
interface BlockingChatResponse {
answer: string;
chatId: string;
chatUrl: string;
}

/**
* POST /api/chat/blocking
*
* A blocking (non-streaming) chat endpoint designed for MCP and other integrations.
* Creates a chat session, runs the agent to completion, and returns the final answer.
*
* The chat session is persisted to the database, allowing users to view the full
* conversation (including tool calls and reasoning) in the web UI.
*/
export const POST = apiHandler(async (request: NextRequest) => {
const requestBody = await request.json();
const parsed = await blockingChatRequestSchema.safeParseAsync(requestBody);

if (!parsed.success) {
return serviceErrorResponse(requestBodySchemaValidationError(parsed.error));
}

const { query, repos = [] } = parsed.data;

const response: BlockingChatResponse | ServiceError = await sew(() =>
withOptionalAuthV2(async ({ org, user, prisma }) => {
// Get all configured language models
const configuredModels = await _getConfiguredLanguageModelsFull();
if (configuredModels.length === 0) {
return {
statusCode: StatusCodes.BAD_REQUEST,
errorCode: ErrorCode.INVALID_REQUEST_BODY,
message: "No language models are configured. Please configure at least one language model.",
} satisfies ServiceError;
}

// @todo: we should probably have a option of passing the language model
// into the request body. For now, just use the first configured model.
const languageModelConfig = configuredModels[0];

const { model, providerOptions } = await _getAISDKLanguageModelAndOptions(languageModelConfig);
const modelName = languageModelConfig.displayName ?? languageModelConfig.model;

// Create a new chat session
const chat = await prisma.chat.create({
data: {
orgId: org.id,
createdById: user?.id,
visibility: ChatVisibility.PRIVATE,
messages: [] as unknown as Prisma.InputJsonValue,
},
});

// Run the agent to completion
logger.debug(`Starting blocking agent for chat ${chat.id}`, {
chatId: chat.id,
query: query.substring(0, 100),
model: modelName,
});

// Create the initial user message
const userMessage: SBChatMessage = {
id: randomUUID(),
role: 'user',
parts: [{ type: 'text', text: query }],
};

const selectedSearchScopes = await Promise.all(repos.map(async (repo) => {
const repoDB = await prisma.repo.findFirst({
where: {
name: repo,
},
});

if (!repoDB) {
throw new ServiceErrorException({
statusCode: StatusCodes.BAD_REQUEST,
errorCode: ErrorCode.INVALID_REQUEST_BODY,
message: `Repository '${repo}' not found.`,
})
}

return {
type: 'repo',
value: repoDB.name,
name: repoDB.displayName ?? repoDB.name.split('/').pop() ?? repoDB.name,
codeHostType: repoDB.external_codeHostType,
} satisfies SearchScope;
}));

// We'll capture the final messages and usage from the stream
let finalMessages: SBChatMessage[] = [];

const stream = await createMessageStream({
messages: [userMessage],
selectedSearchScopes,
model,
modelName,
modelProviderOptions: providerOptions,
orgId: org.id,
prisma,
onFinish: async ({ messages }) => {
finalMessages = messages;
},
})

await Promise.all([
// Consume the stream fully to trigger onFinish
blockStreamUntilFinish(stream),
// Generate and update the chat name
generateAndUpdateChatNameFromMessage({
chatId: chat.id,
languageModelId: languageModelConfig.model,
message: query,
})
]);

// Persist the messages to the chat
await updateChatMessages({
chatId: chat.id,
messages: finalMessages,
});

// Extract the answer text from the assistant message
const assistantMessage = finalMessages.find(m => m.role === 'assistant');
const answerPart = assistantMessage
? getAnswerPartFromAssistantMessage(assistantMessage, false)
: undefined;
const answerText = answerPart?.text ?? '';

// Convert to portable markdown (replaces @file: references with markdown links)
const portableAnswer = convertLLMOutputToPortableMarkdown(answerText);

// Build the chat URL
const headersList = await headers();
const baseUrl = getBaseUrl(headersList);
const chatUrl = `${baseUrl}/${org.domain}/chat/${chat.id}`;

logger.debug(`Completed blocking agent for chat ${chat.id}`, {
chatId: chat.id,
});

return {
answer: portableAnswer,
chatId: chat.id,
chatUrl,
} satisfies BlockingChatResponse;
})
);

if (isServiceError(response)) {
return serviceErrorResponse(response);
}

return NextResponse.json(response);
});

const blockStreamUntilFinish = async <T extends UIMessage<unknown, UIDataTypes, UITools>>(stream: ReadableStream<InferUIMessageChunk<T>>) => {
const reader = stream.getReader();
while (true as const) {
const { done } = await reader.read();
if (done) break;
}
}
Loading