Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions cli/cmd/engine-cli/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ var (
commitLogin string
commitEmail string
assignmentID string
enableModelSelection bool
selectedModel string
defaultModel string
availableModels []string
modelVendors []string
)

func main() {
Expand Down Expand Up @@ -88,6 +93,11 @@ func init() {
runCmd.Flags().StringVar(&commitLogin, "commit-login", "engine-cli-user", "Git author name for commits")
runCmd.Flags().StringVar(&commitEmail, "commit-email", "engine-cli@users.noreply.github.com", "Git author email for commits")
runCmd.Flags().StringVar(&assignmentID, "assignment-id", "", "Assignment ID to enable cross-run history persistence")
runCmd.Flags().BoolVar(&enableModelSelection, "enable-model-selection", false, "Enable the model selection feature flag in the job response")
runCmd.Flags().StringVar(&selectedModel, "selected-model", "", "Selected model for this job")
runCmd.Flags().StringVar(&defaultModel, "default-model", "", "Default model for this engine")
runCmd.Flags().StringSliceVar(&availableModels, "available-model", nil, "Available model for this engine (repeatable)")
runCmd.Flags().StringSliceVar(&modelVendors, "model-vendor", nil, "Model vendor for filtering (repeatable, e.g. Anthropic, OpenAI)")

_ = runCmd.MarkFlagRequired("repo")
}
Expand Down Expand Up @@ -149,6 +159,11 @@ func runEngine(cmd *cobra.Command, args []string) error {
BranchName: branchName,
CommitLogin: commitLogin,
CommitEmail: commitEmail,
EnableModelSelection: enableModelSelection,
SelectedModel: selectedModel,
DefaultModel: defaultModel,
AvailableModels: availableModels,
ModelVendors: modelVendors,
}

prNumber := setup.PRNumber
Expand Down Expand Up @@ -267,6 +282,13 @@ func runEngine(cmd *cobra.Command, args []string) error {
GitToken: githubToken,
}

if enableModelSelection {
env.SelectedModel = selectedModel
env.DefaultModel = defaultModel
env.AvailableModels = availableModels
env.ModelVendors = modelVendors
}
Comment on lines +285 to +290
Copy link

Copilot AI Apr 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With --enable-model-selection, the mock job response will set features.model_selection=true even if none of --selected-model, --default-model, or --available-model are provided, resulting in a job payload with the feature enabled but no model information. Consider validating the flag combination and returning an error (or auto-populating a default) to avoid confusing local engine testing.

Copilot uses AI. Check for mistakes.

result := runner.Run(ctx, command, env, runner.Options{WorkingDir: workingDir}, runnerCallbacks)

// Summary
Expand Down
52 changes: 39 additions & 13 deletions cli/internal/runner/runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ package runner
import (
"bufio"
"context"
"encoding/json"
"fmt"
"os"
"os/exec"
Expand All @@ -15,13 +16,17 @@ import (

// Environment contains the platform environment variables for the engine.
type Environment struct {
JobID string
APIToken string
APIURL string
JobNonce string
InferenceToken string
InferenceURL string
GitToken string
JobID string
APIToken string
APIURL string
JobNonce string
InferenceToken string
InferenceURL string
GitToken string
SelectedModel string
DefaultModel string
AvailableModels []string
ModelVendors []string
}

// Callbacks contains optional callbacks for runner events.
Expand Down Expand Up @@ -124,18 +129,39 @@ func buildEnv(env Environment, extra map[string]string) []string {
// Add platform environment variables
// Note: We use GITHUB_* prefix for consistency with GitHub platform conventions
platformVars := map[string]string{
"GITHUB_JOB_ID": env.JobID,
"GITHUB_JOB_NONCE": env.JobNonce,
"GITHUB_PLATFORM_API_TOKEN": env.APIToken,
"GITHUB_PLATFORM_API_URL": env.APIURL,
"GITHUB_INFERENCE_TOKEN": env.InferenceToken,
"GITHUB_GIT_TOKEN": env.GitToken,
"GITHUB_JOB_ID": env.JobID,
"GITHUB_JOB_NONCE": env.JobNonce,
"GITHUB_PLATFORM_API_TOKEN": env.APIToken,
"GITHUB_PLATFORM_API_URL": env.APIURL,
"GITHUB_INFERENCE_TOKEN": env.InferenceToken,
"GITHUB_GIT_TOKEN": env.GitToken,
}

if env.InferenceURL != "" {
platformVars["GITHUB_INFERENCE_URL"] = env.InferenceURL
}

if env.SelectedModel != "" {
platformVars["GITHUB_SELECTED_MODEL"] = env.SelectedModel
}

if env.DefaultModel != "" {
platformVars["GITHUB_DEFAULT_MODEL"] = env.DefaultModel
}

if len(env.AvailableModels) > 0 {
// json.Marshal cannot fail for []string, but handle the error defensively.
if encoded, err := json.Marshal(env.AvailableModels); err == nil {
platformVars["GITHUB_AVAILABLE_MODELS"] = string(encoded)
}
}

if len(env.ModelVendors) > 0 {
if encoded, err := json.Marshal(env.ModelVendors); err == nil {
platformVars["GITHUB_MODEL_VENDORS"] = string(encoded)
}
}

for k, v := range platformVars {
result = append(result, fmt.Sprintf("%s=%s", k, v))
}
Expand Down
31 changes: 29 additions & 2 deletions cli/internal/server/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ type JobConfig struct {
CommitLogin string
CommitEmail string
MCPProxyURL string
EnableModelSelection bool
SelectedModel string
DefaultModel string
AvailableModels []string
ModelVendors []string
}

// ProgressEvent represents a progress event received from an engine.
Expand Down Expand Up @@ -129,8 +134,8 @@ func (s *MockPlatformServer) Stop(ctx context.Context) error {
}

var (
getJobRegex = regexp.MustCompile(`^/agent/jobs/([^/]+)$`)
progressRegex = regexp.MustCompile(`^/agent/jobs/([^/]+)/progress$`)
getJobRegex = regexp.MustCompile(`^/agent/jobs/([^/]+)$`)
progressRegex = regexp.MustCompile(`^/agent/jobs/([^/]+)/progress$`)
)

func (s *MockPlatformServer) handleRequest(w http.ResponseWriter, r *http.Request) {
Expand Down Expand Up @@ -206,6 +211,28 @@ func (s *MockPlatformServer) handleGetJob(w http.ResponseWriter, r *http.Request
response["mcp_proxy_url"] = s.jobConfig.MCPProxyURL
}

if s.jobConfig.EnableModelSelection {
response["features"] = map[string]any{
"model_selection": true,
}

if s.jobConfig.SelectedModel != "" {
response["selected_model"] = s.jobConfig.SelectedModel
}

if s.jobConfig.DefaultModel != "" {
response["default_model"] = s.jobConfig.DefaultModel
}

if len(s.jobConfig.AvailableModels) > 0 {
response["available_models"] = s.jobConfig.AvailableModels
}

if len(s.jobConfig.ModelVendors) > 0 {
response["model_vendors"] = s.jobConfig.ModelVendors
}
}

if s.callbacks.OnJobFetched != nil {
s.callbacks.OnJobFetched()
}
Expand Down
29 changes: 27 additions & 2 deletions docs/integration-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,12 @@ author: 'Your Name'
# The fully qualified command to run the engine.
# The platform executes this command directly — no implicit runtime setup.
entrypoint: 'node --enable-source-maps dist/index.js'

# Optional: Specify the model vendors for model selection (e.g. 'Anthropic', 'OpenAI').
# When set, the platform uses these to determine the available models for the engine.
vendors:
- 'Anthropic'
- 'OpenAI'
```

> **Note:** This is not a GitHub Action. The platform reads `entrypoint` from `engine.yaml` and runs it directly. All paths in the entrypoint are resolved relative to the engine's root directory.
Expand All @@ -94,6 +100,10 @@ The platform injects these environment variables into the engine process at runt
| `GITHUB_INFERENCE_TOKEN` | Yes | Token used by your inference client / SDK for model calls. |
| `GITHUB_INFERENCE_URL` | Yes | Base URL for the inference API (e.g. Copilot API). Use this along with `GITHUB_INFERENCE_TOKEN` to make LLM inference calls. |
| `GITHUB_GIT_TOKEN` | Yes | Token used for authenticated `git clone` / `git push`. |
| `GITHUB_SELECTED_MODEL` | No | Model selected by the platform for this run. Only set when model selection is enabled. |
| `GITHUB_DEFAULT_MODEL` | No | Default model for the selected engine. Only set when model selection is enabled. |
| `GITHUB_AVAILABLE_MODELS` | No | JSON array of models the engine can choose from (e.g. `["claude-sonnet-4.5","claude-opus-4.1"]`). Only set when model selection is enabled. |
| `GITHUB_MODEL_VENDORS` | No | JSON array of model vendors as defined by the `vendors` field in `engine.yaml` (e.g. `["Anthropic","OpenAI"]`). Only set when model selection is enabled. |

## Step 2: Fetch Job Details

Expand Down Expand Up @@ -143,6 +153,13 @@ Headers:
"branch_name": "copilot/fix-123",
"commit_login": "copilot-bot",
"commit_email": "copilot-bot@users.noreply.github.com",
"features": {
"model_selection": true
},
"selected_model": "claude-sonnet-4.5",
"default_model": "claude-sonnet-4.5",
"available_models": ["claude-sonnet-4.5", "claude-opus-4.1"],
"model_vendors": ["Anthropic"],
"mcp_proxy_url": "http://127.0.0.1:2301"
}
```
Expand All @@ -157,6 +174,11 @@ Headers:
| `branch_name` | Branch to checkout or create. |
| `commit_login` | Git author name for commits. |
| `commit_email` | Git author email for commits. |
| `features` | Optional feature flags. Currently supports `model_selection` (boolean). |
| `selected_model` | Model selected by the platform for this run. Present when `features.model_selection` is `true`. |
| `default_model` | Default model for the selected engine. Present when `features.model_selection` is `true`. |
| `available_models` | List of models the engine can choose from. Present when `features.model_selection` is `true`. |
| `model_vendors` | List of model vendors as defined by the `vendors` field in `engine.yaml` (e.g. `["Anthropic", "OpenAI"]`). Present when `features.model_selection` is `true`. |
| `mcp_proxy_url` | Optional URL of the MCP proxy server. When present, use it to discover user-provided MCP servers. See [User-Provided MCP Servers](#user-provided-mcp-servers). |

Use `GITHUB_INFERENCE_TOKEN` for model calls and `GITHUB_GIT_TOKEN` for git operations; those are bootstrap action inputs, not job response fields.
Expand Down Expand Up @@ -825,7 +847,7 @@ flowchart LR
```

```typescript
import { PlatformClient, cloneRepo, finalizeChanges } from "@github/copilot-engine-sdk";
import { PlatformClient, cloneRepo, finalizeChanges, resolveSelectedModel } from "@github/copilot-engine-sdk";
import { CopilotClient } from "@github/copilot-sdk";

async function main() {
Expand Down Expand Up @@ -862,6 +884,9 @@ async function main() {

// 5. Build system message based on action type
const systemMessage = buildSystemMessage(job.action, job);
const model = resolveSelectedModel(job, {
fallbackModel: "claude-sonnet-4.5",
}) ?? "claude-sonnet-4.5";

// 6. Run your agentic loop with your inference client
const client = new CopilotClient({
Expand All @@ -872,7 +897,7 @@ async function main() {
const mcpServerPath = require.resolve("@github/copilot-engine-sdk/mcp-server");

const session = await client.createSession({
model: "claude-sonnet-4.5",
model,
systemMessage: { content: systemMessage },
mcpServers: {
"engine-tools": {
Expand Down
91 changes: 91 additions & 0 deletions src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -485,6 +485,97 @@ export interface JobDetails {
commit_login: string;
commit_email: string;
mcp_proxy_url?: string;
/** Model selected by the platform for this run. Present when model selection is enabled. */
selected_model?: string;
/** Default model for the selected engine. Present when model selection is enabled. */
default_model?: string;
/** Models the engine can choose from. Present when model selection is enabled. */
available_models?: string[];
/** Model vendors for filtering (e.g. ["Anthropic", "OpenAI"]). Present when model selection is enabled. */
model_vendors?: string[];
/** Feature flags enabled for this job. */
features?: {
/** Whether the platform has enabled model selection for this job. */
model_selection?: boolean;
};
}

/**
* Check whether the platform has enabled model selection for this job.
*
* When this returns `false`, engines should use their own hardcoded model.
*/
export function isModelSelectionEnabled(job: Pick<JobDetails, "features">): boolean {
return job.features?.model_selection === true;
}

/**
* Resolve which model an engine should use for a job.
*
* Returns `undefined` when model selection is not enabled for the job
* (i.e. `features.model_selection` is not `true`), allowing engines that
* do not support model selection to ignore it entirely.
Comment on lines +515 to +517
Copy link

Copilot AI Apr 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The JSDoc says this function returns undefined only when model selection is not enabled, but the implementation can also return undefined when model selection is enabled and there are no usable candidates (e.g. missing selected_model, default_model, and no fallback provided). Please either update the doc to reflect this behavior or change the function to always resolve a model (or throw) when features.model_selection === true.

This issue also appears on line 559 of the same file.

Copilot uses AI. Check for mistakes.
*
* When enabled, the selection order is:
* 1) caller preferred model
* 2) model selected by platform (`selected_model`)
* 3) platform-provided engine default (`default_model`)
* 4) caller fallback model
*
* If `available_models` is present the resolved model must appear in that
* list. When no candidate matches, the first available model is returned
* and a warning is logged if `selected_model` was set but missing from the
* list (indicates a platform misconfiguration).
*/
/** Options for {@link resolveSelectedModel}. */
export interface ResolveSelectedModelOptions {
/** Model the engine prefers to use, checked first. */
preferredModel?: string;
/** Model to fall back to when no platform-provided candidate matches. */
fallbackModel?: string;
}

export function resolveSelectedModel(
job: Pick<JobDetails, "selected_model" | "default_model" | "available_models" | "features">,
options?: ResolveSelectedModelOptions,
): string | undefined {
// Model selection must be explicitly enabled via feature flag
if (job.features?.model_selection !== true) {
return undefined;
}

const availableModels = job.available_models
?.map((model) => model.trim())
.filter((model) => model.length > 0) ?? [];

const candidates = [
options?.preferredModel,
job.selected_model,
job.default_model,
options?.fallbackModel,
].map((model) => model?.trim())
.filter((model): model is string => Boolean(model && model.length > 0));

if (availableModels.length === 0) {
return candidates[0];
}

for (const candidate of candidates) {
if (availableModels.includes(candidate)) {
return candidate;
}
}

// Warn when the platform-selected model is not in the available list
const trimmedSelectedModel = job.selected_model?.trim();
if (trimmedSelectedModel && !availableModels.includes(trimmedSelectedModel)) {
console.warn(
`resolveSelectedModel: selected_model "${trimmedSelectedModel}" is not in available_models [${availableModels.join(", ")}]. ` +
`Falling back to "${availableModels[0]}".`
);
}

return availableModels[0];
}

/**
Expand Down
4 changes: 2 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,9 @@ export type {
// Platform Client
// =============================================================================

export { PlatformClient } from "./client.js";
export { PlatformClient, resolveSelectedModel, isModelSelectionEnabled } from "./client.js";

export type { PlatformClientConfig, ProgressPayload, ProgressRecord, ProgressResponse, SendResult, JobDetails, ProblemStatement } from "./client.js";
export type { PlatformClientConfig, ProgressPayload, ProgressRecord, ProgressResponse, SendResult, JobDetails, ProblemStatement, ResolveSelectedModelOptions } from "./client.js";

// =============================================================================
// MCP Server
Expand Down
Loading