Skip to content

Commit d335ac1

Browse files
Merge upstream continuedev/continue main (47 commits)
Includes: ClawRouter provider, JetBrains stability fixes, security vulnerability patches, docs improvements, and dependency bumps. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2 parents 49b7de0 + 581980e commit d335ac1

88 files changed

Lines changed: 69983 additions & 56683 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/workflows/jetbrains-release.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ jobs:
286286
echo "Contents of distributions folder:"
287287
ls
288288
echo "---"
289-
FILENAME=`ls continue-intellij-extension-*.zip`
289+
FILENAME=$(ls continue-intellij-extension-*.zip | head -1)
290290
echo "Filename=${FILENAME}"
291291
unzip "$FILENAME" -d content
292292

.vscode/tasks.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@
120120
},
121121
"options": {
122122
"env": {
123-
"SKIP_INSTALLS": "true"
123+
// "SKIP_INSTALLS": "true" // Can set to true to speed up build scripts, but only after initial installs (otherwise issues with copying e.g. ripgrep, sqlite, etc)
124124
},
125125
"cwd": "${workspaceFolder}/extensions/vscode"
126126
}

binary/package-lock.json

Lines changed: 27679 additions & 22378 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

binary/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
"@types/uuid": "^9.0.8",
3535
"@vercel/ncc": "^0.38.1",
3636
"cross-env": "^7.0.3",
37-
"esbuild": "^0.19.11",
37+
"esbuild": "^0.25.0",
3838
"jest": "^29.7.0",
3939
"pkg": "^5.8.1",
4040
"rimraf": "^5.0.7",
@@ -51,7 +51,7 @@
5151
"ncp": "^2.0.0",
5252
"node-fetch": "^3.3.2",
5353
"system-ca": "^1.0.2",
54-
"tar": "^7.5.10",
54+
"tar": "^7.5.13",
5555
"undici": "^7.24.0",
5656
"uuid": "^9.0.1",
5757
"vectordb": "0.4.20",

binary/test/binary.test.ts

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { SerializedContinueConfig } from "core";
1+
import { ModelDescription, SerializedContinueConfig } from "core";
22
// import Mock from "core/llm/llms/Mock.js";
33
import { FromIdeProtocol, ToIdeProtocol } from "core/protocol/index.js";
44
import { IMessenger } from "core/protocol/messenger";
@@ -67,7 +67,7 @@ describe("Test Suite", () => {
6767
const binaryPath = path.join(binaryDir, `continue-binary${exe}`);
6868
const expectedItems = [
6969
`continue-binary${exe}`,
70-
`esbuild${exe}`,
70+
`rg${exe}`,
7171
"index.node",
7272
"package.json",
7373
"build/Release/node_sqlite3.node",
@@ -144,7 +144,7 @@ describe("Test Suite", () => {
144144
(
145145
messenger as CoreBinaryTcpMessenger<ToIdeProtocol, FromIdeProtocol>
146146
).close();
147-
} else {
147+
} else if (subprocess) {
148148
subprocess.kill();
149149
await new Promise((resolve) => subprocess.on("close", resolve));
150150
await new Promise((resolve) => setTimeout(resolve, 1000));
@@ -235,16 +235,20 @@ describe("Test Suite", () => {
235235
result: { config },
236236
} = await messenger.request("config/getSerializedProfileInfo", undefined);
237237

238-
expect(config!.modelsByRole.chat.some((m) => m.title === model.title)).toBe(
239-
true,
240-
);
238+
expect(
239+
config!.modelsByRole.chat.some(
240+
(m: ModelDescription) => m.title === model.title,
241+
),
242+
).toBe(true);
241243

242244
await messenger.request("config/deleteModel", { title: model.title });
243245
const {
244246
result: { config: configAfterDelete },
245247
} = await messenger.request("config/getSerializedProfileInfo", undefined);
246248
expect(
247-
configAfterDelete!.modelsByRole.chat.some((m) => m.title === model.title),
249+
configAfterDelete!.modelsByRole.chat.some(
250+
(m: ModelDescription) => m.title === model.title,
251+
),
248252
).toBe(false);
249253
});
250254

core/autocomplete/CompletionProvider.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ export class CompletionProvider {
8989
llm.completionOptions.temperature = 0.01;
9090
}
9191

92-
if (llm instanceof OpenAI) {
92+
if (llm instanceof OpenAI && llm.providerName !== "openrouter") {
9393
llm.useLegacyCompletionsEndpoint = true;
9494
}
9595

core/autocomplete/templating/constructPrefixSuffix.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,23 @@
1-
import { IDE } from "../..";
21
import { getRangeInString } from "../../util/ranges";
32
import { languageForFilepath } from "../constants/AutocompleteLanguageInfo";
43
import { AutocompleteInput } from "../util/types";
54

65
/**
76
* We have to handle a few edge cases in getting the entire prefix/suffix for the current file.
8-
* This is entirely prior to finding snippets from other files
7+
* This is entirely prior to finding snippets from other files.
8+
*
9+
* Accepts pre-loaded file contents to avoid a redundant file read
10+
* (the caller already has the contents loaded).
911
*/
10-
export async function constructInitialPrefixSuffix(
12+
export function constructInitialPrefixSuffix(
1113
input: AutocompleteInput,
12-
ide: IDE,
13-
): Promise<{
14+
fileContents: string,
15+
): {
1416
prefix: string;
1517
suffix: string;
16-
}> {
18+
} {
1719
const lang = languageForFilepath(input.filepath);
1820

19-
const fileContents =
20-
input.manuallyPassFileContents ?? (await ide.readFile(input.filepath));
2121
const fileLines = fileContents.split("\n");
2222
let prefix =
2323
getRangeInString(fileContents, {

core/autocomplete/util/HelperVars.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ export class HelperVars {
5353

5454
// Construct full prefix/suffix (a few edge cases handled in here)
5555
const { prefix: fullPrefix, suffix: fullSuffix } =
56-
await constructInitialPrefixSuffix(this.input, this.ide);
56+
constructInitialPrefixSuffix(this.input, this._fileContents);
5757
this._fullPrefix = fullPrefix;
5858
this._fullSuffix = fullSuffix;
5959

core/config/onboarding.ts

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,8 +101,17 @@ export function setupProviderConfig(
101101
throw new Error(`Unknown provider: ${provider}`);
102102
}
103103

104-
return {
105-
...config,
106-
models: [...(config.models ?? []), ...newModels],
107-
};
104+
const existingModels = config.models ?? [];
105+
106+
// Update API key on existing models; add new entries for any missing slugs
107+
const updatedModels = existingModels.map((m) => {
108+
if (!("uses" in m)) return m;
109+
const match = newModels.find((n) => n.uses === m.uses);
110+
return match ? { ...m, with: { ...m.with, ...match.with } } : m;
111+
});
112+
const modelsToAdd = newModels.filter(
113+
(n) => !existingModels.some((m) => "uses" in m && m.uses === n.uses),
114+
);
115+
116+
return { ...config, models: [...updatedModels, ...modelsToAdd] };
108117
}

core/config/types.ts

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -534,7 +534,16 @@ declare global {
534534
cacheBehavior?: CacheBehavior;
535535
536536
useLegacyCompletionsEndpoint?: boolean;
537-
537+
538+
/**
539+
* When set to false, forces the OpenAI provider to use the /chat/completions
540+
* endpoint instead of the /responses endpoint for o-series and gpt-5 models.
541+
* This can help avoid "organization must be verified" errors related to
542+
* reasoning summaries and streaming on the Responses API.
543+
* Defaults to true for official OpenAI API.
544+
*/
545+
useResponsesApi?: boolean;
546+
538547
// Embedding options
539548
embeddingId?: string;
540549
maxEmbeddingChunkSize?: number;

0 commit comments

Comments
 (0)