Skip to content

Commit 34c8ebc

Browse files
committed
refactor: extract pushStreamErrorRow helper in StreamingMessageAggregator
Both stream-error rows in `buildDisplayedMessagesForMessage` (the existing `message.metadata?.error` branch and the new `finishReason === "length"` branch added in #3223) push structurally identical objects, differing only in `id` suffix, `error` string, and `errorType`. The shared parent-message-derived fields (`historyId`, `historySequence`, `model`, `routedThroughGateway`, `timestamp`) were duplicated across both pushes. Extract a local `pushStreamErrorRow` closure that captures the shared fields once. Each branch now reduces to a single call passing the three differing values. Pure refactor — emitted DisplayedMessage objects are identical.
1 parent 467fc24 commit 34c8ebc

1 file changed

Lines changed: 30 additions & 19 deletions

File tree

src/browser/utils/messages/StreamingMessageAggregator.ts

Lines changed: 30 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import {
2727
type StreamLifecycleSnapshot,
2828
} from "@/common/types/stream";
2929
import type { LanguageModelV2Usage } from "@ai-sdk/provider";
30+
import type { StreamErrorType } from "@/common/types/errors";
3031
import type { TodoItem, StatusSetToolResult, NotifyToolResult } from "@/common/types/tools";
3132
import { completeInProgressTodoItems } from "@/common/utils/todoList";
3233
import { getToolOutputUiOnly } from "@/common/utils/tools/toolOutputUiOnly";
@@ -3064,20 +3065,37 @@ export class StreamingMessageAggregator {
30643065
}
30653066
});
30663067

3067-
// Create stream-error DisplayedMessage if message has error metadata
3068-
// This happens after all parts are displayed, so error appears at the end
3069-
if (message.metadata?.error) {
3068+
// Both stream-error rows (real error metadata + synthesized
3069+
// max_tokens truncation) share the same parent-message-derived
3070+
// fields. Capture them in one place so adding a new branch later
3071+
// can't accidentally drift on `model` / `routedThroughGateway` /
3072+
// `historySequence` / `timestamp`.
3073+
const pushStreamErrorRow = (
3074+
idSuffix: string,
3075+
error: string,
3076+
errorType: StreamErrorType
3077+
): void => {
30703078
displayedMessages.push({
30713079
type: "stream-error",
3072-
id: `${message.id}-error`,
3080+
id: `${message.id}-${idSuffix}`,
30733081
historyId: message.id,
3074-
error: message.metadata.error,
3075-
errorType: message.metadata.errorType ?? "unknown",
3082+
error,
3083+
errorType,
30763084
historySequence,
3077-
model: message.metadata.model,
3085+
model: message.metadata?.model,
30783086
routedThroughGateway: message.metadata?.routedThroughGateway,
30793087
timestamp: baseTimestamp,
30803088
});
3089+
};
3090+
3091+
// Create stream-error DisplayedMessage if message has error metadata
3092+
// This happens after all parts are displayed, so error appears at the end
3093+
if (message.metadata?.error) {
3094+
pushStreamErrorRow(
3095+
"error",
3096+
message.metadata.error,
3097+
message.metadata.errorType ?? "unknown"
3098+
);
30813099
} else if (
30823100
// Stream ended cleanly *but* the provider truncated us at max_tokens.
30833101
// The backend's stream-end path treats this as a successful completion
@@ -3090,19 +3108,12 @@ export class StreamingMessageAggregator {
30903108
!hasActiveStream &&
30913109
message.metadata?.finishReason === "length"
30923110
) {
3093-
displayedMessages.push({
3094-
type: "stream-error",
3095-
id: `${message.id}-length`,
3096-
historyId: message.id,
3097-
error:
3098-
"The model hit its max output token limit before finishing this response. " +
3111+
pushStreamErrorRow(
3112+
"length",
3113+
"The model hit its max output token limit before finishing this response. " +
30993114
"Lower the thinking level (or split the turn into smaller steps) to give it more headroom.",
3100-
errorType: "max_output_tokens",
3101-
historySequence,
3102-
model: message.metadata.model,
3103-
routedThroughGateway: message.metadata?.routedThroughGateway,
3104-
timestamp: baseTimestamp,
3105-
});
3115+
"max_output_tokens"
3116+
);
31063117
}
31073118
}
31083119

0 commit comments

Comments
 (0)