Skip to content

Commit 825b3e3

Browse files
committed
test_runner: avoid hanging on incomplete v8 frames
Signed-off-by: Ali Hassan <ali-hassan27@outlook.com>
1 parent 0fea430 commit 825b3e3

File tree

2 files changed

+118
-18
lines changed

2 files changed

+118
-18
lines changed

lib/internal/test_runner/runner.js

Lines changed: 45 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -248,6 +248,7 @@ class FileTest extends Test {
248248
#rawBuffer = []; // Raw data waiting to be parsed
249249
#rawBufferSize = 0;
250250
#reportedChildren = 0;
251+
#pendingPartialV8Header = false;
251252
failedSubtests = false;
252253

253254
constructor(options) {
@@ -339,6 +340,12 @@ class FileTest extends Test {
339340
}
340341
parseMessage(readData) {
341342
let dataLength = TypedArrayPrototypeGetLength(readData);
343+
if (this.#pendingPartialV8Header) {
344+
readData = Buffer.concat([TypedArrayPrototypeSubarray(v8Header, 0, 1), readData]);
345+
dataLength = TypedArrayPrototypeGetLength(readData);
346+
this.#pendingPartialV8Header = false;
347+
}
348+
342349
if (dataLength === 0) return;
343350
const partialV8Header = readData[dataLength - 1] === v8Header[0];
344351

@@ -349,22 +356,52 @@ class FileTest extends Test {
349356
dataLength--;
350357
}
351358

352-
if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) {
353-
this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]);
354-
} else {
355-
ArrayPrototypePush(this.#rawBuffer, readData);
359+
if (dataLength > 0) {
360+
if (this.#rawBuffer[0] && TypedArrayPrototypeGetLength(this.#rawBuffer[0]) < kSerializedSizeHeader) {
361+
this.#rawBuffer[0] = Buffer.concat([this.#rawBuffer[0], readData]);
362+
} else {
363+
ArrayPrototypePush(this.#rawBuffer, readData);
364+
}
365+
this.#rawBufferSize += dataLength;
366+
this.#processRawBuffer();
356367
}
357-
this.#rawBufferSize += dataLength;
358-
this.#processRawBuffer();
359368

360369
if (partialV8Header) {
361-
ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1));
362-
this.#rawBufferSize++;
370+
this.#pendingPartialV8Header = true;
363371
}
364372
}
365373
#drainRawBuffer() {
374+
if (this.#pendingPartialV8Header) {
375+
ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1));
376+
this.#rawBufferSize++;
377+
this.#pendingPartialV8Header = false;
378+
}
379+
366380
while (this.#rawBuffer.length > 0) {
381+
const prevBufferLength = this.#rawBuffer.length;
382+
const prevBufferSize = this.#rawBufferSize;
367383
this.#processRawBuffer();
384+
385+
if (this.#rawBuffer.length === prevBufferLength &&
386+
this.#rawBufferSize === prevBufferSize) {
387+
const bufferHead = this.#rawBuffer[0];
388+
this.addToReport({
389+
__proto__: null,
390+
type: 'test:stdout',
391+
data: {
392+
__proto__: null,
393+
file: this.name,
394+
message: TypedArrayPrototypeSubarray(bufferHead, 0, 1).toString('utf-8'),
395+
},
396+
});
397+
398+
if (TypedArrayPrototypeGetLength(bufferHead) === 1) {
399+
ArrayPrototypeShift(this.#rawBuffer);
400+
} else {
401+
this.#rawBuffer[0] = TypedArrayPrototypeSubarray(bufferHead, 1);
402+
}
403+
this.#rawBufferSize--;
404+
}
368405
}
369406
}
370407
#processRawBuffer() {

test/parallel/test-runner-v8-deserializer.mjs

Lines changed: 73 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,24 @@ async function toArray(chunks) {
1414
return arr;
1515
}
1616

17-
const chunks = await toArray(serializer([
18-
{ type: 'test:diagnostic', data: { nesting: 0, details: {}, message: 'diagnostic' } },
19-
]));
17+
const diagnosticEvent = {
18+
type: 'test:diagnostic',
19+
data: { nesting: 0, details: {}, message: 'diagnostic' },
20+
};
21+
const chunks = await toArray(serializer([diagnosticEvent]));
2022
const defaultSerializer = new DefaultSerializer();
2123
defaultSerializer.writeHeader();
2224
const headerLength = defaultSerializer.releaseBuffer().length;
25+
const headerOnly = Buffer.from([0xff, 0x0f]);
26+
const oversizedLengthHeader = Buffer.from([0xff, 0x0f, 0x7f, 0xff, 0xff, 0xff]);
27+
const truncatedLengthHeader = Buffer.from([0xff, 0x0f, 0x00, 0x01, 0x00, 0x00]);
28+
29+
function collectStdout(reported) {
30+
return reported
31+
.filter((event) => event.type === 'test:stdout')
32+
.map((event) => event.data.message)
33+
.join('');
34+
}
2335

2436
describe('v8 deserializer', common.mustCall(() => {
2537
let fileTest;
@@ -56,35 +68,86 @@ describe('v8 deserializer', common.mustCall(() => {
5668

5769
it('should deserialize a serialized chunk', async () => {
5870
const reported = await collectReported(chunks);
59-
assert.deepStrictEqual(reported, [
60-
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
61-
]);
71+
assert.deepStrictEqual(reported, [diagnosticEvent]);
6272
});
6373

6474
it('should deserialize a serialized chunk after non-serialized chunk', async () => {
6575
const reported = await collectReported([Buffer.concat([Buffer.from('unknown'), ...chunks])]);
6676
assert.deepStrictEqual(reported, [
6777
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
68-
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
78+
diagnosticEvent,
6979
]);
7080
});
7181

7282
it('should deserialize a serialized chunk before non-serialized output', async () => {
7383
const reported = await collectReported([Buffer.concat([ ...chunks, Buffer.from('unknown')])]);
7484
assert.deepStrictEqual(reported, [
75-
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
85+
diagnosticEvent,
7686
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
7787
]);
7888
});
7989

90+
it('should not hang when buffer starts with v8Header followed by oversized length', async () => {
91+
// Regression test for https://github.com/nodejs/node/issues/62693
92+
// FF 0F is the v8 serializer header; the next 4 bytes are read as a
93+
// big-endian message size. 0x7FFFFFFF far exceeds any actual buffer
94+
// size, causing #processRawBuffer to make no progress and
95+
// #drainRawBuffer to loop forever without the no-progress guard.
96+
const reported = await collectReported([oversizedLengthHeader]);
97+
assert.partialDeepStrictEqual(
98+
reported,
99+
Array.from({ length: reported.length }, () => ({ type: 'test:stdout' })),
100+
);
101+
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
102+
});
103+
104+
it('should flush incomplete v8 frame as stdout and keep prior valid data', async () => {
105+
// A valid non-serialized message followed by bytes that look like
106+
// a v8 header with a truncated/oversized length.
107+
const reported = await collectReported([
108+
Buffer.from('hello'),
109+
truncatedLengthHeader,
110+
]);
111+
assert.strictEqual(collectStdout(reported), `hello${truncatedLengthHeader.toString('utf8')}`);
112+
});
113+
114+
it('should flush v8Header-only bytes as stdout when stream ends', async () => {
115+
// Just the two-byte v8 header with no size field at all.
116+
const reported = await collectReported([headerOnly]);
117+
assert(reported.every((event) => event.type === 'test:stdout'));
118+
assert.strictEqual(collectStdout(reported), headerOnly.toString('utf8'));
119+
});
120+
121+
it('should resync and parse valid messages after false v8 header', async () => {
122+
// A false v8 header (FF 0F + oversized length) followed by a
123+
// legitimate serialized message. The parser must skip the corrupt
124+
// bytes and still deserialize the real message.
125+
const reported = await collectReported([
126+
oversizedLengthHeader,
127+
...chunks,
128+
]);
129+
assert.deepStrictEqual(reported.at(-1), diagnosticEvent);
130+
assert.strictEqual(reported.filter((event) => event.type === 'test:diagnostic').length, 1);
131+
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
132+
});
133+
134+
it('should preserve a false v8 header split across chunks', async () => {
135+
const reported = await collectReported([
136+
oversizedLengthHeader.subarray(0, 1),
137+
oversizedLengthHeader.subarray(1),
138+
]);
139+
assert(reported.every((event) => event.type === 'test:stdout'));
140+
assert.strictEqual(collectStdout(reported), oversizedLengthHeader.toString('utf8'));
141+
});
142+
80143
const headerPosition = headerLength * 2 + 4;
81144
for (let i = 0; i < headerPosition + 5; i++) {
82145
const message = `should deserialize a serialized message split into two chunks {...${i},${i + 1}...}`;
83146
it(message, async () => {
84147
const data = chunks[0];
85148
const reported = await collectReported([data.subarray(0, i), data.subarray(i)]);
86149
assert.deepStrictEqual(reported, [
87-
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
150+
diagnosticEvent,
88151
]);
89152
});
90153

@@ -96,7 +159,7 @@ describe('v8 deserializer', common.mustCall(() => {
96159
]);
97160
assert.deepStrictEqual(reported, [
98161
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
99-
{ data: { nesting: 0, details: {}, message: 'diagnostic' }, type: 'test:diagnostic' },
162+
diagnosticEvent,
100163
{ data: { __proto__: null, file: 'filetest', message: 'unknown' }, type: 'test:stdout' },
101164
]);
102165
}

0 commit comments

Comments
 (0)