Skip to content

Commit 54c5ff8

Browse files
committed
review william and linting
1 parent a4299cb commit 54c5ff8

2 files changed

Lines changed: 106 additions & 34 deletions

File tree

lib/auth/streamingV4/trailingChecksumTransform.js

Lines changed: 18 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -48,17 +48,20 @@ class TrailingChecksumTransform extends Transform {
4848
continue;
4949
}
5050

51-
const lineBreakIndex = chunk.indexOf('\r');
51+
// we are now looking for the chunk size field
52+
// no need to look further than 10 bytes since the field cannot be bigger
53+
const lineBreakIndex = chunk.subarray(0, 10).indexOf('\r');
54+
const bytesToKeep = lineBreakIndex === -1 ? chunk.byteLength : lineBreakIndex;
55+
if (this.chunkSizeBuffer.byteLength + bytesToKeep > 10) {
56+
this.log.error('chunk size field too big', {
57+
chunkSizeBuffer: this.chunkSizeBuffer.toString('hex'),
58+
truncatedChunk: chunk.subarray(0, 10).toString('hex'),
59+
});
60+
// if bigger, the chunk would be over 5 GB
61+
// returning early to avoid a DoS by memory exhaustion
62+
return callback(errors.InvalidArgument);
63+
}
5264
if (lineBreakIndex === -1) {
53-
if (this.chunkSizeBuffer.byteLength + chunk.byteLength > 10) {
54-
this.log.error('chunk size field too big', {
55-
chunkSizeBuffer: this.chunkSizeBuffer.toString(),
56-
truncatedChunk: chunk.subarray(0, 16).toString(),
57-
});
58-
// if bigger, the chunk would be over 5 GB
59-
// returning early to avoid a DoS by memory exhaustion
60-
return callback(errors.InvalidArgument);
61-
}
6265
// no delimiter, we'll keep the chunk for later
6366
this.chunkSizeBuffer = Buffer.concat([this.chunkSizeBuffer, chunk]);
6467
return callback();
@@ -68,17 +71,11 @@ class TrailingChecksumTransform extends Transform {
6871
chunk = chunk.subarray(lineBreakIndex);
6972

7073
// chunk-size is sent in hex
71-
if (!/^[0-9a-fA-F]+$/.test(this.chunkSizeBuffer.toString())) {
72-
this.log.error('chunk size is not a valid hex number', {
73-
chunkSizeBuffer: this.chunkSizeBuffer.toString(),
74-
});
75-
return callback(errors.InvalidArgument);
76-
}
77-
const dataSize = Number.parseInt(this.chunkSizeBuffer.toString(), 16);
78-
if (Number.isNaN(dataSize)) {
79-
this.log.error('unable to parse chunk size', {
80-
chunkSizeBuffer: this.chunkSizeBuffer.toString(),
81-
});
74+
const chunkSizeStr = this.chunkSizeBuffer.toString();
75+
const dataSize = parseInt(chunkSizeStr, 16);
76+
// we check that the parsing is correct (parseInt returns a partial parse when it fails)
77+
if (isNaN(dataSize) || dataSize.toString(16) !== chunkSizeStr.toLowerCase()) {
78+
this.log.error('invalid chunk size', { chunkSizeBuffer: chunkSizeStr });
8279
return callback(errors.InvalidArgument);
8380
}
8481
this.chunkSizeBuffer = Buffer.alloc(0);

tests/unit/auth/TrailingChecksumTransform.js

Lines changed: 88 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
const { errors } = require('arsenal');
12
const assert = require('assert');
23
const async = require('async');
34
const { Readable } = require('stream');
@@ -8,12 +9,12 @@ const { DummyRequestLogger } = require('../helpers');
89
const log = new DummyRequestLogger();
910

1011
// note this is not the correct checksum in objDataWithTrailingChecksum
11-
const objDataWithTrailingChecksum = '10\r\n0123456789abcdef\r\n' +
12+
const objDataWithTrailingChecksum = '10\r\n01234\r6789abcd\r\n\r\n' +
1213
'2\r\n01\r\n' +
1314
'1\r\n2\r\n' +
1415
'd\r\n3456789abcdef\r\n' +
1516
'0\r\nchecksum:xyz=\r\n';
16-
const objDataWithoutTrailingChecksum = '0123456789abcdef0123456789abcdef';
17+
const objDataWithoutTrailingChecksum = '01234\r6789abcd\r\n0123456789abcdef';
1718

1819
class ChunkedReader extends Readable {
1920
constructor(chunks) {
@@ -30,11 +31,16 @@ class ChunkedReader extends Readable {
3031
this.push(this._parts[this._index]);
3132
this._index++;
3233
}
34+
35+
getIndex() {
36+
return this._index;
37+
}
3338
}
3439

3540
describe('TrailingChecksumTransform class', () => {
3641
it('should correctly remove checksums', done => {
37-
const trailingChecksumTransform = new TrailingChecksumTransform(log, err => {
42+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
43+
trailingChecksumTransform.on('error', err => {
3844
assert.strictEqual(err, null);
3945
});
4046
const chunks = [
@@ -43,7 +49,7 @@ describe('TrailingChecksumTransform class', () => {
4349
const chunkedReader = new ChunkedReader(chunks);
4450
chunkedReader.pipe(trailingChecksumTransform);
4551
const outputChunks = [];
46-
trailingChecksumTransform.on('data', (chunk) => outputChunks.push(Buffer.from(chunk)));
52+
trailingChecksumTransform.on('data', chunk => outputChunks.push(Buffer.from(chunk)));
4753
trailingChecksumTransform.on('finish', () => {
4854
const data = Buffer.concat(outputChunks).toString();
4955
assert.strictEqual(data, objDataWithoutTrailingChecksum);
@@ -55,9 +61,10 @@ describe('TrailingChecksumTransform class', () => {
5561
});
5662

5763
// test all bisection of the input string
58-
async.forEach([...Array(objDataWithTrailingChecksum.length).keys()], (i) => {
64+
async.forEach([...Array(objDataWithTrailingChecksum.length).keys()], i => {
5965
it(`should correctly remove checksums, cut at ${i}`, done => {
60-
const trailingChecksumTransform = new TrailingChecksumTransform(log, err => {
66+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
67+
trailingChecksumTransform.on('error', err => {
6168
assert.strictEqual(err, null);
6269
});
6370
const chunks = [
@@ -67,7 +74,7 @@ describe('TrailingChecksumTransform class', () => {
6774
const chunkedReader = new ChunkedReader(chunks);
6875
chunkedReader.pipe(trailingChecksumTransform);
6976
const outputChunks = [];
70-
trailingChecksumTransform.on('data', (chunk) => outputChunks.push(Buffer.from(chunk)));
77+
trailingChecksumTransform.on('data', chunk => outputChunks.push(Buffer.from(chunk)));
7178
trailingChecksumTransform.on('finish', () => {
7279
const data = Buffer.concat(outputChunks).toString();
7380
assert.strictEqual(data, objDataWithoutTrailingChecksum);
@@ -80,10 +87,11 @@ describe('TrailingChecksumTransform class', () => {
8087
});
8188

8289
// test all trisection of the input string
83-
async.forEach([...Array(objDataWithTrailingChecksum.length - 2).keys()], (i) => {
84-
async.forEach([...Array(objDataWithTrailingChecksum.length - i - 2).keys()], (j) => {
90+
async.forEach([...Array(objDataWithTrailingChecksum.length - 2).keys()], i => {
91+
async.forEach([...Array(objDataWithTrailingChecksum.length - i - 2).keys()], j => {
8592
it(`should correctly remove checksums, cut at ${i} and ${i + j + 1}`, done => {
86-
const trailingChecksumTransform = new TrailingChecksumTransform(log, err => {
93+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
94+
trailingChecksumTransform.on('error', err => {
8795
assert.strictEqual(err, null);
8896
});
8997
const chunks = [
@@ -94,7 +102,7 @@ describe('TrailingChecksumTransform class', () => {
94102
const chunkedReader = new ChunkedReader(chunks);
95103
chunkedReader.pipe(trailingChecksumTransform);
96104
const outputChunks = [];
97-
trailingChecksumTransform.on('data', (chunk) => outputChunks.push(Buffer.from(chunk)));
105+
trailingChecksumTransform.on('data', chunk => outputChunks.push(Buffer.from(chunk)));
98106
trailingChecksumTransform.on('finish', () => {
99107
const data = Buffer.concat(outputChunks).toString();
100108
assert.strictEqual(data, objDataWithoutTrailingChecksum);
@@ -108,7 +116,8 @@ describe('TrailingChecksumTransform class', () => {
108116
});
109117

110118
it('should correctly remove checksums, cut at each individual byte', done => {
111-
const trailingChecksumTransform = new TrailingChecksumTransform(log, err => {
119+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
120+
trailingChecksumTransform.on('error', err => {
112121
assert.strictEqual(err, null);
113122
});
114123
const chunks = [];
@@ -118,7 +127,7 @@ describe('TrailingChecksumTransform class', () => {
118127
const chunkedReader = new ChunkedReader(chunks);
119128
chunkedReader.pipe(trailingChecksumTransform);
120129
const outputChunks = [];
121-
trailingChecksumTransform.on('data', (chunk) => outputChunks.push(Buffer.from(chunk)));
130+
trailingChecksumTransform.on('data', chunk => outputChunks.push(Buffer.from(chunk)));
122131
trailingChecksumTransform.on('finish', () => {
123132
const data = Buffer.concat(outputChunks).toString();
124133
assert.strictEqual(data, objDataWithoutTrailingChecksum);
@@ -128,4 +137,70 @@ describe('TrailingChecksumTransform class', () => {
128137
assert.ifError(err);
129138
});
130139
});
140+
141+
it('should return an error if the format does not follow trailing checksum specification', done => {
142+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
143+
const chunks = [
144+
Buffer.from('11\r\n'),
145+
Buffer.alloc(1000000),
146+
Buffer.alloc(1000000),
147+
Buffer.alloc(1000000),
148+
Buffer.alloc(1000000),
149+
Buffer.alloc(1000000),
150+
Buffer.alloc(1000000),
151+
Buffer.alloc(1000000),
152+
Buffer.alloc(1000000),
153+
];
154+
const chunkedReader = new ChunkedReader(chunks);
155+
trailingChecksumTransform.on('error', err => {
156+
assert.deepStrictEqual(err, errors.InvalidArgument);
157+
trailingChecksumTransform.end();
158+
});
159+
let bytesWritten = 0;
160+
trailingChecksumTransform.on('data', (chunk) => {
161+
bytesWritten += chunk.length;
162+
});
163+
trailingChecksumTransform.on('close', () => {
164+
assert.equal(bytesWritten, 17);
165+
// 2 is the minimum but it looks like buffering will fetch additional chunks before the error is emitted
166+
// as long as we do not read the full input stream, this is fine
167+
assert.ok(chunkedReader.getIndex() <= 4);
168+
done();
169+
});
170+
chunkedReader.pipe(trailingChecksumTransform);
171+
});
172+
173+
it('should return early if supplied with an out of specification chunk size', done => {
174+
const trailingChecksumTransform = new TrailingChecksumTransform(log);
175+
const chunks = [
176+
Buffer.from('500000'),
177+
Buffer.from('000000\r\n'),
178+
Buffer.alloc(1000000),
179+
Buffer.alloc(1000000),
180+
Buffer.alloc(1000000),
181+
Buffer.alloc(1000000),
182+
Buffer.alloc(1000000),
183+
Buffer.alloc(1000000),
184+
Buffer.alloc(1000000),
185+
Buffer.alloc(1000000),
186+
Buffer.alloc(1000000),
187+
];
188+
const chunkedReader = new ChunkedReader(chunks);
189+
trailingChecksumTransform.on('error', err => {
190+
assert.deepStrictEqual(err, errors.InvalidArgument);
191+
trailingChecksumTransform.end();
192+
});
193+
let bytesWritten = 0;
194+
trailingChecksumTransform.on('data', (chunk) => {
195+
bytesWritten += chunk.length;
196+
});
197+
trailingChecksumTransform.on('close', () => {
198+
assert.equal(bytesWritten, 0);
199+
// 2 is the minimum but it looks like buffering will fetch additional chunks before the error is emitted
200+
// as long as we do not read the full input stream, this is fine
201+
assert.ok(chunkedReader.getIndex() <= 4);
202+
done();
203+
});
204+
chunkedReader.pipe(trailingChecksumTransform);
205+
});
131206
});

0 commit comments

Comments
 (0)