@@ -4,9 +4,39 @@ const async = require('async');
44const { Readable } = require ( 'stream' ) ;
55
66const TrailingChecksumTransform = require ( '../../../lib/auth/streamingV4/trailingChecksumTransform' ) ;
7- const { stripTrailingChecksumStream } = require ( '../../../lib/api/apiUtils/object/prepareStream' ) ;
87const { DummyRequestLogger } = require ( '../helpers' ) ;
98
9+ // Helper: pipe input chunks through TrailingChecksumTransform, collect output and trailer events
10+ function runTransform ( inputChunks ) {
11+ const stream = new TrailingChecksumTransform ( new DummyRequestLogger ( ) ) ;
12+ return new Promise ( ( resolve , reject ) => {
13+ const output = [ ] ;
14+ const trailerEvents = [ ] ;
15+ stream . on ( 'data' , chunk => output . push ( Buffer . from ( chunk ) ) ) ;
16+ stream . on ( 'trailer' , ( name , value ) => trailerEvents . push ( { name, value } ) ) ;
17+ stream . on ( 'finish' , ( ) => resolve ( { data : Buffer . concat ( output ) , trailers : trailerEvents , stream } ) ) ;
18+ stream . on ( 'error' , reject ) ;
19+ for ( const chunk of inputChunks ) {
20+ stream . write ( Buffer . isBuffer ( chunk ) ? chunk : Buffer . from ( chunk ) ) ;
21+ }
22+ stream . end ( ) ;
23+ } ) ;
24+ }
25+
26+ // Helper: expect a stream error from the given input chunks
27+ function expectError ( inputChunks ) {
28+ const stream = new TrailingChecksumTransform ( new DummyRequestLogger ( ) ) ;
29+ return new Promise ( ( resolve , reject ) => {
30+ stream . on ( 'error' , resolve ) ;
31+ stream . on ( 'finish' , ( ) => reject ( new Error ( 'expected error but stream finished cleanly' ) ) ) ;
32+ for ( const chunk of inputChunks ) {
33+ stream . write ( Buffer . isBuffer ( chunk ) ? chunk : Buffer . from ( chunk ) ) ;
34+ }
35+ stream . end ( ) ;
36+ stream . resume ( ) ;
37+ } ) ;
38+ }
39+
1040const log = new DummyRequestLogger ( ) ;
1141
1242// note this is not the correct checksum in objDataWithTrailingChecksum
@@ -174,30 +204,33 @@ describe('TrailingChecksumTransform class', () => {
174204 it ( 'should propagate _flush error via errCb when stream closes without chunked encoding' , done => {
175205 const incompleteData = '10\r\n01234\r6789abcd\r\n\r\n' ;
176206 const source = new ChunkedReader ( [ Buffer . from ( incompleteData ) ] ) ;
177- source . headers = { 'x-amz-content-sha256' : 'STREAMING-UNSIGNED-PAYLOAD-TRAILER' } ;
178- const stream = stripTrailingChecksumStream ( source , log , err => {
179- assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
207+ const stream = new TrailingChecksumTransform ( log ) ;
208+ stream . on ( 'error' , err => {
209+ assert . deepStrictEqual ( err , errors . IncompleteBody ) ;
180210 done ( ) ;
181211 } ) ;
212+ source . pipe ( stream ) ;
182213 stream . resume ( ) ;
183214 } ) ;
184215
185216 it ( 'should propagate _transform error via errCb for invalid chunk size' , done => {
186217 const badData = '500000000000\r\n' ;
187218 const source = new ChunkedReader ( [ Buffer . from ( badData ) ] ) ;
188- source . headers = { 'x-amz-content-sha256' : 'STREAMING-UNSIGNED-PAYLOAD-TRAILER' } ;
189- const stream = stripTrailingChecksumStream ( source , log , err => {
219+ const stream = new TrailingChecksumTransform ( log ) ;
220+ stream . on ( 'error' , err => {
190221 assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
191222 done ( ) ;
192223 } ) ;
224+ source . pipe ( stream ) ;
193225 stream . resume ( ) ;
194226 } ) ;
195227
196- it ( 'should return early if supplied with an out of specification chunk size' , done => {
228+ it ( 'should return early if supplied with an out-of- specification chunk size' , done => {
197229 const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
198230 const chunks = [
199231 Buffer . from ( '500000' ) ,
200232 Buffer . from ( '000000\r\n' ) ,
233+
201234 Buffer . alloc ( 1000000 ) ,
202235 Buffer . alloc ( 1000000 ) ,
203236 Buffer . alloc ( 1000000 ) ,
@@ -227,3 +260,188 @@ describe('TrailingChecksumTransform class', () => {
227260 chunkedReader . pipe ( trailingChecksumTransform ) ;
228261 } ) ;
229262} ) ;
263+
264+ describe ( 'TrailingChecksumTransform trailer parsing and emitting' , ( ) => {
265+ describe ( 'happy path' , ( ) => {
266+ it ( 'single chunk with data and trailer: forwards data, emits trailer name and value' , async ( ) => {
267+ const input = '5\r\nhello\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
268+ const { data, trailers } = await runTransform ( [ input ] ) ;
269+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
270+ assert . strictEqual ( trailers . length , 1 ) ;
271+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
272+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
273+ } ) ;
274+
275+ it ( 'multiple data chunks followed by trailer: forwards all data, emits trailer once' , async ( ) => {
276+ const input = '5\r\nhello\r\n5\r\nworld\r\n0\r\nx-amz-checksum-sha256:AAAAAA==\r\n' ;
277+ const { data, trailers } = await runTransform ( [ input ] ) ;
278+ assert . strictEqual ( data . toString ( ) , 'helloworld' ) ;
279+ assert . strictEqual ( trailers . length , 1 ) ;
280+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-sha256' ) ;
281+ } ) ;
282+
283+ it ( 'data chunk containing \\r\\n in payload is forwarded correctly' , async ( ) => {
284+ // 7 bytes: h e l \r \n l o
285+ const input = '7\r\nhel\r\nlo\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
286+ const { data } = await runTransform ( [ input ] ) ;
287+ assert . strictEqual ( data . toString ( ) , 'hel\r\nlo' ) ;
288+ } ) ;
289+
290+ it ( 'trailer with whitespace around name and value: name and value are trimmed' , async ( ) => {
291+ const input = '0\r\n x-amz-checksum-crc32 : AAAAAA== \r\n' ;
292+ const { trailers } = await runTransform ( [ input ] ) ;
293+ assert . strictEqual ( trailers . length , 1 ) ;
294+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
295+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
296+ } ) ;
297+
298+ it ( 'trailer terminated with \\n\\r\\n: trailing \\n stripped from parsed line' , async ( ) => {
299+ const input = '0\r\nx-amz-checksum-crc32:AAAAAA==\n\r\n' ;
300+ const { trailers } = await runTransform ( [ input ] ) ;
301+ assert . strictEqual ( trailers . length , 1 ) ;
302+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
303+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
304+ } ) ;
305+
306+ it ( 'trailer value containing a colon: only first colon used as separator' , async ( ) => {
307+ const input = '0\r\nx-amz-checksum-crc32:AA:BB==\r\n' ;
308+ const { trailers } = await runTransform ( [ input ] ) ;
309+ assert . strictEqual ( trailers . length , 1 ) ;
310+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
311+ assert . strictEqual ( trailers [ 0 ] . value , 'AA:BB==' ) ;
312+ } ) ;
313+
314+ it ( 'bytes after trailer \\r\\n are silently discarded' , async ( ) => {
315+ const input = '5\r\nhello\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\nextra bytes ignored' ;
316+ const { data, trailers } = await runTransform ( [ input ] ) ;
317+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
318+ assert . strictEqual ( trailers . length , 1 ) ;
319+ } ) ;
320+ } ) ;
321+
322+ describe ( 'chunk boundary edge cases' , ( ) => {
323+ it ( 'chunk size field split across two input chunks: parsed correctly' , async ( ) => {
324+ // size 'a' (hex) = 10 bytes; split the size field across two chunks
325+ const c1 = 'a' ;
326+ const c2 = '\r\nAAAAAAAAAA\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
327+ const { data, trailers } = await runTransform ( [ c1 , c2 ] ) ;
328+ assert . strictEqual ( data . toString ( ) , 'AAAAAAAAAA' ) ;
329+ assert . strictEqual ( trailers . length , 1 ) ;
330+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
331+ } ) ;
332+
333+ it ( 'data bytes split across two input chunks: all forwarded' , async ( ) => {
334+ // 5 bytes 'hello', split after 'hel'
335+ const c1 = '5\r\nhel' ;
336+ const c2 = 'lo\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
337+ const { data, trailers } = await runTransform ( [ c1 , c2 ] ) ;
338+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
339+ assert . strictEqual ( trailers . length , 1 ) ;
340+ } ) ;
341+
342+ it ( '\\r\\n delimiter after chunk size split across two input chunks: parsed correctly' , async ( ) => {
343+ // '5\r' in chunk1, '\nhello\r\n0\r\n...' in chunk2
344+ const c1 = '5\r' ;
345+ const c2 = '\nhello\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
346+ const { data, trailers } = await runTransform ( [ c1 , c2 ] ) ;
347+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
348+ assert . strictEqual ( trailers . length , 1 ) ;
349+ } ) ;
350+
351+ it ( 'trailer line split across two input chunks: emits trailer correctly' , async ( ) => {
352+ const c1 = '5\r\nhello\r\n0\r\nx-amz-checksum-' ;
353+ const c2 = 'crc32:AAAAAA==\r\n' ;
354+ const { data, trailers } = await runTransform ( [ c1 , c2 ] ) ;
355+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
356+ assert . strictEqual ( trailers . length , 1 ) ;
357+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
358+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
359+ } ) ;
360+
361+ it ( 'trailer \\r\\n split across two input chunks: emits trailer correctly' , async ( ) => {
362+ const c1 = '5\r\nhello\r\n0\r\nx-amz-checksum-crc32:AAAAAA==\r' ;
363+ const c2 = '\n' ;
364+ const { data, trailers } = await runTransform ( [ c1 , c2 ] ) ;
365+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
366+ assert . strictEqual ( trailers . length , 1 ) ;
367+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
368+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
369+ } ) ;
370+ } ) ;
371+
372+ describe ( 'zero-size terminator and trailer' , ( ) => {
373+ it ( 'empty trailer line (0\\r\\n\\r\\n): no trailer event emitted, stream closes cleanly' , async ( ) => {
374+ const input = '5\r\nhello\r\n0\r\n\r\n' ;
375+ const { data, trailers } = await runTransform ( [ input ] ) ;
376+ assert . strictEqual ( data . toString ( ) , 'hello' ) ;
377+ assert . strictEqual ( trailers . length , 0 ) ;
378+ } ) ;
379+
380+ it ( 'zero data chunks (only terminator + trailer): no data forwarded, trailer emitted' , async ( ) => {
381+ const input = '0\r\nx-amz-checksum-crc32:AAAAAA==\r\n' ;
382+ const { data, trailers } = await runTransform ( [ input ] ) ;
383+ assert . strictEqual ( data . length , 0 ) ;
384+ assert . strictEqual ( trailers . length , 1 ) ;
385+ assert . strictEqual ( trailers [ 0 ] . name , 'x-amz-checksum-crc32' ) ;
386+ assert . strictEqual ( trailers [ 0 ] . value , 'AAAAAA==' ) ;
387+ } ) ;
388+ } ) ;
389+
390+ describe ( '_flush error cases' , ( ) => {
391+ it ( 'stream ends mid-data (no zero-chunk): IncompleteBody error' , async ( ) => {
392+ // 5 bytes declared but stream ends after only 3
393+ const err = await expectError ( [ '5\r\nhel' ] ) ;
394+ assert . deepStrictEqual ( err , errors . IncompleteBody ) ;
395+ } ) ;
396+
397+ it ( 'stream ends after zero-chunk with partial trailer content: IncompleteBody error' , async ( ) => {
398+ // zero-chunk received, trailer starts but no \r\n terminator
399+ const err = await expectError ( [ '0\r\nx-amz-checksum-crc32:AAAAAA==' ] ) ;
400+ assert . deepStrictEqual ( err , errors . IncompleteBody ) ;
401+ } ) ;
402+
403+ it ( 'stream ends after zero-chunk with no trailer content: no error' , async ( ) => {
404+ // only '0\r\n' — readingTrailer=true, trailerBuffer empty → no error
405+ const { data, trailers } = await runTransform ( [ '0\r\n' ] ) ;
406+ assert . strictEqual ( data . length , 0 ) ;
407+ assert . strictEqual ( trailers . length , 0 ) ;
408+ } ) ;
409+ } ) ;
410+
411+ describe ( '_transform error cases' , ( ) => {
412+ it ( 'chunk size field larger than 10 bytes: InvalidArgument error' , async ( ) => {
413+ // 11 hex digits — exceeds the 10-byte field size limit
414+ const err = await expectError ( [ '12345678901\r\n' ] ) ;
415+ assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
416+ } ) ;
417+
418+ it ( 'chunk size is not valid hex: InvalidArgument error' , async ( ) => {
419+ // 2 chars, short enough to pass size check, but not valid hex
420+ const err = await expectError ( [ 'zz\r\n' ] ) ;
421+ assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
422+ } ) ;
423+
424+ it ( 'chunk size exceeds maximumAllowedPartSize: EntityTooLarge error' , async ( ) => {
425+ // 0x200000000 = 8589934592 > maximumAllowedPartSize (5GB = 0x140000000)
426+ const err = await expectError ( [ '200000000\r\n' ] ) ;
427+ assert . deepStrictEqual ( err , errors . EntityTooLarge ) ;
428+ } ) ;
429+
430+ it ( 'trailer line longer than 1024 bytes: MalformedTrailerError' , async ( ) => {
431+ // send zero-chunk then a trailer line > 1024 bytes with no \r\n
432+ const longTrailer = 'x' . repeat ( 1025 ) ;
433+ const err = await expectError ( [ `0\r\n${ longTrailer } ` ] ) ;
434+ assert . deepStrictEqual ( err , errors . MalformedTrailerError ) ;
435+ } ) ;
436+
437+ it ( 'trailer line missing colon: IncompleteBody error' , async ( ) => {
438+ const err = await expectError ( [ '0\r\nnocolon\r\n' ] ) ;
439+ assert . deepStrictEqual ( err , errors . IncompleteBody ) ;
440+ } ) ;
441+
442+ it ( 'trailer line with colon at position 0 (empty name): IncompleteBody error' , async ( ) => {
443+ const err = await expectError ( [ '0\r\n:value\r\n' ] ) ;
444+ assert . deepStrictEqual ( err , errors . IncompleteBody ) ;
445+ } ) ;
446+ } ) ;
447+ } ) ;
0 commit comments