1+ const { errors } = require ( 'arsenal' ) ;
12const assert = require ( 'assert' ) ;
23const async = require ( 'async' ) ;
34const { Readable } = require ( 'stream' ) ;
@@ -8,12 +9,12 @@ const { DummyRequestLogger } = require('../helpers');
89const log = new DummyRequestLogger ( ) ;
910
1011// note this is not the correct checksum in objDataWithTrailingChecksum
11- const objDataWithTrailingChecksum = '10\r\n0123456789abcdef \r\n' +
12+ const objDataWithTrailingChecksum = '10\r\n01234\r6789abcd\r\n \r\n' +
1213 '2\r\n01\r\n' +
1314 '1\r\n2\r\n' +
1415 'd\r\n3456789abcdef\r\n' +
1516 '0\r\nchecksum:xyz=\r\n' ;
16- const objDataWithoutTrailingChecksum = '0123456789abcdef0123456789abcdef ' ;
17+ const objDataWithoutTrailingChecksum = '01234\r6789abcd\r\n0123456789abcdef ' ;
1718
1819class ChunkedReader extends Readable {
1920 constructor ( chunks ) {
@@ -30,11 +31,16 @@ class ChunkedReader extends Readable {
3031 this . push ( this . _parts [ this . _index ] ) ;
3132 this . _index ++ ;
3233 }
34+
35+ getIndex ( ) {
36+ return this . _index ;
37+ }
3338}
3439
3540describe ( 'TrailingChecksumTransform class' , ( ) => {
3641 it ( 'should correctly remove checksums' , done => {
37- const trailingChecksumTransform = new TrailingChecksumTransform ( log , err => {
42+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
43+ trailingChecksumTransform . on ( 'error' , err => {
3844 assert . strictEqual ( err , null ) ;
3945 } ) ;
4046 const chunks = [
@@ -43,7 +49,7 @@ describe('TrailingChecksumTransform class', () => {
4349 const chunkedReader = new ChunkedReader ( chunks ) ;
4450 chunkedReader . pipe ( trailingChecksumTransform ) ;
4551 const outputChunks = [ ] ;
46- trailingChecksumTransform . on ( 'data' , ( chunk ) => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
52+ trailingChecksumTransform . on ( 'data' , chunk => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
4753 trailingChecksumTransform . on ( 'finish' , ( ) => {
4854 const data = Buffer . concat ( outputChunks ) . toString ( ) ;
4955 assert . strictEqual ( data , objDataWithoutTrailingChecksum ) ;
@@ -55,9 +61,10 @@ describe('TrailingChecksumTransform class', () => {
5561 } ) ;
5662
5763 // test all bisection of the input string
58- async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length ) . keys ( ) ] , ( i ) => {
64+ async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length ) . keys ( ) ] , i => {
5965 it ( `should correctly remove checksums, cut at ${ i } ` , done => {
60- const trailingChecksumTransform = new TrailingChecksumTransform ( log , err => {
66+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
67+ trailingChecksumTransform . on ( 'error' , err => {
6168 assert . strictEqual ( err , null ) ;
6269 } ) ;
6370 const chunks = [
@@ -67,7 +74,7 @@ describe('TrailingChecksumTransform class', () => {
6774 const chunkedReader = new ChunkedReader ( chunks ) ;
6875 chunkedReader . pipe ( trailingChecksumTransform ) ;
6976 const outputChunks = [ ] ;
70- trailingChecksumTransform . on ( 'data' , ( chunk ) => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
77+ trailingChecksumTransform . on ( 'data' , chunk => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
7178 trailingChecksumTransform . on ( 'finish' , ( ) => {
7279 const data = Buffer . concat ( outputChunks ) . toString ( ) ;
7380 assert . strictEqual ( data , objDataWithoutTrailingChecksum ) ;
@@ -80,10 +87,11 @@ describe('TrailingChecksumTransform class', () => {
8087 } ) ;
8188
8289 // test all trisection of the input string
83- async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length - 2 ) . keys ( ) ] , ( i ) => {
84- async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length - i - 2 ) . keys ( ) ] , ( j ) => {
90+ async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length - 2 ) . keys ( ) ] , i => {
91+ async . forEach ( [ ...Array ( objDataWithTrailingChecksum . length - i - 2 ) . keys ( ) ] , j => {
8592 it ( `should correctly remove checksums, cut at ${ i } and ${ i + j + 1 } ` , done => {
86- const trailingChecksumTransform = new TrailingChecksumTransform ( log , err => {
93+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
94+ trailingChecksumTransform . on ( 'error' , err => {
8795 assert . strictEqual ( err , null ) ;
8896 } ) ;
8997 const chunks = [
@@ -94,7 +102,7 @@ describe('TrailingChecksumTransform class', () => {
94102 const chunkedReader = new ChunkedReader ( chunks ) ;
95103 chunkedReader . pipe ( trailingChecksumTransform ) ;
96104 const outputChunks = [ ] ;
97- trailingChecksumTransform . on ( 'data' , ( chunk ) => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
105+ trailingChecksumTransform . on ( 'data' , chunk => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
98106 trailingChecksumTransform . on ( 'finish' , ( ) => {
99107 const data = Buffer . concat ( outputChunks ) . toString ( ) ;
100108 assert . strictEqual ( data , objDataWithoutTrailingChecksum ) ;
@@ -108,7 +116,8 @@ describe('TrailingChecksumTransform class', () => {
108116 } ) ;
109117
110118 it ( 'should correctly remove checksums, cut at each individual byte' , done => {
111- const trailingChecksumTransform = new TrailingChecksumTransform ( log , err => {
119+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
120+ trailingChecksumTransform . on ( 'error' , err => {
112121 assert . strictEqual ( err , null ) ;
113122 } ) ;
114123 const chunks = [ ] ;
@@ -118,7 +127,7 @@ describe('TrailingChecksumTransform class', () => {
118127 const chunkedReader = new ChunkedReader ( chunks ) ;
119128 chunkedReader . pipe ( trailingChecksumTransform ) ;
120129 const outputChunks = [ ] ;
121- trailingChecksumTransform . on ( 'data' , ( chunk ) => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
130+ trailingChecksumTransform . on ( 'data' , chunk => outputChunks . push ( Buffer . from ( chunk ) ) ) ;
122131 trailingChecksumTransform . on ( 'finish' , ( ) => {
123132 const data = Buffer . concat ( outputChunks ) . toString ( ) ;
124133 assert . strictEqual ( data , objDataWithoutTrailingChecksum ) ;
@@ -128,4 +137,70 @@ describe('TrailingChecksumTransform class', () => {
128137 assert . ifError ( err ) ;
129138 } ) ;
130139 } ) ;
140+
141+ it ( 'should return an error if the format does not follow trailing checksum specification' , done => {
142+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
143+ const chunks = [
144+ Buffer . from ( '11\r\n' ) ,
145+ Buffer . alloc ( 1000000 ) ,
146+ Buffer . alloc ( 1000000 ) ,
147+ Buffer . alloc ( 1000000 ) ,
148+ Buffer . alloc ( 1000000 ) ,
149+ Buffer . alloc ( 1000000 ) ,
150+ Buffer . alloc ( 1000000 ) ,
151+ Buffer . alloc ( 1000000 ) ,
152+ Buffer . alloc ( 1000000 ) ,
153+ ] ;
154+ const chunkedReader = new ChunkedReader ( chunks ) ;
155+ trailingChecksumTransform . on ( 'error' , err => {
156+ assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
157+ trailingChecksumTransform . end ( ) ;
158+ } ) ;
159+ let bytesWritten = 0 ;
160+ trailingChecksumTransform . on ( 'data' , ( chunk ) => {
161+ bytesWritten += chunk . length ;
162+ } ) ;
163+ trailingChecksumTransform . on ( 'close' , ( ) => {
164+ assert . equal ( bytesWritten , 17 ) ;
165+ // 2 is the minimum but it looks like buffering will fetch additional chunks before the error is emitted
166+ // as long as we do not read the full input stream, this is fine
167+ assert . ok ( chunkedReader . getIndex ( ) <= 4 ) ;
168+ done ( ) ;
169+ } ) ;
170+ chunkedReader . pipe ( trailingChecksumTransform ) ;
171+ } ) ;
172+
173+ it ( 'should return early if supplied with an out of specification chunk size' , done => {
174+ const trailingChecksumTransform = new TrailingChecksumTransform ( log ) ;
175+ const chunks = [
176+ Buffer . from ( '500000' ) ,
177+ Buffer . from ( '000000\r\n' ) ,
178+ Buffer . alloc ( 1000000 ) ,
179+ Buffer . alloc ( 1000000 ) ,
180+ Buffer . alloc ( 1000000 ) ,
181+ Buffer . alloc ( 1000000 ) ,
182+ Buffer . alloc ( 1000000 ) ,
183+ Buffer . alloc ( 1000000 ) ,
184+ Buffer . alloc ( 1000000 ) ,
185+ Buffer . alloc ( 1000000 ) ,
186+ Buffer . alloc ( 1000000 ) ,
187+ ] ;
188+ const chunkedReader = new ChunkedReader ( chunks ) ;
189+ trailingChecksumTransform . on ( 'error' , err => {
190+ assert . deepStrictEqual ( err , errors . InvalidArgument ) ;
191+ trailingChecksumTransform . end ( ) ;
192+ } ) ;
193+ let bytesWritten = 0 ;
194+ trailingChecksumTransform . on ( 'data' , ( chunk ) => {
195+ bytesWritten += chunk . length ;
196+ } ) ;
197+ trailingChecksumTransform . on ( 'close' , ( ) => {
198+ assert . equal ( bytesWritten , 0 ) ;
199+ // 2 is the minimum but it looks like buffering will fetch additional chunks before the error is emitted
200+ // as long as we do not read the full input stream, this is fine
201+ assert . ok ( chunkedReader . getIndex ( ) <= 4 ) ;
202+ done ( ) ;
203+ } ) ;
204+ chunkedReader . pipe ( trailingChecksumTransform ) ;
205+ } ) ;
131206} ) ;
0 commit comments