@@ -1090,18 +1090,37 @@ function Compressor(log, type) {
10901090
10911091 assert ( ( type === 'REQUEST' ) || ( type === 'RESPONSE' ) ) ;
10921092 this . _table = new HeaderTable ( this . _log ) ;
1093+
1094+ this . tableSizeChangePending = false ;
1095+ this . lowestTableSizePending = 0 ;
1096+ this . tableSizeSetting = DEFAULT_HEADER_TABLE_LIMIT ;
10931097}
10941098
10951099// Changing the header table size
10961100Compressor . prototype . setTableSizeLimit = function setTableSizeLimit ( size ) {
10971101 this . _table . setSizeLimit ( size ) ;
1102+ if ( ! this . tableSizeChangePending || size < this . lowestTableSizePending ) {
1103+ this . lowestTableSizePending = size ;
1104+ }
1105+ this . tableSizeSetting = size ;
1106+ this . tableSizeChangePending = true ;
10981107} ;
10991108
11001109// `compress` takes a header set, and compresses it using a new `HeaderSetCompressor` stream
11011110// instance. This means that from now on, the advantages of streaming header encoding are lost,
11021111// but the API becomes simpler.
11031112Compressor . prototype . compress = function compress ( headers ) {
11041113 var compressor = new HeaderSetCompressor ( this . _log , this . _table ) ;
1114+
1115+ if ( this . tableSizeChangePending ) {
1116+ if ( this . lowestTableSizePending < this . tableSizeSetting ) {
1117+ compressor . send ( { contextUpdate : true , newMaxSize : this . lowestTableSizePending ,
1118+ name : "" , value : "" , index : 0 } ) ;
1119+ }
1120+ compressor . send ( { contextUpdate : true , newMaxSize : this . tableSizeSetting ,
1121+ name : "" , value : "" , index : 0 } ) ;
1122+ this . tableSizeChangePending = false ;
1123+ }
11051124 var colonHeaders = [ ] ;
11061125 var nonColonHeaders = [ ] ;
11071126
0 commit comments