|
| 1 | +const assert = require('assert'); |
| 2 | +const async = require('async'); |
| 3 | +const crypto = require('crypto'); |
| 4 | +const { storage } = require('arsenal'); |
| 5 | +const { parseString } = require('xml2js'); |
| 6 | + |
| 7 | +const { bucketPut } = require('../../../lib/api/bucketPut'); |
| 8 | +const initiateMultipartUpload = require('../../../lib/api/initiateMultipartUpload'); |
| 9 | +const objectPutPart = require('../../../lib/api/objectPutPart'); |
| 10 | +const constants = require('../../../constants'); |
| 11 | +const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers'); |
| 12 | +const DummyRequest = require('../DummyRequest'); |
| 13 | + |
| 14 | +const { metadata } = storage.metadata.inMemory.metadata; |
| 15 | + |
| 16 | +const log = new DummyRequestLogger(); |
| 17 | +const canonicalID = 'accessKey1'; |
| 18 | +const authInfo = makeAuthInfo(canonicalID); |
| 19 | +const namespace = 'default'; |
| 20 | +const bucketName = 'checksum-test-bucket'; |
| 21 | +const objectKey = 'testObject'; |
| 22 | +const mpuBucket = `${constants.mpuBucketPrefix}${bucketName}`; |
| 23 | +const partBody = Buffer.from('I am a part body for checksum testing', 'utf8'); |
| 24 | + |
| 25 | +const bucketPutRequest = { |
| 26 | + bucketName, |
| 27 | + namespace, |
| 28 | + headers: { host: `${bucketName}.s3.amazonaws.com` }, |
| 29 | + url: '/', |
| 30 | + actionImplicitDenies: false, |
| 31 | +}; |
| 32 | + |
| 33 | +function makeInitiateRequest(extraHeaders = {}) { |
| 34 | + return { |
| 35 | + socket: { remoteAddress: '1.1.1.1' }, |
| 36 | + bucketName, |
| 37 | + namespace, |
| 38 | + objectKey, |
| 39 | + headers: { |
| 40 | + host: `${bucketName}.s3.amazonaws.com`, |
| 41 | + ...extraHeaders, |
| 42 | + }, |
| 43 | + url: `/${objectKey}?uploads`, |
| 44 | + actionImplicitDenies: false, |
| 45 | + }; |
| 46 | +} |
| 47 | + |
| 48 | +function makePutPartRequest(uploadId, partNumber, body, extraHeaders = {}) { |
| 49 | + const md5Hash = crypto.createHash('md5').update(body); |
| 50 | + return new DummyRequest({ |
| 51 | + bucketName, |
| 52 | + namespace, |
| 53 | + objectKey, |
| 54 | + headers: { |
| 55 | + host: `${bucketName}.s3.amazonaws.com`, |
| 56 | + ...extraHeaders, |
| 57 | + }, |
| 58 | + url: `/${objectKey}?partNumber=${partNumber}&uploadId=${uploadId}`, |
| 59 | + query: { partNumber, uploadId }, |
| 60 | + partHash: md5Hash.digest('hex'), |
| 61 | + actionImplicitDenies: false, |
| 62 | + }, body); |
| 63 | +} |
| 64 | + |
| 65 | +function initiateMPU(initiateHeaders, cb) { |
| 66 | + async.waterfall([ |
| 67 | + next => bucketPut(authInfo, bucketPutRequest, log, next), |
| 68 | + (corsHeaders, next) => { |
| 69 | + const req = makeInitiateRequest(initiateHeaders); |
| 70 | + initiateMultipartUpload(authInfo, req, log, next); |
| 71 | + }, |
| 72 | + (result, corsHeaders, next) => parseString(result, next), |
| 73 | + ], (err, json) => { |
| 74 | + if (err) {return cb(err);} |
| 75 | + return cb(null, json.InitiateMultipartUploadResult.UploadId[0]); |
| 76 | + }); |
| 77 | +} |
| 78 | + |
| 79 | +function getPartMetadata(uploadId) { |
| 80 | + const mpuKeys = metadata.keyMaps.get(mpuBucket); |
| 81 | + if (!mpuKeys) {return null;} |
| 82 | + for (const [key, val] of mpuKeys) { |
| 83 | + if (key.startsWith(uploadId) && !key.startsWith('overview')) { |
| 84 | + return val; |
| 85 | + } |
| 86 | + } |
| 87 | + return null; |
| 88 | +} |
| 89 | + |
| 90 | +describe('objectPutPart checksum validation', () => { |
| 91 | + beforeEach(() => cleanup()); |
| 92 | + |
| 93 | + describe('algo match validation', () => { |
| 94 | + it('should accept part with matching checksum algo', done => { |
| 95 | + initiateMPU({ 'x-amz-checksum-algorithm': 'crc32' }, (err, uploadId) => { |
| 96 | + assert.ifError(err); |
| 97 | + const request = makePutPartRequest(uploadId, 1, partBody, { |
| 98 | + 'x-amz-checksum-crc32': 'AAAAAA==', |
| 99 | + }); |
| 100 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 101 | + // BadDigest is expected since the checksum value won't |
| 102 | + // match the body, but NOT InvalidRequest — the algo is accepted. |
| 103 | + if (err) { |
| 104 | + assert.notStrictEqual(err.message, 'InvalidRequest'); |
| 105 | + } |
| 106 | + done(); |
| 107 | + }); |
| 108 | + }); |
| 109 | + }); |
| 110 | + |
| 111 | + it('should reject part with mismatching checksum algo', done => { |
| 112 | + initiateMPU({ 'x-amz-checksum-algorithm': 'sha256' }, (err, uploadId) => { |
| 113 | + assert.ifError(err); |
| 114 | + const request = makePutPartRequest(uploadId, 1, partBody, { |
| 115 | + 'x-amz-checksum-crc32': 'AAAAAA==', |
| 116 | + }); |
| 117 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 118 | + assert(err, 'Expected an error'); |
| 119 | + assert.strictEqual(err.message, 'InvalidRequest'); |
| 120 | + done(); |
| 121 | + }); |
| 122 | + }); |
| 123 | + }); |
| 124 | + |
| 125 | + it('should accept part with no checksum on non-default MPU', done => { |
| 126 | + initiateMPU({ 'x-amz-checksum-algorithm': 'sha256' }, (err, uploadId) => { |
| 127 | + assert.ifError(err); |
| 128 | + // No checksum header sent |
| 129 | + const request = makePutPartRequest(uploadId, 1, partBody); |
| 130 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 131 | + assert.ifError(err); |
| 132 | + done(); |
| 133 | + }); |
| 134 | + }); |
| 135 | + }); |
| 136 | + |
| 137 | + it('should accept any checksum algo on default (no algo specified) MPU', done => { |
| 138 | + initiateMPU({}, (err, uploadId) => { |
| 139 | + assert.ifError(err); |
| 140 | + // Send sha256 checksum even though MPU is default crc64nvme |
| 141 | + const request = makePutPartRequest(uploadId, 1, partBody, { |
| 142 | + 'x-amz-checksum-sha256': 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', |
| 143 | + }); |
| 144 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 145 | + // BadDigest (wrong value) is fine; InvalidRequest (wrong algo) is not |
| 146 | + if (err) { |
| 147 | + assert.notStrictEqual(err.message, 'InvalidRequest'); |
| 148 | + } |
| 149 | + done(); |
| 150 | + }); |
| 151 | + }); |
| 152 | + }); |
| 153 | + }); |
| 154 | + |
| 155 | + describe('checksum stored in part metadata', () => { |
| 156 | + it('should store checksumValue and checksumAlgorithm in part metadata', done => { |
| 157 | + initiateMPU({}, (err, uploadId) => { |
| 158 | + assert.ifError(err); |
| 159 | + const request = makePutPartRequest(uploadId, 1, partBody); |
| 160 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 161 | + assert.ifError(err); |
| 162 | + const partMD = getPartMetadata(uploadId); |
| 163 | + assert(partMD, 'Part metadata should exist'); |
| 164 | + assert(partMD.checksumValue, 'checksumValue should be stored'); |
| 165 | + assert.strictEqual(partMD.checksumAlgorithm, 'crc64nvme'); |
| 166 | + done(); |
| 167 | + }); |
| 168 | + }); |
| 169 | + }); |
| 170 | + |
| 171 | + it('should store the MPU algo checksum when client sends matching algo', done => { |
| 172 | + initiateMPU({ 'x-amz-checksum-algorithm': 'crc64nvme' }, (err, uploadId) => { |
| 173 | + assert.ifError(err); |
| 174 | + const request = makePutPartRequest(uploadId, 1, partBody); |
| 175 | + objectPutPart(authInfo, request, undefined, log, err => { |
| 176 | + assert.ifError(err); |
| 177 | + const partMD = getPartMetadata(uploadId); |
| 178 | + assert(partMD); |
| 179 | + assert.strictEqual(partMD.checksumAlgorithm, 'crc64nvme'); |
| 180 | + assert(partMD.checksumValue); |
| 181 | + done(); |
| 182 | + }); |
| 183 | + }); |
| 184 | + }); |
| 185 | + }); |
| 186 | + |
| 187 | + describe('dual-checksum', () => { |
| 188 | + it('should store crc64nvme when default MPU and client sends different algo', done => { |
| 189 | + initiateMPU({}, (err, uploadId) => { |
| 190 | + assert.ifError(err); |
| 191 | + // Compute correct crc32 for partBody so validation passes |
| 192 | + const { algorithms } = require('../../../lib/api/apiUtils/integrity/validateChecksums'); |
| 193 | + const crc32Hash = algorithms.crc32.createHash(); |
| 194 | + crc32Hash.update(partBody); |
| 195 | + const crc64Hash = algorithms.crc64nvme.createHash(); |
| 196 | + crc64Hash.update(partBody); |
| 197 | + Promise.all([ |
| 198 | + algorithms.crc32.digestFromHash(crc32Hash), |
| 199 | + algorithms.crc64nvme.digestFromHash(crc64Hash), |
| 200 | + ]).then(([crc32Digest, crc64Digest]) => { |
| 201 | + const request = makePutPartRequest(uploadId, 1, partBody, { |
| 202 | + 'x-amz-checksum-crc32': crc32Digest, |
| 203 | + }); |
| 204 | + objectPutPart(authInfo, request, undefined, log, (err, hexDigest, corsHeaders) => { |
| 205 | + assert.ifError(err); |
| 206 | + // Response header should be the client's algo (crc32) |
| 207 | + assert.strictEqual(corsHeaders['x-amz-checksum-crc32'], crc32Digest); |
| 208 | + // Stored metadata should be crc64nvme with correct value |
| 209 | + const partMD = getPartMetadata(uploadId); |
| 210 | + assert(partMD); |
| 211 | + assert.strictEqual(partMD.checksumAlgorithm, 'crc64nvme'); |
| 212 | + assert.strictEqual(partMD.checksumValue, crc64Digest); |
| 213 | + done(); |
| 214 | + }); |
| 215 | + }); |
| 216 | + }); |
| 217 | + }); |
| 218 | + |
| 219 | + it('should handle dual-checksum with trailer (STREAMING-UNSIGNED-PAYLOAD-TRAILER)', done => { |
| 220 | + initiateMPU({}, (err, uploadId) => { |
| 221 | + assert.ifError(err); |
| 222 | + const { algorithms } = require('../../../lib/api/apiUtils/integrity/validateChecksums'); |
| 223 | + const hash = algorithms.sha256.createHash(); |
| 224 | + hash.update(partBody); |
| 225 | + const crc64Hash = algorithms.crc64nvme.createHash(); |
| 226 | + crc64Hash.update(partBody); |
| 227 | + Promise.all([ |
| 228 | + algorithms.sha256.digestFromHash(hash), |
| 229 | + algorithms.crc64nvme.digestFromHash(crc64Hash), |
| 230 | + ]).then(([sha256Digest, crc64Digest]) => { |
| 231 | + // Build chunked body with trailing checksum |
| 232 | + const hexLen = partBody.length.toString(16); |
| 233 | + const chunkedBody = `${hexLen}\r\n${partBody.toString()}\r\n` + |
| 234 | + `0\r\nx-amz-checksum-sha256:${sha256Digest}\r\n`; |
| 235 | + const request = makePutPartRequest(uploadId, 1, Buffer.from(chunkedBody), { |
| 236 | + 'x-amz-content-sha256': 'STREAMING-UNSIGNED-PAYLOAD-TRAILER', |
| 237 | + 'x-amz-trailer': 'x-amz-checksum-sha256', |
| 238 | + }); |
| 239 | + request.parsedContentLength = partBody.length; |
| 240 | + objectPutPart(authInfo, request, undefined, log, (err, hexDigest, corsHeaders) => { |
| 241 | + assert.ifError(err); |
| 242 | + // Response should echo the client's sha256 |
| 243 | + assert.strictEqual(corsHeaders['x-amz-checksum-sha256'], sha256Digest); |
| 244 | + // Stored metadata should be crc64nvme with correct value |
| 245 | + const partMD = getPartMetadata(uploadId); |
| 246 | + assert(partMD); |
| 247 | + assert.strictEqual(partMD.checksumAlgorithm, 'crc64nvme'); |
| 248 | + assert.strictEqual(partMD.checksumValue, crc64Digest); |
| 249 | + done(); |
| 250 | + }); |
| 251 | + }); |
| 252 | + }); |
| 253 | + }); |
| 254 | + |
| 255 | + it('should return client-facing checksum in response header for dual-checksum', done => { |
| 256 | + initiateMPU({}, (err, uploadId) => { |
| 257 | + assert.ifError(err); |
| 258 | + const { algorithms } = require('../../../lib/api/apiUtils/integrity/validateChecksums'); |
| 259 | + const hash = algorithms.sha256.createHash(); |
| 260 | + hash.update(partBody); |
| 261 | + Promise.resolve(algorithms.sha256.digestFromHash(hash)).then(digest => { |
| 262 | + const request = makePutPartRequest(uploadId, 1, partBody, { |
| 263 | + 'x-amz-checksum-sha256': digest, |
| 264 | + }); |
| 265 | + objectPutPart(authInfo, request, undefined, log, (err, hexDigest, corsHeaders) => { |
| 266 | + assert.ifError(err); |
| 267 | + assert.strictEqual(corsHeaders['x-amz-checksum-sha256'], digest); |
| 268 | + assert.strictEqual(corsHeaders['x-amz-checksum-crc64nvme'], undefined); |
| 269 | + done(); |
| 270 | + }); |
| 271 | + }); |
| 272 | + }); |
| 273 | + }); |
| 274 | + }); |
| 275 | +}); |
0 commit comments