Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ const constants = {
'versionId',
'isNull',
'isDeleteMarker',
'bucketOwnerId',
],
unsupportedSignatureChecksums: new Set([
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
Expand Down
4 changes: 4 additions & 0 deletions lib/api/apiUtils/object/createAndStoreObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,10 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.oldReplayId = objMD.uploadId;
}

if (authInfo.getCanonicalID() !== bucketMD.getOwner()) {
metadataStoreParams.bucketOwnerId = bucketMD.getOwner();
}

/* eslint-disable camelcase */
const dontSkipBackend = externalBackends;
/* eslint-enable camelcase */
Expand Down
3 changes: 3 additions & 0 deletions lib/api/completeMultipartUpload.js
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,9 @@ function completeMultipartUpload(authInfo, request, log, callback) {
};
setSSEHeaders(responseHeaders, serverSideEncryption, kmsKey);
}
if (authInfo.getCanonicalID() !== destBucket.getOwner()) {
metaStoreParams.bucketOwnerId = destBucket.getOwner();
}
return versioningPreprocessing(bucketName,
destBucket, objectKey, objMD, log, (err, options) => {
if (err) {
Expand Down
5 changes: 5 additions & 0 deletions lib/api/objectCopy.js
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,11 @@ function _prepMetadata(request, sourceObjMD, headers, sourceIsDestination,
if (!storeMetadataParams.contentType) {
storeMetadataParams.contentType = sourceObjMD['content-type'];
}

if (authInfo.getCanonicalID() !== destBucketMD.getOwner()) {
storeMetadataParams.bucketOwnerId = destBucketMD.getOwner();
}

return { storeMetadataParams, sourceLocationConstraintName,
backendInfoDest: backendInfoObjDest.backendInfo };
}
Expand Down
1 change: 1 addition & 0 deletions lib/api/objectPutCopyPart.js
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
splitter: constants.splitter,
lastModified,
overheadField: constants.overheadField,
ownerId: destBucketMD.getOwner(),
};
return services.metadataStorePart(mpuBucketName,
locations, metaStoreParams, log, err => {
Expand Down
9 changes: 7 additions & 2 deletions lib/services.js
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ const services = {
lastModifiedDate, versioning, versionId, uploadId,
tagging, taggingCopy, replicationInfo, defaultRetention,
dataStoreName, retentionMode, retentionDate, legalHold,
originOp, oldReplayId, deleteNullKey, overheadField } = params;
originOp, oldReplayId, deleteNullKey, overheadField, bucketOwnerId } = params;
log.trace('storing object in metadata');
assert.strictEqual(typeof bucketName, 'string');
const md = new ObjectMD();
Expand Down Expand Up @@ -241,6 +241,10 @@ const services = {
md.setLegalHold(legalHold);
}

if (bucketOwnerId) {
md.setBucketOwnerId(bucketOwnerId);
}

log.trace('object metadata', { omVal: md.getValue() });
// If this is not the completion of a multipart upload or
// the creation of a delete marker, parse the headers to
Expand Down Expand Up @@ -730,7 +734,7 @@ const services = {
metadataStorePart(mpuBucketName, partLocations,
metaStoreParams, log, cb) {
assert.strictEqual(typeof mpuBucketName, 'string');
const { partNumber, contentMD5, size, uploadId, lastModified, splitter, overheadField }
const { partNumber, contentMD5, size, uploadId, lastModified, splitter, overheadField, ownerId }
= metaStoreParams;
const dateModified = typeof lastModified === 'string' ?
lastModified : new Date().toJSON();
Expand All @@ -745,6 +749,7 @@ const services = {
'last-modified': dateModified,
'content-md5': contentMD5,
'content-length': size,
'owner-id': ownerId,
};

const params = {};
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "s3",
"version": "7.70.71",
"version": "7.70.72",
"description": "S3 connector",
"main": "index.js",
"engines": {
Expand All @@ -20,7 +20,7 @@
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"@hapi/joi": "^17.1.0",
"arsenal": "git+https://github.com/scality/Arsenal#7.70.46",
"arsenal": "git+https://github.com/scality/Arsenal#7.70.47",
"async": "~2.5.0",
"aws-sdk": "2.905.0",
"azure-storage": "^2.1.0",
Expand Down
5 changes: 5 additions & 0 deletions tests/unit/DummyRequest.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ class DummyRequest extends http.IncomingMessage {
}
this.push(null);
}

_destroy(err, cb) {
// this is a no-op
cb();
}
}

module.exports = DummyRequest;
96 changes: 96 additions & 0 deletions tests/unit/api/multipartUpload.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ const log = new DummyRequestLogger();
const splitter = constants.splitter;
const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID);
const authInfoOtherAcc = makeAuthInfo('accessKey2');
const namespace = 'default';
const bucketName = 'bucketname';
const lockedBucket = 'objectlockenabledbucket';
Expand Down Expand Up @@ -2309,9 +2310,14 @@ describe('complete mpu with bucket policy', () => {

beforeEach(done => {
cleanup();
sinon.spy(metadataswitch, 'putObjectMD');
bucketPut(authInfo, bucketPutRequest, log, done);
});

afterEach(() => {
sinon.restore();
});

it('should complete with a deny on unrelated object as non root', done => {
const bucketPutPolicyRequest = getPolicyRequest({
Version: '2012-10-17',
Expand Down Expand Up @@ -2378,4 +2384,94 @@ describe('complete mpu with bucket policy', () => {
done();
});
});

it('should set bucketOwnerId if requester is not destination bucket owner', done => {
const partBody = Buffer.from('I am a part\n', 'utf8');
const bucketPutPolicyRequest = getPolicyRequest({
Version: '2012-10-17',
Statement: [
{
Effect: 'Allow',
Principal: { AWS: `arn:aws:iam::${authInfoOtherAcc.shortid}:root` },
Action: ['s3:*'],
Resource: `arn:aws:s3:::${bucketName}/*`,
},
],
});
async.waterfall([
next => bucketPutPolicy(authInfo, bucketPutPolicyRequest, log, next),
(corsHeaders, next) => initiateMultipartUpload(authInfoOtherAcc,
initiateReqFixed, log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, json) => {
// Need to build request in here since do not have uploadId
// until here
assert.ifError(err);
const testUploadId =
json.InitiateMultipartUploadResult.UploadId[0];
const md5Hash = crypto.createHash('md5').update(partBody);
const calculatedHash = md5Hash.digest('hex');
const partRequest = new DummyRequest(Object.assign({
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?partNumber=1&uploadId=${testUploadId}`,
query: {
partNumber: '1',
uploadId: testUploadId,
},
// Note that the body of the post set in the request here does
// not really matter in this test.
// The put is not going through the route so the md5 is being
// calculated above and manually being set in the request below.
// What is being tested is that the calculatedHash being sent
// to the API for the part is stored and then used to
// calculate the final ETag upon completion
// of the multipart upload.
calculatedHash,
socket: {
remoteAddress: '1.1.1.1',
},
}, requestFix), partBody);
objectPutPart(authInfoOtherAcc, partRequest, undefined, log, err => {
assert.ifError(err);
const completeBody = '<CompleteMultipartUpload>' +
'<Part>' +
'<PartNumber>1</PartNumber>' +
`<ETag>"${calculatedHash}"</ETag>` +
'</Part>' +
'</CompleteMultipartUpload>';
const completeRequest = new DummyRequest(Object.assign({
bucketName,
namespace,
objectKey,
parsedHost: 's3.amazonaws.com',
url: `/${objectKey}?uploadId=${testUploadId}`,
headers: { host: `${bucketName}.s3.amazonaws.com` },
query: { uploadId: testUploadId },
post: completeBody,
actionImplicitDenies: false,
socket: {
remoteAddress: '1.1.1.1',
},
}, requestFix));
completeMultipartUpload(authInfoOtherAcc,
completeRequest, log, err => {
assert.ifError(err);
sinon.assert.calledWith(
metadataswitch.putObjectMD.lastCall,
bucketName,
objectKey,
sinon.match({ bucketOwnerId: authInfo.canonicalId }),
sinon.match.any,
sinon.match.any,
sinon.match.any
);
done();
});
});
});
});
});
96 changes: 95 additions & 1 deletion tests/unit/api/objectCopy.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const sinon = require('sinon');

const { bucketPut } = require('../../../lib/api/bucketPut');
const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning');
const bucketPutPolicy = require('../../../lib/api/bucketPutPolicy');
const objectPut = require('../../../lib/api/objectPut');
const objectCopy = require('../../../lib/api/objectCopy');
const { ds } = require('arsenal').storage.data.inMemory.datastore;
Expand Down Expand Up @@ -40,6 +41,7 @@ function _createObjectCopyRequest(destBucketName) {
objectKey,
headers: {},
url: `/${destBucketName}/${objectKey}`,
socket: {},
};
return new DummyRequest(params);
}
Expand All @@ -63,6 +65,7 @@ describe('objectCopy with versioning', () => {

before(done => {
cleanup();
sinon.spy(metadata, 'putObjectMD');
async.series([
callback => bucketPut(authInfo, putDestBucketRequest, log,
callback),
Expand Down Expand Up @@ -91,7 +94,10 @@ describe('objectCopy with versioning', () => {
});
});

after(() => cleanup());
after(() => {
metadata.putObjectMD.restore();
cleanup();
});

it('should delete null version when creating new null version, ' +
'even when null version is not the latest version', done => {
Expand All @@ -108,6 +114,94 @@ describe('objectCopy with versioning', () => {
});
});
});

it('should not set bucketOwnerId if requesting account owns dest bucket', done => {
const testObjectCopyRequest = _createObjectCopyRequest(destBucketName);
objectCopy(authInfo, testObjectCopyRequest, sourceBucketName, objectKey,
undefined, log, err => {
assert.ifError(err);
sinon.assert.calledWith(
metadata.putObjectMD.lastCall,
destBucketName,
objectKey,
sinon.match({ _data: { bucketOwnerId: sinon.match.typeOf('undefined') } }),
sinon.match.any,
sinon.match.any,
sinon.match.any
);
done();
});
});

// TODO: S3C-9965
// Skipped because the policy is not checked correctly
// When source bucket policy is checked destination arn is used
it.skip('should set bucketOwnerId if requesting account differs from dest bucket owner', done => {
const authInfo2 = makeAuthInfo('accessKey2');
const testObjectCopyRequest = _createObjectCopyRequest(destBucketName);
const testPutSrcPolicyRequest = new DummyRequest({
bucketName: sourceBucketName,
namespace,
headers: { host: `${sourceBucketName}.s3.amazonaws.com` },
url: '/',
socket: {},
post: JSON.stringify({
Version: '2012-10-17',
Statement: [
{
Sid: 'AllowCrossAccountRead',
Effect: 'Allow',
Principal: { AWS: `arn:aws:iam::${authInfo2.shortid}:root` },
Action: ['s3:GetObject'],
Resource: [
`arn:aws:s3:::${sourceBucketName}/*`,
],
},
],
}),
});
const testPutDestPolicyRequest = new DummyRequest({
bucketName: destBucketName,
namespace,
headers: { host: `${destBucketName}.s3.amazonaws.com` },
url: '/',
socket: {},
post: JSON.stringify({
Version: '2012-10-17',
Statement: [
{
Sid: 'AllowCrossAccountWrite',
Effect: 'Allow',
Principal: { AWS: `arn:aws:iam::${authInfo2.shortid}:root` },
Action: ['s3:PutObject'],
Resource: [
`arn:aws:s3:::${destBucketName}/*`,
],
},
],
}),
});
bucketPutPolicy(authInfo, testPutSrcPolicyRequest, log, err => {
assert.ifError(err);
bucketPutPolicy(authInfo, testPutDestPolicyRequest, log, err => {
assert.ifError(err);
objectCopy(authInfo2, testObjectCopyRequest, sourceBucketName, objectKey,
undefined, log, err => {
sinon.assert.calledWith(
metadata.putObjectMD.lastCall,
destBucketName,
objectKey,
sinon.match({ _data: { bucketOwnerId: authInfo.canonicalID } }),
sinon.match.any,
sinon.match.any,
sinon.match.any
);
assert.ifError(err);
done();
});
});
});
});
});

describe('non-versioned objectCopy', () => {
Expand Down
17 changes: 17 additions & 0 deletions tests/unit/api/objectCopyPart.js
Original file line number Diff line number Diff line change
Expand Up @@ -137,4 +137,21 @@ describe('objectCopyPart', () => {
done();
});
});

it('should set owner-id to the canonicalId of the dest bucket owner', done => {
const testObjectCopyRequest = _createObjectCopyPartRequest(destBucketName, uploadId);
objectPutCopyPart(authInfo, testObjectCopyRequest, sourceBucketName, objectKey, undefined, log, err => {
assert.ifError(err);
sinon.assert.calledWith(
metadataswitch.putObjectMD.lastCall,
sinon.match.string, // MPU shadow bucket
`${uploadId}..|..00001`,
sinon.match({ 'owner-id': authInfo.canonicalID }),
sinon.match.any,
sinon.match.any,
sinon.match.any
);
done();
});
});
});
Loading
Loading