From 8f64c9c2276da36032c1924f3e4e7f078f4ac0fe Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 22:04:12 +0200 Subject: [PATCH 01/15] CLDSRV-546: update package for postObject support --- package.json | 2 +- yarn.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 88d99d1dcd..5a234d9e9c 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "homepage": "https://github.com/scality/S3#readme", "dependencies": { "@hapi/joi": "^17.1.0", - "arsenal": "git+https://github.com/scality/arsenal#7.70.29", + "arsenal": "git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085", "async": "~2.5.0", "aws-sdk": "2.905.0", "azure-storage": "^2.1.0", diff --git a/yarn.lock b/yarn.lock index 57f5a07451..094dec0c38 100644 --- a/yarn.lock +++ b/yarn.lock @@ -499,9 +499,9 @@ arraybuffer.slice@~0.0.7: optionalDependencies: ioctl "^2.0.2" -"arsenal@git+https://github.com/scality/arsenal#7.70.29": +"arsenal@git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085": version "7.70.29" - resolved "git+https://github.com/scality/arsenal#a643a3e6ccbc49327339a285de1d4cb17afcd171" + resolved "git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085" dependencies: "@js-sdsl/ordered-set" "^4.4.2" "@types/async" "^3.2.12" From 4d826c9f36c96733e48d9e64d53ff2e17e27776c Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 18:54:16 +0200 Subject: [PATCH 02/15] CLDSRV-546: add form data handling to api handler --- lib/api/api.js | 143 ++++++++++++++++++++++++++++++++++++------------- package.json | 1 + yarn.lock | 5 ++ 3 files changed, 112 insertions(+), 37 deletions(-) diff --git a/lib/api/api.js b/lib/api/api.js index 23039807da..98747ed2d1 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -1,5 +1,6 @@ const { auth, errors, policies } = require('arsenal'); const async = require('async'); +const busboy = require('@fastify/busboy'); const bucketDelete = require('./bucketDelete'); const bucketDeleteCors = require('./bucketDeleteCors'); @@ -52,6 +53,7 @@ const objectGetRetention = require('./objectGetRetention'); const objectGetTagging = require('./objectGetTagging'); const objectHead = require('./objectHead'); const objectPut = require('./objectPut'); +const objectPost = require('./objectPost'); const objectPutACL = require('./objectPutACL'); const objectPutLegalHold = require('./objectPutLegalHold'); const objectPutTagging = require('./objectPutTagging'); @@ -112,7 +114,7 @@ const api = { // no need to check auth on website or cors preflight requests if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' || - apiMethod === 'corsPreflight') { + apiMethod === 'corsPreflight') { request.actionImplicitDenies = false; return this[apiMethod](request, log, callback); } @@ -158,7 +160,7 @@ const api = { // second item checks s3:GetObject(Version)Tagging action if (!authResults[1].isAllowed) { log.trace('get tagging authorization denial ' + - 'from Vault'); + 'from Vault'); returnTagCount = false; } } else { @@ -184,14 +186,101 @@ const api = { } return { returnTagCount, isImplicitDeny }; } + let bb; + let fileEventData = null; + + if (apiMethod === 'objectPost') { + bb = busboy({ headers: request.headers }); + } return async.waterfall([ + next => { + if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { + return next(null); + } + if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) { + writeContinue(request, response); + + request.formData = {}; + let totalFieldSize = 0; + const MAX_FIELD_SIZE = 20 * 1024; // 20KB + bb.on('field', (fieldname, val) => { + totalFieldSize += Buffer.byteLength(val, 'utf8'); + if (totalFieldSize > MAX_FIELD_SIZE) { + const err = errors.MaxPostPreDataLengthExceeded; + bb.emit('error', err); + } + // Convert fieldname to lowercase for case-insensitive comparison + const lowerFieldname = fieldname.toLowerCase(); + request.formData[lowerFieldname] = val; + }); + + bb.on('file', (fieldname, file, filename, encoding, mimetype) => { + fileEventData = { fieldname, file, filename, encoding, mimetype }; + return next(null); + }); + + bb.on('finish', () => { + // if fields are found but no file, continue + if (!fileEventData) { + return next(null); + } + return undefined; + }); + + bb.on('error', (err) => { + log.trace('Error processing form data:', { + error: err, + }); + request.unpipe(bb); + return next(err); + }); + + request.pipe(bb); + } else { + // issue 100 Continue to the client + writeContinue(request, response); + const MAX_POST_LENGTH = request.method === 'POST' ? + 1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB + const post = []; + let postLength = 0; + request.on('data', chunk => { + postLength += chunk.length; + // Sanity check on post length + if (postLength <= MAX_POST_LENGTH) { + post.push(chunk); + } + }); + + request.on('error', err => { + log.trace('error receiving request', { + error: err, + }); + return next(errors.InternalError); + }); + + request.on('end', () => { + if (postLength > MAX_POST_LENGTH) { + log.error('body length is too long for request type', + { postLength }); + return next(errors.InvalidRequest); + } + request.post = Buffer.concat(post, postLength).toString(); + return next(null); + }); + } + return undefined; + }, next => auth.server.doAuth( request, log, (err, userInfo, authorizationResults, streamingV4Params) => { if (err) { log.trace('authentication error', { error: err }); return next(err); } + // TODO RING-45960 remove ignore for POST object here + if (apiMethod === 'objectPost') { + return next(null, userInfo, authorizationResults, streamingV4Params); + } return next(null, userInfo, authorizationResults, streamingV4Params); }, 's3', requestContexts), (userInfo, authorizationResults, streamingV4Params, next) => { @@ -200,41 +289,7 @@ const api = { authNames.userName = userInfo.getIAMdisplayName(); } log.addDefaultFields(authNames); - if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { - return next(null, userInfo, authorizationResults, streamingV4Params); - } - // issue 100 Continue to the client - writeContinue(request, response); - const MAX_POST_LENGTH = request.method === 'POST' ? - 1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB - const post = []; - let postLength = 0; - request.on('data', chunk => { - postLength += chunk.length; - // Sanity check on post length - if (postLength <= MAX_POST_LENGTH) { - post.push(chunk); - } - }); - - request.on('error', err => { - log.trace('error receiving request', { - error: err, - }); - return next(errors.InternalError); - }); - - request.on('end', () => { - if (postLength > MAX_POST_LENGTH) { - log.error('body length is too long for request type', - { postLength }); - return next(errors.InvalidRequest); - } - // Convert array of post buffers into one string - request.post = Buffer.concat(post, postLength).toString(); - return next(null, userInfo, authorizationResults, streamingV4Params); - }); - return undefined; + return next(null, userInfo, authorizationResults, streamingV4Params); }, // Tag condition keys require information from CloudServer for evaluation (userInfo, authorizationResults, streamingV4Params, next) => tagConditionKeyAuth( @@ -244,6 +299,10 @@ const api = { apiMethod, log, (err, authResultsWithTags) => { + // TODO RING-45960 remove ignore for POST object here + if (apiMethod === 'objectPost') { + return next(null, userInfo, authorizationResults, streamingV4Params); + } if (err) { log.trace('tag authentication error', { error: err }); return next(err); @@ -271,6 +330,15 @@ const api = { return acc; }, {}); } + if (apiMethod === 'objectPost') { + request._response = response; + if (fileEventData) { + request.fileEventData = fileEventData; + request.headers['content-type'] = fileEventData.mimetype; + } + return this[apiMethod](userInfo, request, streamingV4Params, + log, callback, authorizationResults); + } if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { request._response = response; return this[apiMethod](userInfo, request, streamingV4Params, @@ -337,6 +405,7 @@ const api = { objectCopy, objectHead, objectPut, + objectPost, objectPutACL, objectPutLegalHold, objectPutTagging, diff --git a/package.json b/package.json index 5a234d9e9c..7e0c5b8811 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ }, "homepage": "https://github.com/scality/S3#readme", "dependencies": { + "@fastify/busboy": "^2.1.1", "@hapi/joi": "^17.1.0", "arsenal": "git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085", "async": "~2.5.0", diff --git a/yarn.lock b/yarn.lock index 094dec0c38..625c5cbf49 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,6 +16,11 @@ enabled "2.0.x" kuler "^2.0.0" +"@fastify/busboy@^2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" + integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== + "@gar/promisify@^1.0.1": version "1.1.3" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" From 20a6daa395e415e46ec418cd3fd2de440e44544c Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 18:56:57 +0200 Subject: [PATCH 03/15] CLDSRV-546: post object action --- .../apiUtils/object/createAndStoreObject.js | 14 +- lib/api/objectPost.js | 135 ++++++++++++++++++ 2 files changed, 147 insertions(+), 2 deletions(-) create mode 100644 lib/api/objectPost.js diff --git a/lib/api/apiUtils/object/createAndStoreObject.js b/lib/api/apiUtils/object/createAndStoreObject.js index 7dc84089bf..62c600d37b 100644 --- a/lib/api/apiUtils/object/createAndStoreObject.js +++ b/lib/api/apiUtils/object/createAndStoreObject.js @@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo, metadataStoreParams.contentMD5 = constants.emptyFileMd5; return next(null, null, null); } - return dataStore(objectKeyContext, cipherBundle, request, size, - streamingV4Params, backendInfo, log, next); + // Object Post receives a file stream. + // This is to be used to store data instead of the request stream itself. + + let stream; + + if (request.apiMethod === 'objectPost') { + stream = request.fileEventData ? request.fileEventData.file : undefined; + } else { + stream = request; + } + + return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next); }, function processDataResult(dataGetInfo, calculatedHash, next) { if (dataGetInfo === null || dataGetInfo === undefined) { diff --git a/lib/api/objectPost.js b/lib/api/objectPost.js new file mode 100644 index 0000000000..95b0db335f --- /dev/null +++ b/lib/api/objectPost.js @@ -0,0 +1,135 @@ +const async = require('async'); +const { errors, versioning } = require('arsenal'); +const { PassThrough } = require('stream'); + +const collectCorsHeaders = require('../utilities/collectCorsHeaders'); +const createAndStoreObject = require('./apiUtils/object/createAndStoreObject'); +const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils'); +const { config } = require('../Config'); +const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders'); +const writeContinue = require('../utilities/writeContinue'); +const { overheadField } = require('../../constants'); + + +const versionIdUtils = versioning.VersionID; + + +/** + * POST Object in the requested bucket. Steps include: + * validating metadata for authorization, bucket and object existence etc. + * store object data in datastore upon successful authorization + * store object location returned by datastore and + * object's (custom) headers in metadata + * return the result in final callback + * + * @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info + * @param {request} request - request object given by router, + * includes normalized headers + * @param {object | undefined } streamingV4Params - if v4 auth, + * object containing accessKey, signatureFromRequest, region, scopeDate, + * timestamp, and credentialScope + * (to be used for streaming v4 auth if applicable) + * @param {object} log - the log request + * @param {Function} callback - final callback to call with the result + * @return {undefined} + */ +function objectPost(authInfo, request, streamingV4Params, log, callback) { + const { + headers, + method, + } = request; + let parsedContentLength = 0; + const passThroughStream = new PassThrough(); + const requestType = request.apiMethods || 'objectPost'; + const valParams = { + authInfo, + bucketName: request.formData.bucket, + objectKey: request.formData.key, + requestType, + request, + }; + const canonicalID = authInfo.getCanonicalID(); + + + log.trace('owner canonicalID to send to data', { canonicalID }); + return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log, + (err, bucket, objMD) => { + const responseHeaders = collectCorsHeaders(headers.origin, + method, bucket); + + if (err && !err.AccessDenied) { + log.trace('error processing request', { + error: err, + method: 'metadataValidateBucketAndObj', + }); + return callback(err, responseHeaders); + } + if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) { + log.trace('deleted flag on bucket and request ' + + 'from non-owner account'); + return callback(errors.NoSuchBucket); + } + + return async.waterfall([ + function countPOSTFileSize(next) { + if (!request.fileEventData || !request.fileEventData.file) { + return next(); + } + request.fileEventData.file.on('data', (chunk) => { + parsedContentLength += chunk.length; + passThroughStream.write(chunk); + }); + + request.fileEventData.file.on('end', () => { + passThroughStream.end(); + // Setting the file in the request avoids the need to make changes to createAndStoreObject's + // parameters and thus all it's subsequent calls. This is necessary as the stream used to create + // the object is that of the request directly; something we must work around + // to use the file data produced from the multipart form data. + /* eslint-disable no-param-reassign */ + request.fileEventData.file = passThroughStream; + /* eslint-disable no-param-reassign */ + // Here parsedContentLength will have the total size of the file + // This is used when calculating the size of the object in createAndStoreObject + request.parsedContentLength = parsedContentLength; + return next(); + }); + return undefined; + }, + function objectCreateAndStore(next) { + writeContinue(request, request._response); + return createAndStoreObject(request.bucketName, + bucket, request.formData.key, objMD, authInfo, canonicalID, null, + request, false, streamingV4Params, overheadField, log, next); + }, + ], (err, storingResult) => { + if (err) { + return callback(err, responseHeaders); + } + setExpirationHeaders(responseHeaders, { + lifecycleConfig: bucket.getLifecycleConfiguration(), + objectParams: { + key: request.key, + date: storingResult.lastModified, + tags: storingResult.tags, + }, + }); + if (storingResult) { + // ETag's hex should always be enclosed in quotes + responseHeaders.ETag = `"${storingResult.contentMD5}"`; + } + const vcfg = bucket.getVersioningConfiguration(); + const isVersionedObj = vcfg && vcfg.Status === 'Enabled'; + if (isVersionedObj) { + if (storingResult && storingResult.versionId) { + responseHeaders['x-amz-version-id'] = + versionIdUtils.encode(storingResult.versionId, + config.versionIdEncodingType); + } + } + return callback(null, responseHeaders); + }); + }); +} + +module.exports = objectPost; From 19f671bbf9cc8e86b300d148ec7f941423d95564 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 18:57:33 +0200 Subject: [PATCH 04/15] CLDSRV-546: debugged prepareStream --- lib/api/apiUtils/object/prepareStream.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/api/apiUtils/object/prepareStream.js b/lib/api/apiUtils/object/prepareStream.js index 7d436dd96b..985ec4efe0 100644 --- a/lib/api/apiUtils/object/prepareStream.js +++ b/lib/api/apiUtils/object/prepareStream.js @@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform'); * the type of request requires them */ function prepareStream(stream, streamingV4Params, log, errCb) { - if (stream.headers['x-amz-content-sha256'] === + if (stream && stream.headers && stream.headers['x-amz-content-sha256'] === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { if (typeof streamingV4Params !== 'object') { // this might happen if the user provided a valid V2 From c4860c3817d2a03f38c071d92e929cfed7dbf2fa Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 18:58:21 +0200 Subject: [PATCH 05/15] CLDSRV-546: functional tests --- .../aws-node-sdk/test/object/post.js | 516 ++++++++++++++++++ 1 file changed, 516 insertions(+) create mode 100644 tests/functional/aws-node-sdk/test/object/post.js diff --git a/tests/functional/aws-node-sdk/test/object/post.js b/tests/functional/aws-node-sdk/test/object/post.js new file mode 100644 index 0000000000..fd639d78cc --- /dev/null +++ b/tests/functional/aws-node-sdk/test/object/post.js @@ -0,0 +1,516 @@ +const xml2js = require('xml2js'); +const axios = require('axios'); +const fs = require('fs'); +const path = require('path'); +const crypto = require('crypto'); +const FormData = require('form-data'); +const assert = require('assert'); + +const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); + +const filename = 'test-file.txt'; +const region = 'us-east-1'; +let ak; +let sk; +let s3; + +const generateBucketName = () => `test-bucket-${crypto.randomBytes(8).toString('hex')}`; + +const formatDate = (date) => { + const year = date.getUTCFullYear(); + const month = (date.getUTCMonth() + 1).toString().padStart(2, '0'); + const day = date.getUTCDate().toString().padStart(2, '0'); + return `${year}${month}${day}`; +}; + +const getSignatureKey = (key, dateStamp, regionName, serviceName) => { + const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest(); + const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest(); + const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest(); + const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest(); + return kSigning; +}; + +const calculateFields = (ak, sk, bucketName, additionalConditions) => { + const service = 's3'; + + const now = new Date(); + const formattedDate = now.toISOString().replace(/[:-]|\.\d{3}/g, ''); + const shortFormattedDate = formatDate(now); + + const credential = `${ak}/${shortFormattedDate}/${region}/${service}/aws4_request`; + const conditionsFields = [ + { bucket: bucketName }, + { key: filename }, + { 'x-amz-credential': credential }, + { 'x-amz-algorithm': 'AWS4-HMAC-SHA256' }, + { 'x-amz-date': formattedDate }, + ]; + if (additionalConditions) { + additionalConditions.forEach(field => { + const key = Object.keys(field)[0]; + const value = field[key]; + const index = conditionsFields.findIndex(condition => condition.hasOwnProperty(key)); + if (index !== -1) { + conditionsFields[index][key] = value; + } else { + conditionsFields.push({ [key]: value }); + } + }); + } + const policy = { + expiration: new Date(new Date().getTime() + 60000).toISOString(), + conditions: conditionsFields, + }; + const policyBase64 = Buffer.from(JSON.stringify(policy)).toString('base64'); + + const signingKey = getSignatureKey(sk, shortFormattedDate, region, service); + const signature = crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex'); + + const returnFields = [ + { name: 'X-Amz-Credential', value: credential }, + { name: 'X-Amz-Algorithm', value: 'AWS4-HMAC-SHA256' }, + { name: 'X-Amz-Signature', value: signature }, + { name: 'X-Amz-Date', value: formattedDate }, + { name: 'Policy', value: policyBase64 }, + { name: 'bucket', value: bucketName }, + { name: 'key', value: filename }, + ]; + if (!additionalConditions) { + return returnFields; + } + if (additionalConditions) { + additionalConditions.forEach(field => { + const key = Object.keys(field)[0]; + const value = field[key]; + const index = returnFields.findIndex(f => f.name === key); + if (index !== -1) { + returnFields[index].value = value; + } else { + returnFields.push({ name: key, value }); + } + }); + } + return returnFields; +}; + + +describe('POST object', () => { + let bucketUtil; + let config; + const testContext = {}; + + before(() => { + config = getConfig('default'); + ak = config.credentials.accessKeyId; + sk = config.credentials.secretAccessKey; + bucketUtil = new BucketUtility('default'); + s3 = bucketUtil.s3; + }); + + beforeEach(done => { + const bucketName = generateBucketName(); + const url = `${config.endpoint}/${bucketName}`; + testContext.bucketName = bucketName; + testContext.url = url; + + const filePath = path.join(__dirname, filename); + const fileContent = 'This is a test file'; + fs.writeFile(filePath, fileContent, err => { + if (err) { + return done(err); + } + + // Create the bucket + return s3.createBucket({ Bucket: bucketName }, async (err) => { + if (err) { + return done(err); + } + return done(); + }); + }); + }); + + afterEach(() => { + const { bucketName } = testContext; + const filePath = path.join(__dirname, filename); + + // Delete the file + fs.unlink(filePath, err => { + if (err) { + throw err; + } + + process.stdout.write('Emptying bucket'); + return bucketUtil.empty(bucketName) + .then(() => { + process.stdout.write('Deleting bucket'); + return bucketUtil.deleteOne(bucketName); + }) + .catch(err => { + if (err.code !== 'NoSuchBucket') { + process.stdout.write('Error in afterEach'); + throw err; + } + }); + }); + }); + + it('should successfully upload an object using a POST form', done => { + const { bucketName, url } = testContext; + const fields = calculateFields(ak, sk, bucketName); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, filename))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(response => { + assert.equal(response.status, 204); + done(); + }) + .catch(err => { + done(err); + }); + }); + }); + + it('should handle error when bucket does not exist', done => { + const fakeBucketName = generateBucketName(); + const tempUrl = `${config.endpoint}/${fakeBucketName}`; + const fields = calculateFields(ak, sk, fakeBucketName); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, filename))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(tempUrl, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Expected error but got success response')); + }) + .catch(err => { + assert.equal(err.response.status, 404); + done(); + }); + }); + }); + + it('should successfully upload a larger file to S3 using a POST form', done => { + const { bucketName, url } = testContext; + const largeFileName = 'large-test-file.txt'; + const largeFilePath = path.join(__dirname, largeFileName); + const largeFileContent = 'This is a larger test file'.repeat(10000); // Simulate a larger file + + fs.writeFile(largeFilePath, largeFileContent, err => { + if (err) { + return done(err); + } + + const fields = calculateFields(ak, sk, bucketName, [{ key: largeFileName }]); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(largeFilePath)); + + return formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(response => { + assert.equal(response.status, 204); + s3.listObjectsV2({ Bucket: bucketName }, (err, data) => { + if (err) { + fs.unlink(largeFilePath, () => done(err)); // Clean up and propagate the error + return; + } + + const uploadedFile = data.Contents.find(item => item.Key === path.basename(largeFileName)); + assert(uploadedFile, 'Uploaded file should exist in the bucket'); + assert.equal(uploadedFile.Size, Buffer.byteLength(largeFileContent), + 'File size should match'); + + fs.unlink(largeFilePath, done); // Clean up the large file + }); + }) + .catch(err => { + fs.unlink(largeFilePath, () => done(err)); // Clean up and propagate the error + }); + }); + }); + }); + + it('should be able to post an empty file and verify its existence', done => { + const { bucketName, url } = testContext; + const emptyFilePath = path.join(__dirname, 'empty-file.txt'); + + // Create an empty file + fs.writeFile(emptyFilePath, '', err => { + if (err) { + return done(err); + } + + const fields = calculateFields(ak, sk, bucketName); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(emptyFilePath)); + + return formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(response => { + assert.equal(response.status, 204); + + // Check if the object exists using listObjects + return s3.listObjects({ Bucket: bucketName, Prefix: filename }, (err, data) => { + if (err) { + return done(err); + } + + const fileExists = data.Contents.some(item => item.Key === filename); + + const file = data.Contents.find(item => item.Key === filename); + assert.equal(file.Size, 0); + + if (!fileExists) { + return done(new Error('File does not exist in S3')); + } + + // Clean up: delete the empty file locally and from S3 + return fs.unlink(emptyFilePath, err => { + if (err) { + return done(err); + } + + return s3.deleteObject({ Bucket: bucketName, Key: filename }, err => { + if (err) { + return done(err); + } + + return done(); + }); + }); + }); + }) + .catch(err => { + done(err); + }); + }); + }); + }); + + it('should handle error when file is missing', done => { + const { bucketName, url } = testContext; + const fields = calculateFields(ak, sk, bucketName); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Expected error but got success response')); + }) + .catch(err => { + assert.equal(err.response.status, 400); + done(); + }); + }); + }); + + it('should upload an object with key slash', done => { + const { bucketName, url } = testContext; + const slashKey = '/'; + const fields = calculateFields(ak, sk, bucketName, [{ key: slashKey }]); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, filename))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(response => { + assert.equal(response.status, 204); + done(); + }) + .catch(err => { + done(err); + }); + }); + }); + + it('should return an error if form data (excluding file) exceeds 20KB', done => { + const { bucketName, url } = testContext; + const fields = calculateFields(ak, sk, bucketName); + + // Add additional fields to make form data exceed 20KB + const largeValue = 'A'.repeat(1024); // 1KB value + for (let i = 0; i < 21; i++) { // Add 21 fields of 1KB each to exceed 20KB + fields.push({ name: `field${i}`, value: largeValue }); + } + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, filename))); + + return formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Request should not succeed with form data exceeding 20KB')); + }) + .catch(err => { + assert.ok(err.response, 'Error should be returned by axios'); + + // Parse the XML error response + xml2js.parseString(err.response.data, (err, result) => { + if (err) { + return done(err); + } + + const error = result.Error; + assert.equal(error.Code[0], 'MaxPostPreDataLengthExceeded'); + assert.equal(error.Message[0], + 'Your POST request fields preceeding the upload file was too large.'); + return done(); + }); + }); + }); + }); + + it('should successfully upload an object with bucket versioning enabled and verify version ID', done => { + const { url, bucketName } = testContext; + + // Enable versioning on the bucket + const versioningParams = { + Bucket: bucketName, + VersioningConfiguration: { + Status: 'Enabled', + }, + }; + + return s3.putBucketVersioning(versioningParams, (err) => { + if (err) { + return done(err); + } + + const fields = calculateFields(ak, sk, bucketName, [{ bucket: bucketName }]); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); + + return formData.getLength((err, length) => { + if (err) { + return done(err); + } + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(response => { + assert.equal(response.status, 204); + + // Verify version ID is present in the response + const versionId = response.headers['x-amz-version-id']; + assert.ok(versionId, 'Version ID should be present in the response headers'); + done(); + }) + .catch(err => { + done(err); + }); + }); + }); + }); +}); + From d18f9ecd708f49357bea05a70a9f54be28abfe0b Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Mon, 1 Jul 2024 18:58:43 +0200 Subject: [PATCH 06/15] CLDSRV-546: unit tests --- tests/unit/api/objectPost.js | 439 +++++++++++++++++++++++++++++++++++ tests/unit/helpers.js | 13 ++ 2 files changed, 452 insertions(+) create mode 100644 tests/unit/api/objectPost.js diff --git a/tests/unit/api/objectPost.js b/tests/unit/api/objectPost.js new file mode 100644 index 0000000000..74664755d0 --- /dev/null +++ b/tests/unit/api/objectPost.js @@ -0,0 +1,439 @@ +const assert = require('assert'); +const async = require('async'); +const moment = require('moment'); +const { errors } = require('arsenal'); +const sinon = require('sinon'); + +const { bucketPut } = require('../../../lib/api/bucketPut'); +const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock'); +const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning'); +const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils } + = require('../helpers'); +const { ds } = require('arsenal').storage.data.inMemory.datastore; +const metadata = require('../metadataswitch'); +const objectPost = require('../../../lib/api/objectPost'); +const { objectLockTestUtils } = require('../helpers'); +const DummyRequest = require('../DummyRequest'); +const mpuUtils = require('../utils/mpuUtils'); +const any = sinon.match.any; + +const log = new DummyRequestLogger(); +const canonicalID = 'accessKey1'; +const authInfo = makeAuthInfo(canonicalID); +const bucketName = 'bucketname123'; +const postBody = Buffer.from('I am a body', 'utf8'); +const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; +const mockDate = new Date(2050, 10, 12); +const testPutBucketRequest = new DummyRequest({ + bucketName, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: '/', +}); +const testPutBucketRequestLock = new DummyRequest({ + bucketName, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-bucket-object-lock-enabled': 'true', + }, + url: '/', +}); + +const originalputObjectMD = metadata.putObjectMD; +const objectName = 'objectName'; + +let testPostObjectRequest; +const enableVersioningRequest = + versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled'); +const suspendVersioningRequest = + versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended'); + +describe('objectPost API', () => { + beforeEach(() => { + cleanup(); + sinon.spy(metadata, 'putObjectMD'); + testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: '/', + }, postBody); + }); + + afterEach(() => { + sinon.restore(); + metadata.putObjectMD = originalputObjectMD; + }); + + it('should return an error if the bucket does not exist', done => { + objectPost(authInfo, testPostObjectRequest, undefined, log, err => { + assert.deepStrictEqual(err, errors.NoSuchBucket); + done(); + }); + }); + + it('should successfully post an object', done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, resHeaders) => { + assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, + {}, log, (err, md) => { + assert(md); + assert + .strictEqual(md['content-md5'], correctMD5); + done(); + }); + }); + }); + }); + + const mockModes = ['GOVERNANCE', 'COMPLIANCE']; + mockModes.forEach(mockMode => { + it(`should post an object with valid date & ${mockMode} mode`, done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-retain-until-date': mockDate, + 'x-amz-object-lock-mode': mockMode, + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + const mode = md.retentionMode; + const retainUntilDate = md.retentionDate; + assert.ifError(err); + assert(md); + assert.strictEqual(mode, mockMode); + assert.strictEqual(retainUntilDate, mockDate); + done(); + }); + }); + }); + }); + }); + + const formatTime = time => time.slice(0, 20); + + const testObjectLockConfigs = [ + { + testMode: 'COMPLIANCE', + val: 30, + type: 'Days', + }, + { + testMode: 'GOVERNANCE', + val: 5, + type: 'Years', + }, + ]; + testObjectLockConfigs.forEach(config => { + const { testMode, type, val } = config; + it('should put an object with default retention if object does not ' + + 'have retention configuration but bucket has', done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + const testObjLockRequest = { + bucketName, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + post: objectLockTestUtils.generateXml(testMode, val, type), + }; + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + bucketPutObjectLock(authInfo, testObjLockRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, + log, (err, md) => { + const mode = md.retentionMode; + const retainDate = md.retentionDate; + const date = moment(); + const days + = type === 'Days' ? val : val * 365; + const expectedDate + = date.add(days, 'days'); + assert.ifError(err); + assert.strictEqual(mode, testMode); + assert.strictEqual(formatTime(retainDate), + formatTime(expectedDate.toISOString())); + done(); + }); + }); + }); + }); + }); + }); + + + it('should successfully put an object with legal hold ON', done => { + const request = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-legal-hold': 'ON', + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, request, undefined, log, (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + assert.ifError(err); + assert.strictEqual(md.legalHold, true); + done(); + }); + }); + }); + }); + + it('should successfully put an object with legal hold OFF', done => { + const request = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-legal-hold': 'OFF', + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, request, undefined, log, (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + assert.ifError(err); + assert(md); + assert.strictEqual(md.legalHold, false); + done(); + }); + }); + }); + }); + + it('should not leave orphans in data when overwriting an object', done => { + const testPostObjectRequest2 = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + }, Buffer.from('I am another body', 'utf8')); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, + undefined, log, () => { + objectPost(authInfo, testPostObjectRequest2, undefined, + log, + () => { + // orphan objects don't get deleted + // until the next tick + // in memory + setImmediate(() => { + // Data store starts at index 1 + assert.strictEqual(ds[0], undefined); + assert.strictEqual(ds[1], undefined); + assert.deepStrictEqual(ds[2].value, + Buffer.from('I am another body', 'utf8')); + done(); + }); + }); + }); + }); + }); + + it('should not leave orphans in data when overwriting an multipart upload object', done => { + bucketPut(authInfo, testPutBucketRequest, log, () => { + mpuUtils.createMPU('default', bucketName, objectName, log, + (err, testUploadId) => { + objectPost(authInfo, testPostObjectRequest, undefined, log, err => { + assert.ifError(err); + sinon.assert.calledWith(metadata.putObjectMD, + any, any, any, sinon.match({ oldReplayId: testUploadId }), any, any); + done(); + }); + }); + }); + }); + + describe('objectPost API with versioning', () => { + beforeEach(() => { + cleanup(); + }); + + const objData = ['foo0', 'foo1', 'foo2'].map(str => + Buffer.from(str, 'utf8')); + const testPostObjectRequests = objData.map(data => versioningTestUtils + .createPostObjectRequest(bucketName, objectName, data)); + + it('should delete latest version when creating new null version ' + + 'if latest version is null version', done => { + async.series([ + callback => bucketPut(authInfo, testPutBucketRequest, log, + callback), + // putting null version by putting obj before versioning configured + callback => objectPost(authInfo, testPostObjectRequests[0], undefined, + log, err => { + versioningTestUtils.assertDataStoreValues(ds, [objData[0]]); + callback(err); + }), + callback => bucketPutVersioning(authInfo, suspendVersioningRequest, + log, callback), + // creating new null version by putting obj after ver suspended + callback => objectPost(authInfo, testPostObjectRequests[1], + undefined, log, err => { + // wait until next tick since mem backend executes + // deletes in the next tick + setImmediate(() => { + // old null version should be deleted + versioningTestUtils.assertDataStoreValues(ds, + [undefined, objData[1]]); + callback(err); + }); + }), + // create another null version + callback => objectPost(authInfo, testPostObjectRequests[2], + undefined, log, err => { + setImmediate(() => { + // old null version should be deleted + versioningTestUtils.assertDataStoreValues(ds, + [undefined, undefined, objData[2]]); + callback(err); + }); + }), + ], done); + }); + + describe('when null version is not the latest version', () => { + const objData = ['foo0', 'foo1', 'foo2'].map(str => + Buffer.from(str, 'utf8')); + const testPostObjectRequests = objData.map(data => versioningTestUtils + .createPostObjectRequest(bucketName, objectName, data)); + beforeEach(done => { + async.series([ + callback => bucketPut(authInfo, testPutBucketRequest, log, + callback), + // putting null version: put obj before versioning configured + callback => objectPost(authInfo, testPostObjectRequests[0], + undefined, log, callback), + callback => bucketPutVersioning(authInfo, + enableVersioningRequest, log, callback), + // put another version: + callback => objectPost(authInfo, testPostObjectRequests[1], + undefined, log, callback), + callback => bucketPutVersioning(authInfo, + suspendVersioningRequest, log, callback), + ], err => { + if (err) { + return done(err); + } + versioningTestUtils.assertDataStoreValues(ds, + objData.slice(0, 2)); + return done(); + }); + }); + + it('should still delete null version when creating new null version', + done => { + objectPost(authInfo, testPostObjectRequests[2], undefined, + log, err => { + assert.ifError(err, `Unexpected err: ${err}`); + setImmediate(() => { + // old null version should be deleted after putting + // new null version + versioningTestUtils.assertDataStoreValues(ds, + [undefined, objData[1], objData[2]]); + done(err); + }); + }); + }); + }); + + it('should return BadDigest error and not leave orphans in data when ' + + 'contentMD5 and completedHash do not match', done => { + const testPostObjectRequests = new DummyRequest({ + bucketName, + formData: { + bucket: bucketName, + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + contentMD5: 'vnR+tLdVF79rPPfF+7YvOg==', + }, Buffer.from('I am another body', 'utf8')); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequests, undefined, log, + err => { + assert.deepStrictEqual(err, errors.BadDigest); + // orphan objects don't get deleted + // until the next tick + // in memory + setImmediate(() => { + // Data store starts at index 1 + assert.strictEqual(ds[0], undefined); + assert.strictEqual(ds[1], undefined); + done(); + }); + }); + }); + }); + }); +}); + diff --git a/tests/unit/helpers.js b/tests/unit/helpers.js index d4c017003d..1277e9110f 100644 --- a/tests/unit/helpers.js +++ b/tests/unit/helpers.js @@ -374,6 +374,19 @@ const versioningTestUtils = { }; return new DummyRequest(params, body); }, + createPostObjectRequest: (bucketName, keyName, body) => { + const params = { + bucketName, + formData: { + bucket: bucketName, + key: keyName, + }, + fileEventData: {}, + headers: {}, + url: '/', + }; + return new DummyRequest(params, body); + }, createBucketPutVersioningReq: (bucketName, status) => { const request = { bucketName, From f10b5541fd7b1fef338e2cc1508068bece3684b7 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Tue, 2 Jul 2024 12:40:57 +0200 Subject: [PATCH 07/15] LDSRV-551: add auth field checking in api handler --- lib/api/api.js | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/lib/api/api.js b/lib/api/api.js index 98747ed2d1..7ec128c00d 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -70,6 +70,7 @@ const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders'); const parseCopySource = require('./apiUtils/object/parseCopySource'); const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys'); const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize'); +const { decryptToken } = require('./apiUtils/object/continueToken'); const monitoringMap = policies.actionMaps.actionMonitoringMapS3; @@ -201,6 +202,11 @@ const api = { if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) { writeContinue(request, response); + let algoOK = false; + let credOK = false; + let dateOK = false; + let sigOK = false; + let policyOK = false; request.formData = {}; let totalFieldSize = 0; const MAX_FIELD_SIZE = 20 * 1024; // 20KB @@ -213,19 +219,36 @@ const api = { // Convert fieldname to lowercase for case-insensitive comparison const lowerFieldname = fieldname.toLowerCase(); request.formData[lowerFieldname] = val; + if (lowerFieldname === 'x-amz-algorithm') { + algoOK = true; + } + if (lowerFieldname === 'x-amz-credential') { + credOK = true; + } + if (lowerFieldname === 'x-amz-date') { + dateOK = true; + } + if (lowerFieldname === 'x-amz-signature') { + sigOK = true; + } + if (lowerFieldname === 'policy') { + policyOK = true; + } }); bb.on('file', (fieldname, file, filename, encoding, mimetype) => { fileEventData = { fieldname, file, filename, encoding, mimetype }; - return next(null); + if (algoOK && credOK && dateOK && sigOK && policyOK) { + return next(null); + } }); bb.on('finish', () => { - // if fields are found but no file, continue - if (!fileEventData) { - return next(null); + if (!algoOK || !credOK || !dateOK || !sigOK || !policyOK) { + return next(errors.InvalidRequest); } - return undefined; + // if fields are found but no file, continue + return next(null); }); bb.on('error', (err) => { From 456d5b79e3b7c4675fda20828e84e7d46e94db17 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Tue, 2 Jul 2024 12:51:37 +0200 Subject: [PATCH 08/15] fixup: api handler --- lib/api/api.js | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lib/api/api.js b/lib/api/api.js index 7ec128c00d..bdb17b55dd 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -300,10 +300,6 @@ const api = { log.trace('authentication error', { error: err }); return next(err); } - // TODO RING-45960 remove ignore for POST object here - if (apiMethod === 'objectPost') { - return next(null, userInfo, authorizationResults, streamingV4Params); - } return next(null, userInfo, authorizationResults, streamingV4Params); }, 's3', requestContexts), (userInfo, authorizationResults, streamingV4Params, next) => { @@ -322,10 +318,6 @@ const api = { apiMethod, log, (err, authResultsWithTags) => { - // TODO RING-45960 remove ignore for POST object here - if (apiMethod === 'objectPost') { - return next(null, userInfo, authorizationResults, streamingV4Params); - } if (err) { log.trace('tag authentication error', { error: err }); return next(err); From 471902ebdef0eeaa4cc5c12627c123ccb6c2f242 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Tue, 2 Jul 2024 15:45:48 +0200 Subject: [PATCH 09/15] CLDSRV-551: functional tests --- .../aws-node-sdk/test/object/post.js | 202 ++++++++++++++++++ 1 file changed, 202 insertions(+) diff --git a/tests/functional/aws-node-sdk/test/object/post.js b/tests/functional/aws-node-sdk/test/object/post.js index fd639d78cc..d3dccc9bbc 100644 --- a/tests/functional/aws-node-sdk/test/object/post.js +++ b/tests/functional/aws-node-sdk/test/object/post.js @@ -512,5 +512,207 @@ describe('POST object', () => { }); }); }); + + it('should handle error when signature is invalid', done => { + const { url, bucketName } = testContext; + const fields = calculateFields(ak, sk, bucketName); + fields.push({ name: 'X-Amz-Signature', value: 'invalid-signature' }); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Expected error but got success response')); + }) + .catch(err => { + assert.equal(err.response.status, 403); + done(); + }); + }); + }); + + it('should return an error when signature includes invalid data', done => { + const { url, bucketName } = testContext; + let fields = calculateFields(ak, sk, bucketName); + const laterThanNow = new Date(new Date().getTime() + 60000); + const shortFormattedDate = formatDate(laterThanNow); + + const signingKey = getSignatureKey(sk, shortFormattedDate, 'ap-east-1', 's3'); + const signature = crypto.createHmac('sha256', signingKey).update(fields.find(field => + field.name === 'Policy').value).digest('hex'); + + // Modify the signature to be invalid + fields = fields.map(field => { + if (field.name === 'X-Amz-Signature') { + return { name: 'X-Amz-Signature', value: signature }; + } + return field; + }); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Request should not succeed with an invalid signature')); + }) + .catch(err => { + assert.ok(err.response, 'Error should be returned by axios'); + + // Parse the XML error response + xml2js.parseString(err.response.data, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + const error = result.Error; + assert.equal( + error.Code[0], + 'SignatureDoesNotMatch', + 'Expected SignatureDoesNotMatch error code' + ); + done(); + }); + }); + }); + }); + + it('should return an error for invalid keys', done => { + const { url, bucketName } = testContext; + const invalidAccessKeyId = 'INVALIDACCESSKEY'; + const invalidSecretAccessKey = 'INVALIDSECRETKEY'; + let fields = calculateFields(invalidAccessKeyId, invalidSecretAccessKey, bucketName); + + // Modify the signature to be invalid + fields = fields.map(field => { + if (field.name === 'X-Amz-Signature') { + return { name: 'X-Amz-Signature', value: 'invalid-signature' }; + } + return field; + }); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Request should not succeed with an invalid keys')); + }) + .catch(err => { + assert.ok(err.response, 'Error should be returned by axios'); + + // Parse the XML error response + xml2js.parseString(err.response.data, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + const error = result.Error; + assert.equal(error.Code[0], 'InvalidAccessKeyId', 'Expected InvalidAccessKeyId error code'); + done(); + }); + }); + }); + }); + + it('should return an error for invalid credential', done => { + const { url, bucketName } = testContext; + let fields = calculateFields(ak, sk, bucketName); + const laterThanNow = new Date(new Date().getTime() + 60000); + const shortFormattedDate = formatDate(laterThanNow); + + const credential = `${ak}/${shortFormattedDate}/ap-east-1/s3/aws4_request`; + + // Modify the signature to be invalid + fields = fields.map(field => { + if (field.name === 'X-Amz-Credential') { + return { name: 'X-Amz-Credential', value: credential }; + } + return field; + }); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }) + .then(() => { + done(new Error('Request should not succeed with an invalid credential')); + }) + .catch(err => { + assert.ok(err.response, 'Error should be returned by axios'); + + // Parse the XML error response + xml2js.parseString(err.response.data, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument', 'Expected InvalidArgument error code'); + done(); + }); + }); + }); + }); }); From 9534fe4cc65a81209d52dbb3b1b7ff1a582c9b35 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Thu, 4 Jul 2024 14:40:42 +0200 Subject: [PATCH 10/15] fixup: api: remove policy decrypt --- lib/api/api.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/api/api.js b/lib/api/api.js index bdb17b55dd..afa1551840 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -70,7 +70,6 @@ const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders'); const parseCopySource = require('./apiUtils/object/parseCopySource'); const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys'); const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize'); -const { decryptToken } = require('./apiUtils/object/continueToken'); const monitoringMap = policies.actionMaps.actionMonitoringMapS3; From 9f8df2f24ba01837c199c9a9803f45d1472d6f5b Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Thu, 4 Jul 2024 19:09:31 +0200 Subject: [PATCH 11/15] fixup: api: add fileevent check for no file data --- lib/api/api.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/api/api.js b/lib/api/api.js index afa1551840..493dd30c08 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -247,7 +247,9 @@ const api = { return next(errors.InvalidRequest); } // if fields are found but no file, continue - return next(null); + if (!fileEventData) { + return next(null); + } }); bb.on('error', (err) => { From a4b10edad0be177301f159b4207d66f0343779cc Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Thu, 4 Jul 2024 19:10:24 +0200 Subject: [PATCH 12/15] fixup: func tests: auth functional tests --- .../aws-node-sdk/test/object/post.js | 23 +++++++++++++------ 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/tests/functional/aws-node-sdk/test/object/post.js b/tests/functional/aws-node-sdk/test/object/post.js index d3dccc9bbc..ef3005e3d2 100644 --- a/tests/functional/aws-node-sdk/test/object/post.js +++ b/tests/functional/aws-node-sdk/test/object/post.js @@ -60,7 +60,8 @@ const calculateFields = (ak, sk, bucketName, additionalConditions) => { }); } const policy = { - expiration: new Date(new Date().getTime() + 60000).toISOString(), + // 15 minutes from now + expiration: new Date(new Date().getTime() + 15 * 60 * 1000).toISOString(), conditions: conditionsFields, }; const policyBase64 = Buffer.from(JSON.stringify(policy)).toString('base64'); @@ -515,6 +516,7 @@ describe('POST object', () => { it('should handle error when signature is invalid', done => { const { url, bucketName } = testContext; + const fields = calculateFields(ak, sk, bucketName); fields.push({ name: 'X-Amz-Signature', value: 'invalid-signature' }); const formData = new FormData(); @@ -530,18 +532,25 @@ describe('POST object', () => { return done(err); } - axios.post(url, formData, { + return axios.post(url, formData, { headers: { ...formData.getHeaders(), 'Content-Length': length, }, }) - .then(() => { - done(new Error('Expected error but got success response')); - }) + .then(() => done(new Error('Expected error but got success response'))) .catch(err => { assert.equal(err.response.status, 403); - done(); + return xml2js.parseString(err.response.data, (err, result) => { + if (err) { + return done(err); + } + + const error = result.Error; + assert.equal(error.Code[0], 'SignatureDoesNotMatch'); + assert.equal(error.Message[0], 'The request signature we calculated does not match the signature you provided.'); + return done(); + }); }); }); }); @@ -666,7 +675,7 @@ describe('POST object', () => { const laterThanNow = new Date(new Date().getTime() + 60000); const shortFormattedDate = formatDate(laterThanNow); - const credential = `${ak}/${shortFormattedDate}/ap-east-1/s3/aws4_request`; + const credential = `${ak}/${shortFormattedDate}/eu-west-1/blabla/aws4_request`; // Modify the signature to be invalid fields = fields.map(field => { From 413d0e374bff6dd6724bc7a50a8fd10e21e01b2b Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Thu, 4 Jul 2024 19:20:29 +0200 Subject: [PATCH 13/15] lint --- lib/api/api.js | 7 ++-- .../aws-node-sdk/test/object/post.js | 35 ++++++++----------- 2 files changed, 18 insertions(+), 24 deletions(-) diff --git a/lib/api/api.js b/lib/api/api.js index 493dd30c08..cdd59b7fc0 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -240,16 +240,15 @@ const api = { if (algoOK && credOK && dateOK && sigOK && policyOK) { return next(null); } + return undefined; }); bb.on('finish', () => { - if (!algoOK || !credOK || !dateOK || !sigOK || !policyOK) { - return next(errors.InvalidRequest); - } // if fields are found but no file, continue - if (!fileEventData) { + if ((algoOK && credOK && dateOK && sigOK && policyOK) && !fileEventData) { return next(null); } + return next(errors.InvalidRequest); }); bb.on('error', (err) => { diff --git a/tests/functional/aws-node-sdk/test/object/post.js b/tests/functional/aws-node-sdk/test/object/post.js index ef3005e3d2..ce341c3bd2 100644 --- a/tests/functional/aws-node-sdk/test/object/post.js +++ b/tests/functional/aws-node-sdk/test/object/post.js @@ -548,7 +548,8 @@ describe('POST object', () => { const error = result.Error; assert.equal(error.Code[0], 'SignatureDoesNotMatch'); - assert.equal(error.Message[0], 'The request signature we calculated does not match the signature you provided.'); + assert.equal(error.Message[0], + 'The request signature we calculated does not match the signature you provided.'); return done(); }); }); @@ -581,25 +582,23 @@ describe('POST object', () => { formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt'))); - formData.getLength((err, length) => { + return formData.getLength((err, length) => { if (err) { return done(err); } - axios.post(url, formData, { + return axios.post(url, formData, { headers: { ...formData.getHeaders(), 'Content-Length': length, }, }) - .then(() => { - done(new Error('Request should not succeed with an invalid signature')); - }) + .then(() => done(new Error('Request should not succeed with an invalid signature'))) .catch(err => { assert.ok(err.response, 'Error should be returned by axios'); // Parse the XML error response - xml2js.parseString(err.response.data, (parseErr, result) => { + return xml2js.parseString(err.response.data, (parseErr, result) => { if (parseErr) { return done(parseErr); } @@ -610,7 +609,7 @@ describe('POST object', () => { 'SignatureDoesNotMatch', 'Expected SignatureDoesNotMatch error code' ); - done(); + return done(); }); }); }); @@ -643,27 +642,25 @@ describe('POST object', () => { return done(err); } - axios.post(url, formData, { + return axios.post(url, formData, { headers: { ...formData.getHeaders(), 'Content-Length': length, }, }) - .then(() => { - done(new Error('Request should not succeed with an invalid keys')); - }) + .then(() => done(new Error('Request should not succeed with an invalid keys'))) .catch(err => { assert.ok(err.response, 'Error should be returned by axios'); // Parse the XML error response - xml2js.parseString(err.response.data, (parseErr, result) => { + return xml2js.parseString(err.response.data, (parseErr, result) => { if (parseErr) { return done(parseErr); } const error = result.Error; assert.equal(error.Code[0], 'InvalidAccessKeyId', 'Expected InvalidAccessKeyId error code'); - done(); + return done(); }); }); }); @@ -698,27 +695,25 @@ describe('POST object', () => { return done(err); } - axios.post(url, formData, { + return axios.post(url, formData, { headers: { ...formData.getHeaders(), 'Content-Length': length, }, }) - .then(() => { - done(new Error('Request should not succeed with an invalid credential')); - }) + .then(() => done(new Error('Request should not succeed with an invalid credential'))) .catch(err => { assert.ok(err.response, 'Error should be returned by axios'); // Parse the XML error response - xml2js.parseString(err.response.data, (parseErr, result) => { + return xml2js.parseString(err.response.data, (parseErr, result) => { if (parseErr) { return done(parseErr); } const error = result.Error; assert.equal(error.Code[0], 'InvalidArgument', 'Expected InvalidArgument error code'); - done(); + return done(); }); }); }); From df8edfdec043b4495e952113b6667d72dcd86370 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Fri, 5 Jul 2024 14:16:01 +0200 Subject: [PATCH 14/15] fixup: api handler fix --- lib/api/api.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/api/api.js b/lib/api/api.js index cdd59b7fc0..26251f5f1e 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -248,7 +248,7 @@ const api = { if ((algoOK && credOK && dateOK && sigOK && policyOK) && !fileEventData) { return next(null); } - return next(errors.InvalidRequest); + return undefined; }); bb.on('error', (err) => { From 824b593a19f1f4017fed719f8868fa3a6d26fcc9 Mon Sep 17 00:00:00 2001 From: Will Toozs Date: Fri, 5 Jul 2024 14:31:46 +0200 Subject: [PATCH 15/15] fixup: arsenal package --- package.json | 4 +++- yarn.lock | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 7e0c5b8811..b509a41df9 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "dependencies": { "@fastify/busboy": "^2.1.1", "@hapi/joi": "^17.1.0", - "arsenal": "git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085", + "arsenal": "git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7", "async": "~2.5.0", "aws-sdk": "2.905.0", "azure-storage": "^2.1.0", @@ -61,6 +61,8 @@ }, "scripts": { "ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", + "ft_post": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post.js", + "ft_post_aws": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post-copy.js", "ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", "ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket", "ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support", diff --git a/yarn.lock b/yarn.lock index 625c5cbf49..72e27aaa21 100644 --- a/yarn.lock +++ b/yarn.lock @@ -504,9 +504,9 @@ arraybuffer.slice@~0.0.7: optionalDependencies: ioctl "^2.0.2" -"arsenal@git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085": +"arsenal@git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7": version "7.70.29" - resolved "git+https://github.com/scality/arsenal#1016c270856ffae3071cba0237cee079888d3085" + resolved "git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7" dependencies: "@js-sdsl/ordered-set" "^4.4.2" "@types/async" "^3.2.12"