diff --git a/documentation/BucketInfoModelVersion.md b/documentation/BucketInfoModelVersion.md index 240ad1762..c2ab8d311 100644 --- a/documentation/BucketInfoModelVersion.md +++ b/documentation/BucketInfoModelVersion.md @@ -285,3 +285,16 @@ this._quotaMax = quotaMax || 0; ### Usage Used to store bucket quota + +## Model version 18 + +### Properties Added + +```javascript +private _bucketLoggingStatus?: BucketLoggingStatus; +``` + +### Usage + +Used for bucket logging +https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html diff --git a/lib/models/BucketInfo.ts b/lib/models/BucketInfo.ts index 5c993cfad..750015d07 100644 --- a/lib/models/BucketInfo.ts +++ b/lib/models/BucketInfo.ts @@ -11,11 +11,12 @@ import { ACL as OACL } from './ObjectMD'; import { areTagsValid, BucketTag } from '../s3middleware/tagging'; import { VeeamCapability, VeeamSOSApiSchema, VeeamSOSApiSerializable } from './Veeam'; import { AzureInfoMetadata } from './BucketAzureInfo'; +import BucketLoggingStatus from './BucketLoggingStatus'; // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // BucketInfoModelVersion.md can be found in documentation/ at the root // of this repository -const modelVersion = 17; +const modelVersion = 18; export type CORS = { id: string; @@ -78,6 +79,7 @@ export type BucketMetadata = { tags: Array, capabilities?: Capabilities, quotaMax: bigint | number, + bucketLoggingStatus?: BucketLoggingStatus, }; export type BucketMetadataJSON = Omit & { @@ -115,6 +117,7 @@ export default class BucketInfo implements BucketMetadata { private _ingestion?: { status: 'enabled' | 'disabled' }; private _capabilities?: Capabilities; private _quotaMax: bigint; + private _bucketLoggingStatus?: BucketLoggingStatus; /** * Represents all bucket information. @@ -201,6 +204,7 @@ export default class BucketInfo implements BucketMetadata { tags?: Array | [], capabilities?: Capabilities, quotaMax?: bigint | number, + bucketLoggingStatus?: BucketLoggingStatus, ) { assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof owner, 'string'); @@ -327,6 +331,10 @@ export default class BucketInfo implements BucketMetadata { } assert.strictEqual(areTagsValid(tags), true); + if (bucketLoggingStatus) { + assert(bucketLoggingStatus instanceof BucketLoggingStatus); + } + // IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE this._acl = aclInstance; this._name = name; @@ -353,6 +361,7 @@ export default class BucketInfo implements BucketMetadata { this._objectLockConfiguration = objectLockConfiguration; this._notificationConfiguration = notificationConfiguration; this._tags = tags; + this._bucketLoggingStatus = bucketLoggingStatus; this._capabilities = capabilities && { ...capabilities, @@ -401,6 +410,7 @@ export default class BucketInfo implements BucketMetadata { VeeamCapability.serialize(this._capabilities.VeeamSOSApi), }, quotaMax: this._quotaMax.toString(), + bucketLoggingStatus: this._bucketLoggingStatus, }; const final = this._websiteConfiguration ? { @@ -433,6 +443,8 @@ export default class BucketInfo implements BucketMetadata { }; const websiteConfig = obj.websiteConfiguration ? new WebsiteConfiguration(obj.websiteConfiguration) : undefined; + const bucketLoggingStatus = obj.bucketLoggingStatus ? + new BucketLoggingStatus((obj.bucketLoggingStatus as any)._loggingEnabled) : undefined; return new BucketInfo(obj.name, obj.owner, obj.ownerDisplayName, obj.creationDate, obj.mdBucketModelVersion, obj.acl, obj.transient, obj.deleted, obj.serverSideEncryption, @@ -441,7 +453,7 @@ export default class BucketInfo implements BucketMetadata { obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS, obj.ingestion, obj.azureInfo, obj.objectLockEnabled, obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags, - capabilities, BigInt(obj.quotaMax || 0n)); + capabilities, BigInt(obj.quotaMax || 0n), bucketLoggingStatus); } /** @@ -474,7 +486,7 @@ export default class BucketInfo implements BucketMetadata { data._isNFS, data._ingestion, data._azureInfo, data._objectLockEnabled, data._objectLockConfiguration, data._notificationConfiguration, data._tags, capabilities, - BigInt(data._quotaMax || 0n)); + BigInt(data._quotaMax || 0n), data._bucketLoggingStatus); } /** @@ -484,6 +496,8 @@ export default class BucketInfo implements BucketMetadata { * @return Return an BucketInfo */ static fromJson(data: BucketMetadataJSON) { + const bucketLoggingStatus = data.bucketLoggingStatus ? + new BucketLoggingStatus((data.bucketLoggingStatus as any)._loggingEnabled) : undefined; return new BucketInfo(data.name, data.owner, data.ownerDisplayName, data.creationDate, data.mdBucketModelVersion, data.acl, data.transient, data.deleted, data.serverSideEncryption, @@ -497,7 +511,7 @@ export default class BucketInfo implements BucketMetadata { ...data.capabilities, VeeamSOSApi: data.capabilities?.VeeamSOSApi && VeeamCapability.parse(data.capabilities?.VeeamSOSApi), - }, BigInt(data.quotaMax || 0n)); + }, BigInt(data.quotaMax || 0n), bucketLoggingStatus); } /** @@ -1070,4 +1084,22 @@ export default class BucketInfo implements BucketMetadata { this._quotaMax = BigInt(quota || 0n); return this; } + + /** + * Get bucket logging status + * @returns - bucket logging status + */ + getBucketLoggingStatus() : BucketLoggingStatus | undefined { + return this._bucketLoggingStatus; + } + + /** + * Set bucket logging status + * @param bucketLoggingStatus - bucket logging status + * @returns - this + */ + setBucketLoggingStatus(bucketLoggingStatus : BucketLoggingStatus) { + this._bucketLoggingStatus = bucketLoggingStatus; + return this; + } } diff --git a/lib/models/BucketLoggingStatus.ts b/lib/models/BucketLoggingStatus.ts new file mode 100644 index 000000000..26b129345 --- /dev/null +++ b/lib/models/BucketLoggingStatus.ts @@ -0,0 +1,193 @@ +import { parseString } from 'xml2js'; +import errors, { ArsenalError, errorInstances } from '../errors'; + +/** BucketLoggingStatus constants, not documented by AWS but found via testing */ +const TARGET_BUCKET_MIN_LENGTH = 3; +const TARGET_BUCKET_MAX_LENGTH = 255; +const TARGET_PREFIX_MAX_LENGTH = 800; + +/** + * Format of xml request: + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_LoggingEnabled.html + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketLogging.html + * + + + + string + + + + string + string + string + string + string + + string + + + + + string + + + + + string + + +*/ + +export type LoggingEnabled = { + TargetBucket: string; + TargetPrefix: string; + // TargetGrants and TargetObjectKeyFormat are not implemented. +}; + +export default class BucketLoggingStatus { + private _loggingEnabled?: LoggingEnabled; + + constructor(loggingEnabled?: LoggingEnabled) { + this._loggingEnabled = loggingEnabled; + } + + getLoggingEnabled(): LoggingEnabled | undefined { + return this._loggingEnabled; + } + + toXML(): string { + let loggingEnabledXML = ""; + if (this._loggingEnabled) { + loggingEnabledXML = ` + ${this._loggingEnabled.TargetBucket} + ${this._loggingEnabled.TargetPrefix} + +`; + } + + return ` + + ${loggingEnabledXML} + +`; + } + + static fromXML( + data: string, + ): { error?: { arsenalError: ArsenalError, details: any }; res?: BucketLoggingStatus; } { + let parsed, parseError; + + try { + parseString(data, (err: any, res: any) => { + parseError = err; + parsed = res; + }); + + if (parseError) { + return { + error: { arsenalError: errors.MalformedXML, details: parseError }, + }; + } + } catch (err) { + return { + error: { arsenalError: errors.MalformedXML, details: err }, + }; + } + + if (!parsed) { + return { + error: { arsenalError: errors.MalformedXML, details: 'request xml is undefined or empty' }, + }; + } + + if (!parsed.BucketLoggingStatus) { + return { + error: { arsenalError: errors.MalformedXML, details: 'missing BucketLoggingStatus root' }, + }; + } + + let loggingEnabled: LoggingEnabled | undefined = undefined; + if (parsed.BucketLoggingStatus.LoggingEnabled) { + const loggingEnabledData = parsed.BucketLoggingStatus.LoggingEnabled[0]; + + if ( + !Object.prototype.hasOwnProperty.call(loggingEnabledData, 'TargetBucket') || + loggingEnabledData.TargetBucket === null || + loggingEnabledData.TargetBucket === undefined + ) { + return { + error: { + arsenalError: errors.MalformedXML, + details: 'missing TargetBucket field in LoggingEnabled', + }, + }; + } else if (loggingEnabledData.TargetBucket[0].length < TARGET_BUCKET_MIN_LENGTH) { + return { + error: { + arsenalError: errors.InvalidBucketName, + details: `TargetBucket field length < ${TARGET_BUCKET_MIN_LENGTH}`, + }, + }; + } else if (loggingEnabledData.TargetBucket[0].length > TARGET_BUCKET_MAX_LENGTH) { + return { + error: { + arsenalError: errors.InvalidBucketName, + details: `TargetBucket field length > ${TARGET_BUCKET_MAX_LENGTH}`, + }, + }; + } + + if ( + !Object.prototype.hasOwnProperty.call(loggingEnabledData, 'TargetPrefix') || + loggingEnabledData.TargetPrefix === null || + loggingEnabledData.TargetPrefix === undefined + ) { + return { + error: { + arsenalError: errors.MalformedXML, + details: 'missing TargetPrefix field in LoggingEnabled', + }, + }; + } else if (loggingEnabledData.TargetPrefix[0].length > TARGET_PREFIX_MAX_LENGTH) { + return { + error: { + arsenalError: errorInstances.InvalidArgument + .customizeDescription(`Field exceeds ${TARGET_PREFIX_MAX_LENGTH} bytes`) + .addMetadataEntry('invalidArguments', + [{ ArgumentName: 'TargetPrefix', ArgumentValue: loggingEnabledData.TargetPrefix[0] }]), + details: `TargetPrefix field length > ${TARGET_PREFIX_MAX_LENGTH}`, + }, + }; + } + + if (loggingEnabledData.TargetGrants) { + return { + error: { + arsenalError: errors.NotImplemented, + details: 'TargetGrants field in LoggingEnabled is not implemented', + }, + }; + } + + if (loggingEnabledData.TargetObjectKeyFormat) { + return { + error: { + arsenalError: errors.NotImplemented, + details: 'TargetObjectKeyFormat field in LoggingEnabled is not implemented', + }, + }; + } + + loggingEnabled = { + TargetBucket: loggingEnabledData.TargetBucket[0], + TargetPrefix: loggingEnabledData.TargetPrefix[0], + }; + } + + return { + error: undefined, + res: new BucketLoggingStatus(loggingEnabled), + }; + } +}; diff --git a/lib/models/index.ts b/lib/models/index.ts index 6c36bfea6..1655ba571 100644 --- a/lib/models/index.ts +++ b/lib/models/index.ts @@ -13,4 +13,5 @@ export { default as ObjectMDArchive } from './ObjectMDArchive'; export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo'; export { default as ObjectMDLocation } from './ObjectMDLocation'; export { default as ReplicationConfiguration } from './ReplicationConfiguration'; +export { default as BucketLoggingStatus } from './BucketLoggingStatus'; export * as WebsiteConfiguration from './WebsiteConfiguration'; diff --git a/lib/policyEvaluator/utils/actionMaps.ts b/lib/policyEvaluator/utils/actionMaps.ts index 7fbb00b99..d0274beb4 100644 --- a/lib/policyEvaluator/utils/actionMaps.ts +++ b/lib/policyEvaluator/utils/actionMaps.ts @@ -18,6 +18,7 @@ const sharedActionMap = { bucketGetVersioning: 's3:GetBucketVersioning', bucketGetWebsite: 's3:GetBucketWebsite', bucketGetTagging: 's3:GetBucketTagging', + bucketGetLogging: 's3:GetBucketLogging', bucketHead: 's3:ListBucket', bucketPutACL: 's3:PutBucketAcl', bucketPutCors: 's3:PutBucketCORS', @@ -30,6 +31,7 @@ const sharedActionMap = { bucketPutVersioning: 's3:PutBucketVersioning', bucketPutWebsite: 's3:PutBucketWebsite', bucketPutTagging: 's3:PutBucketTagging', + bucketPutLogging: 's3:PutBucketLogging', bypassGovernanceRetention: 's3:BypassGovernanceRetention', listMultipartUploads: 's3:ListBucketMultipartUploads', listParts: 's3:ListMultipartUploadParts', @@ -121,6 +123,7 @@ const actionMonitoringMapS3 = { bucketGetEncryption: 'GetBucketEncryption', bucketGetWebsite: 'GetBucketWebsite', bucketGetTagging: 'GetBucketTagging', + bucketGetLogging: 'GetBucketLogging', bucketHead: 'HeadBucket', bucketPut: 'CreateBucket', bucketPutACL: 'PutBucketAcl', @@ -134,6 +137,7 @@ const actionMonitoringMapS3 = { bucketPutEncryption: 'PutBucketEncryption', bucketPutWebsite: 'PutBucketWebsite', bucketPutTagging: 'PutBucketTagging', + bucketPutLogging: 'PutBucketLogging', completeMultipartUpload: 'CompleteMultipartUpload', initiateMultipartUpload: 'CreateMultipartUpload', listMultipartUploads: 'ListMultipartUploads', diff --git a/lib/s3routes/routes/routeGET.ts b/lib/s3routes/routes/routeGET.ts index 7b87f1aba..9a9ed9bb2 100644 --- a/lib/s3routes/routes/routeGET.ts +++ b/lib/s3routes/routes/routeGET.ts @@ -62,6 +62,8 @@ export default function routerGET( call('metadataSearch'); } else if (query.quota !== undefined) { call('bucketGetQuota'); + } else if (query.logging !== undefined) { + call('bucketGetLogging'); } else { // GET bucket call('bucketGet'); diff --git a/lib/s3routes/routes/routePUT.ts b/lib/s3routes/routes/routePUT.ts index 5e6c450b0..85b1bbdc1 100644 --- a/lib/s3routes/routes/routePUT.ts +++ b/lib/s3routes/routes/routePUT.ts @@ -105,6 +105,13 @@ export default function routePUT( return routesUtils.responseNoBody(err, corsHeaders, response, 200, log); }); + } else if (query.logging !== undefined) { + api.callApiMethod('bucketPutLogging', request, response, log, + (err, resHeaders) => { + routesUtils.statsReport500(err, statsClient); + return routesUtils.responseNoBody(err, resHeaders, + response, 200, log); + }); } else if (query.quota !== undefined) { api.callApiMethod('bucketUpdateQuota', request, response, log, (err, resHeaders) => { diff --git a/tests/unit/models/BucketInfo.spec.js b/tests/unit/models/BucketInfo.spec.js index eb05bbde6..dcd346999 100644 --- a/tests/unit/models/BucketInfo.spec.js +++ b/tests/unit/models/BucketInfo.spec.js @@ -3,6 +3,7 @@ const BucketInfo = require('../../../lib/models/BucketInfo').default; const { WebsiteConfiguration } = require('../../../lib/models/WebsiteConfiguration'); const { VeeamCapacityInfo } = require('../../../lib/models/Veeam'); +const BucketLoggingStatus = require('../../../lib/models/BucketLoggingStatus').default; // create variables to populate dummyBucket const bucketName = 'nameOfBucket'; @@ -232,6 +233,11 @@ const testBucketCapabilities = { const testBucketQuota = 100000n; +const testBucketLoggingStatus = new BucketLoggingStatus({ + TargetBucket: 'target-bucket', + TargetPrefix: 'logs/', +}); + // create a dummy bucket to test getters and setters Object.keys(acl).forEach( aclObj => describe(`different acl configurations : ${aclObj}`, () => { @@ -257,6 +263,7 @@ Object.keys(acl).forEach( testBucketTagging, testBucketCapabilities, testBucketQuota, + testBucketLoggingStatus, ); describe('serialize/deSerialize on BucketInfo class', () => { @@ -304,6 +311,7 @@ Object.keys(acl).forEach( } : undefined, } : undefined, quotaMax: dummyBucket._quotaMax.toString(), + bucketLoggingStatus: dummyBucket._bucketLoggingStatus, }; assert.strictEqual(serialized, JSON.stringify(bucketInfos)); done(); @@ -354,6 +362,7 @@ Object.keys(acl).forEach( _tags: dummyBucket._tags, _capabilities: dummyBucket._capabilities, _quotaMax: dummyBucket._quotaMax, + _bucketLoggingStatus: dummyBucket._bucketLoggingStatus, }; const fromObj = BucketInfo.fromObj(dataObj); assert(fromObj instanceof BucketInfo); @@ -506,6 +515,10 @@ Object.keys(acl).forEach( assert.deepStrictEqual(dummyBucket.getCapability('VeeamSOSApi'), testBucketCapabilities.VeeamSOSApi); }); + it('getBucketLoggingStatus should return bucket logging status', () => { + assert.deepStrictEqual(dummyBucket.getBucketLoggingStatus(), + testBucketLoggingStatus); + }); }); describe('setters on BucketInfo class', () => { @@ -720,6 +733,15 @@ Object.keys(acl).forEach( assert.deepStrictEqual( dummyBucket.getQuota(), 0n); }); + it('setBucketLoggingStatus should set bucket logging status', () => { + const newLoggingStatus = new BucketLoggingStatus({ + TargetBucket: 'new-target-bucket', + TargetPrefix: 'new-logs/', + }); + dummyBucket.setBucketLoggingStatus(newLoggingStatus); + assert.deepStrictEqual( + dummyBucket.getBucketLoggingStatus(), newLoggingStatus); + }); }); }), ); @@ -851,3 +873,120 @@ describe('ingest', () => { assert.strictEqual(dummyBucket.isIngestionEnabled(), false); }); }); + +describe('bucketLoggingStatus', () => { + it('should set bucketLoggingStatus if provided during bucket creation', () => { + const loggingStatus = new BucketLoggingStatus({ + TargetBucket: 'log-bucket', + TargetPrefix: 'access-logs/', + }); + const dummyBucket = new BucketInfo( + bucketName, owner, ownerDisplayName, testDate, + BucketInfo.currentModelVersion(), acl[emptyAcl], + false, false, { + cryptoScheme: 1, + algorithm: 'sha1', + masterKeyId: 'somekey', + mandatory: true, + }, testVersioningConfiguration, + testLocationConstraint, + testWebsiteConfiguration, + testCorsConfiguration, + testReplicationConfiguration, + testLifecycleConfiguration, + testBucketPolicy, + testUid, undefined, true, undefined, undefined, + false, undefined, undefined, undefined, + undefined, undefined, loggingStatus); + assert.deepStrictEqual(dummyBucket.getBucketLoggingStatus(), loggingStatus); + }); + + it('should have bucketLoggingStatus as undefined if not provided', () => { + const dummyBucket = new BucketInfo( + bucketName, owner, ownerDisplayName, testDate, + BucketInfo.currentModelVersion(), acl[emptyAcl], + false, false, { + cryptoScheme: 1, + algorithm: 'sha1', + masterKeyId: 'somekey', + mandatory: true, + }, testVersioningConfiguration, + testLocationConstraint, + testWebsiteConfiguration, + testCorsConfiguration, + testReplicationConfiguration, + testLifecycleConfiguration, + testBucketPolicy, + testUid, undefined, true); + assert.strictEqual(dummyBucket.getBucketLoggingStatus(), undefined); + }); + + it('should throw assertion error if bucketLoggingStatus is not an instance of BucketLoggingStatus', () => { + assert.throws(() => { + new BucketInfo( + bucketName, owner, ownerDisplayName, testDate, + BucketInfo.currentModelVersion(), acl[emptyAcl], + false, false, { + cryptoScheme: 1, + algorithm: 'sha1', + masterKeyId: 'somekey', + mandatory: true, + }, + undefined, undefined, undefined, undefined, + undefined, undefined, undefined, undefined, + undefined, undefined, undefined, undefined, + undefined, undefined, undefined, undefined, + undefined, { TargetBucket: 'bucket', TargetPrefix: 'logs/' }); + }, /AssertionError/); + }); + + it('should serialize and deserialize bucketLoggingStatus correctly', () => { + const loggingStatus = new BucketLoggingStatus({ + TargetBucket: 'serialization-test-bucket', + TargetPrefix: 'serialization-logs/', + }); + const dummyBucket = new BucketInfo( + bucketName, owner, ownerDisplayName, testDate, + BucketInfo.currentModelVersion(), acl[emptyAcl], + false, false, { + cryptoScheme: 1, + algorithm: 'sha1', + masterKeyId: 'somekey', + mandatory: true, + }, testVersioningConfiguration, + testLocationConstraint, + testWebsiteConfiguration, + testCorsConfiguration, + testReplicationConfiguration, + testLifecycleConfiguration, + testBucketPolicy, + testUid, undefined, true, undefined, undefined, + false, undefined, undefined, undefined, + undefined, undefined, loggingStatus); + + const serialized = dummyBucket.serialize(); + const deserialized = BucketInfo.deSerialize(serialized); + + assert.deepStrictEqual( + deserialized.getBucketLoggingStatus()?.getLoggingEnabled(), + loggingStatus.getLoggingEnabled() + ); + }); + + it('should handle undefined bucketLoggingStatus during serialization', () => { + const dummyBucket = new BucketInfo( + bucketName, owner, ownerDisplayName, testDate, + BucketInfo.currentModelVersion(), acl[emptyAcl], + false, false, { + cryptoScheme: 1, + algorithm: 'sha1', + masterKeyId: 'somekey', + mandatory: true, + }); + + const serialized = dummyBucket.serialize(); + const deserialized = BucketInfo.deSerialize(serialized); + + assert.strictEqual(deserialized.getBucketLoggingStatus(), undefined); + }); +}); diff --git a/tests/unit/models/BucketLoggingStatus.spec.js b/tests/unit/models/BucketLoggingStatus.spec.js new file mode 100644 index 000000000..88e831c4e --- /dev/null +++ b/tests/unit/models/BucketLoggingStatus.spec.js @@ -0,0 +1,310 @@ +const assert = require('assert'); +const BucketLoggingStatus = require('../../../lib/models/BucketLoggingStatus').default; +const { parseString } = require('xml2js'); + +describe('BucketLoggingStatus', () => { + describe('Constructor', () => { + it('should initialize with undefined when no parameters provided', () => { + const config = new BucketLoggingStatus(); + assert.strictEqual(config.getLoggingEnabled(), undefined); + }); + + it('should initialize with LoggingEnabled when provided', () => { + const loggingEnabled = { + TargetBucket: 'my-bucket', + TargetPrefix: 'logs/', + }; + const config = new BucketLoggingStatus(loggingEnabled); + assert.deepStrictEqual(config.getLoggingEnabled(), loggingEnabled); + }); + }); + + describe('toXML', () => { + it('should generate XML without LoggingEnabled when not configured', done => { + const config = new BucketLoggingStatus(); + const xml = config.toXML(); + + // Parse the XML and check that BucketLoggingStatus is empty (no LoggingEnabled) + parseString(xml, { explicitArray: false }, (err, result) => { + assert.ifError(err); + assert(result.BucketLoggingStatus); + // Should not have LoggingEnabled property + assert.strictEqual(result.BucketLoggingStatus.LoggingEnabled, undefined); + done(); + }); + }); + + it('should generate XML with LoggingEnabled when configured', done => { + const loggingEnabled = { + TargetBucket: 'my-log-bucket', + TargetPrefix: 'logs/2025/', + }; + const config = new BucketLoggingStatus(loggingEnabled); + const xml = config.toXML(); + + parseString(xml, { explicitArray: false }, (err, result) => { + assert.ifError(err); + assert(result.BucketLoggingStatus); + const logging = result.BucketLoggingStatus.LoggingEnabled; + assert(logging); + assert.strictEqual(logging.TargetBucket, 'my-log-bucket'); + assert.strictEqual(logging.TargetPrefix, 'logs/2025/'); + done(); + }); + }); + }); + + describe('fromXML', () => { + describe('Success cases', () => { + it('should parse XML with LoggingEnabled', () => { + const xml = '' + + '' + + '' + + 'loggingbucket' + + 'my-app-logs/2025/' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + const loggingEnabled = result.res.getLoggingEnabled(); + assert.strictEqual(loggingEnabled.TargetBucket, 'loggingbucket'); + assert.strictEqual(loggingEnabled.TargetPrefix, 'my-app-logs/2025/'); + }); + + it('should parse XML without LoggingEnabled (empty config)', () => { + const xml = '' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + assert.strictEqual(result.res.getLoggingEnabled(), undefined); + }); + + it('should parse XML without LoggingEnabled (self closing)', () => { + const xml = '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + assert.strictEqual(result.res.getLoggingEnabled(), undefined); + }); + + it('should parse XML with empty TargetPrefix', () => { + const xml = '' + + '' + + '' + + 'loggingbucket' + + '' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + const loggingEnabled = result.res.getLoggingEnabled(); + assert.strictEqual(loggingEnabled.TargetBucket, 'loggingbucket'); + assert.strictEqual(loggingEnabled.TargetPrefix, ''); + }); + }); + + describe('Error cases - MalformedXML', () => { + it('should return error for invalid XML', () => { + const xml = 'not valid xml'; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.MalformedXML, true); + assert.strictEqual(result.res, undefined); + }); + + it('should return error for empty string', () => { + const xml = ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.MalformedXML, true); + assert.strictEqual(result.error.details, 'request xml is undefined or empty'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error for XML without BucketLoggingStatus root tag', () => { + const xml = '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.MalformedXML, true); + assert.strictEqual(result.error.details, 'missing BucketLoggingStatus root'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error when TargetBucket is missing', () => { + const xml = '' + + '' + + '' + + 'logs/' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.MalformedXML, true); + assert.strictEqual(result.error.details, 'missing TargetBucket field in LoggingEnabled'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error when TargetPrefix is missing', () => { + const xml = '' + + '' + + '' + + 'bucket' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.MalformedXML, true); + assert.strictEqual(result.error.details, 'missing TargetPrefix field in LoggingEnabled'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error when TargetBucket length is less than 3', () => { + const xml = '' + + '' + + '' + + 'ab' + + 'logs/' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.InvalidBucketName, true); + assert.strictEqual(result.error.details, 'TargetBucket field length < 3'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error when TargetBucket length is greater than 255', () => { + const longBucketName = 'a'.repeat(256); + const xml = '' + + '' + + '' + + `${longBucketName}` + + 'logs/' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.InvalidBucketName, true); + assert.strictEqual(result.error.details, 'TargetBucket field length > 255'); + assert.strictEqual(result.res, undefined); + }); + + it('should return error when TargetPrefix length is greater than 800', () => { + const longPrefix = 'a'.repeat(801); + const xml = '' + + '' + + '' + + 'bucket' + + `${longPrefix}` + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.InvalidArgument, true); + assert.strictEqual(result.error.details, 'TargetPrefix field length > 800'); + assert.strictEqual(result.res, undefined); + }); + }); + + describe('Error cases - NotImplemented', () => { + it('should return NotImplemented error when TargetGrants is present', () => { + const xml = '' + + '' + + '' + + 'bucket' + + 'logs/' + + 'user123' + + 'READ' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.NotImplemented, true); + assert.strictEqual(result.error.details, + 'TargetGrants field in LoggingEnabled is not implemented'); + assert.strictEqual(result.res, undefined); + }); + + it('should return NotImplemented error when TargetObjectKeyFormat is present', () => { + const xml = '' + + '' + + '' + + 'bucket' + + 'logs/' + + '' + + '' + + ''; + + const result = BucketLoggingStatus.fromXML(xml); + + assert(result.error); + assert.strictEqual(result.error.arsenalError.NotImplemented, true); + assert.strictEqual(result.error.details, + 'TargetObjectKeyFormat field in LoggingEnabled is not implemented'); + assert.strictEqual(result.res, undefined); + }); + }); + + describe('Round-trip conversions', () => { + it('should successfully round-trip with LoggingEnabled', () => { + const loggingEnabled = { + TargetBucket: 'test-bucket', + TargetPrefix: 'app/logs/2025/', + }; + const config1 = new BucketLoggingStatus(loggingEnabled); + const xml = config1.toXML(); + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + const parsed = result.res.getLoggingEnabled(); + assert.strictEqual(parsed.TargetBucket, loggingEnabled.TargetBucket); + assert.strictEqual(parsed.TargetPrefix, loggingEnabled.TargetPrefix); + }); + + it('should successfully round-trip without LoggingEnabled', () => { + const config1 = new BucketLoggingStatus(); + const xml = config1.toXML(); + + const result = BucketLoggingStatus.fromXML(xml); + + assert.strictEqual(result.error, undefined); + assert(result.res instanceof BucketLoggingStatus); + assert.strictEqual(result.res.getLoggingEnabled(), undefined); + }); + }); + }); +}); diff --git a/tests/unit/s3routes/routeGET.spec.js b/tests/unit/s3routes/routeGET.spec.js index a20e1521a..169e45106 100644 --- a/tests/unit/s3routes/routeGET.spec.js +++ b/tests/unit/s3routes/routeGET.spec.js @@ -90,6 +90,17 @@ describe('routerGET', () => { ); }); + it('should call bucketGetLogging when query.logging is present', () => { + request.bucketName = 'bucketName'; + request.query = { logging: '' }; + + routerGET(request, response, api, log, statsClient, dataRetrievalParams); + + expect(api.callApiMethod).toHaveBeenCalledWith( + 'bucketGetLogging', request, response, log, expect.any(Function), + ); + }); + it('should handle objectGet with responseStreamData when no query is present for an object', () => { request.bucketName = 'bucketName'; request.objectKey = 'objectKey'; diff --git a/tests/unit/s3routes/routePUT.spec.js b/tests/unit/s3routes/routePUT.spec.js index ba4cf8336..47617a15b 100644 --- a/tests/unit/s3routes/routePUT.spec.js +++ b/tests/unit/s3routes/routePUT.spec.js @@ -236,6 +236,18 @@ describe('routePUT', () => { ); }); + it('should call bucketPutLogging when query.logging is set', () => { + request.bucketName = 'test-bucket'; + request.query = { logging: '' }; + api.callApiMethod = jest.fn(); + + routePUT(request, response, api, log, statsClient); + + expect(api.callApiMethod).toHaveBeenCalledWith( + 'bucketPutLogging', request, response, log, expect.any(Function), + ); + }); + it('should return BadRequest when content-length is invalid for PUT bucket', () => { request.bucketName = 'test-bucket'; request.query = {};