Skip to content
4 changes: 2 additions & 2 deletions packages/@aws-cdk/aws-ec2/test/cfn-init-element.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -664,15 +664,15 @@ describe('InitSource', () => {

test('fromS3Object uses object URL', () => {
// GIVEN
const bucket = s3.Bucket.fromBucketName(stack, 'bucket', 'MyBucket');
const bucket = s3.Bucket.fromBucketName(stack, 'bucket', 'mybucket');
const source = ec2.InitSource.fromS3Object('/tmp/foo', bucket, 'myKey');

// WHEN
const rendered = getElementConfig(source, InitPlatform.LINUX);

// THEN
expect(rendered).toEqual({
'/tmp/foo': expect.stringContaining('/MyBucket/myKey'),
'/tmp/foo': expect.stringContaining('/mybucket/myKey'),
});
});

Expand Down
2 changes: 1 addition & 1 deletion packages/@aws-cdk/aws-ec2/test/cfn-init.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ class SingletonLocationSythesizer extends DefaultStackSynthesizer {
public addFileAsset(_asset: FileAssetSource): FileAssetLocation {
const httpUrl = 'https://MyBucket.s3.amazonaws.com/MyAsset';
return {
bucketName: 'MyAssetBucket',
bucketName: 'myassetbucket',
objectKey: 'MyAssetFile',
httpUrl,
s3ObjectUrl: httpUrl,
Expand Down
8 changes: 4 additions & 4 deletions packages/@aws-cdk/aws-glue/test/code.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ describe('Code', () => {
let bucket: s3.IBucket;

test('with valid bucket name and key and bound by job sets the right path and grants the job permissions to read from it', () => {
bucket = s3.Bucket.fromBucketName(stack, 'Bucket', 'bucketName');
bucket = s3.Bucket.fromBucketName(stack, 'Bucket', 'bucketname');
script = glue.Code.fromBucket(bucket, key);
new glue.Job(stack, 'Job1', {
executable: glue.JobExecutable.pythonShell({
Expand All @@ -29,7 +29,7 @@ describe('Code', () => {

Template.fromStack(stack).hasResourceProperties('AWS::Glue::Job', {
Command: {
ScriptLocation: 's3://bucketName/script',
ScriptLocation: 's3://bucketname/script',
},
});

Expand All @@ -53,7 +53,7 @@ describe('Code', () => {
{
Ref: 'AWS::Partition',
},
':s3:::bucketName',
':s3:::bucketname',
],
],
},
Expand All @@ -65,7 +65,7 @@ describe('Code', () => {
{
Ref: 'AWS::Partition',
},
':s3:::bucketName/script',
':s3:::bucketname/script',
],
],
},
Expand Down
2 changes: 1 addition & 1 deletion packages/@aws-cdk/aws-glue/test/job-executable.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ describe('JobExecutable', () => {

beforeEach(() => {
stack = new cdk.Stack();
bucket = s3.Bucket.fromBucketName(stack, 'Bucket', 'bucketName');
bucket = s3.Bucket.fromBucketName(stack, 'Bucket', 'bucketname');
script = glue.Code.fromBucket(bucket, 'script.py');
});

Expand Down
28 changes: 14 additions & 14 deletions packages/@aws-cdk/aws-glue/test/job.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ describe('Job', () => {

describe('new', () => {
const className = 'com.amazon.test.ClassName';
const codeBucketName = 'bucketName';
const codeBucketName = 'bucketname';
const codeBucketAccessStatement = {
Action: [
's3:GetObject*',
Expand Down Expand Up @@ -166,7 +166,7 @@ describe('Job', () => {
Template.fromStack(stack).hasResourceProperties('AWS::Glue::Job', {
Command: {
Name: 'glueetl',
ScriptLocation: 's3://bucketName/script',
ScriptLocation: 's3://bucketname/script',
},
Role: {
'Fn::GetAtt': [
Expand Down Expand Up @@ -383,7 +383,7 @@ describe('Job', () => {
});

describe('with bucket provided', () => {
const sparkUIBucketName = 'sparkBucketName';
const sparkUIBucketName = 'sparkbucketname';
let sparkUIBucket: s3.IBucket;

beforeEach(() => {
Expand Down Expand Up @@ -420,7 +420,7 @@ describe('Job', () => {
{
Ref: 'AWS::Partition',
},
':s3:::sparkBucketName',
':s3:::sparkbucketname',
],
],
},
Expand All @@ -432,7 +432,7 @@ describe('Job', () => {
{
Ref: 'AWS::Partition',
},
':s3:::sparkBucketName/*',
':s3:::sparkbucketname/*',
],
],
},
Expand Down Expand Up @@ -460,7 +460,7 @@ describe('Job', () => {
});

describe('with bucket and path provided', () => {
const sparkUIBucketName = 'sparkBucketName';
const sparkUIBucketName = 'sparkbucketname';
const prefix = 'some/path/';
let sparkUIBucket: s3.IBucket;

Expand Down Expand Up @@ -516,7 +516,7 @@ describe('Job', () => {
Template.fromStack(stack).hasResourceProperties('AWS::Glue::Job', {
Command: {
Name: 'glueetl',
ScriptLocation: 's3://bucketName/script',
ScriptLocation: 's3://bucketname/script',
},
Role: {
'Fn::GetAtt': [
Expand Down Expand Up @@ -614,7 +614,7 @@ describe('Job', () => {
GlueVersion: '2.0',
Command: {
Name: 'glueetl',
ScriptLocation: 's3://bucketName/script',
ScriptLocation: 's3://bucketname/script',
PythonVersion: '3',
},
Role: {
Expand All @@ -625,9 +625,9 @@ describe('Job', () => {
},
DefaultArguments: {
'--job-language': 'python',
'--extra-jars': 's3://bucketName/file1.jar,s3://bucketName/file2.jar',
'--extra-py-files': 's3://bucketName/file1.py,s3://bucketName/file2.py',
'--extra-files': 's3://bucketName/file1.txt,s3://bucketName/file2.txt',
'--extra-jars': 's3://bucketname/file1.jar,s3://bucketname/file2.jar',
'--extra-py-files': 's3://bucketname/file1.py,s3://bucketname/file2.py',
'--extra-files': 's3://bucketname/file1.txt,s3://bucketname/file2.txt',
'--user-jars-first': 'true',
},
});
Expand All @@ -649,7 +649,7 @@ describe('Job', () => {
GlueVersion: '2.0',
Command: {
Name: 'gluestreaming',
ScriptLocation: 's3://bucketName/script',
ScriptLocation: 's3://bucketname/script',
},
Role: {
'Fn::GetAtt': [
Expand All @@ -660,8 +660,8 @@ describe('Job', () => {
DefaultArguments: {
'--job-language': 'scala',
'--class': 'com.amazon.test.ClassName',
'--extra-jars': 's3://bucketName/file1.jar,s3://bucketName/file2.jar',
'--extra-files': 's3://bucketName/file1.txt,s3://bucketName/file2.txt',
'--extra-jars': 's3://bucketname/file1.jar,s3://bucketname/file2.jar',
'--extra-files': 's3://bucketname/file1.txt,s3://bucketname/file2.txt',
'--user-jars-first': 'true',
},
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ describe('CloudWatch Events', () => {
test('onCloudTrailPutObject contains the Bucket ARN itself when path is undefined', () => {
const stack = new cdk.Stack();
const bucket = s3.Bucket.fromBucketAttributes(stack, 'Bucket', {
bucketName: 'MyBucket',
bucketName: 'mybucket',
});
bucket.onCloudTrailPutObject('PutRule', {
target: {
Expand All @@ -363,7 +363,7 @@ describe('CloudWatch Events', () => {
{
'Ref': 'AWS::Partition',
},
':s3:::MyBucket',
':s3:::mybucket',
],
],
},
Expand All @@ -378,7 +378,7 @@ describe('CloudWatch Events', () => {
test("onCloudTrailPutObject contains the path when it's provided", () => {
const stack = new cdk.Stack();
const bucket = s3.Bucket.fromBucketAttributes(stack, 'Bucket', {
bucketName: 'MyBucket',
bucketName: 'mybucket',
});
bucket.onCloudTrailPutObject('PutRule', {
target: {
Expand Down Expand Up @@ -406,7 +406,7 @@ describe('CloudWatch Events', () => {
{
'Ref': 'AWS::Partition',
},
':s3:::MyBucket/my/path.zip',
':s3:::mybucket/my/path.zip',
],
],
},
Expand All @@ -421,7 +421,7 @@ describe('CloudWatch Events', () => {
test('onCloudTrailWriteObject matches on events CompleteMultipartUpload, CopyObject, and PutObject', () => {
const stack = new cdk.Stack();
const bucket = s3.Bucket.fromBucketAttributes(stack, 'Bucket', {
bucketName: 'MyBucket',
bucketName: 'mybucket',
});
bucket.onCloudTrailWriteObject('OnCloudTrailWriteObjectRule', {
target: {
Expand Down Expand Up @@ -449,7 +449,7 @@ describe('CloudWatch Events', () => {
test('onCloudTrailWriteObject matches on the requestParameter bucketName when the path is not provided', () => {
const stack = new cdk.Stack();
const bucket = s3.Bucket.fromBucketAttributes(stack, 'Bucket', {
bucketName: 'MyBucket',
bucketName: 'mybucket',
});
bucket.onCloudTrailWriteObject('OnCloudTrailWriteObjectRule', {
target: {
Expand All @@ -476,7 +476,7 @@ describe('CloudWatch Events', () => {
test('onCloudTrailWriteObject matches on the requestParameters bucketName and key when the path is provided', () => {
const stack = new cdk.Stack();
const bucket = s3.Bucket.fromBucketAttributes(stack, 'Bucket', {
bucketName: 'MyBucket',
bucketName: 'mybucket',
});
bucket.onCloudTrailWriteObject('OnCloudTrailWriteObjectRule', {
target: {
Expand Down
90 changes: 48 additions & 42 deletions packages/@aws-cdk/aws-s3/lib/bucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1392,6 +1392,7 @@ export class Bucket extends BucketBase {
if (!bucketName) {
throw new Error('Bucket name is required');
}
Bucket.validateBucketName(bucketName);

const newUrlFormat = attrs.bucketWebsiteNewUrlFormat === undefined
? false
Expand Down Expand Up @@ -1430,6 +1431,52 @@ export class Bucket extends BucketBase {
});
}

/**
* Thrown an exception if the given bucket name is not valid.
*
* @param physicalName name of the bucket.
*/
public static validateBucketName(physicalName: string): void {
const bucketName = physicalName;
if (!bucketName || Token.isUnresolved(bucketName)) {
// the name is a late-bound value, not a defined string,
// so skip validation
return;
}

const errors: string[] = [];

// Rules codified from https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
if (bucketName.length < 3 || bucketName.length > 63) {
errors.push('Bucket name must be at least 3 and no more than 63 characters');
}
const charsetMatch = bucketName.match(/[^a-z0-9.-]/);
if (charsetMatch) {
errors.push('Bucket name must only contain lowercase characters and the symbols, period (.) and dash (-) '
+ `(offset: ${charsetMatch.index})`);
}
if (!/[a-z0-9]/.test(bucketName.charAt(0))) {
errors.push('Bucket name must start and end with a lowercase character or number '
+ '(offset: 0)');
}
if (!/[a-z0-9]/.test(bucketName.charAt(bucketName.length - 1))) {
errors.push('Bucket name must start and end with a lowercase character or number '
+ `(offset: ${bucketName.length - 1})`);
}
const consecSymbolMatch = bucketName.match(/\.-|-\.|\.\./);
if (consecSymbolMatch) {
errors.push('Bucket name must not have dash next to period, or period next to dash, or consecutive periods '
+ `(offset: ${consecSymbolMatch.index})`);
}
if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(bucketName)) {
errors.push('Bucket name must not resemble an IP address');
}

if (errors.length > 0) {
throw new Error(`Invalid S3 bucket name (value: ${bucketName})${EOL}${errors.join(EOL)}`);
}
}

public readonly bucketArn: string;
public readonly bucketName: string;
public readonly bucketDomainName: string;
Expand Down Expand Up @@ -1458,7 +1505,7 @@ export class Bucket extends BucketBase {

const { bucketEncryption, encryptionKey } = this.parseEncryption(props);

this.validateBucketName(this.physicalName);
Bucket.validateBucketName(this.physicalName);

const websiteConfiguration = this.renderWebsiteConfiguration(props);
this.isWebsite = (websiteConfiguration !== undefined);
Expand Down Expand Up @@ -1596,47 +1643,6 @@ export class Bucket extends BucketBase {
this.addToResourcePolicy(statement);
}

private validateBucketName(physicalName: string): void {
const bucketName = physicalName;
if (!bucketName || Token.isUnresolved(bucketName)) {
// the name is a late-bound value, not a defined string,
// so skip validation
return;
}

const errors: string[] = [];

// Rules codified from https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
if (bucketName.length < 3 || bucketName.length > 63) {
errors.push('Bucket name must be at least 3 and no more than 63 characters');
}
const charsetMatch = bucketName.match(/[^a-z0-9.-]/);
if (charsetMatch) {
errors.push('Bucket name must only contain lowercase characters and the symbols, period (.) and dash (-) '
+ `(offset: ${charsetMatch.index})`);
}
if (!/[a-z0-9]/.test(bucketName.charAt(0))) {
errors.push('Bucket name must start and end with a lowercase character or number '
+ '(offset: 0)');
}
if (!/[a-z0-9]/.test(bucketName.charAt(bucketName.length - 1))) {
errors.push('Bucket name must start and end with a lowercase character or number '
+ `(offset: ${bucketName.length - 1})`);
}
const consecSymbolMatch = bucketName.match(/\.-|-\.|\.\./);
if (consecSymbolMatch) {
errors.push('Bucket name must not have dash next to period, or period next to dash, or consecutive periods '
+ `(offset: ${consecSymbolMatch.index})`);
}
if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(bucketName)) {
errors.push('Bucket name must not resemble an IP address');
}

if (errors.length > 0) {
throw new Error(`Invalid S3 bucket name (value: ${bucketName})${EOL}${errors.join(EOL)}`);
}
}

/**
* Set up key properties and return the Bucket encryption property from the
* user's configuration.
Expand Down
Loading