From e82b84d9efa731e43de40c45bc66e8c7ca30a4b9 Mon Sep 17 00:00:00 2001 From: Eric Shepherd Date: Tue, 1 Oct 2024 20:52:17 -0400 Subject: [PATCH] S3 bucket names rename project (#4291) * S3 bucket name change project Our doc standard now requires the use of specific, reserved, bucket names throughout documentation and code examples. This commit replaces every bucket name I could find with one of those reserved names. --------- Co-authored-by: Nate Prewitt --- docs/source/guide/clients.rst | 4 +- docs/source/guide/collections.rst | 10 +- docs/source/guide/error-handling.rst | 2 +- docs/source/guide/events.rst | 93 ++++++++++--------- docs/source/guide/migration.rst | 4 +- docs/source/guide/migrations3.rst | 18 ++-- docs/source/guide/paginators.rst | 8 +- docs/source/guide/quickstart.rst | 7 +- docs/source/guide/resources.rst | 12 +-- .../guide/s3-example-access-permissions.rst | 2 +- .../guide/s3-example-bucket-policies.rst | 4 +- .../guide/s3-example-configuring-buckets.rst | 2 +- .../source/guide/s3-example-download-file.rst | 4 +- .../guide/s3-example-static-web-host.rst | 6 +- docs/source/guide/s3-presigned-urls.rst | 4 +- docs/source/guide/s3-uploading-files.rst | 10 +- docs/source/guide/s3.rst | 6 +- docs/source/guide/ses-rules.rst | 2 +- 18 files changed, 100 insertions(+), 98 deletions(-) diff --git a/docs/source/guide/clients.rst b/docs/source/guide/clients.rst index 52ce0fcc69..791c81b8c8 100644 --- a/docs/source/guide/clients.rst +++ b/docs/source/guide/clients.rst @@ -105,8 +105,8 @@ from its list of possible waiters:: Then to actually start waiting, you must call the waiter's ``wait()`` method with the method's appropriate parameters passed in:: - # Begin waiting for the S3 bucket, mybucket, to exist - s3_bucket_exists_waiter.wait(Bucket='mybucket') + # Begin waiting for the S3 bucket, amzn-s3-demo-bucket, to exist + s3_bucket_exists_waiter.wait(Bucket='amzn-s3-demo-bucket') Multithreading or multiprocessing with clients ---------------------------------------------- diff --git a/docs/source/guide/collections.rst b/docs/source/guide/collections.rst index 1420620d52..6d66399cef 100644 --- a/docs/source/guide/collections.rst +++ b/docs/source/guide/collections.rst @@ -34,7 +34,7 @@ the following conditions: * **Batch actions (see below)**:: - s3.Bucket('my-bucket').objects.delete() + s3.Bucket('amzn-s3-demo-bucket').objects.delete() Filtering --------- @@ -124,11 +124,11 @@ Some collections support batch actions, which are actions that operate on an entire page of results at a time. They will automatically handle pagination:: - # S3 delete everything in `my-bucket` + # S3 delete everything in `amzn-s3-demo-bucket` s3 = boto3.resource('s3') - s3.Bucket('my-bucket').objects.delete() + s3.Bucket('amzn-s3-demo-bucket').objects.delete() .. danger:: - The above example will **completely erase all data** in the ``my-bucket`` - bucket! Please be careful with batch actions. + The above example will **completely erase all data** in the + ``amzn-s3-demo-bucket`` bucket! Please be careful with batch actions. diff --git a/docs/source/guide/error-handling.rst b/docs/source/guide/error-handling.rst index afdbdae2d1..bd83d7cc29 100644 --- a/docs/source/guide/error-handling.rst +++ b/docs/source/guide/error-handling.rst @@ -231,7 +231,7 @@ Using Amazon S3 as an example resource service, you can use the client’s excep client = boto3.resource('s3') try: - client.create_bucket(BucketName='myTestBucket') + client.create_bucket(BucketName='amzn-s3-demo-bucket') except client.meta.client.exceptions.BucketAlreadyExists as err: print("Bucket {} already exists!".format(err.response['Error']['BucketName'])) diff --git a/docs/source/guide/events.rst b/docs/source/guide/events.rst index 9788e66153..1015fb7642 100644 --- a/docs/source/guide/events.rst +++ b/docs/source/guide/events.rst @@ -14,17 +14,16 @@ Boto3's event system. An introduction to the event system ----------------------------------- -Boto3's event system allows users to register a function to -a specific event. Then once the running program reaches a line that -emits that specific event, Boto3 will call every function -registered to the event in the order in which they were registered. -When Boto3 calls each of these registered functions, -it will call each of them with a specific set of -keyword arguments that are associated with that event. -Then once the registered function -is called, the function may modify the keyword arguments passed to that -function or return a value. -Here is an example of how the event system works:: +Boto3's event system allows users to register a function to a specific event. +Then once the running program reaches a line that emits that specific event, +Boto3 will call every function registered to the event in the order in which +they were registered. + +When Boto3 calls each of these registered functions, it will call each of them +with a specific set of keyword arguments that are associated with that event. +Then once the registered function is called, the function may modify the +keyword arguments passed to that function or return a value. Here is an +example of how the event system works:: import boto3 @@ -37,19 +36,19 @@ Here is an example of how the event system works:: def add_my_bucket(params, **kwargs): # Add the name of the bucket you want to default to. if 'Bucket' not in params: - params['Bucket'] = 'mybucket' + params['Bucket'] = 'amzn-s3-demo-bucket' # Register the function to an event event_system.register('provide-client-params.s3.ListObjectsV2', add_my_bucket) response = s3.list_objects_v2() -In this example, the handler ``add_my_bucket`` -is registered such that the handler will inject the -value ``'mybucket'`` for the ``Bucket`` parameter whenever the -``list_objects_v2`` client call is made without the ``Bucket`` parameter. Note -that if the same ``list_objects_v2`` call is made without the ``Bucket`` -parameter and the registered handler, it will result in a validation error. +In this example, the handler ``add_my_bucket`` is registered such that the +handler will inject the value ``'amzn-s3-demo-bucket'`` for the ``Bucket`` +parameter whenever the ``list_objects_v2`` client call is made without the +``Bucket`` parameter. Note that if the same ``list_objects_v2`` call is made +without the ``Bucket`` parameter and the registered handler, it will result in +a validation error. Here are the takeaways from this example: @@ -103,11 +102,11 @@ its hierarchical structure:: def add_my_general_bucket(params, **kwargs): if 'Bucket' not in params: - params['Bucket'] = 'mybucket' + params['Bucket'] = 'amzn-s3-demo-bucket1' def add_my_specific_bucket(params, **kwargs): if 'Bucket' not in params: - params['Bucket'] = 'myspecificbucket' + params['Bucket'] = 'amzn-s3-demo-bucket2' event_system.register('provide-client-params.s3', add_my_general_bucket) event_system.register('provide-client-params.s3.ListObjectsV2', add_my_specific_bucket) @@ -116,17 +115,18 @@ its hierarchical structure:: put_obj_response = s3.put_object(Key='mykey', Body=b'my body') In this example, the ``list_objects_v2`` method call will use the -``'myspecificbucket'`` for the bucket instead of ``'mybucket'`` because -the ``add_my_specific_bucket`` method was registered to the -``'provide-client-params.s3.ListObjectsV2'`` event which is more specific than -the ``'provide-client-params.s3'`` event. Thus, the +``'amzn-s3-demo-bucket2'`` for the bucket instead of +``'amzn-s3-demo-bucket1'`` because the ``add_my_specific_bucket`` method was +registered to the ``'provide-client-params.s3.ListObjectsV2'`` event which is +more specific than the ``'provide-client-params.s3'`` event. Thus, the ``add_my_specific_bucket`` function is called before the ``add_my_general_bucket`` function is called when the event is emitted. -However for the ``put_object`` call, the bucket used is ``'mybucket'``. This -is because the event emitted for the ``put_object`` client call is -``'provide-client-params.s3.PutObject'`` and the ``add_my_general_bucket`` -method is called via its registration to ``'provide-client-params.s3'``. The +However for the ``put_object`` call, the bucket used is +``'amzn-s3-demo-bucket1'``. This is because the event emitted for the +``put_object`` client call is ``'provide-client-params.s3.PutObject'`` and the +``add_my_general_bucket`` method is called via its registration to +``'provide-client-params.s3'``. The ``'provide-client-params.s3.ListObjectsV2'`` event is never emitted so the registered ``add_my_specific_bucket`` function is never called. @@ -147,7 +147,7 @@ of using wildcards in the event system:: def add_my_wildcard_bucket(params, **kwargs): if 'Bucket' not in params: - params['Bucket'] = 'mybucket' + params['Bucket'] = 'amzn-s3-demo-bucket' event_system.register('provide-client-params.s3.*', add_my_wildcard_bucket) response = s3.list_objects_v2() @@ -184,11 +184,11 @@ to another client's event system:: def add_my_bucket(params, **kwargs): if 'Bucket' not in params: - params['Bucket'] = 'mybucket' + params['Bucket'] = 'amzn-s3-demo-bucket1' def add_my_other_bucket(params, **kwargs): if 'Bucket' not in params: - params['Bucket'] = 'myotherbucket' + params['Bucket'] = 'amzn-s3-demo-bucket2' client1.meta.events.register( 'provide-client-params.s3.ListObjectsV2', add_my_bucket) @@ -200,10 +200,10 @@ to another client's event system:: Thanks to the isolation of clients' event systems, ``client1`` will inject -``'mybucket'`` for its ``list_objects_v2`` method call while ``client2`` will -inject ``'myotherbucket'`` for its ``list_objects_v2`` method call because -``add_my_bucket`` was registered to ``client1`` while ``add_my_other_bucket`` -was registered to ``client2``. +``'amzn-s3-demo-bucket1'`` for its ``list_objects_v2`` method call while +``client2`` will inject ``'amzn-s3-demo-bucket2'`` for its ``list_objects_v2`` +method call because ``add_my_bucket`` was registered to ``client1`` while +``add_my_other_bucket`` was registered to ``client2``. Boto3 specific events @@ -212,13 +212,14 @@ Boto3 specific events Boto3 emits a set of events that users can register to customize clients or resources and modify the behavior of method calls. -Here is a table of events that users of Boto3 can register handlers to. More information -about each event can be found in the corresponding sections below: +Here is a table of events that users of Boto3 can register handlers to. More +information about each event can be found in the corresponding sections below: .. note:: - Events with a ``*`` in their order number are conditionally emitted while all others are always emitted. - An explanation of all 3 conditional events is provided below. + Events with a ``*`` in their order number are conditionally emitted while + all others are always emitted. An explanation of all 3 conditional events is + provided below. ``2 *`` - ``creating-resource-class`` is emitted ONLY when using a service resource. @@ -440,7 +441,7 @@ about each event can be found in the corresponding sections below: def add_my_bucket(params, **kwargs): # Add the name of the bucket you want to default to. if 'Bucket' not in params: - params['Bucket'] = 'mybucket' + params['Bucket'] = 'amzn-s3-demo-bucket' # Register the function to an event event_system.register('provide-client-params.s3.ListObjectsV2', add_my_bucket) @@ -551,13 +552,13 @@ about each event can be found in the corresponding sections below: # Register the function to an event event_system.register('request-created.s3.ListObjectsV2', inspect_request_created) - response = s3.list_objects_v2(Bucket='my-bucket') + response = s3.list_objects_v2(Bucket='amzn-s3-demo-bucket') This should output:: Request Info: method: GET - url: https://my-bucket.s3 ... + url: https://amzn-s3-demo-bucket.s3 ... data: ... params: { ... } auth_path: ... @@ -682,9 +683,9 @@ about each event can be found in the corresponding sections below: ``'after-call.service-name.operation-name'`` :Description: - This event is emitted just after the service client makes an API call. - This event allows developers to postprocess or inspect the API response according to the - specific requirements of their application if needed. + This event is emitted just after the service client makes an API call. This + event allows developers to postprocess or inspect the API response according + to the specific requirements of their application if needed. :Keyword Arguments Emitted: @@ -720,7 +721,7 @@ about each event can be found in the corresponding sections below: # Register the function to an event event_system.register('after-call.s3.ListObjectsV2', print_after_call_args) - s3.list_objects_v2(Bucket='my-bucket') + s3.list_objects_v2(Bucket='amzn-s3-demo-bucket') This should output:: diff --git a/docs/source/guide/migration.rst b/docs/source/guide/migration.rst index 11c8deb9ce..b331712290 100644 --- a/docs/source/guide/migration.rst +++ b/docs/source/guide/migration.rst @@ -34,10 +34,10 @@ Second, while every service now uses the runtime-generated low-level client, som # High-level connections & resource objects from boto.s3.bucket import Bucket s3_conn = boto.connect_s3() - boto2_bucket = Bucket('mybucket') + boto2_bucket = Bucket('amzn-s3-demo-bucket') s3 = boto3.resource('s3') - boto3_bucket = s3.Bucket('mybucket') + boto3_bucket = s3.Bucket('amzn-s3-demo-bucket') Installation and configuration ------------------------------ diff --git a/docs/source/guide/migrations3.rst b/docs/source/guide/migrations3.rst index 68be4aaef6..42759d5db3 100644 --- a/docs/source/guide/migrations3.rst +++ b/docs/source/guide/migrations3.rst @@ -21,12 +21,12 @@ Creating a bucket Creating a bucket in Boto 2 and Boto3 is very similar, except that in Boto3 all action parameters must be passed via keyword arguments and a bucket configuration must be specified manually:: # Boto 2.x - s3_connection.create_bucket('mybucket') - s3_connection.create_bucket('mybucket', location=Location.USWest) + s3_connection.create_bucket('amzn-s3-demo-bucket') + s3_connection.create_bucket('amzn-s3-demo-bucket', location=Location.USWest) # Boto3 - s3.create_bucket(Bucket='mybucket') - s3.create_bucket(Bucket='mybucket', CreateBucketConfiguration={ + s3.create_bucket(Bucket='amzn-s3-demo-bucket') + s3.create_bucket(Bucket='amzn-s3-demo-bucket', CreateBucketConfiguration={ 'LocationConstraint': 'us-west-1'}) Storing data @@ -39,7 +39,7 @@ Storing data from a file, stream, or string is easy:: key.set_contents_from_file('/tmp/hello.txt') # Boto3 - s3.Object('mybucket', 'hello.txt').put(Body=open('/tmp/hello.txt', 'rb')) + s3.Object('amzn-s3-demo-bucket', 'hello.txt').put(Body=open('/tmp/hello.txt', 'rb')) Accessing a bucket @@ -47,15 +47,15 @@ Accessing a bucket Getting a bucket is easy with Boto3's resources, however these do not automatically validate whether a bucket exists:: # Boto 2.x - bucket = s3_connection.get_bucket('mybucket', validate=False) - exists = s3_connection.lookup('mybucket') + bucket = s3_connection.get_bucket('amzn-s3-demo-bucket', validate=False) + exists = s3_connection.lookup('amzn-s3-demo-bucket') # Boto3 import botocore - bucket = s3.Bucket('mybucket') + bucket = s3.Bucket('amzn-s3-demo-bucket') exists = True try: - s3.meta.client.head_bucket(Bucket='mybucket') + s3.meta.client.head_bucket(Bucket='amzn-s3-demo-bucket') except botocore.exceptions.ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. diff --git a/docs/source/guide/paginators.rst b/docs/source/guide/paginators.rst index 76804b3076..0de9962102 100644 --- a/docs/source/guide/paginators.rst +++ b/docs/source/guide/paginators.rst @@ -32,7 +32,7 @@ underlying API operation. The ``paginate`` method then returns an iterable paginator = client.get_paginator('list_objects_v2') # Create a PageIterator from the Paginator - page_iterator = paginator.paginate(Bucket='my-bucket') + page_iterator = paginator.paginate(Bucket='amzn-s3-demo-bucket') for page in page_iterator: print(page['Contents']) @@ -47,7 +47,7 @@ the pages of API operation results. The ``paginate`` method accepts a pagination:: paginator = client.get_paginator('list_objects_v2') - page_iterator = paginator.paginate(Bucket='my-bucket', + page_iterator = paginator.paginate(Bucket='amzn-s3-demo-bucket', PaginationConfig={'MaxItems': 10}) ``MaxItems`` @@ -82,7 +82,7 @@ to the client:: client = boto3.client('s3', region_name='us-west-2') paginator = client.get_paginator('list_objects_v2') - operation_parameters = {'Bucket': 'my-bucket', + operation_parameters = {'Bucket': 'amzn-s3-demo-bucket', 'Prefix': 'foo/baz'} page_iterator = paginator.paginate(**operation_parameters) for page in page_iterator: @@ -103,7 +103,7 @@ JMESPath expressions that are applied to each page of results through the client = boto3.client('s3', region_name='us-west-2') paginator = client.get_paginator('list_objects_v2') - page_iterator = paginator.paginate(Bucket='my-bucket') + page_iterator = paginator.paginate(Bucket='amzn-s3-demo-bucket') filtered_iterator = page_iterator.search("Contents[?Size > `100`][]") for key_data in filtered_iterator: print(key_data) diff --git a/docs/source/guide/quickstart.rst b/docs/source/guide/quickstart.rst index 55af0f7d92..74ba8dc087 100644 --- a/docs/source/guide/quickstart.rst +++ b/docs/source/guide/quickstart.rst @@ -140,12 +140,13 @@ Now that you have an ``s3`` resource, you can make send requests to the service. for bucket in s3.buckets.all(): print(bucket.name) -You can also upload and download binary data. For example, the following uploads a new file to S3, -assuming that the bucket ``my-bucket`` already exists:: +You can also upload and download binary data. For example, the following +uploads a new file to S3, assuming that the bucket ``amzn-s3-demo-bucket`` +already exists:: # Upload a new file with open('test.jpg', 'rb') as data: - s3.Bucket('my-bucket').put_object(Key='test.jpg', Body=data) + s3.Bucket('amzn-s3-demo-bucket').put_object(Key='test.jpg', Body=data) :ref:`guide_resources` and :ref:`guide_collections` are covered in more detail in the following sections. diff --git a/docs/source/guide/resources.rst b/docs/source/guide/resources.rst index 5baf5aab24..998251132c 100644 --- a/docs/source/guide/resources.rst +++ b/docs/source/guide/resources.rst @@ -48,12 +48,12 @@ instantiation will result in an exception. Examples of identifiers:: print(queue.url) # S3 Object (bucket_name and key are identifiers) - obj = s3.Object(bucket_name='boto3', key='test.py') + obj = s3.Object(bucket_name='amzn-s3-demo-bucket', key='test.py') print(obj.bucket_name) print(obj.key) # Raises exception, missing identifier: key! - obj = s3.Object(bucket_name='boto3') + obj = s3.Object(bucket_name='amzn-s3-demo-bucket') Identifiers may also be passed as positional arguments:: @@ -70,9 +70,9 @@ Identifiers also play a role in resource instance equality. For two instances of a resource to be considered equal, their identifiers must be equal:: - >>> bucket1 = s3.Bucket('boto3') - >>> bucket2 = s3.Bucket('boto3') - >>> bucket3 = s3.Bucket('some-other-bucket') + >>> bucket1 = s3.Bucket('amzn-s3-demo-bucket1') + >>> bucket2 = s3.Bucket('amzn-s3-demo-bucket1') + >>> bucket3 = s3.Bucket('amzn-s3-demo-bucket3') >>> bucket1 == bucket2 True @@ -128,7 +128,7 @@ of actions:: message.delete() # S3 Object - obj = s3.Object(bucket_name='boto3', key='test.py') + obj = s3.Object(bucket_name='amzn-s3-demo-bucket', key='test.py') response = obj.get() data = response['Body'].read() diff --git a/docs/source/guide/s3-example-access-permissions.rst b/docs/source/guide/s3-example-access-permissions.rst index 9dbf51317e..a1d33a7c35 100644 --- a/docs/source/guide/s3-example-access-permissions.rst +++ b/docs/source/guide/s3-example-access-permissions.rst @@ -27,5 +27,5 @@ The example retrieves the current access control list of an S3 bucket. # Retrieve a bucket's ACL s3 = boto3.client('s3') - result = s3.get_bucket_acl(Bucket='my-bucket') + result = s3.get_bucket_acl(Bucket='amzn-s3-demo-bucket') print(result) diff --git a/docs/source/guide/s3-example-bucket-policies.rst b/docs/source/guide/s3-example-bucket-policies.rst index a9d9d042a2..4d84043ad7 100644 --- a/docs/source/guide/s3-example-bucket-policies.rst +++ b/docs/source/guide/s3-example-bucket-policies.rst @@ -31,7 +31,7 @@ the bucket name. # Retrieve the policy of the specified bucket s3 = boto3.client('s3') - result = s3.get_bucket_policy(Bucket='BUCKET_NAME') + result = s3.get_bucket_policy(Bucket='amzn-s3-demo-bucket') print(result['Policy']) @@ -50,7 +50,7 @@ stored in the bucket identified by the ``bucket_name`` variable. import json # Create a bucket policy - bucket_name = 'BUCKET_NAME' + bucket_name = 'amzn-s3-demo-bucket' bucket_policy = { 'Version': '2012-10-17', 'Statement': [{ diff --git a/docs/source/guide/s3-example-configuring-buckets.rst b/docs/source/guide/s3-example-configuring-buckets.rst index c404a8bd26..36b8a28295 100644 --- a/docs/source/guide/s3-example-configuring-buckets.rst +++ b/docs/source/guide/s3-example-configuring-buckets.rst @@ -75,5 +75,5 @@ method. # Set the CORS configuration s3 = boto3.client('s3') - s3.put_bucket_cors(Bucket='BUCKET_NAME', + s3.put_bucket_cors(Bucket='amzn-s3-demo-bucket', CORSConfiguration=cors_configuration) diff --git a/docs/source/guide/s3-example-download-file.rst b/docs/source/guide/s3-example-download-file.rst index 138bb97d82..a4208585fa 100644 --- a/docs/source/guide/s3-example-download-file.rst +++ b/docs/source/guide/s3-example-download-file.rst @@ -24,7 +24,7 @@ download and the filename to save the file to. import boto3 s3 = boto3.client('s3') - s3.download_file('BUCKET_NAME', 'OBJECT_NAME', 'FILE_NAME') + s3.download_file('amzn-s3-demo-bucket', 'OBJECT_NAME', 'FILE_NAME') The ``download_fileobj`` method accepts a writeable file-like object. The file @@ -34,7 +34,7 @@ object must be opened in binary mode, not text mode. s3 = boto3.client('s3') with open('FILE_NAME', 'wb') as f: - s3.download_fileobj('BUCKET_NAME', 'OBJECT_NAME', f) + s3.download_fileobj('amzn-s3-demo-bucket', 'OBJECT_NAME', f) Like their upload cousins, the download methods are provided by the diff --git a/docs/source/guide/s3-example-static-web-host.rst b/docs/source/guide/s3-example-static-web-host.rst index 0ab1b376fc..7d19ad4fb4 100644 --- a/docs/source/guide/s3-example-static-web-host.rst +++ b/docs/source/guide/s3-example-static-web-host.rst @@ -28,7 +28,7 @@ Retrieve a bucket's website configuration by calling the AWS SDK for Python # Retrieve the website configuration s3 = boto3.client('s3') - result = s3.get_bucket_website(Bucket='BUCKET_NAME') + result = s3.get_bucket_website(Bucket='amzn-s3-demo-website-bucket') Set a website configuration @@ -48,7 +48,7 @@ A bucket's website configuration can be set by calling the # Set the website configuration s3 = boto3.client('s3') - s3.put_bucket_website(Bucket='BUCKET_NAME', + s3.put_bucket_website(Bucket='amzn-s3-demo-website-bucket', WebsiteConfiguration=website_configuration) @@ -62,4 +62,4 @@ A bucket's website configuration can be deleted by calling the # Delete the website configuration s3 = boto3.client('s3') - s3.delete_bucket_website(Bucket='BUCKET_NAME') + s3.delete_bucket_website(Bucket='amzn-s3-demo-website-bucket') diff --git a/docs/source/guide/s3-presigned-urls.rst b/docs/source/guide/s3-presigned-urls.rst index d99a55a26f..2ca25a54e5 100644 --- a/docs/source/guide/s3-presigned-urls.rst +++ b/docs/source/guide/s3-presigned-urls.rst @@ -65,7 +65,7 @@ perform a GET request. import requests # To install: pip install requests - url = create_presigned_url('BUCKET_NAME', 'OBJECT_NAME') + url = create_presigned_url('amzn-s3-demo-bucket', 'OBJECT_NAME') if url is not None: response = requests.get(url) @@ -176,7 +176,7 @@ presigned POST URL to perform a POST request to upload a file to S3. # Generate a presigned S3 POST URL object_name = 'OBJECT_NAME' - response = create_presigned_post('BUCKET_NAME', object_name) + response = create_presigned_post('amzn-s3-demo-bucket', object_name) if response is None: exit(1) diff --git a/docs/source/guide/s3-uploading-files.rst b/docs/source/guide/s3-uploading-files.rst index 91abadf215..0f66e09ba2 100644 --- a/docs/source/guide/s3-uploading-files.rst +++ b/docs/source/guide/s3-uploading-files.rst @@ -58,7 +58,7 @@ object must be opened in binary mode, not text mode. s3 = boto3.client('s3') with open("FILE_NAME", "rb") as f: - s3.upload_fileobj(f, "BUCKET_NAME", "OBJECT_NAME") + s3.upload_fileobj(f, "amzn-s3-demo-bucket", "OBJECT_NAME") The ``upload_file`` and ``upload_fileobj`` methods are provided by the S3 @@ -82,7 +82,7 @@ object. .. code-block:: python s3.upload_file( - 'FILE_NAME', 'BUCKET_NAME', 'OBJECT_NAME', + 'FILE_NAME', 'amzn-s3-demo-bucket', 'OBJECT_NAME', ExtraArgs={'Metadata': {'mykey': 'myvalue'}} ) @@ -93,7 +93,7 @@ list) value 'public-read' to the S3 object. .. code-block:: python s3.upload_file( - 'FILE_NAME', 'BUCKET_NAME', 'OBJECT_NAME', + 'FILE_NAME', 'amzn-s3-demo-bucket', 'OBJECT_NAME', ExtraArgs={'ACL': 'public-read'} ) @@ -103,7 +103,7 @@ The ``ExtraArgs`` parameter can also be used to set custom or multiple ACLs. .. code-block:: python s3.upload_file( - 'FILE_NAME', 'BUCKET_NAME', 'OBJECT_NAME', + 'FILE_NAME', 'amzn-s3-demo-bucket', 'OBJECT_NAME', ExtraArgs={ 'GrantRead': 'uri="http://acs.amazonaws.com/groups/global/AllUsers"', 'GrantFullControl': 'id="01234567890abcdefg"', @@ -129,7 +129,7 @@ instance's ``__call__`` method will be invoked intermittently. .. code-block:: python s3.upload_file( - 'FILE_NAME', 'BUCKET_NAME', 'OBJECT_NAME', + 'FILE_NAME', 'amzn-s3-demo-bucket', 'OBJECT_NAME', Callback=ProgressPercentage('FILE_NAME') ) diff --git a/docs/source/guide/s3.rst b/docs/source/guide/s3.rst index dea8bb04ef..fce010615b 100644 --- a/docs/source/guide/s3.rst +++ b/docs/source/guide/s3.rst @@ -50,7 +50,7 @@ if the file size is larger than the threshold specified in the # Perform the transfer s3 = boto3.client('s3') - s3.upload_file('FILE_NAME', 'BUCKET_NAME', 'OBJECT_NAME', Config=config) + s3.upload_file('FILE_NAME', 'amzn-s3-demo-bucket', 'OBJECT_NAME', Config=config) Concurrent transfer operations @@ -70,7 +70,7 @@ value; to increase usage, increase it. # Download an S3 object s3 = boto3.client('s3') - s3.download_file('BUCKET_NAME', 'OBJECT_NAME', 'FILE_NAME', Config=config) + s3.download_file('amzn-s3-demo-bucket', 'OBJECT_NAME', 'FILE_NAME', Config=config) Threads @@ -88,4 +88,4 @@ the value of the ``max_concurrency`` attribute is ignored. config = TransferConfig(use_threads=False) s3 = boto3.client('s3') - s3.download_file('BUCKET_NAME', 'OBJECT_NAME', 'FILE_NAME', Config=config) + s3.download_file('amzn-s3-demo-bucket', 'OBJECT_NAME', 'FILE_NAME', Config=config) diff --git a/docs/source/guide/ses-rules.rst b/docs/source/guide/ses-rules.rst index f78108b0b1..a6dc4bf592 100644 --- a/docs/source/guide/ses-rules.rst +++ b/docs/source/guide/ses-rules.rst @@ -106,7 +106,7 @@ Example 'Actions' : [ { 'S3Action' : { - 'BucketName' : 'S3_BUCKET_NAME', + 'BucketName' : 'amzn-s3-demo-bucket', 'ObjectKeyPrefix': 'SES_email' } }