From d0e44f4719d0b44543e78422ac12c9d2b4950fda Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 13 Jul 2016 10:15:46 -0700 Subject: [PATCH 001/197] Renaming storage gcloud samples folder. (#418) --- customer_supplied_keys.py | 115 +++++++++++++++++++++++++++++++++ customer_supplied_keys_test.py | 24 +++++++ 2 files changed, 139 insertions(+) create mode 100644 customer_supplied_keys.py create mode 100644 customer_supplied_keys_test.py diff --git a/customer_supplied_keys.py b/customer_supplied_keys.py new file mode 100644 index 000000000..1e9e6f888 --- /dev/null +++ b/customer_supplied_keys.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Command-line sample app demonstrating customer-supplied encryption keys. + +This sample demonstrates uploading an object while supplying an encryption key, +and retrieving that object's contents using gcloud API. The sample uses +the default credential and project. To review their values, run this command: + $ gcloud info + +This sample is used on this page: + https://cloud.google.com/storage/docs/encryption#customer-supplied + +For more information, see the README.md under /storage. +""" + +import argparse +import base64 +import filecmp +import os +import tempfile + +from gcloud import storage + +# An AES256 encryption key. It must be exactly 256 bits (32 bytes). You can +# (and should) generate your own encryption key. os.urandom(32) is a good way +# to accomplish this with Python. +# +# Although these keys are provided here for simplicity, please remember +# that it is a bad idea to store your encryption keys in your source code. +ENCRYPTION_KEY = os.urandom(32) + + +def upload_object(storage_client, + bucket_name, + filename, + object_name, + encryption_key): + """Uploads an object, specifying a custom encryption key. + + Args: + storage_client: gcloud client to access cloud storage + bucket_name: name of the destination bucket + filename: name of file to be uploaded + object_name: name of resulting object + encryption_key: encryption key to encrypt the object, + either 32 raw bytes or a string of 32 bytes. + """ + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(object_name) + with open(filename, 'rb') as f: + blob.upload_from_file(f, encryption_key=encryption_key) + + +def download_object(storage_client, + bucket_name, + object_name, + filename, + encryption_key): + """Downloads an object protected by a custom encryption key. + + Args: + storage_client: gcloud client to access cloud storage + bucket_name: name of the source bucket + object_name: name of the object to be downloaded + filename: name of the resulting file + encryption_key: the encryption key that the object is encrypted by, + either 32 raw bytes or a string of 32 bytes. + """ + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(object_name) + with open(filename, 'wb') as f: + blob.download_to_file(f, encryption_key=encryption_key) + + +def main(bucket, filename): + storage_client = storage.Client() + print('Uploading object gs://{}/{} using encryption key (base64 formatted)' + ' {}'.format(bucket, filename, base64.encodestring(ENCRYPTION_KEY))) + upload_object(storage_client, bucket, filename, filename, ENCRYPTION_KEY) + print('Downloading it back') + with tempfile.NamedTemporaryFile(mode='w+b') as tmpfile: + download_object( + storage_client, + bucket, + object_name=filename, + filename=tmpfile.name, + encryption_key=ENCRYPTION_KEY) + assert filecmp.cmp(filename, tmpfile.name), \ + 'Downloaded file has different content from the original file.' + print('Done') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket', help='Your Cloud Storage bucket.') + parser.add_argument('filename', help='A file to upload and download.') + + args = parser.parse_args() + + main(args.bucket, args.filename) diff --git a/customer_supplied_keys_test.py b/customer_supplied_keys_test.py new file mode 100644 index 000000000..c5449a27f --- /dev/null +++ b/customer_supplied_keys_test.py @@ -0,0 +1,24 @@ +# Copyright 2016, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +from customer_supplied_keys import main + + +def test_main(cloud_config, capsys): + main(cloud_config.storage_bucket, __file__) + out, err = capsys.readouterr() + + assert not re.search(r'Downloaded file [!]=', out) + assert re.search(r'Uploading.*Downloading.*Done', out, re.DOTALL) From 9e4c4e81492e8419ef810ee8aceef31b95bd9b0e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 14 Jul 2016 10:53:21 -0700 Subject: [PATCH 002/197] Add gcloud-based storage usage samples. (#419) --- customer_supplied_keys.py | 20 +++---- manage_blobs.py | 109 ++++++++++++++++++++++++++++++++++++++ manage_blobs_test.py | 57 ++++++++++++++++++++ requirements.txt | 1 + 4 files changed, 173 insertions(+), 14 deletions(-) create mode 100644 manage_blobs.py create mode 100644 manage_blobs_test.py create mode 100644 requirements.txt diff --git a/customer_supplied_keys.py b/customer_supplied_keys.py index 1e9e6f888..0ac70266f 100644 --- a/customer_supplied_keys.py +++ b/customer_supplied_keys.py @@ -43,10 +43,7 @@ ENCRYPTION_KEY = os.urandom(32) -def upload_object(storage_client, - bucket_name, - filename, - object_name, +def upload_object(storage_client, bucket_name, filename, object_name, encryption_key): """Uploads an object, specifying a custom encryption key. @@ -60,14 +57,10 @@ def upload_object(storage_client, """ bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(object_name) - with open(filename, 'rb') as f: - blob.upload_from_file(f, encryption_key=encryption_key) + blob.upload_from_filename(filename, encryption_key=encryption_key) -def download_object(storage_client, - bucket_name, - object_name, - filename, +def download_object(storage_client, bucket_name, object_name, filename, encryption_key): """Downloads an object protected by a custom encryption key. @@ -81,8 +74,7 @@ def download_object(storage_client, """ bucket = storage_client.get_bucket(bucket_name) blob = bucket.blob(object_name) - with open(filename, 'wb') as f: - blob.download_to_file(f, encryption_key=encryption_key) + blob.download_to_filename(filename, encryption_key=encryption_key) def main(bucket, filename): @@ -98,8 +90,8 @@ def main(bucket, filename): object_name=filename, filename=tmpfile.name, encryption_key=ENCRYPTION_KEY) - assert filecmp.cmp(filename, tmpfile.name), \ - 'Downloaded file has different content from the original file.' + assert filecmp.cmp(filename, tmpfile.name), ( + 'Downloaded file has different content from the original file.') print('Done') diff --git a/manage_blobs.py b/manage_blobs.py new file mode 100644 index 000000000..82dfc0383 --- /dev/null +++ b/manage_blobs.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +# Copyright (C) 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Command-line sample application for simple CRUD management of blobs in a +given bucket. + +For more information, see the README.md under /storage. +""" + +import argparse + +from gcloud import storage + + +def list_blobs(bucket_name): + """Lists all the blobs in the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + blobs = bucket.list_blobs() + + for blob in blobs: + print(blob.name) + + +def upload_blob(bucket_name, source_file_name, destination_blob_name): + """Uploads a file to the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + blob.upload_from_filename(source_file_name) + + print('File {} uploaded to {}.'.format( + source_file_name, + destination_blob_name)) + + +def download_blob(bucket_name, source_blob_name, destination_file_name): + """Downloads a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(source_blob_name) + + blob.download_to_filename(destination_file_name) + + print('Blob {} downloaded to {}.'.format( + source_blob_name, + destination_file_name)) + + +def delete_blob(bucket_name, blob_name): + """Deletes a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.delete() + + print('Blob {} deleted.'.format(blob_name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('bucket_name', help='Your cloud storage bucket.') + + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('list', help=list_blobs.__doc__) + + upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) + upload_parser.add_argument('source_file_name') + upload_parser.add_argument('destination_blob_name') + + download_parser = subparsers.add_parser( + 'download', help=download_blob.__doc__) + download_parser.add_argument('source_blob_name') + download_parser.add_argument('destination_file_name') + + delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) + delete_parser.add_argument('blob_name') + + args = parser.parse_args() + + if args.command == 'list': + list_blobs(args.bucket_name) + elif args.command == 'upload': + upload_blob( + args.bucket_name, + args.source_file_name, + args.destination_blob_name) + elif args.command == 'download': + download_blob( + args.bucket_name, + args.source_blob_name, + args.destination_file_name) + elif args.command == 'delete': + delete_blob(args.bucket_name, args.blob_name) diff --git a/manage_blobs_test.py b/manage_blobs_test.py new file mode 100644 index 000000000..972d939ae --- /dev/null +++ b/manage_blobs_test.py @@ -0,0 +1,57 @@ +# Copyright 2016, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tempfile + +from gcloud import storage +import manage_blobs +import pytest + + +@pytest.fixture +def test_blob(cloud_config): + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('manage_blobs_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + return blob.name + + +def test_list_blobs(test_blob, cloud_config, capsys): + manage_blobs.list_blobs(cloud_config.storage_bucket) + out, _ = capsys.readouterr() + assert test_blob in out + + +def test_upload_blob(cloud_config): + with tempfile.NamedTemporaryFile() as source_file: + source_file.write(b'test') + + manage_blobs.upload_blob( + cloud_config.storage_bucket, + source_file.name, 'test_upload_blob') + + +def test_download_blob(test_blob, cloud_config): + with tempfile.NamedTemporaryFile() as dest_file: + manage_blobs.download_blob( + cloud_config.storage_bucket, + test_blob, + dest_file.name) + + assert dest_file.read() + + +def test_delete_blob(test_blob, cloud_config): + manage_blobs.delete_blob( + cloud_config.storage_bucket, + test_blob) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..868847aeb --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +gcloud==0.17.0 From 625e4dad36adc6bff69dfddf5f9adb5c7d9935ad Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 15 Jul 2016 12:31:34 -0700 Subject: [PATCH 003/197] Refactor cloud client storage samples. (#421) --- customer_supplied_keys.py | 107 ---------------------- customer_supplied_keys_test.py | 24 ----- encryption.py | 161 +++++++++++++++++++++++++++++++++ encryption_test.py | 67 ++++++++++++++ manage_blobs.py | 16 ++-- manage_blobs_test.py | 23 +++-- 6 files changed, 251 insertions(+), 147 deletions(-) delete mode 100644 customer_supplied_keys.py delete mode 100644 customer_supplied_keys_test.py create mode 100644 encryption.py create mode 100644 encryption_test.py diff --git a/customer_supplied_keys.py b/customer_supplied_keys.py deleted file mode 100644 index 0ac70266f..000000000 --- a/customer_supplied_keys.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Command-line sample app demonstrating customer-supplied encryption keys. - -This sample demonstrates uploading an object while supplying an encryption key, -and retrieving that object's contents using gcloud API. The sample uses -the default credential and project. To review their values, run this command: - $ gcloud info - -This sample is used on this page: - https://cloud.google.com/storage/docs/encryption#customer-supplied - -For more information, see the README.md under /storage. -""" - -import argparse -import base64 -import filecmp -import os -import tempfile - -from gcloud import storage - -# An AES256 encryption key. It must be exactly 256 bits (32 bytes). You can -# (and should) generate your own encryption key. os.urandom(32) is a good way -# to accomplish this with Python. -# -# Although these keys are provided here for simplicity, please remember -# that it is a bad idea to store your encryption keys in your source code. -ENCRYPTION_KEY = os.urandom(32) - - -def upload_object(storage_client, bucket_name, filename, object_name, - encryption_key): - """Uploads an object, specifying a custom encryption key. - - Args: - storage_client: gcloud client to access cloud storage - bucket_name: name of the destination bucket - filename: name of file to be uploaded - object_name: name of resulting object - encryption_key: encryption key to encrypt the object, - either 32 raw bytes or a string of 32 bytes. - """ - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(object_name) - blob.upload_from_filename(filename, encryption_key=encryption_key) - - -def download_object(storage_client, bucket_name, object_name, filename, - encryption_key): - """Downloads an object protected by a custom encryption key. - - Args: - storage_client: gcloud client to access cloud storage - bucket_name: name of the source bucket - object_name: name of the object to be downloaded - filename: name of the resulting file - encryption_key: the encryption key that the object is encrypted by, - either 32 raw bytes or a string of 32 bytes. - """ - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(object_name) - blob.download_to_filename(filename, encryption_key=encryption_key) - - -def main(bucket, filename): - storage_client = storage.Client() - print('Uploading object gs://{}/{} using encryption key (base64 formatted)' - ' {}'.format(bucket, filename, base64.encodestring(ENCRYPTION_KEY))) - upload_object(storage_client, bucket, filename, filename, ENCRYPTION_KEY) - print('Downloading it back') - with tempfile.NamedTemporaryFile(mode='w+b') as tmpfile: - download_object( - storage_client, - bucket, - object_name=filename, - filename=tmpfile.name, - encryption_key=ENCRYPTION_KEY) - assert filecmp.cmp(filename, tmpfile.name), ( - 'Downloaded file has different content from the original file.') - print('Done') - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket', help='Your Cloud Storage bucket.') - parser.add_argument('filename', help='A file to upload and download.') - - args = parser.parse_args() - - main(args.bucket, args.filename) diff --git a/customer_supplied_keys_test.py b/customer_supplied_keys_test.py deleted file mode 100644 index c5449a27f..000000000 --- a/customer_supplied_keys_test.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2016, Google, Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -from customer_supplied_keys import main - - -def test_main(cloud_config, capsys): - main(cloud_config.storage_bucket, __file__) - out, err = capsys.readouterr() - - assert not re.search(r'Downloaded file [!]=', out) - assert re.search(r'Uploading.*Downloading.*Done', out, re.DOTALL) diff --git a/encryption.py b/encryption.py new file mode 100644 index 000000000..571b91b21 --- /dev/null +++ b/encryption.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python + +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to upload and download encrypted blobs +(objects) in Google Cloud Storage. + +Use `generate-encryption-key` to generate an example key: + + python encryption.py generate-encryption-key + +Then use the key to upload and download files encrypted with a custom key. + +For more information, see the README.md under /storage and the documentation +at https://cloud.google.com/storage/docs/encryption. +""" + +import argparse +import base64 +import os + +from gcloud import storage + + +def generate_encryption_key(): + """Generates a 256 bit (32 byte) AES encryption key and prints the + base64 representation. + + This is included for demonstration purposes. You should generate your own + key. Please remember that encryption keys should be handled with a + comprehensive security policy. + """ + key = os.urandom(32) + encoded_key = base64.b64encode(key).decode('utf-8') + print('Base 64 encoded encryption key: {}'.format(encoded_key)) + + +def upload_encrypted_blob(bucket_name, source_file_name, + destination_blob_name, base64_encryption_key): + """Uploads a file to a Google Cloud Storage bucket using a custom + encryption key. + + The file will be encrypted by Google Cloud Storage and only + retrievable using the provided encryption key. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + # Encryption key must be an AES256 key represented as a bytestring with + # 32 bytes. Since it's passed in as a base64 encoded string, it needs + # to be decoded. + encryption_key = base64.b64decode(base64_encryption_key) + + blob.upload_from_filename( + source_file_name, encryption_key=encryption_key) + + print('File {} uploaded to {}.'.format( + source_file_name, + destination_blob_name)) + + +def download_encrypted_blob(bucket_name, source_blob_name, + destination_file_name, base64_encryption_key): + """Downloads a previously-encrypted blob from Google Cloud Storage. + + The encryption key provided must be the same key provided when uploading + the blob. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(source_blob_name) + + # Encryption key must be an AES256 key represented as a bytestring with + # 32 bytes. Since it's passed in as a base64 encoded string, it needs + # to be decoded. + encryption_key = base64.b64decode(base64_encryption_key) + + blob.download_to_filename( + destination_file_name, encryption_key=encryption_key) + + print('Blob {} downloaded to {}.'.format( + source_blob_name, + destination_file_name)) + + +def rotate_encryption_key(bucket_name, blob_name, base64_encryption_key, + base64_new_encryption_key): + """Performs a key rotation by re-writing an encrypted blob with a new + encryption key.""" + raise NotImplementedError( + 'This is currently not available using the Cloud Client Library.') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + subparsers = parser.add_subparsers(dest='command') + + subparsers.add_parser( + 'generate-encryption-key', help=generate_encryption_key.__doc__) + + upload_parser = subparsers.add_parser( + 'upload', help=upload_encrypted_blob.__doc__) + upload_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + upload_parser.add_argument('source_file_name') + upload_parser.add_argument('destination_blob_name') + upload_parser.add_argument('base64_encryption_key') + + download_parser = subparsers.add_parser( + 'download', help=download_encrypted_blob.__doc__) + download_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + download_parser.add_argument('source_blob_name') + download_parser.add_argument('destination_file_name') + download_parser.add_argument('base64_encryption_key') + + rotate_parser = subparsers.add_parser( + 'rotate', help=rotate_encryption_key.__doc__) + rotate_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + download_parser.add_argument('blob_name') + download_parser.add_argument('base64_encryption_key') + download_parser.add_argument('base64_new_encryption_key') + + args = parser.parse_args() + + if args.command == 'generate-encryption-key': + generate_encryption_key() + elif args.command == 'upload': + upload_encrypted_blob( + args.bucket_name, + args.source_file_name, + args.destination_blob_name, + args.base64_encryption_key) + elif args.command == 'download': + download_encrypted_blob( + args.bucket_name, + args.source_blob_name, + args.destination_file_name, + args.base64_encryption_key) + elif args.command == 'rotate': + rotate_encryption_key( + args.bucket_name, + args.blob_name, + args.base64_encryption_key, + args.base64_new_encryption_key) diff --git a/encryption_test.py b/encryption_test.py new file mode 100644 index 000000000..4ebea22d1 --- /dev/null +++ b/encryption_test.py @@ -0,0 +1,67 @@ +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import tempfile + +import encryption +from gcloud import storage +import pytest + +TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' +TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) + + +def test_generate_encryption_key(capsys): + encryption.generate_encryption_key() + out, _ = capsys.readouterr() + encoded_key = out.split(':', 1).pop().strip() + key = base64.b64decode(encoded_key) + assert len(key) == 32, 'Returned key should be 32 bytes' + + +def test_upload_encrypted_blob(cloud_config): + with tempfile.NamedTemporaryFile() as source_file: + source_file.write(b'test') + + encryption.upload_encrypted_blob( + cloud_config.storage_bucket, + source_file.name, + 'test_encrypted_upload_blob', + TEST_ENCRYPTION_KEY) + + +@pytest.fixture +def test_blob(cloud_config): + """Provides a pre-existing blob in the test bucket.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('encrption_test_sigil') + content = 'Hello, is it me you\'re looking for?' + blob.upload_from_string( + content, + encryption_key=TEST_ENCRYPTION_KEY_DECODED) + return blob.name, content + + +def test_download_blob(test_blob, cloud_config): + test_blob_name, test_blob_content = test_blob + with tempfile.NamedTemporaryFile() as dest_file: + encryption.download_encrypted_blob( + cloud_config.storage_bucket, + test_blob_name, + dest_file.name, + TEST_ENCRYPTION_KEY) + + downloaded_content = dest_file.read().decode('utf-8') + assert downloaded_content == test_blob_content diff --git a/manage_blobs.py b/manage_blobs.py index 82dfc0383..8116c8ea6 100644 --- a/manage_blobs.py +++ b/manage_blobs.py @@ -1,22 +1,24 @@ #!/usr/bin/env python -# Copyright (C) 2016 Google Inc. +# Copyright 2016 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Command-line sample application for simple CRUD management of blobs in a -given bucket. -For more information, see the README.md under /storage. +"""This application demonstrates how to perform basic operations on blobs +(objects) in a Google Cloud Storage bucket. + +For more information, see the README.md under /storage and the documentation +at https://cloud.google.com/storage/docs. """ import argparse @@ -73,7 +75,9 @@ def delete_blob(bucket_name, blob_name): if __name__ == '__main__': - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('bucket_name', help='Your cloud storage bucket.') subparsers = parser.add_subparsers(dest='command') diff --git a/manage_blobs_test.py b/manage_blobs_test.py index 972d939ae..8c78930a4 100644 --- a/manage_blobs_test.py +++ b/manage_blobs_test.py @@ -1,4 +1,5 @@ -# Copyright 2016, Google, Inc. +# Copyright 2016 Google, Inc. +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -18,14 +19,6 @@ import pytest -@pytest.fixture -def test_blob(cloud_config): - bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('manage_blobs_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') - return blob.name - - def test_list_blobs(test_blob, cloud_config, capsys): manage_blobs.list_blobs(cloud_config.storage_bucket) out, _ = capsys.readouterr() @@ -38,7 +31,17 @@ def test_upload_blob(cloud_config): manage_blobs.upload_blob( cloud_config.storage_bucket, - source_file.name, 'test_upload_blob') + source_file.name, + 'test_upload_blob') + + +@pytest.fixture +def test_blob(cloud_config): + """Provides a pre-existing blob in the test bucket.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('manage_blobs_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + return blob.name def test_download_blob(test_blob, cloud_config): From 677d2cf4c83879da66a9630f4ff30bded4870ce6 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 29 Jul 2016 11:23:20 -0700 Subject: [PATCH 004/197] Add more storage samples for the cloud client libraries. (#432) --- encryption_test.py | 2 +- manage_blobs.py | 113 ---------------- manage_blobs_test.py | 60 --------- snippets.py | 300 +++++++++++++++++++++++++++++++++++++++++++ snippets_test.py | 126 ++++++++++++++++++ 5 files changed, 427 insertions(+), 174 deletions(-) delete mode 100644 manage_blobs.py delete mode 100644 manage_blobs_test.py create mode 100644 snippets.py create mode 100644 snippets_test.py diff --git a/encryption_test.py b/encryption_test.py index 4ebea22d1..ddef282b8 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -46,7 +46,7 @@ def test_upload_encrypted_blob(cloud_config): def test_blob(cloud_config): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('encrption_test_sigil') + blob = bucket.blob('encryption_test_sigil') content = 'Hello, is it me you\'re looking for?' blob.upload_from_string( content, diff --git a/manage_blobs.py b/manage_blobs.py deleted file mode 100644 index 8116c8ea6..000000000 --- a/manage_blobs.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on blobs -(objects) in a Google Cloud Storage bucket. - -For more information, see the README.md under /storage and the documentation -at https://cloud.google.com/storage/docs. -""" - -import argparse - -from gcloud import storage - - -def list_blobs(bucket_name): - """Lists all the blobs in the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - blobs = bucket.list_blobs() - - for blob in blobs: - print(blob.name) - - -def upload_blob(bucket_name, source_file_name, destination_blob_name): - """Uploads a file to the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(destination_blob_name) - - blob.upload_from_filename(source_file_name) - - print('File {} uploaded to {}.'.format( - source_file_name, - destination_blob_name)) - - -def download_blob(bucket_name, source_blob_name, destination_file_name): - """Downloads a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(source_blob_name) - - blob.download_to_filename(destination_file_name) - - print('Blob {} downloaded to {}.'.format( - source_blob_name, - destination_file_name)) - - -def delete_blob(bucket_name, blob_name): - """Deletes a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.delete() - - print('Blob {} deleted.'.format(blob_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket_name', help='Your cloud storage bucket.') - - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('list', help=list_blobs.__doc__) - - upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) - upload_parser.add_argument('source_file_name') - upload_parser.add_argument('destination_blob_name') - - download_parser = subparsers.add_parser( - 'download', help=download_blob.__doc__) - download_parser.add_argument('source_blob_name') - download_parser.add_argument('destination_file_name') - - delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) - delete_parser.add_argument('blob_name') - - args = parser.parse_args() - - if args.command == 'list': - list_blobs(args.bucket_name) - elif args.command == 'upload': - upload_blob( - args.bucket_name, - args.source_file_name, - args.destination_blob_name) - elif args.command == 'download': - download_blob( - args.bucket_name, - args.source_blob_name, - args.destination_file_name) - elif args.command == 'delete': - delete_blob(args.bucket_name, args.blob_name) diff --git a/manage_blobs_test.py b/manage_blobs_test.py deleted file mode 100644 index 8c78930a4..000000000 --- a/manage_blobs_test.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import tempfile - -from gcloud import storage -import manage_blobs -import pytest - - -def test_list_blobs(test_blob, cloud_config, capsys): - manage_blobs.list_blobs(cloud_config.storage_bucket) - out, _ = capsys.readouterr() - assert test_blob in out - - -def test_upload_blob(cloud_config): - with tempfile.NamedTemporaryFile() as source_file: - source_file.write(b'test') - - manage_blobs.upload_blob( - cloud_config.storage_bucket, - source_file.name, - 'test_upload_blob') - - -@pytest.fixture -def test_blob(cloud_config): - """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('manage_blobs_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') - return blob.name - - -def test_download_blob(test_blob, cloud_config): - with tempfile.NamedTemporaryFile() as dest_file: - manage_blobs.download_blob( - cloud_config.storage_bucket, - test_blob, - dest_file.name) - - assert dest_file.read() - - -def test_delete_blob(test_blob, cloud_config): - manage_blobs.delete_blob( - cloud_config.storage_bucket, - test_blob) diff --git a/snippets.py b/snippets.py new file mode 100644 index 000000000..0f6b88cb3 --- /dev/null +++ b/snippets.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python + +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on blobs +(objects) in a Google Cloud Storage bucket. + +For more information, see the README.md under /storage and the documentation +at https://cloud.google.com/storage/docs. +""" + +import argparse +import datetime + +from gcloud import storage + + +def create_bucket(bucket_name): + """Creates a new bucket.""" + storage_client = storage.Client() + bucket = storage_client.create_bucket(bucket_name) + print('Bucket {} created'.format(bucket.name)) + + +def delete_bucket(bucket_name): + """Deletes a bucket. The bucket must be empty.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.delete() + print('Bucket {} deleted'.format(bucket.name)) + + +def list_blobs(bucket_name): + """Lists all the blobs in the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + blobs = bucket.list_blobs() + + for blob in blobs: + print(blob.name) + + +def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): + """Lists all the blobs in the bucket that begin with the prefix. + + This can be used to list all blobs in a "folder", e.g. "public/". + + The delimiter argument can be used to restrict the results to only the + "files" in the given "folder". Without the delimiter, the entire tree under + the prefix is returned. For example, given these blobs: + + /a/1.txt + /a/b/2.txt + + If you just specify prefix = '/a', you'll get back: + + /a/1.txt + /a/b/2.txt + + However, if you specify prefix='/a' and delimiter='/', you'll get back: + + /a/1.txt + + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + blobs = bucket.list_blobs(prefix=prefix, delimiter=delimiter) + + print('Blobs:') + for blob in blobs: + print(blob.name) + + if delimiter: + print('Prefixes:') + for prefix in blobs.prefixes: + print(prefix) + + +def upload_blob(bucket_name, source_file_name, destination_blob_name): + """Uploads a file to the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + blob.upload_from_filename(source_file_name) + + print('File {} uploaded to {}.'.format( + source_file_name, + destination_blob_name)) + + +def download_blob(bucket_name, source_blob_name, destination_file_name): + """Downloads a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(source_blob_name) + + blob.download_to_filename(destination_file_name) + + print('Blob {} downloaded to {}.'.format( + source_blob_name, + destination_file_name)) + + +def delete_blob(bucket_name, blob_name): + """Deletes a blob from the bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.delete() + + print('Blob {} deleted.'.format(blob_name)) + + +def blob_metadata(bucket_name, blob_name): + """Prints out a blob's metadata.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.get_blob(blob_name) + + print('Blob: {}'.format(blob.name)) + print('Bucket: {}'.format(blob.bucket.name)) + print('Storage class: {}'.format(blob.storage_class)) + print('ID: {}'.format(blob.id)) + print('Size: {} bytes'.format(blob.size)) + print('Updated: {}'.format(blob.updated)) + print('Generation: {}'.format(blob.generation)) + print('Metageneration: {}'.format(blob.metageneration)) + print('Etag: {}'.format(blob.etag)) + print('Owner: {}'.format(blob.owner)) + print('Component count: {}'.format(blob.component_count)) + print('Crc32c: {}'.format(blob.crc32c)) + print('md5_hash: {}'.format(blob.md5_hash)) + print('Cache-control: {}'.format(blob.cache_control)) + print('Content-type: {}'.format(blob.content_type)) + print('Content-disposition: {}'.format(blob.content_disposition)) + print('Content-encoding: {}'.format(blob.content_encoding)) + print('Content-language: {}'.format(blob.content_language)) + print('Metadata: {}'.format(blob.metadata)) + + +def make_blob_public(bucket_name, blob_name): + """Makes a blob publicly accessible.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.make_public() + + print('Blob {} is publicly accessible at {}'.format( + blob.name, blob.public_url)) + + +def generate_signed_url(bucket_name, blob_name): + """Generates a signed URL for a blob. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + # This URL is valid for 1 hour + expiration=datetime.timedelta(hours=1), + # Allow GET requests using this URL. + method='GET') + + print('The signed url for {} is {}'.format(blob.name, url)) + + +def rename_blob(bucket_name, blob_name, new_name): + """Renames a blob.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + new_blob = bucket.rename_blob(blob, new_name) + + print('Blob {} has been renamed to {}'.format( + blob.name, new_blob.name)) + + +def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): + """Copies a blob from one bucket to another with a new name.""" + storage_client = storage.Client() + source_bucket = storage_client.get_bucket(bucket_name) + source_blob = source_bucket.blob(blob_name) + destination_bucket = storage_client.get_bucket(new_bucket_name) + + new_blob = source_bucket.copy_blob( + source_blob, destination_bucket, new_blob_name) + + print('Blob {} in bucket {} copied to blob {} in bucket {}.'.format( + source_blob.name, source_bucket.name, new_blob.name, + destination_bucket.name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket_name', help='Your cloud storage bucket.') + + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('create-bucket', help=create_bucket.__doc__) + subparsers.add_parser('delete-bucket', help=delete_bucket.__doc__) + subparsers.add_parser('list', help=list_blobs.__doc__) + + list_with_prefix_parser = subparsers.add_parser( + 'list-with-prefix', help=list_blobs_with_prefix.__doc__) + list_with_prefix_parser.add_argument('prefix') + list_with_prefix_parser.add_argument('--delimiter', default=None) + + upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) + upload_parser.add_argument('source_file_name') + upload_parser.add_argument('destination_blob_name') + + download_parser = subparsers.add_parser( + 'download', help=download_blob.__doc__) + download_parser.add_argument('source_blob_name') + download_parser.add_argument('destination_file_name') + + delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) + delete_parser.add_argument('blob_name') + + metadata_parser = subparsers.add_parser( + 'metadata', help=blob_metadata.__doc__) + metadata_parser.add_argument('blob_name') + + make_public_parser = subparsers.add_parser( + 'make-public', help=make_blob_public.__doc__) + make_public_parser.add_argument('blob_name') + + signed_url_parser = subparsers.add_parser( + 'signed-url', help=generate_signed_url.__doc__) + signed_url_parser.add_argument('blob_name') + + rename_parser = subparsers.add_parser('rename', help=rename_blob.__doc__) + rename_parser.add_argument('blob_name') + rename_parser.add_argument('new_name') + + copy_parser = subparsers.add_parser('copy', help=rename_blob.__doc__) + copy_parser.add_argument('blob_name') + copy_parser.add_argument('new_bucket_name') + copy_parser.add_argument('new_blob_name') + + args = parser.parse_args() + + if args.command == 'create-bucket': + create_bucket(args.bucket_name) + elif args.command == 'delete-bucket': + delete_bucket(args.bucket_name) + elif args.command == 'list': + list_blobs(args.bucket_name) + elif args.command == 'list-with-prefix': + list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) + elif args.command == 'upload': + upload_blob( + args.bucket_name, + args.source_file_name, + args.destination_blob_name) + elif args.command == 'download': + download_blob( + args.bucket_name, + args.source_blob_name, + args.destination_file_name) + elif args.command == 'delete': + delete_blob(args.bucket_name, args.blob_name) + elif args.command == 'metadata': + blob_metadata(args.bucket_name, args.blob_name) + elif args.command == 'make-public': + make_blob_public(args.bucket_name, args.blob_name) + elif args.command == 'signed-url': + generate_signed_url(args.bucket_name, args.blob_name) + elif args.command == 'rename': + rename_blob(args.bucket_name, args.blob_name, args.new_name) + elif args.command == 'copy': + copy_blob( + args.bucket_name, + args.blob_name, + args.new_bucket_name, + args.new_blob_name) diff --git a/snippets_test.py b/snippets_test.py new file mode 100644 index 000000000..b38aa4380 --- /dev/null +++ b/snippets_test.py @@ -0,0 +1,126 @@ +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tempfile + +import gcloud +from gcloud import storage +import pytest +import requests +import snippets + + +@pytest.fixture +def test_blob(cloud_config): + """Provides a pre-existing blob in the test bucket.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('storage_snippets_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + return blob + + +def test_list_blobs(test_blob, cloud_config, capsys): + snippets.list_blobs(cloud_config.storage_bucket) + out, _ = capsys.readouterr() + assert test_blob.name in out + + +def test_list_blobs_with_prefix(test_blob, cloud_config, capsys): + snippets.list_blobs_with_prefix( + cloud_config.storage_bucket, + prefix='storage_snippets') + out, _ = capsys.readouterr() + assert test_blob.name in out + + +def test_upload_blob(cloud_config): + with tempfile.NamedTemporaryFile() as source_file: + source_file.write(b'test') + + snippets.upload_blob( + cloud_config.storage_bucket, + source_file.name, + 'test_upload_blob') + + +def test_download_blob(test_blob, cloud_config): + with tempfile.NamedTemporaryFile() as dest_file: + snippets.download_blob( + cloud_config.storage_bucket, + test_blob.name, + dest_file.name) + + assert dest_file.read() + + +def test_blob_metadata(test_blob, cloud_config, capsys): + snippets.blob_metadata(cloud_config.storage_bucket, test_blob.name) + out, _ = capsys.readouterr() + assert test_blob.name in out + + +def test_delete_blob(test_blob, cloud_config): + snippets.delete_blob( + cloud_config.storage_bucket, + test_blob.name) + + +def test_make_blob_public(test_blob, cloud_config): + snippets.make_blob_public( + cloud_config.storage_bucket, + test_blob.name) + + r = requests.get(test_blob.public_url) + assert r.text == 'Hello, is it me you\'re looking for?' + + +def test_generate_signed_url(test_blob, cloud_config, capsys): + snippets.generate_signed_url( + cloud_config.storage_bucket, + test_blob.name) + + out, _ = capsys.readouterr() + url = out.rsplit().pop() + + r = requests.get(url) + assert r.text == 'Hello, is it me you\'re looking for?' + + +def test_rename_blob(test_blob, cloud_config): + bucket = storage.Client().bucket(cloud_config.storage_bucket) + + try: + bucket.delete_blob('test_rename_blob') + except gcloud.exceptions.NotFound: + pass + + snippets.rename_blob(bucket.name, test_blob.name, 'test_rename_blob') + + assert bucket.get_blob('test_rename_blob') is not None + assert bucket.get_blob(test_blob.name) is None + + +def test_copy_blob(test_blob, cloud_config): + bucket = storage.Client().bucket(cloud_config.storage_bucket) + + try: + bucket.delete_blob('test_copy_blob') + except gcloud.exceptions.NotFound: + pass + + snippets.copy_blob( + bucket.name, test_blob.name, bucket.name, 'test_copy_blob') + + assert bucket.get_blob('test_copy_blob') is not None + assert bucket.get_blob(test_blob.name) is not None From 667c262e85050a3a2274f0352b4837d33a741140 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 16 Aug 2016 13:32:42 -0700 Subject: [PATCH 005/197] Auto-update dependencies. (#456) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 868847aeb..2beeafe63 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -gcloud==0.17.0 +gcloud==0.18.1 From a9d1ebc18532d90d5bba7045136b1be682ef0b58 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 19 Aug 2016 13:56:28 -0700 Subject: [PATCH 006/197] Fix import order lint errors Change-Id: Ieaf7237fc6f925daec46a07d2e81a452b841198a --- encryption_test.py | 3 ++- snippets_test.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/encryption_test.py b/encryption_test.py index ddef282b8..52d3e6d15 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -15,10 +15,11 @@ import base64 import tempfile -import encryption from gcloud import storage import pytest +import encryption + TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) diff --git a/snippets_test.py b/snippets_test.py index b38aa4380..f215d2754 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -18,6 +18,7 @@ from gcloud import storage import pytest import requests + import snippets From 146086130566202d7d665ca6bc7fd32e3f858a68 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 31 Aug 2016 13:24:24 -0700 Subject: [PATCH 007/197] Add storage acl samples Change-Id: Ib44f9bb42bf0c0607e64905a26369f06ea5fb231 --- acl.py | 255 ++++++++++++++++++++++++++++++++++++++++++++++++++++ acl_test.py | 135 ++++++++++++++++++++++++++++ 2 files changed, 390 insertions(+) create mode 100644 acl.py create mode 100644 acl_test.py diff --git a/acl.py b/acl.py new file mode 100644 index 000000000..6fc653ada --- /dev/null +++ b/acl.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python + +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to manage access control lists (acls) in +Google Cloud Storage. + +For more information, see the README.md under /storage and the documentation +at https://cloud.google.com/storage/docs/encryption. +""" + +import argparse + +from gcloud import storage + + +def get_bucket_acl(bucket_name): + """Prints out a bucket's access control list.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + for entry in bucket.acl: + print('{}: {}'.format(entry['role'], entry['entity'])) + + +def get_bucket_acl_for_user(bucket_name, user_email): + """Prints out a bucket's access control list for a given user.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # get the roles for different types of entities. + roles = bucket.acl.user(user_email).get_roles() + + print(roles) + + +def set_bucket_acl(bucket_name, user_email): + """Adds a user as an owner on the given bucket.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # grant access to different types of entities. You can also use + # `grant_read` or `grant_write` to grant different roles. + bucket.acl.user(user_email).grant_owner() + bucket.acl.save() + + print('Added user {} as an owner on bucket {}.'.format( + user_email, bucket_name)) + + +def remove_bucket_acl(bucket_name, user_email): + """Removes a user from the access control list of the given bucket.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + bucket.acl.user(user_email).revoke_read() + bucket.acl.user(user_email).revoke_write() + bucket.acl.user(user_email).revoke_owner() + bucket.acl.save() + + print('Removed user {} from bucket {}.'.format( + user_email, bucket_name)) + + +def set_bucket_default_acl(bucket_name, user_email): + """Adds a user as an owner in the given bucket's default object access + control list.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # grant access to different types of entities. You can also use + # `grant_read` or `grant_write` to grant different roles. + bucket.default_object_acl.user(user_email).grant_owner() + bucket.default_object_acl.save() + + print('Added user {} as an owner in the default acl on bucket {}.'.format( + user_email, bucket_name)) + + +def remove_bucket_default_acl(bucket_name, user_email): + """Removes a user from the access control list of the given bucket's + default object access control list.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + bucket.default_object_acl.user(user_email).revoke_read() + bucket.default_object_acl.user(user_email).revoke_write() + bucket.default_object_acl.user(user_email).revoke_owner() + bucket.default_object_acl.save() + + print('Removed user {} from the default acl of bucket {}.'.format( + user_email, bucket_name)) + + +def get_blob_acl(bucket_name, blob_name): + """Prints out a blob's access control list.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + for entry in blob.acl: + print('{}: {}'.format(entry['role'], entry['entity'])) + + +def get_blob_acl_for_user(bucket_name, blob_name, user_email): + """Prints out a bucket's access control list for a given user.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # get the roles for different types of entities. + roles = blob.acl.user(user_email).get_roles() + + print(roles) + + +def set_blob_acl(bucket_name, blob_name, user_email): + """Adds a user as an owner on the given blob.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # grant access to different types of entities. You can also use + # `grant_read` or `grant_write` to grant different roles. + blob.acl.user(user_email).grant_owner() + blob.acl.save() + + print('Added user {} as an owner on blob {} in bucket {}.'.format( + user_email, blob_name, bucket_name)) + + +def remove_blob_acl(bucket_name, blob_name, user_email): + """Removes a user from the access control list of the given blob in the + given bucket.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + blob.acl.user(user_email).revoke_read() + blob.acl.user(user_email).revoke_write() + blob.acl.user(user_email).revoke_owner() + blob.acl.save() + + print('Removed user {} from blob {} in bucket {}.'.format( + user_email, blob_name, bucket_name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + subparsers = parser.add_subparsers(dest='command') + + get_bucket_acl_parser = subparsers.add_parser( + 'get-bucket-acl', help=get_bucket_acl.__doc__) + get_bucket_acl_parser.add_argument('bucket_name') + + get_bucket_acl_for_user_parser = subparsers.add_parser( + 'get-bucket-acl-for-user', help=get_bucket_acl.__doc__) + get_bucket_acl_for_user_parser.add_argument('bucket_name') + get_bucket_acl_for_user_parser.add_argument('user_email') + + set_bucket_acl_parser = subparsers.add_parser( + 'set-bucket-acl', help=set_bucket_acl.__doc__) + set_bucket_acl_parser.add_argument('bucket_name') + set_bucket_acl_parser.add_argument('user_email') + + remove_bucket_acl_parser = subparsers.add_parser( + 'remove-bucket-acl', help=remove_bucket_acl.__doc__) + remove_bucket_acl_parser.add_argument('bucket_name') + remove_bucket_acl_parser.add_argument('user_email') + + set_bucket_default_acl_parser = subparsers.add_parser( + 'set-bucket-default-acl', help=set_bucket_default_acl.__doc__) + set_bucket_default_acl_parser.add_argument('bucket_name') + set_bucket_default_acl_parser.add_argument('user_email') + + remove_bucket_default_acl_parser = subparsers.add_parser( + 'remove-bucket-default-acl', help=remove_bucket_default_acl.__doc__) + remove_bucket_default_acl_parser.add_argument('bucket_name') + remove_bucket_default_acl_parser.add_argument('user_email') + + get_blob_acl_parser = subparsers.add_parser( + 'get-blob-acl', help=get_blob_acl.__doc__) + get_blob_acl_parser.add_argument('bucket_name') + get_blob_acl_parser.add_argument('blob_name') + + get_blob_acl_for_user_parser = subparsers.add_parser( + 'get-blob-acl-for-user', help=get_blob_acl_for_user.__doc__) + get_blob_acl_for_user_parser.add_argument('bucket_name') + get_blob_acl_for_user_parser.add_argument('blob_name') + get_blob_acl_for_user_parser.add_argument('user_email') + + set_blob_acl_parser = subparsers.add_parser( + 'set-blob-acl', help=set_blob_acl.__doc__) + set_blob_acl_parser.add_argument('bucket_name') + set_blob_acl_parser.add_argument('blob_name') + set_blob_acl_parser.add_argument('user_email') + + remove_blob_acl_parser = subparsers.add_parser( + 'remove-blob-acl', help=remove_blob_acl.__doc__) + remove_blob_acl_parser.add_argument('bucket_name') + remove_blob_acl_parser.add_argument('blob_name') + remove_blob_acl_parser.add_argument('user_email') + + args = parser.parse_args() + + if args.command == 'get-bucket-acl': + get_bucket_acl(args.bucket_name) + elif args.command == 'get-bucket-acl-for-user': + get_bucket_acl_for_user(args.bucket_name, args.user_email) + elif args.command == 'set-bucket-acl': + set_bucket_acl(args.bucket_name, args.user_email) + elif args.command == 'remove-bucket-acl': + remove_bucket_acl(args.bucket_name, args.user_email) + elif args.command == 'set-bucket-default-acl': + set_bucket_default_acl(args.bucket_name, args.user_email) + elif args.command == 'remove-bucket-default-acl': + remove_bucket_default_acl(args.bucket_name, args.user_email) + elif args.command == 'get-blob-acl': + get_blob_acl(args.bucket_name, args.blob_name) + elif args.command == 'get-blob-acl-for-user': + get_blob_acl_for_user( + args.bucket_name, args.blob_name, args.user_email) + elif args.command == 'set-blob-acl': + set_blob_acl(args.bucket_name, args.blob_name, args.user_email) + elif args.command == 'remove-blob-acl': + remove_blob_acl(args.bucket_name, args.blob_name, args.user_email) diff --git a/acl_test.py b/acl_test.py new file mode 100644 index 000000000..6315d105a --- /dev/null +++ b/acl_test.py @@ -0,0 +1,135 @@ +# Copyright 2016 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gcloud import storage +import gcloud.storage.acl +import pytest + +import acl + +# Typically we'd use a @example.com address, but GCS requires a real Google +# account. +TEST_EMAIL = 'jonwayne@google.com' + + +@pytest.fixture +def test_bucket(cloud_config): + """Yields a bucket that resets its acl after the test completes.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + acl = gcloud.storage.acl.BucketACL(bucket) + object_default_acl = gcloud.storage.acl.DefaultObjectACL(bucket) + acl.reload() + object_default_acl.reload() + yield bucket + acl.save() + object_default_acl.save() + + +@pytest.fixture +def test_blob(cloud_config): + """Yields a blob that resets its acl after the test completes.""" + bucket = storage.Client().bucket(cloud_config.storage_bucket) + blob = bucket.blob('storage_acl_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + acl = gcloud.storage.acl.ObjectACL(blob) + acl.reload() + yield blob + acl.save() + + +def test_get_bucket_acl(cloud_config, capsys): + acl.get_bucket_acl(cloud_config.storage_bucket) + out, _ = capsys.readouterr() + assert out + + +def test_get_bucket_acl_for_user(test_bucket, cloud_config, capsys): + test_bucket.acl.user(TEST_EMAIL).grant_owner() + test_bucket.acl.save() + + acl.get_bucket_acl_for_user(cloud_config.storage_bucket, TEST_EMAIL) + + out, _ = capsys.readouterr() + assert 'OWNER' in out + + +def test_set_bucket_acl(test_bucket, cloud_config): + acl.set_bucket_acl(cloud_config.storage_bucket, TEST_EMAIL) + + test_bucket.acl.reload() + assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles() + + +def test_remove_bucket_acl(test_bucket, cloud_config): + test_bucket.acl.user(TEST_EMAIL).grant_owner() + test_bucket.acl.save() + + acl.remove_bucket_acl(cloud_config.storage_bucket, TEST_EMAIL) + + test_bucket.acl.reload() + assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles() + + +def test_set_bucket_default_acl(test_bucket, cloud_config): + acl.set_bucket_default_acl(cloud_config.storage_bucket, TEST_EMAIL) + + test_bucket.default_object_acl.reload() + roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() + assert 'OWNER' in roles + + +def test_remove_bucket_default_acl(test_bucket, cloud_config): + test_bucket.acl.user(TEST_EMAIL).grant_owner() + test_bucket.acl.save() + + acl.remove_bucket_default_acl(cloud_config.storage_bucket, TEST_EMAIL) + + test_bucket.default_object_acl.reload() + roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() + assert 'OWNER' not in roles + + +def test_get_blob_acl(test_blob, cloud_config, capsys): + acl.get_blob_acl(cloud_config.storage_bucket, test_blob.name) + out, _ = capsys.readouterr() + assert out + + +def test_get_blob_acl_for_user(test_blob, cloud_config, capsys): + test_blob.acl.user(TEST_EMAIL).grant_owner() + test_blob.acl.save() + + acl.get_blob_acl_for_user( + cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + + out, _ = capsys.readouterr() + assert 'OWNER' in out + + +def test_set_blob_acl(test_blob, cloud_config): + acl.set_blob_acl(cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + + test_blob.acl.reload() + assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles() + + +def test_remove_blob_acl(test_blob, cloud_config): + test_blob.acl.user(TEST_EMAIL).grant_owner() + test_blob.acl.save() + + acl.remove_blob_acl( + cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + + test_blob.acl.reload() + assert 'OWNER' not in test_blob.acl.user(TEST_EMAIL).get_roles() From 430c4bb95b5be31d969988bd89d4011c579f95e3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 31 Aug 2016 14:44:11 -0700 Subject: [PATCH 008/197] Address review comments Change-Id: I94973a839f38ef3d1ec657c3c79f666eca56728b --- acl.py | 188 ++++++++++++++++++++++++++++------------------------ acl_test.py | 40 +++++------ 2 files changed, 123 insertions(+), 105 deletions(-) diff --git a/acl.py b/acl.py index 6fc653ada..68547e09c 100644 --- a/acl.py +++ b/acl.py @@ -26,7 +26,7 @@ from gcloud import storage -def get_bucket_acl(bucket_name): +def print_bucket_acl(bucket_name): """Prints out a bucket's access control list.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) @@ -35,11 +35,12 @@ def get_bucket_acl(bucket_name): print('{}: {}'.format(entry['role'], entry['entity'])) -def get_bucket_acl_for_user(bucket_name, user_email): +def print_bucket_acl_for_user(bucket_name, user_email): """Prints out a bucket's access control list for a given user.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + # Reload fetches the current ACL from Cloud Storage. bucket.acl.reload() # You can also use `group`, `domain`, `all_authenticated` and `all` to @@ -49,14 +50,18 @@ def get_bucket_acl_for_user(bucket_name, user_email): print(roles) -def set_bucket_acl(bucket_name, user_email): +def add_bucket_owner(bucket_name, user_email): """Adds a user as an owner on the given bucket.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # grant access to different types of entities. You can also use - # `grant_read` or `grant_write` to grant different roles. + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group()`, `domain()`, `all_authenticated()` and `all()` + # to grant access to different types of entities. + # You can also use `grant_read()` or `grant_write()` to grant different + # roles. bucket.acl.user(user_email).grant_owner() bucket.acl.save() @@ -64,11 +69,14 @@ def set_bucket_acl(bucket_name, user_email): user_email, bucket_name)) -def remove_bucket_acl(bucket_name, user_email): +def remove_bucket_owner(bucket_name, user_email): """Removes a user from the access control list of the given bucket.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + # You can also use `group`, `domain`, `all_authenticated` and `all` to # remove access for different types of entities. bucket.acl.user(user_email).revoke_read() @@ -80,12 +88,15 @@ def remove_bucket_acl(bucket_name, user_email): user_email, bucket_name)) -def set_bucket_default_acl(bucket_name, user_email): +def add_bucket_default_owner(bucket_name, user_email): """Adds a user as an owner in the given bucket's default object access control list.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + # You can also use `group`, `domain`, `all_authenticated` and `all` to # grant access to different types of entities. You can also use # `grant_read` or `grant_write` to grant different roles. @@ -96,12 +107,15 @@ def set_bucket_default_acl(bucket_name, user_email): user_email, bucket_name)) -def remove_bucket_default_acl(bucket_name, user_email): +def remove_bucket_default_owner(bucket_name, user_email): """Removes a user from the access control list of the given bucket's default object access control list.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + # You can also use `group`, `domain`, `all_authenticated` and `all` to # remove access for different types of entities. bucket.default_object_acl.user(user_email).revoke_read() @@ -113,7 +127,7 @@ def remove_bucket_default_acl(bucket_name, user_email): user_email, bucket_name)) -def get_blob_acl(bucket_name, blob_name): +def print_blob_acl(bucket_name, blob_name): """Prints out a blob's access control list.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) @@ -123,12 +137,13 @@ def get_blob_acl(bucket_name, blob_name): print('{}: {}'.format(entry['role'], entry['entity'])) -def get_blob_acl_for_user(bucket_name, blob_name, user_email): - """Prints out a bucket's access control list for a given user.""" +def print_blob_acl_for_user(bucket_name, blob_name, user_email): + """Prints out a blob's access control list for a given user.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) + # Reload fetches the current ACL from Cloud Storage. blob.acl.reload() # You can also use `group`, `domain`, `all_authenticated` and `all` to @@ -138,12 +153,15 @@ def get_blob_acl_for_user(bucket_name, blob_name, user_email): print(roles) -def set_blob_acl(bucket_name, blob_name, user_email): +def add_blob_owner(bucket_name, blob_name, user_email): """Adds a user as an owner on the given blob.""" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) + # Reload fetches the current ACL from Cloud Storage. + blob.acl.reload() + # You can also use `group`, `domain`, `all_authenticated` and `all` to # grant access to different types of entities. You can also use # `grant_read` or `grant_write` to grant different roles. @@ -154,7 +172,7 @@ def set_blob_acl(bucket_name, blob_name, user_email): user_email, blob_name, bucket_name)) -def remove_blob_acl(bucket_name, blob_name, user_email): +def remove_blob_owner(bucket_name, blob_name, user_email): """Removes a user from the access control list of the given blob in the given bucket.""" storage_client = storage.Client() @@ -178,78 +196,78 @@ def remove_blob_acl(bucket_name, blob_name, user_email): formatter_class=argparse.RawDescriptionHelpFormatter) subparsers = parser.add_subparsers(dest='command') - get_bucket_acl_parser = subparsers.add_parser( - 'get-bucket-acl', help=get_bucket_acl.__doc__) - get_bucket_acl_parser.add_argument('bucket_name') - - get_bucket_acl_for_user_parser = subparsers.add_parser( - 'get-bucket-acl-for-user', help=get_bucket_acl.__doc__) - get_bucket_acl_for_user_parser.add_argument('bucket_name') - get_bucket_acl_for_user_parser.add_argument('user_email') - - set_bucket_acl_parser = subparsers.add_parser( - 'set-bucket-acl', help=set_bucket_acl.__doc__) - set_bucket_acl_parser.add_argument('bucket_name') - set_bucket_acl_parser.add_argument('user_email') - - remove_bucket_acl_parser = subparsers.add_parser( - 'remove-bucket-acl', help=remove_bucket_acl.__doc__) - remove_bucket_acl_parser.add_argument('bucket_name') - remove_bucket_acl_parser.add_argument('user_email') - - set_bucket_default_acl_parser = subparsers.add_parser( - 'set-bucket-default-acl', help=set_bucket_default_acl.__doc__) - set_bucket_default_acl_parser.add_argument('bucket_name') - set_bucket_default_acl_parser.add_argument('user_email') - - remove_bucket_default_acl_parser = subparsers.add_parser( - 'remove-bucket-default-acl', help=remove_bucket_default_acl.__doc__) - remove_bucket_default_acl_parser.add_argument('bucket_name') - remove_bucket_default_acl_parser.add_argument('user_email') - - get_blob_acl_parser = subparsers.add_parser( - 'get-blob-acl', help=get_blob_acl.__doc__) - get_blob_acl_parser.add_argument('bucket_name') - get_blob_acl_parser.add_argument('blob_name') - - get_blob_acl_for_user_parser = subparsers.add_parser( - 'get-blob-acl-for-user', help=get_blob_acl_for_user.__doc__) - get_blob_acl_for_user_parser.add_argument('bucket_name') - get_blob_acl_for_user_parser.add_argument('blob_name') - get_blob_acl_for_user_parser.add_argument('user_email') - - set_blob_acl_parser = subparsers.add_parser( - 'set-blob-acl', help=set_blob_acl.__doc__) - set_blob_acl_parser.add_argument('bucket_name') - set_blob_acl_parser.add_argument('blob_name') - set_blob_acl_parser.add_argument('user_email') - - remove_blob_acl_parser = subparsers.add_parser( - 'remove-blob-acl', help=remove_blob_acl.__doc__) - remove_blob_acl_parser.add_argument('bucket_name') - remove_blob_acl_parser.add_argument('blob_name') - remove_blob_acl_parser.add_argument('user_email') + print_bucket_acl_parser = subparsers.add_parser( + 'print-bucket-acl', help=print_bucket_acl.__doc__) + print_bucket_acl_parser.add_argument('bucket_name') + + print_bucket_acl_for_user_parser = subparsers.add_parser( + 'print-bucket-acl-for-user', help=print_bucket_acl.__doc__) + print_bucket_acl_for_user_parser.add_argument('bucket_name') + print_bucket_acl_for_user_parser.add_argument('user_email') + + add_bucket_owner_parser = subparsers.add_parser( + 'add-bucket-owner', help=add_bucket_owner.__doc__) + add_bucket_owner_parser.add_argument('bucket_name') + add_bucket_owner_parser.add_argument('user_email') + + remove_bucket_owner_parser = subparsers.add_parser( + 'remove-bucket-owner', help=remove_bucket_owner.__doc__) + remove_bucket_owner_parser.add_argument('bucket_name') + remove_bucket_owner_parser.add_argument('user_email') + + add_bucket_default_owner_parser = subparsers.add_parser( + 'add-bucket-default-owner', help=add_bucket_default_owner.__doc__) + add_bucket_default_owner_parser.add_argument('bucket_name') + add_bucket_default_owner_parser.add_argument('user_email') + + remove_bucket_default_owner_parser = subparsers.add_parser( + 'remove-bucket-default-owner', help=remove_bucket_default_owner.__doc__) + remove_bucket_default_owner_parser.add_argument('bucket_name') + remove_bucket_default_owner_parser.add_argument('user_email') + + print_blob_acl_parser = subparsers.add_parser( + 'print-blob-acl', help=print_blob_acl.__doc__) + print_blob_acl_parser.add_argument('bucket_name') + print_blob_acl_parser.add_argument('blob_name') + + print_blob_acl_for_user_parser = subparsers.add_parser( + 'print-blob-acl-for-user', help=print_blob_acl_for_user.__doc__) + print_blob_acl_for_user_parser.add_argument('bucket_name') + print_blob_acl_for_user_parser.add_argument('blob_name') + print_blob_acl_for_user_parser.add_argument('user_email') + + add_blob_owner_parser = subparsers.add_parser( + 'add-blob-owner', help=add_blob_owner.__doc__) + add_blob_owner_parser.add_argument('bucket_name') + add_blob_owner_parser.add_argument('blob_name') + add_blob_owner_parser.add_argument('user_email') + + remove_blob_owner_parser = subparsers.add_parser( + 'remove-blob-owner', help=remove_blob_owner.__doc__) + remove_blob_owner_parser.add_argument('bucket_name') + remove_blob_owner_parser.add_argument('blob_name') + remove_blob_owner_parser.add_argument('user_email') args = parser.parse_args() - if args.command == 'get-bucket-acl': - get_bucket_acl(args.bucket_name) - elif args.command == 'get-bucket-acl-for-user': - get_bucket_acl_for_user(args.bucket_name, args.user_email) - elif args.command == 'set-bucket-acl': - set_bucket_acl(args.bucket_name, args.user_email) - elif args.command == 'remove-bucket-acl': - remove_bucket_acl(args.bucket_name, args.user_email) - elif args.command == 'set-bucket-default-acl': - set_bucket_default_acl(args.bucket_name, args.user_email) - elif args.command == 'remove-bucket-default-acl': - remove_bucket_default_acl(args.bucket_name, args.user_email) - elif args.command == 'get-blob-acl': - get_blob_acl(args.bucket_name, args.blob_name) - elif args.command == 'get-blob-acl-for-user': - get_blob_acl_for_user( + if args.command == 'print-bucket-acl': + print_bucket_acl(args.bucket_name) + elif args.command == 'print-bucket-acl-for-user': + print_bucket_acl_for_user(args.bucket_name, args.user_email) + elif args.command == 'add-bucket-owner': + add_bucket_owner(args.bucket_name, args.user_email) + elif args.command == 'remove-bucket-owner': + remove_bucket_owner(args.bucket_name, args.user_email) + elif args.command == 'add-bucket-default-owner': + add_bucket_default_owner(args.bucket_name, args.user_email) + elif args.command == 'remove-bucket-default-owner': + remove_bucket_default_owner(args.bucket_name, args.user_email) + elif args.command == 'print-blob-acl': + print_blob_acl(args.bucket_name, args.blob_name) + elif args.command == 'print-blob-acl-for-user': + print_blob_acl_for_user( args.bucket_name, args.blob_name, args.user_email) - elif args.command == 'set-blob-acl': - set_blob_acl(args.bucket_name, args.blob_name, args.user_email) - elif args.command == 'remove-blob-acl': - remove_blob_acl(args.bucket_name, args.blob_name, args.user_email) + elif args.command == 'add-blob-owner': + add_blob_owner(args.bucket_name, args.blob_name, args.user_email) + elif args.command == 'remove-blob-owner': + remove_blob_owner(args.bucket_name, args.blob_name, args.user_email) diff --git a/acl_test.py b/acl_test.py index 6315d105a..eca1570d8 100644 --- a/acl_test.py +++ b/acl_test.py @@ -48,87 +48,87 @@ def test_blob(cloud_config): acl.save() -def test_get_bucket_acl(cloud_config, capsys): - acl.get_bucket_acl(cloud_config.storage_bucket) +def test_print_bucket_acl(cloud_config, capsys): + acl.print_bucket_acl(cloud_config.storage_bucket) out, _ = capsys.readouterr() assert out -def test_get_bucket_acl_for_user(test_bucket, cloud_config, capsys): +def test_print_bucket_acl_for_user(test_bucket, cloud_config, capsys): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.get_bucket_acl_for_user(cloud_config.storage_bucket, TEST_EMAIL) + acl.print_bucket_acl_for_user(cloud_config.storage_bucket, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_set_bucket_acl(test_bucket, cloud_config): - acl.set_bucket_acl(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_owner(test_bucket, cloud_config): + acl.add_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_remove_bucket_acl(test_bucket, cloud_config): +def test_remove_bucket_owner(test_bucket, cloud_config): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_acl(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_set_bucket_default_acl(test_bucket, cloud_config): - acl.set_bucket_default_acl(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_default_owner(test_bucket, cloud_config): + acl.add_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' in roles -def test_remove_bucket_default_acl(test_bucket, cloud_config): +def test_remove_bucket_default_owner(test_bucket, cloud_config): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_default_acl(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' not in roles -def test_get_blob_acl(test_blob, cloud_config, capsys): - acl.get_blob_acl(cloud_config.storage_bucket, test_blob.name) +def test_print_blob_acl(test_blob, cloud_config, capsys): + acl.print_blob_acl(cloud_config.storage_bucket, test_blob.name) out, _ = capsys.readouterr() assert out -def test_get_blob_acl_for_user(test_blob, cloud_config, capsys): +def test_print_blob_acl_for_user(test_blob, cloud_config, capsys): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() - acl.get_blob_acl_for_user( + acl.print_blob_acl_for_user( cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_set_blob_acl(test_blob, cloud_config): - acl.set_blob_acl(cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) +def test_add_blob_owner(test_blob, cloud_config): + acl.add_blob_owner(cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles() -def test_remove_blob_acl(test_blob, cloud_config): +def test_remove_blob_owner(test_blob, cloud_config): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() - acl.remove_blob_acl( + acl.remove_blob_owner( cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) test_blob.acl.reload() From 79aa1f5ab16344fdcbcd6a0ab62abaa9e1e7ed09 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 1 Sep 2016 10:32:44 -0700 Subject: [PATCH 009/197] Fix lint issue Change-Id: Ie9cf585303931f200a763d691906ad56221105fd --- acl.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/acl.py b/acl.py index 68547e09c..324f51b2d 100644 --- a/acl.py +++ b/acl.py @@ -221,7 +221,8 @@ def remove_blob_owner(bucket_name, blob_name, user_email): add_bucket_default_owner_parser.add_argument('user_email') remove_bucket_default_owner_parser = subparsers.add_parser( - 'remove-bucket-default-owner', help=remove_bucket_default_owner.__doc__) + 'remove-bucket-default-owner', + help=remove_bucket_default_owner.__doc__) remove_bucket_default_owner_parser.add_argument('bucket_name') remove_bucket_default_owner_parser.add_argument('user_email') From 93b7b07bb110e8c0b766a0bc8702319154accf2f Mon Sep 17 00:00:00 2001 From: DPE bot Date: Fri, 23 Sep 2016 09:48:46 -0700 Subject: [PATCH 010/197] Auto-update dependencies. (#540) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2beeafe63..dfb42aaaa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -gcloud==0.18.1 +gcloud==0.18.2 From 2a0b60a4e15889039863b1115200dcd3e09ae303 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Sep 2016 11:34:45 -0700 Subject: [PATCH 011/197] Auto-update dependencies. (#542) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dfb42aaaa..97a207d3a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -gcloud==0.18.2 +gcloud==0.18.3 From deff0979246aa089258728819fd4b911523f0e0f Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 29 Sep 2016 20:51:47 -0700 Subject: [PATCH 012/197] Move to google-cloud (#544) --- acl.py | 2 +- acl_test.py | 10 +++++----- encryption.py | 2 +- encryption_test.py | 2 +- requirements.txt | 2 +- snippets.py | 2 +- snippets_test.py | 8 ++++---- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/acl.py b/acl.py index 324f51b2d..d742ae428 100644 --- a/acl.py +++ b/acl.py @@ -23,7 +23,7 @@ import argparse -from gcloud import storage +from google.cloud import storage def print_bucket_acl(bucket_name): diff --git a/acl_test.py b/acl_test.py index eca1570d8..3197b4ea0 100644 --- a/acl_test.py +++ b/acl_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import storage -import gcloud.storage.acl +from google.cloud import storage +import google.cloud.storage.acl import pytest import acl @@ -27,8 +27,8 @@ def test_bucket(cloud_config): """Yields a bucket that resets its acl after the test completes.""" bucket = storage.Client().bucket(cloud_config.storage_bucket) - acl = gcloud.storage.acl.BucketACL(bucket) - object_default_acl = gcloud.storage.acl.DefaultObjectACL(bucket) + acl = google.cloud.storage.acl.BucketACL(bucket) + object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) acl.reload() object_default_acl.reload() yield bucket @@ -42,7 +42,7 @@ def test_blob(cloud_config): bucket = storage.Client().bucket(cloud_config.storage_bucket) blob = bucket.blob('storage_acl_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') - acl = gcloud.storage.acl.ObjectACL(blob) + acl = google.cloud.storage.acl.ObjectACL(blob) acl.reload() yield blob acl.save() diff --git a/encryption.py b/encryption.py index 571b91b21..d15be17c2 100644 --- a/encryption.py +++ b/encryption.py @@ -31,7 +31,7 @@ import base64 import os -from gcloud import storage +from google.cloud import storage def generate_encryption_key(): diff --git a/encryption_test.py b/encryption_test.py index 52d3e6d15..28fa5e3b8 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -15,7 +15,7 @@ import base64 import tempfile -from gcloud import storage +from google.cloud import storage import pytest import encryption diff --git a/requirements.txt b/requirements.txt index 97a207d3a..303fed123 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -gcloud==0.18.3 +google-cloud-storage==0.20.0 diff --git a/snippets.py b/snippets.py index 0f6b88cb3..c6c69d6ae 100644 --- a/snippets.py +++ b/snippets.py @@ -24,7 +24,7 @@ import argparse import datetime -from gcloud import storage +from google.cloud import storage def create_bucket(bucket_name): diff --git a/snippets_test.py b/snippets_test.py index f215d2754..a657894be 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -14,8 +14,8 @@ import tempfile -import gcloud -from gcloud import storage +from google.cloud import storage +import google.cloud.exceptions import pytest import requests @@ -103,7 +103,7 @@ def test_rename_blob(test_blob, cloud_config): try: bucket.delete_blob('test_rename_blob') - except gcloud.exceptions.NotFound: + except google.cloud.exceptions.exceptions.NotFound: pass snippets.rename_blob(bucket.name, test_blob.name, 'test_rename_blob') @@ -117,7 +117,7 @@ def test_copy_blob(test_blob, cloud_config): try: bucket.delete_blob('test_copy_blob') - except gcloud.exceptions.NotFound: + except google.cloud.exceptions.NotFound: pass snippets.copy_blob( From 197a0d571fea3964e4a487ab61e158164af4994f Mon Sep 17 00:00:00 2001 From: Jason Dobry Date: Wed, 5 Oct 2016 09:56:04 -0700 Subject: [PATCH 013/197] Add new "quickstart" samples (#547) --- quickstart.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 quickstart.py diff --git a/quickstart.py b/quickstart.py new file mode 100644 index 000000000..9aff9b214 --- /dev/null +++ b/quickstart.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def run_quickstart(): + # [START storage_quickstart] + # Imports the Google Cloud client library + from google.cloud import storage + + # Instantiates a client + storage_client = storage.Client() + + # The name for the new bucket + bucket_name = 'my-new-bucket' + + # Creates the new bucket + bucket = storage_client.create_bucket(bucket_name) + + print('Bucket {} created.'.format(bucket.name)) + # [END storage_quickstart] + + +if __name__ == '__main__': + run_quickstart() From a6edaffb6a9ef3df7b0b84a25e92ac127e61b4ed Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 12 Oct 2016 10:48:57 -0700 Subject: [PATCH 014/197] Quickstart tests (#569) * Add tests for quickstarts * Update secrets --- quickstart_test.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 quickstart_test.py diff --git a/quickstart_test.py b/quickstart_test.py new file mode 100644 index 000000000..cb0503972 --- /dev/null +++ b/quickstart_test.py @@ -0,0 +1,28 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import quickstart + + +@mock.patch('google.cloud.storage.client.Client.create_bucket') +def test_quickstart(create_bucket_mock, capsys): + # Unlike other quickstart tests, this one mocks out the creation + # because buckets are expensive, globally-namespaced object. + create_bucket_mock.return_value = mock.sentinel.bucket + + quickstart.run_quickstart() + + create_bucket_mock.assert_called_with('my-new-bucket') From 73df899517d22a806c0f743f0da671a9fb31e8d4 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 20 Oct 2016 10:01:15 -0700 Subject: [PATCH 015/197] Add basic readme generator (#580) --- README.rst | 235 ++++++++++++++++++++++++++++++++++++++++++++++++++ README.rst.in | 26 ++++++ 2 files changed, 261 insertions(+) create mode 100644 README.rst create mode 100644 README.rst.in diff --git a/README.rst b/README.rst new file mode 100644 index 000000000..ea49a4af3 --- /dev/null +++ b/README.rst @@ -0,0 +1,235 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Storage Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. + + +.. _Google Cloud Storage: https://cloud.google.com/storage/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +this means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes `_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _gce-auth: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] + bucket_name + {create-bucket,delete-bucket,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + ... + + This application demonstrates how to perform basic operations on blobs + (objects) in a Google Cloud Storage bucket. + + For more information, see the README.md under /storage and the documentation + at https://cloud.google.com/storage/docs. + + positional arguments: + bucket_name Your cloud storage bucket. + {create-bucket,delete-bucket,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + create-bucket Creates a new bucket. + delete-bucket Deletes a bucket. The bucket must be empty. + list Lists all the blobs in the bucket. + list-with-prefix Lists all the blobs in the bucket that begin with the + prefix. This can be used to list all blobs in a + "folder", e.g. "public/". The delimiter argument can + be used to restrict the results to only the "files" in + the given "folder". Without the delimiter, the entire + tree under the prefix is returned. For example, given + these blobs: /a/1.txt /a/b/2.txt If you just specify + prefix = '/a', you'll get back: /a/1.txt /a/b/2.txt + However, if you specify prefix='/a' and delimiter='/', + you'll get back: /a/1.txt + upload Uploads a file to the bucket. + download Downloads a blob from the bucket. + delete Deletes a blob from the bucket. + metadata Prints out a blob's metadata. + make-public Makes a blob publicly accessible. + signed-url Generates a signed URL for a blob. Note that this + method requires a service account key file. You can + not use this if you are using Application Default + Credentials from Google Compute Engine or from the + Google Cloud SDK. + rename Renames a blob. + copy Renames a blob. + + optional arguments: + -h, --help show this help message and exit + + +Access Control Lists ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python acl.py + + usage: acl.py [-h] + {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} + ... + + This application demonstrates how to manage access control lists (acls) in + Google Cloud Storage. + + For more information, see the README.md under /storage and the documentation + at https://cloud.google.com/storage/docs/encryption. + + positional arguments: + {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} + print-bucket-acl Prints out a bucket's access control list. + print-bucket-acl-for-user + Prints out a bucket's access control list. + add-bucket-owner Adds a user as an owner on the given bucket. + remove-bucket-owner + Removes a user from the access control list of the + given bucket. + add-bucket-default-owner + Adds a user as an owner in the given bucket's default + object access control list. + remove-bucket-default-owner + Removes a user from the access control list of the + given bucket's default object access control list. + print-blob-acl Prints out a blob's access control list. + print-blob-acl-for-user + Prints out a blob's access control list for a given + user. + add-blob-owner Adds a user as an owner on the given blob. + remove-blob-owner Removes a user from the access control list of the + given blob in the given bucket. + + optional arguments: + -h, --help show this help message and exit + + +Customer-Supplied Encryption ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python encryption.py + + usage: encryption.py [-h] {generate-encryption-key,upload,download,rotate} ... + + This application demonstrates how to upload and download encrypted blobs + (objects) in Google Cloud Storage. + + Use `generate-encryption-key` to generate an example key: + + python encryption.py generate-encryption-key + + Then use the key to upload and download files encrypted with a custom key. + + For more information, see the README.md under /storage and the documentation + at https://cloud.google.com/storage/docs/encryption. + + positional arguments: + {generate-encryption-key,upload,download,rotate} + generate-encryption-key + Generates a 256 bit (32 byte) AES encryption key and + prints the base64 representation. This is included for + demonstration purposes. You should generate your own + key. Please remember that encryption keys should be + handled with a comprehensive security policy. + upload Uploads a file to a Google Cloud Storage bucket using + a custom encryption key. The file will be encrypted by + Google Cloud Storage and only retrievable using the + provided encryption key. + download Downloads a previously-encrypted blob from Google + Cloud Storage. The encryption key provided must be the + same key provided when uploading the blob. + rotate Performs a key rotation by re-writing an encrypted + blob with a new encryption key. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python `_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source `_ and `report issues `_. + +.. ccl-docs: https://googlecloudplatform.github.io/google-cloud-python/ +.. ccl-source: https://github.com/GoogleCloudPlatform/google-cloud-python +.. ccl-issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/README.rst.in b/README.rst.in new file mode 100644 index 000000000..83cf9a5e3 --- /dev/null +++ b/README.rst.in @@ -0,0 +1,26 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Storage + short_name: Cloud Storage + url: https://cloud.google.com/storage/docs + description: > + `Google Cloud Storage`_ allows world-wide storage and retrieval of any + amount of data at any time. + +setup: +- auth +- install_deps + +samples: +- name: Snippets + file: snippets.py + show_help: true +- name: Access Control Lists + file: acl.py + show_help: true +- name: Customer-Supplied Encryption + file: encryption.py + show_help: true + +cloud_client_library: true From 2e3f667e58158ff095207ce9b00db9f504194e63 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Oct 2016 11:03:17 -0700 Subject: [PATCH 016/197] Generate readmes for most service samples (#599) --- README.rst | 35 ++++++++++++++++++++++++++--------- README.rst.in | 2 ++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/README.rst b/README.rst index ea49a4af3..89f536544 100644 --- a/README.rst +++ b/README.rst @@ -6,6 +6,8 @@ Google Cloud Storage Python Samples This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. + + .. _Google Cloud Storage: https://cloud.google.com/storage/docs Setup @@ -16,7 +18,7 @@ Authentication ++++++++++++++ Authentication is typically done through `Application Default Credentials`_, -this means you do not have to change the code to authenticate as long as +which means you do not have to change the code to authenticate as long as your environment has credentials. You have a few options for setting up authentication: @@ -29,7 +31,7 @@ authentication: #. When running on App Engine or Compute Engine, credentials are already set-up. However, you may need to configure your Compute Engine instance - with `additional scopes `_. + with `additional scopes`_. #. You can create a `Service Account key file`_. This file can be used to authenticate to Google Cloud Platform services from any environment. To use @@ -41,7 +43,7 @@ authentication: export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json .. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _gce-auth: https://cloud.google.com/compute/docs/authentication#using +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using .. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount Install Dependencies @@ -68,6 +70,18 @@ Install Dependencies Samples ------------------------------------------------------------------------------- +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -223,13 +237,16 @@ To run this sample: The client library ------------------------------------------------------------------------------- -This sample uses the `Google Cloud Client Library for Python `_. +This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub -to `browse the source `_ and `report issues `_. - -.. ccl-docs: https://googlecloudplatform.github.io/google-cloud-python/ -.. ccl-source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. ccl-issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues .. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/README.rst.in b/README.rst.in index 83cf9a5e3..2a6e37ff7 100644 --- a/README.rst.in +++ b/README.rst.in @@ -13,6 +13,8 @@ setup: - install_deps samples: +- name: Quickstart + file: quickstart.py - name: Snippets file: snippets.py show_help: true From 8141e0ab1f08198a8d5aed1092e85fc523c33732 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 15 Nov 2016 14:58:27 -0800 Subject: [PATCH 017/197] Update samples to support latest Google Cloud Python (#656) --- encryption.py | 10 ++++------ encryption_test.py | 5 ++--- requirements.txt | 2 +- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/encryption.py b/encryption.py index d15be17c2..782e70b5c 100644 --- a/encryption.py +++ b/encryption.py @@ -62,10 +62,9 @@ def upload_encrypted_blob(bucket_name, source_file_name, # Encryption key must be an AES256 key represented as a bytestring with # 32 bytes. Since it's passed in as a base64 encoded string, it needs # to be decoded. - encryption_key = base64.b64decode(base64_encryption_key) + blob.encryption_key = base64.b64decode(base64_encryption_key) - blob.upload_from_filename( - source_file_name, encryption_key=encryption_key) + blob.upload_from_filename(source_file_name) print('File {} uploaded to {}.'.format( source_file_name, @@ -86,10 +85,9 @@ def download_encrypted_blob(bucket_name, source_blob_name, # Encryption key must be an AES256 key represented as a bytestring with # 32 bytes. Since it's passed in as a base64 encoded string, it needs # to be decoded. - encryption_key = base64.b64decode(base64_encryption_key) + blob.encryption_key = base64.b64decode(base64_encryption_key) - blob.download_to_filename( - destination_file_name, encryption_key=encryption_key) + blob.download_to_filename(destination_file_name) print('Blob {} downloaded to {}.'.format( source_blob_name, diff --git a/encryption_test.py b/encryption_test.py index 28fa5e3b8..926be643c 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -49,9 +49,8 @@ def test_blob(cloud_config): bucket = storage.Client().bucket(cloud_config.storage_bucket) blob = bucket.blob('encryption_test_sigil') content = 'Hello, is it me you\'re looking for?' - blob.upload_from_string( - content, - encryption_key=TEST_ENCRYPTION_KEY_DECODED) + blob.encryption_key = TEST_ENCRYPTION_KEY_DECODED + blob.upload_from_string(content) return blob.name, content diff --git a/requirements.txt b/requirements.txt index 303fed123..025b3c37a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -google-cloud-storage==0.20.0 +google-cloud-storage==0.21.0 From 3510b2dad9e91e85ee6065669fb5eca7c372bce4 Mon Sep 17 00:00:00 2001 From: ryanmats Date: Thu, 17 Nov 2016 15:10:24 -0800 Subject: [PATCH 018/197] Edited upload/download to perform encryption properly (#667) --- encryption.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/encryption.py b/encryption.py index 782e70b5c..ce10634cc 100644 --- a/encryption.py +++ b/encryption.py @@ -32,6 +32,7 @@ import os from google.cloud import storage +from google.cloud.storage import Blob def generate_encryption_key(): @@ -57,12 +58,11 @@ def upload_encrypted_blob(bucket_name, source_file_name, """ storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(destination_blob_name) - # Encryption key must be an AES256 key represented as a bytestring with # 32 bytes. Since it's passed in as a base64 encoded string, it needs # to be decoded. - blob.encryption_key = base64.b64decode(base64_encryption_key) + encryption_key = base64.b64decode(base64_encryption_key) + blob = Blob(destination_blob_name, bucket, encryption_key=encryption_key) blob.upload_from_filename(source_file_name) @@ -80,12 +80,11 @@ def download_encrypted_blob(bucket_name, source_blob_name, """ storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(source_blob_name) - # Encryption key must be an AES256 key represented as a bytestring with # 32 bytes. Since it's passed in as a base64 encoded string, it needs # to be decoded. - blob.encryption_key = base64.b64decode(base64_encryption_key) + encryption_key = base64.b64decode(base64_encryption_key) + blob = Blob(source_blob_name, bucket, encryption_key=encryption_key) blob.download_to_filename(destination_file_name) @@ -131,9 +130,9 @@ def rotate_encryption_key(bucket_name, blob_name, base64_encryption_key, 'rotate', help=rotate_encryption_key.__doc__) rotate_parser.add_argument( 'bucket_name', help='Your cloud storage bucket.') - download_parser.add_argument('blob_name') - download_parser.add_argument('base64_encryption_key') - download_parser.add_argument('base64_new_encryption_key') + rotate_parser.add_argument('blob_name') + rotate_parser.add_argument('base64_encryption_key') + rotate_parser.add_argument('base64_new_encryption_key') args = parser.parse_args() From 3f9071f81d3c2e92b99b2da77aefcf79d3a49a2f Mon Sep 17 00:00:00 2001 From: Ryan Matsumoto Date: Tue, 22 Nov 2016 12:12:06 -0800 Subject: [PATCH 019/197] Storage Encryption Key Rotation Sample using Veneer + Tests (#672) --- encryption.py | 23 +++++++++++++++++++++-- encryption_test.py | 28 ++++++++++++++++++++++++++-- 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/encryption.py b/encryption.py index ce10634cc..04718cc9d 100644 --- a/encryption.py +++ b/encryption.py @@ -97,8 +97,27 @@ def rotate_encryption_key(bucket_name, blob_name, base64_encryption_key, base64_new_encryption_key): """Performs a key rotation by re-writing an encrypted blob with a new encryption key.""" - raise NotImplementedError( - 'This is currently not available using the Cloud Client Library.') + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + current_encryption_key = base64.b64decode(base64_encryption_key) + new_encryption_key = base64.b64decode(base64_new_encryption_key) + + # Both source_blob and destination_blob refer to the same storage object, + # but destination_blob has the new encryption key. + source_blob = Blob( + blob_name, bucket, encryption_key=current_encryption_key) + destination_blob = Blob( + blob_name, bucket, encryption_key=new_encryption_key) + + token = None + + while True: + token, bytes_rewritten, total_bytes = destination_blob.rewrite( + source_blob, token=token) + if token is None: + break + + print('Key rotation complete for Blob {}'.format(blob_name)) if __name__ == '__main__': diff --git a/encryption_test.py b/encryption_test.py index 926be643c..aec91e50e 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -16,13 +16,18 @@ import tempfile from google.cloud import storage +from google.cloud.storage import Blob import pytest import encryption + TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) +TEST_ENCRYPTION_KEY_2 = 'o4OD7SWCaPjfeEGhAY+YCgMdY9UW+OJ8mvfWD9lNtO4=' +TEST_ENCRYPTION_KEY_2_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY_2) + def test_generate_encryption_key(capsys): encryption.generate_encryption_key() @@ -47,9 +52,9 @@ def test_upload_encrypted_blob(cloud_config): def test_blob(cloud_config): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(cloud_config.storage_bucket) - blob = bucket.blob('encryption_test_sigil') + blob = Blob('encryption_test_sigil', + bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED) content = 'Hello, is it me you\'re looking for?' - blob.encryption_key = TEST_ENCRYPTION_KEY_DECODED blob.upload_from_string(content) return blob.name, content @@ -65,3 +70,22 @@ def test_download_blob(test_blob, cloud_config): downloaded_content = dest_file.read().decode('utf-8') assert downloaded_content == test_blob_content + + +def test_rotate_encryption_key(test_blob, cloud_config): + test_blob_name, test_blob_content = test_blob + encryption.rotate_encryption_key( + cloud_config.storage_bucket, + test_blob_name, + TEST_ENCRYPTION_KEY, + TEST_ENCRYPTION_KEY_2) + + with tempfile.NamedTemporaryFile() as dest_file: + encryption.download_encrypted_blob( + cloud_config.storage_bucket, + test_blob_name, + dest_file.name, + TEST_ENCRYPTION_KEY_2) + + downloaded_content = dest_file.read().decode('utf-8') + assert downloaded_content == test_blob_content From 0f5037b94238ba06e97aa20de8160a76aa86c0ae Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 13 Dec 2016 09:54:02 -0800 Subject: [PATCH 020/197] Auto-update dependencies. (#715) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 025b3c37a..a12f0fc30 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -google-cloud-storage==0.21.0 +google-cloud-storage==0.22.0 From 003648d166e046002a18e8fdd4188744b4ac415a Mon Sep 17 00:00:00 2001 From: BrandonY Date: Mon, 27 Mar 2017 12:30:45 -0700 Subject: [PATCH 021/197] Adds storage Pub/Sub notification polling tutorial (#875) --- README.rst | 39 ++++++++++++ README.rst.in | 3 + notification_polling.py | 117 +++++++++++++++++++++++++++++++++++ notification_polling_test.py | 47 ++++++++++++++ requirements.txt | 1 + 5 files changed, 207 insertions(+) create mode 100644 notification_polling.py create mode 100644 notification_polling_test.py diff --git a/README.rst b/README.rst index 89f536544..218893be5 100644 --- a/README.rst +++ b/README.rst @@ -232,6 +232,45 @@ To run this sample: -h, --help show this help message and exit +Notification Polling ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python notification_polling.py + + usage: notification_polling.py [-h] [--project PROJECT] subscription + + This application demonstrates how to poll for GCS notifications from a Cloud + Pub/Sub subscription, parse the incoming message, and acknowledge the + successful processing of the message. This application will work with any + subscription configured for pull rather than push notifications. If you do not + already have notifications configured, you may consult the docs at + https://cloud.google.com/storage/docs/reporting-changes or follow the steps + below: 1. Activate the Google Cloud Pub/Sub API, if you have not already done + so. https://console.cloud.google.com/flows/enableapi?apiid=pubsub 2. Create a + Google Cloud Storage bucket: $ gsutil mb gs://testbucket 3. Create a Cloud + Pub/Sub topic and publish bucket notifications there: $ gsutil notification + create -f json -t testtopic gs://testbucket 4. Create a subscription for your + new topic: $ gcloud beta pubsub subscriptions create testsubscription + --topic=testtopic 5. Run this program: $ python notification_polling + testsubscription 6. While the program is running, upload and delete some files + in the testbucket bucket (you could use the console or gsutil) and watch as + changes scroll by in the app. + + positional arguments: + subscription The ID of the Pub/Sub subscription + + optional arguments: + -h, --help show this help message and exit + --project PROJECT The project of the subscription, if not in your default + project + + The client library diff --git a/README.rst.in b/README.rst.in index 2a6e37ff7..aa9690abe 100644 --- a/README.rst.in +++ b/README.rst.in @@ -24,5 +24,8 @@ samples: - name: Customer-Supplied Encryption file: encryption.py show_help: true +- name: Notification Polling + file: notification_polling.py + show_help: true cloud_client_library: true diff --git a/notification_polling.py b/notification_polling.py new file mode 100644 index 000000000..481a98e66 --- /dev/null +++ b/notification_polling.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python + +# Copyright 2017 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to poll for GCS notifications from a +Cloud Pub/Sub subscription, parse the incoming message, and acknowledge the +successful processing of the message. + +This application will work with any subscription configured for pull rather +than push notifications. If you do not already have notifications configured, +you may consult the docs at +https://cloud.google.com/storage/docs/reporting-changes or follow the steps +below: + +1. Activate the Google Cloud Pub/Sub API, if you have not already done so. + https://console.cloud.google.com/flows/enableapi?apiid=pubsub + +2. Create a Google Cloud Storage bucket: + $ gsutil mb gs://testbucket + +3. Create a Cloud Pub/Sub topic and publish bucket notifications there: + $ gsutil notification create -f json -t testtopic gs://testbucket + +4. Create a subscription for your new topic: + $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic + +5. Run this program: + $ python notification_polling testsubscription + +6. While the program is running, upload and delete some files in the testbucket + bucket (you could use the console or gsutil) and watch as changes scroll by + in the app. +""" + +import argparse +import json +import sys + +from google.cloud import pubsub + + +def summarize(message): + # [START parse_message] + data = message.data + attributes = message.attributes + + event_type = attributes['eventType'] + bucket_id = attributes['bucketId'] + object_id = attributes['objectId'] + generation = attributes['objectGeneration'] + description = ( + '\tEvent type: {event_type}\n' + '\tBucket ID: {bucket_id}\n' + '\tObject ID: {object_id}\n' + '\tGeneration: {generation}\n').format( + event_type=event_type, + bucket_id=bucket_id, + object_id=object_id, + generation=generation) + + payload_format = attributes['payloadFormat'] + if payload_format == 'JSON_API_V1': + object_metadata = json.loads(data) + size = object_metadata['size'] + content_type = object_metadata['contentType'] + metageneration = object_metadata['metageneration'] + description += ( + '\tContent type: {content_type}\n' + '\tSize: {object_size}\n' + '\tMetageneration: {metageneration}\n').format( + content_type=content_type, + object_size=size, + metageneration=metageneration) + return description + # [END parse_message] + + +def poll_notifications(subscription_id): + """Polls a Cloud Pub/Sub subscription for new GCS events for display.""" + # [BEGIN poll_notifications] + client = pubsub.Client() + subscription = pubsub.subscription.Subscription( + subscription_id, client=client) + + if not subscription.exists(): + sys.stderr.write('Cannot find subscription {0}\n'.format(sys.argv[1])) + return + + print('Polling for messages. Press ctrl+c to exit.') + while True: + pulled = subscription.pull(max_messages=100) + for ack_id, message in pulled: + print('Received message {0}:\n{1}'.format( + message.message_id, summarize(message))) + subscription.acknowledge([ack_id]) + # [END poll_notifications] + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__) + parser.add_argument('subscription', + help='The ID of the Pub/Sub subscription') + args = parser.parse_args() + poll_notifications(args.subscription) diff --git a/notification_polling_test.py b/notification_polling_test.py new file mode 100644 index 000000000..e21e5b60c --- /dev/null +++ b/notification_polling_test.py @@ -0,0 +1,47 @@ +# Copyright 2017 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.pubsub.message import Message + +from notification_polling import summarize + + +MESSAGE_ID = 12345 + + +def test_parse_json_message(): + attributes = { + 'eventType': 'OBJECT_FINALIZE', + 'bucketId': 'mybucket', + 'objectId': 'myobject', + 'objectGeneration': 1234567, + 'resource': 'projects/_/buckets/mybucket/objects/myobject#1234567', + 'notificationConfig': ('projects/_/buckets/mybucket/' + 'notificationConfigs/5'), + 'payloadFormat': 'JSON_API_V1'} + data = ('{' + ' "size": 12345,' + ' "contentType": "text/html",' + ' "metageneration": 1' + '}') + message = Message(data, MESSAGE_ID, attributes=attributes) + assert summarize(message) == ( + '\tEvent type: OBJECT_FINALIZE\n' + '\tBucket ID: mybucket\n' + '\tObject ID: myobject\n' + '\tGeneration: 1234567\n' + '\tContent type: text/html\n' + '\tSize: 12345\n' + '\tMetageneration: 1\n') diff --git a/requirements.txt b/requirements.txt index a12f0fc30..7b8c63145 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ google-cloud-storage==0.22.0 +google-cloud-pubsub==0.22.0 From b17b759aa0d9ae31551c5e209796934b49e5e15e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 4 Apr 2017 16:08:30 -0700 Subject: [PATCH 022/197] Remove cloud config fixture (#887) * Remove cloud config fixture * Fix client secrets * Fix bigtable instance --- acl_test.py | 51 ++++++++++++++++++++++++---------------------- encryption_test.py | 20 ++++++++++-------- snippets_test.py | 47 ++++++++++++++++++++++-------------------- 3 files changed, 63 insertions(+), 55 deletions(-) diff --git a/acl_test.py b/acl_test.py index 3197b4ea0..0c9fd1a50 100644 --- a/acl_test.py +++ b/acl_test.py @@ -12,21 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from google.cloud import storage import google.cloud.storage.acl import pytest import acl +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] # Typically we'd use a @example.com address, but GCS requires a real Google # account. TEST_EMAIL = 'jonwayne@google.com' @pytest.fixture -def test_bucket(cloud_config): +def test_bucket(): """Yields a bucket that resets its acl after the test completes.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) acl = google.cloud.storage.acl.BucketACL(bucket) object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) acl.reload() @@ -37,9 +40,9 @@ def test_bucket(cloud_config): @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Yields a blob that resets its acl after the test completes.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = bucket.blob('storage_acl_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') acl = google.cloud.storage.acl.ObjectACL(blob) @@ -48,88 +51,88 @@ def test_blob(cloud_config): acl.save() -def test_print_bucket_acl(cloud_config, capsys): - acl.print_bucket_acl(cloud_config.storage_bucket) +def test_print_bucket_acl(capsys): + acl.print_bucket_acl(BUCKET) out, _ = capsys.readouterr() assert out -def test_print_bucket_acl_for_user(test_bucket, cloud_config, capsys): +def test_print_bucket_acl_for_user(test_bucket, capsys): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.print_bucket_acl_for_user(cloud_config.storage_bucket, TEST_EMAIL) + acl.print_bucket_acl_for_user(BUCKET, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_add_bucket_owner(test_bucket, cloud_config): - acl.add_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_owner(test_bucket): + acl.add_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_remove_bucket_owner(test_bucket, cloud_config): +def test_remove_bucket_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_owner(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles() -def test_add_bucket_default_owner(test_bucket, cloud_config): - acl.add_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) +def test_add_bucket_default_owner(test_bucket): + acl.add_bucket_default_owner(BUCKET, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' in roles -def test_remove_bucket_default_owner(test_bucket, cloud_config): +def test_remove_bucket_default_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_default_owner(cloud_config.storage_bucket, TEST_EMAIL) + acl.remove_bucket_default_owner(BUCKET, TEST_EMAIL) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() assert 'OWNER' not in roles -def test_print_blob_acl(test_blob, cloud_config, capsys): - acl.print_blob_acl(cloud_config.storage_bucket, test_blob.name) +def test_print_blob_acl(test_blob, capsys): + acl.print_blob_acl(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert out -def test_print_blob_acl_for_user(test_blob, cloud_config, capsys): +def test_print_blob_acl_for_user(test_blob, capsys): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() acl.print_blob_acl_for_user( - cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + BUCKET, test_blob.name, TEST_EMAIL) out, _ = capsys.readouterr() assert 'OWNER' in out -def test_add_blob_owner(test_blob, cloud_config): - acl.add_blob_owner(cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) +def test_add_blob_owner(test_blob): + acl.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles() -def test_remove_blob_owner(test_blob, cloud_config): +def test_remove_blob_owner(test_blob): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() acl.remove_blob_owner( - cloud_config.storage_bucket, test_blob.name, TEST_EMAIL) + BUCKET, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert 'OWNER' not in test_blob.acl.user(TEST_EMAIL).get_roles() diff --git a/encryption_test.py b/encryption_test.py index aec91e50e..4db6e6cb0 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -13,6 +13,7 @@ # limitations under the License. import base64 +import os import tempfile from google.cloud import storage @@ -21,6 +22,7 @@ import encryption +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) @@ -37,21 +39,21 @@ def test_generate_encryption_key(capsys): assert len(key) == 32, 'Returned key should be 32 bytes' -def test_upload_encrypted_blob(cloud_config): +def test_upload_encrypted_blob(): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b'test') encryption.upload_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, source_file.name, 'test_encrypted_upload_blob', TEST_ENCRYPTION_KEY) @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = Blob('encryption_test_sigil', bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED) content = 'Hello, is it me you\'re looking for?' @@ -59,11 +61,11 @@ def test_blob(cloud_config): return blob.name, content -def test_download_blob(test_blob, cloud_config): +def test_download_blob(test_blob): test_blob_name, test_blob_content = test_blob with tempfile.NamedTemporaryFile() as dest_file: encryption.download_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY) @@ -72,17 +74,17 @@ def test_download_blob(test_blob, cloud_config): assert downloaded_content == test_blob_content -def test_rotate_encryption_key(test_blob, cloud_config): +def test_rotate_encryption_key(test_blob): test_blob_name, test_blob_content = test_blob encryption.rotate_encryption_key( - cloud_config.storage_bucket, + BUCKET, test_blob_name, TEST_ENCRYPTION_KEY, TEST_ENCRYPTION_KEY_2) with tempfile.NamedTemporaryFile() as dest_file: encryption.download_encrypted_blob( - cloud_config.storage_bucket, + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY_2) diff --git a/snippets_test.py b/snippets_test.py index a657894be..a44e8ebf1 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import tempfile from google.cloud import storage @@ -21,74 +22,76 @@ import snippets +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] + @pytest.fixture -def test_blob(cloud_config): +def test_blob(): """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(cloud_config.storage_bucket) + bucket = storage.Client().bucket(BUCKET) blob = bucket.blob('storage_snippets_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') return blob -def test_list_blobs(test_blob, cloud_config, capsys): - snippets.list_blobs(cloud_config.storage_bucket) +def test_list_blobs(test_blob, capsys): + snippets.list_blobs(BUCKET) out, _ = capsys.readouterr() assert test_blob.name in out -def test_list_blobs_with_prefix(test_blob, cloud_config, capsys): +def test_list_blobs_with_prefix(test_blob, capsys): snippets.list_blobs_with_prefix( - cloud_config.storage_bucket, + BUCKET, prefix='storage_snippets') out, _ = capsys.readouterr() assert test_blob.name in out -def test_upload_blob(cloud_config): +def test_upload_blob(): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b'test') snippets.upload_blob( - cloud_config.storage_bucket, + BUCKET, source_file.name, 'test_upload_blob') -def test_download_blob(test_blob, cloud_config): +def test_download_blob(test_blob): with tempfile.NamedTemporaryFile() as dest_file: snippets.download_blob( - cloud_config.storage_bucket, + BUCKET, test_blob.name, dest_file.name) assert dest_file.read() -def test_blob_metadata(test_blob, cloud_config, capsys): - snippets.blob_metadata(cloud_config.storage_bucket, test_blob.name) +def test_blob_metadata(test_blob, capsys): + snippets.blob_metadata(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert test_blob.name in out -def test_delete_blob(test_blob, cloud_config): +def test_delete_blob(test_blob): snippets.delete_blob( - cloud_config.storage_bucket, + BUCKET, test_blob.name) -def test_make_blob_public(test_blob, cloud_config): +def test_make_blob_public(test_blob): snippets.make_blob_public( - cloud_config.storage_bucket, + BUCKET, test_blob.name) r = requests.get(test_blob.public_url) assert r.text == 'Hello, is it me you\'re looking for?' -def test_generate_signed_url(test_blob, cloud_config, capsys): +def test_generate_signed_url(test_blob, capsys): snippets.generate_signed_url( - cloud_config.storage_bucket, + BUCKET, test_blob.name) out, _ = capsys.readouterr() @@ -98,8 +101,8 @@ def test_generate_signed_url(test_blob, cloud_config, capsys): assert r.text == 'Hello, is it me you\'re looking for?' -def test_rename_blob(test_blob, cloud_config): - bucket = storage.Client().bucket(cloud_config.storage_bucket) +def test_rename_blob(test_blob): + bucket = storage.Client().bucket(BUCKET) try: bucket.delete_blob('test_rename_blob') @@ -112,8 +115,8 @@ def test_rename_blob(test_blob, cloud_config): assert bucket.get_blob(test_blob.name) is None -def test_copy_blob(test_blob, cloud_config): - bucket = storage.Client().bucket(cloud_config.storage_bucket) +def test_copy_blob(test_blob): + bucket = storage.Client().bucket(BUCKET) try: bucket.delete_blob('test_copy_blob') From 21f7db6fece929498393784c0638b689b3d1cc03 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 24 Apr 2017 13:12:09 -0700 Subject: [PATCH 023/197] Auto-update dependencies. (#914) * Auto-update dependencies. * xfail the error reporting test * Fix lint --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 7b8c63145..6a14b5cdf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==0.22.0 -google-cloud-pubsub==0.22.0 +google-cloud-storage==1.0.0 +google-cloud-pubsub==0.24.0 From fba8c2d4f5984090130ade9bb6b66ef20894970d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Apr 2017 09:54:41 -0700 Subject: [PATCH 024/197] Re-generate all readmes --- README.rst | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 218893be5..070cf263a 100644 --- a/README.rst +++ b/README.rst @@ -26,7 +26,7 @@ authentication: .. code-block:: bash - gcloud beta auth application-default login + gcloud auth application-default login #. When running on App Engine or Compute Engine, credentials are already @@ -243,7 +243,7 @@ To run this sample: $ python notification_polling.py - usage: notification_polling.py [-h] [--project PROJECT] subscription + usage: notification_polling.py [-h] subscription This application demonstrates how to poll for GCS notifications from a Cloud Pub/Sub subscription, parse the incoming message, and acknowledge the @@ -263,12 +263,10 @@ To run this sample: changes scroll by in the app. positional arguments: - subscription The ID of the Pub/Sub subscription + subscription The ID of the Pub/Sub subscription optional arguments: - -h, --help show this help message and exit - --project PROJECT The project of the subscription, if not in your default - project + -h, --help show this help message and exit From afd5ed6e5c4f96150f89b938215412c4fd9269a8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 28 Apr 2017 12:17:50 -0700 Subject: [PATCH 025/197] Add bucket-level IAM samples (#919) * Add bucket-level IAM samples * Address review comments --- iam.py | 95 ++++++++++++++++++++++++++++++++++++++++++++++++ iam_test.py | 45 +++++++++++++++++++++++ requirements.txt | 4 +- 3 files changed, 142 insertions(+), 2 deletions(-) create mode 100644 iam.py create mode 100644 iam_test.py diff --git a/iam.py b/iam.py new file mode 100644 index 000000000..ba20bc1dd --- /dev/null +++ b/iam.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python + +# Copyright 2017 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to get and set IAM policies on Google +Cloud Storage buckets. + +For more information, see the documentation at +https://cloud.google.com/storage/docs/access-control/using-iam-permissions. +""" + +import argparse + +from google.cloud import storage + + +def view_bucket_iam_members(bucket_name): + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + for role in policy: + members = policy[role] + print('Role: {}, Members: {}'.format(role, members)) + + +def add_bucket_iam_member(bucket_name, role, member): + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + policy[role].add(member) + + bucket.set_iam_policy(policy) + + print('Added {} with role {} to {}.'.format( + member, role, bucket_name)) + + +def remove_bucket_iam_member(bucket_name, role, member): + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + policy[role].discard(member) + + bucket.set_iam_policy(policy) + + print('Removed {} with role {} from {}.'.format( + member, role, bucket_name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket_name', help='Your Cloud Storage bucket name.') + subparsers = parser.add_subparsers(dest='command') + + subparsers.add_parser( + 'view-bucket-iam-members', help=view_bucket_iam_members.__doc__) + + add_member_parser = subparsers.add_parser( + 'add-bucket-iam-member', help=add_bucket_iam_member.__doc__) + add_member_parser.add_argument('role') + add_member_parser.add_argument('member') + + remove_member_parser = subparsers.add_parser( + 'remove-bucket-iam-member', help=remove_bucket_iam_member.__doc__) + remove_member_parser.add_argument('role') + remove_member_parser.add_argument('member') + + args = parser.parse_args() + + if args.command == 'view-bucket-iam-members': + view_bucket_iam_members(args.bucket_name) + elif args.command == 'add-bucket-iam-member': + add_bucket_iam_member(args.bucket_name, args.role, args.member) + elif args.command == 'remove-bucket-iam-member': + remove_bucket_iam_member(args.bucket_name, args.role, args.member) diff --git a/iam_test.py b/iam_test.py new file mode 100644 index 000000000..0c823afa0 --- /dev/null +++ b/iam_test.py @@ -0,0 +1,45 @@ +# Copyright 2017 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud import storage +import pytest + +import iam + +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +MEMBER = 'group:dpebot@google.com' +ROLE = 'roles/storage.legacyBucketReader' + + +@pytest.fixture +def bucket(): + yield storage.Client().bucket(BUCKET) + + +def test_view_bucket_iam_members(): + iam.view_bucket_iam_members(BUCKET) + + +def test_add_bucket_iam_member(bucket): + iam.add_bucket_iam_member( + BUCKET, ROLE, MEMBER) + assert MEMBER in bucket.get_iam_policy()[ROLE] + + +def test_remove_bucket_iam_member(bucket): + iam.remove_bucket_iam_member( + BUCKET, ROLE, MEMBER) + assert MEMBER not in bucket.get_iam_policy()[ROLE] diff --git a/requirements.txt b/requirements.txt index 6a14b5cdf..3e0a3ba4b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.0.0 -google-cloud-pubsub==0.24.0 +google-cloud-storage==1.1.0 +google-cloud-pubsub==0.25.0 From be5c9291ad0625bfc284c2c6d76204da9707bee9 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 2 May 2017 10:58:55 -0700 Subject: [PATCH 026/197] Auto-update dependencies. (#927) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3e0a3ba4b..340884fcb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.1.0 +google-cloud-storage==1.1.1 google-cloud-pubsub==0.25.0 From e7b7ccd3e4a97bb173e9369ea46029dcdec1d343 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Tue, 23 May 2017 17:01:25 -0700 Subject: [PATCH 027/197] Fix README rst links (#962) * Fix README rst links * Update all READMEs --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 070cf263a..77722f160 100644 --- a/README.rst +++ b/README.rst @@ -278,11 +278,11 @@ This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub to `browse the source`_ and `report issues`_. -.. Google Cloud Client Library for Python: +.. _Google Cloud Client Library for Python: https://googlecloudplatform.github.io/google-cloud-python/ -.. browse the source: +.. _browse the source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. report issues: +.. _report issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues From 299847eac184f4a8c77ab8b8b492345d2070cde7 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 27 Jun 2017 12:41:15 -0700 Subject: [PATCH 028/197] Auto-update dependencies. (#1004) * Auto-update dependencies. * Fix natural language samples * Fix pubsub iam samples * Fix language samples * Fix bigquery samples --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 340884fcb..296b75b78 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.1.1 -google-cloud-pubsub==0.25.0 +google-cloud-storage==1.2.0 +google-cloud-pubsub==0.26.0 From 8e71bd5b47a3a1f6727ecb6a094fa3af0626651c Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 1 Aug 2017 12:14:37 -0700 Subject: [PATCH 029/197] Add bucket label samples (#1045) --- snippets.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++ snippets_test.py | 21 ++++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/snippets.py b/snippets.py index c6c69d6ae..3d490891a 100644 --- a/snippets.py +++ b/snippets.py @@ -23,6 +23,7 @@ import argparse import datetime +import pprint from google.cloud import storage @@ -42,6 +43,45 @@ def delete_bucket(bucket_name): print('Bucket {} deleted'.format(bucket.name)) +def get_bucket_labels(bucket_name): + """Prints out a bucket's labels.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + labels = bucket.labels + pprint.pprint(labels) + + +def add_bucket_label(bucket_name): + """Add a label to a bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + labels = bucket.labels + labels['example'] = 'label' + bucket.labels = labels + bucket.patch() + + print('Updated labels on {}.'.format(bucket.name)) + pprint.pprint(bucket.labels) + + +def remove_bucket_label(bucket_name): + """Remove a label from a bucket.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + labels = bucket.labels + + if 'example' in labels: + del labels['example'] + + bucket.labels = labels + bucket.patch() + + print('Updated labels on {}.'.format(bucket.name)) + pprint.pprint(bucket.labels) + + def list_blobs(bucket_name): """Lists all the blobs in the bucket.""" storage_client = storage.Client() @@ -222,6 +262,10 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): subparsers = parser.add_subparsers(dest='command') subparsers.add_parser('create-bucket', help=create_bucket.__doc__) subparsers.add_parser('delete-bucket', help=delete_bucket.__doc__) + subparsers.add_parser('get-bucket-labels', help=get_bucket_labels.__doc__) + subparsers.add_parser('add-bucket-label', help=add_bucket_label.__doc__) + subparsers.add_parser( + 'remove-bucket-label', help=remove_bucket_label.__doc__) subparsers.add_parser('list', help=list_blobs.__doc__) list_with_prefix_parser = subparsers.add_parser( @@ -268,6 +312,12 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): create_bucket(args.bucket_name) elif args.command == 'delete-bucket': delete_bucket(args.bucket_name) + if args.command == 'get-bucket-labels': + get_bucket_labels(args.bucket_name) + if args.command == 'add-bucket-label': + add_bucket_label(args.bucket_name) + if args.command == 'remove-bucket-label': + remove_bucket_label(args.bucket_name) elif args.command == 'list': list_blobs(args.bucket_name) elif args.command == 'list-with-prefix': diff --git a/snippets_test.py b/snippets_test.py index a44e8ebf1..bf5ecb8e1 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -25,6 +25,27 @@ BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +def test_get_bucket_labels(): + snippets.get_bucket_labels(BUCKET) + + +def test_add_bucket_label(capsys): + snippets.add_bucket_label(BUCKET) + out, _ = capsys.readouterr() + assert 'example' in out + + +@pytest.mark.xfail( + reason=( + 'https://github.com/GoogleCloudPlatform' + '/google-cloud-python/issues/3711')) +def test_remove_bucket_label(capsys): + snippets.add_bucket_label(BUCKET) + snippets.remove_bucket_label(BUCKET) + out, _ = capsys.readouterr() + assert '{}' in out + + @pytest.fixture def test_blob(): """Provides a pre-existing blob in the test bucket.""" From 2dc31067ebae3e6d2809eb8cb5cceca9dede0ac2 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 7 Aug 2017 10:04:55 -0700 Subject: [PATCH 030/197] Auto-update dependencies. (#1055) * Auto-update dependencies. * Explicitly use latest bigtable client Change-Id: Id71e9e768f020730e4ca9514a0d7ebaa794e7d9e * Revert language update for now Change-Id: I8867f154e9a5aae00d0047c9caf880e5e8f50c53 * Remove pdb. smh Change-Id: I5ff905fadc026eebbcd45512d4e76e003e3b2b43 --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 296b75b78..6a12236c3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.2.0 -google-cloud-pubsub==0.26.0 +google-cloud-storage==1.3.0 +google-cloud-pubsub==0.27.0 From 453206ead138108b5dceb50b163d2c5abd23393d Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 8 Aug 2017 08:51:01 -0700 Subject: [PATCH 031/197] Auto-update dependencies. (#1057) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6a12236c3..6ea7a6a1a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.3.0 +google-cloud-storage==1.3.1 google-cloud-pubsub==0.27.0 From dc7f8c7e6fe6c20ff1f69fe9d7bb8d070d9d07c5 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 16 Aug 2017 09:34:13 -0700 Subject: [PATCH 032/197] Auto-update dependencies. (#1073) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6ea7a6a1a..d31cb7cb2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.3.1 +google-cloud-storage==1.3.2 google-cloud-pubsub==0.27.0 From 201867dbb8493a68e8f26932332cec671bb181d8 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 29 Aug 2017 16:53:02 -0700 Subject: [PATCH 033/197] Auto-update dependencies. (#1093) * Auto-update dependencies. * Fix storage notification poll sample Change-Id: I6afbc79d15e050531555e4c8e51066996717a0f3 * Fix spanner samples Change-Id: I40069222c60d57e8f3d3878167591af9130895cb * Drop coverage because it's not useful Change-Id: Iae399a7083d7866c3c7b9162d0de244fbff8b522 * Try again to fix flaky logging test Change-Id: I6225c074701970c17c426677ef1935bb6d7e36b4 --- notification_polling.py | 32 ++++++++++++++++---------------- notification_polling_test.py | 18 +++++++++++------- requirements.txt | 4 ++-- 3 files changed, 29 insertions(+), 25 deletions(-) diff --git a/notification_polling.py b/notification_polling.py index 481a98e66..0580eaea5 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -46,14 +46,14 @@ import argparse import json -import sys +import time -from google.cloud import pubsub +from google.cloud import pubsub_v1 def summarize(message): # [START parse_message] - data = message.data + data = message.data.decode('utf-8') attributes = message.attributes event_type = attributes['eventType'] @@ -87,24 +87,24 @@ def summarize(message): # [END parse_message] -def poll_notifications(subscription_id): +def poll_notifications(project, subscription_name): """Polls a Cloud Pub/Sub subscription for new GCS events for display.""" # [BEGIN poll_notifications] - client = pubsub.Client() - subscription = pubsub.subscription.Subscription( - subscription_id, client=client) + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) - if not subscription.exists(): - sys.stderr.write('Cannot find subscription {0}\n'.format(sys.argv[1])) - return + def callback(message): + print('Received message:\n{1}'.format(summarize(message))) + message.ack() - print('Polling for messages. Press ctrl+c to exit.') + subscriber.subscribe(subscription_path, callback=callback) + + # The subscriber is non-blocking, so we must keep the main thread from + # exiting to allow it to process messages in the background. + print('Listening for messages on {}'.format(subscription_path)) while True: - pulled = subscription.pull(max_messages=100) - for ack_id, message in pulled: - print('Received message {0}:\n{1}'.format( - message.message_id, summarize(message))) - subscription.acknowledge([ack_id]) + time.sleep(60) # [END poll_notifications] diff --git a/notification_polling_test.py b/notification_polling_test.py index e21e5b60c..b816bd9df 100644 --- a/notification_polling_test.py +++ b/notification_polling_test.py @@ -13,7 +13,8 @@ # limitations under the License. -from google.cloud.pubsub.message import Message +from google.cloud.pubsub_v1.subscriber.message import Message +import mock from notification_polling import summarize @@ -31,12 +32,15 @@ def test_parse_json_message(): 'notificationConfig': ('projects/_/buckets/mybucket/' 'notificationConfigs/5'), 'payloadFormat': 'JSON_API_V1'} - data = ('{' - ' "size": 12345,' - ' "contentType": "text/html",' - ' "metageneration": 1' - '}') - message = Message(data, MESSAGE_ID, attributes=attributes) + data = (b'{' + b' "size": 12345,' + b' "contentType": "text/html",' + b' "metageneration": 1' + b'}') + message = Message( + mock.Mock(data=data, attributes=attributes), + MESSAGE_ID, + mock.Mock()) assert summarize(message) == ( '\tEvent type: OBJECT_FINALIZE\n' '\tBucket ID: mybucket\n' diff --git a/requirements.txt b/requirements.txt index d31cb7cb2..3769a9db7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.3.2 -google-cloud-pubsub==0.27.0 +google-cloud-storage==1.4.0 +google-cloud-pubsub==0.28.2 From 5f81798092574b569d58a1d43cf4d7aec948bd32 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 12 Sep 2017 12:26:44 -0700 Subject: [PATCH 034/197] Auto-update dependencies. (#1097) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3769a9db7..11a282e7b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.4.0 -google-cloud-pubsub==0.28.2 +google-cloud-pubsub==0.28.3 From 782de8732680562a46f78bc61288d15e770f49c3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 18 Sep 2017 11:04:05 -0700 Subject: [PATCH 035/197] Update all generated readme auth instructions (#1121) Change-Id: I03b5eaef8b17ac3dc3c0339fd2c7447bd3e11bd2 --- README.rst | 40 +++++++++++----------------------------- 1 file changed, 11 insertions(+), 29 deletions(-) diff --git a/README.rst b/README.rst index 77722f160..f88c51766 100644 --- a/README.rst +++ b/README.rst @@ -17,34 +17,12 @@ Setup Authentication ++++++++++++++ -Authentication is typically done through `Application Default Credentials`_, -which means you do not have to change the code to authenticate as long as -your environment has credentials. You have a few options for setting up -authentication: +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. -#. When running locally, use the `Google Cloud SDK`_ - - .. code-block:: bash - - gcloud auth application-default login - - -#. When running on App Engine or Compute Engine, credentials are already - set-up. However, you may need to configure your Compute Engine instance - with `additional scopes`_. - -#. You can create a `Service Account key file`_. This file can be used to - authenticate to Google Cloud Platform services from any environment. To use - the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to - the path to the key file, for example: - - .. code-block:: bash - - export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json - -.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using -.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started Install Dependencies ++++++++++++++++++++ @@ -95,7 +73,7 @@ To run this sample: usage: snippets.py [-h] bucket_name - {create-bucket,delete-bucket,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} ... This application demonstrates how to perform basic operations on blobs @@ -106,9 +84,13 @@ To run this sample: positional arguments: bucket_name Your cloud storage bucket. - {create-bucket,delete-bucket,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} create-bucket Creates a new bucket. delete-bucket Deletes a bucket. The bucket must be empty. + get-bucket-labels Prints out a bucket's labels. + add-bucket-label Add a label to a bucket. + remove-bucket-label + Remove a label from a bucket. list Lists all the blobs in the bucket. list-with-prefix Lists all the blobs in the bucket that begin with the prefix. This can be used to list all blobs in a From 4b7bc6fc1e52b6650479fca1893e9070f03acd76 Mon Sep 17 00:00:00 2001 From: BrandonY Date: Fri, 22 Sep 2017 13:46:42 -0700 Subject: [PATCH 036/197] Fix TypeError when running Storage notification polling exmaple. (#1135) * Adds storage Pub/Sub notification polling tutorial * Fix formatting and add some tests * Auto-generate README * Simplify implementation, remove classes * Simplified example, removed de-duping * regenerate README * Remove explicit project parameter. * Fix notification TypeError on start. * Fix linter error. * Fix ordered list ordinals. * Rerun nox readmegen. --- README.rst | 49 +++++++++++++++++++++++++++-------------- notification_polling.py | 26 ++++++++++++++-------- 2 files changed, 50 insertions(+), 25 deletions(-) diff --git a/README.rst b/README.rst index f88c51766..4d9e0a152 100644 --- a/README.rst +++ b/README.rst @@ -225,26 +225,43 @@ To run this sample: $ python notification_polling.py - usage: notification_polling.py [-h] subscription + usage: notification_polling.py [-h] project subscription - This application demonstrates how to poll for GCS notifications from a Cloud - Pub/Sub subscription, parse the incoming message, and acknowledge the - successful processing of the message. This application will work with any - subscription configured for pull rather than push notifications. If you do not - already have notifications configured, you may consult the docs at + This application demonstrates how to poll for GCS notifications from a + Cloud Pub/Sub subscription, parse the incoming message, and acknowledge the + successful processing of the message. + + This application will work with any subscription configured for pull rather + than push notifications. If you do not already have notifications configured, + you may consult the docs at https://cloud.google.com/storage/docs/reporting-changes or follow the steps - below: 1. Activate the Google Cloud Pub/Sub API, if you have not already done - so. https://console.cloud.google.com/flows/enableapi?apiid=pubsub 2. Create a - Google Cloud Storage bucket: $ gsutil mb gs://testbucket 3. Create a Cloud - Pub/Sub topic and publish bucket notifications there: $ gsutil notification - create -f json -t testtopic gs://testbucket 4. Create a subscription for your - new topic: $ gcloud beta pubsub subscriptions create testsubscription - --topic=testtopic 5. Run this program: $ python notification_polling - testsubscription 6. While the program is running, upload and delete some files - in the testbucket bucket (you could use the console or gsutil) and watch as - changes scroll by in the app. + below: + + 1. First, follow the common setup steps for these snippets, specically + configuring auth and installing dependencies. See the README's "Setup" + section. + + 2. Activate the Google Cloud Pub/Sub API, if you have not already done so. + https://console.cloud.google.com/flows/enableapi?apiid=pubsub + + 3. Create a Google Cloud Storage bucket: + $ gsutil mb gs://testbucket + + 4. Create a Cloud Pub/Sub topic and publish bucket notifications there: + $ gsutil notification create -f json -t testtopic gs://testbucket + + 5. Create a subscription for your new topic: + $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic + + 6. Run this program: + $ python notification_polling my-project-id testsubscription + + 7. While the program is running, upload and delete some files in the testbucket + bucket (you could use the console or gsutil) and watch as changes scroll by + in the app. positional arguments: + project The ID of the project that owns the subscription subscription The ID of the Pub/Sub subscription optional arguments: diff --git a/notification_polling.py b/notification_polling.py index 0580eaea5..73b921de4 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -24,22 +24,26 @@ https://cloud.google.com/storage/docs/reporting-changes or follow the steps below: -1. Activate the Google Cloud Pub/Sub API, if you have not already done so. +1. First, follow the common setup steps for these snippets, specically + configuring auth and installing dependencies. See the README's "Setup" + section. + +2. Activate the Google Cloud Pub/Sub API, if you have not already done so. https://console.cloud.google.com/flows/enableapi?apiid=pubsub -2. Create a Google Cloud Storage bucket: +3. Create a Google Cloud Storage bucket: $ gsutil mb gs://testbucket -3. Create a Cloud Pub/Sub topic and publish bucket notifications there: +4. Create a Cloud Pub/Sub topic and publish bucket notifications there: $ gsutil notification create -f json -t testtopic gs://testbucket -4. Create a subscription for your new topic: +5. Create a subscription for your new topic: $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic -5. Run this program: - $ python notification_polling testsubscription +6. Run this program: + $ python notification_polling my-project-id testsubscription -6. While the program is running, upload and delete some files in the testbucket +7. While the program is running, upload and delete some files in the testbucket bucket (you could use the console or gsutil) and watch as changes scroll by in the app. """ @@ -110,8 +114,12 @@ def callback(message): if __name__ == '__main__': parser = argparse.ArgumentParser( - description=__doc__) + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + 'project', + help='The ID of the project that owns the subscription') parser.add_argument('subscription', help='The ID of the Pub/Sub subscription') args = parser.parse_args() - poll_notifications(args.subscription) + poll_notifications(args.project, args.subscription) From 077282f73a34893b5e8e0c58d3cc78c3963efd5c Mon Sep 17 00:00:00 2001 From: BrandonY Date: Wed, 27 Sep 2017 12:13:31 -0700 Subject: [PATCH 037/197] Add support for overwrite attributes (#1142) * Add support for overwrite attributes, bug fixes * Lint fix for overwrite line * Switch variable to snake_case --- notification_polling.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/notification_polling.py b/notification_polling.py index 73b921de4..94d249c94 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -41,7 +41,7 @@ $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic 6. Run this program: - $ python notification_polling my-project-id testsubscription + $ python notification_polling.py my-project-id testsubscription 7. While the program is running, upload and delete some files in the testbucket bucket (you could use the console or gsutil) and watch as changes scroll by @@ -64,6 +64,8 @@ def summarize(message): bucket_id = attributes['bucketId'] object_id = attributes['objectId'] generation = attributes['objectGeneration'] + overwrote_generation = attributes['overwroteGeneration'] + overwritten_by_generation = attributes['overwrittenByGeneration'] description = ( '\tEvent type: {event_type}\n' '\tBucket ID: {bucket_id}\n' @@ -74,6 +76,12 @@ def summarize(message): object_id=object_id, generation=generation) + if overwrote_generation: + description += '\tOverwrote generation: %s\n' % overwrote_generation + if overwritten_by_generation: + description += '\tOverwritten by generation: %s\n' % ( + overwritten_by_generation) + payload_format = attributes['payloadFormat'] if payload_format == 'JSON_API_V1': object_metadata = json.loads(data) @@ -99,7 +107,7 @@ def poll_notifications(project, subscription_name): project, subscription_name) def callback(message): - print('Received message:\n{1}'.format(summarize(message))) + print('Received message:\n{}'.format(summarize(message))) message.ack() subscriber.subscribe(subscription_path, callback=callback) From e4c0cda61e5a3e0ba7c2ad52ca49ef6312d7e16e Mon Sep 17 00:00:00 2001 From: BrandonY Date: Wed, 27 Sep 2017 13:07:33 -0700 Subject: [PATCH 038/197] Handle case where attribute not set (#1143) --- notification_polling.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/notification_polling.py b/notification_polling.py index 94d249c94..dde3a90a2 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -64,8 +64,6 @@ def summarize(message): bucket_id = attributes['bucketId'] object_id = attributes['objectId'] generation = attributes['objectGeneration'] - overwrote_generation = attributes['overwroteGeneration'] - overwritten_by_generation = attributes['overwrittenByGeneration'] description = ( '\tEvent type: {event_type}\n' '\tBucket ID: {bucket_id}\n' @@ -76,11 +74,12 @@ def summarize(message): object_id=object_id, generation=generation) - if overwrote_generation: - description += '\tOverwrote generation: %s\n' % overwrote_generation - if overwritten_by_generation: + if 'overwroteGeneration' in attributes: + description += '\tOverwrote generation: %s\n' % ( + attributes['overwroteGeneration']) + if 'overwrittenByGeneration' in attributes: description += '\tOverwritten by generation: %s\n' % ( - overwritten_by_generation) + attributes['ovewrittenByGeneration']) payload_format = attributes['payloadFormat'] if payload_format == 'JSON_API_V1': From bb637d6b357707904c0d6a8dc7cd37e3c496c960 Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 12 Oct 2017 10:16:11 -0700 Subject: [PATCH 039/197] Added Link to Python Setup Guide (#1158) * Update Readme.rst to add Python setup guide As requested in b/64770713. This sample is linked in documentation https://cloud.google.com/bigtable/docs/scaling, and it would make more sense to update the guide here than in the documentation. * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update install_deps.tmpl.rst * Updated readmegen scripts and re-generated related README files * Fixed the lint error --- README.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 4d9e0a152..471df76a7 100644 --- a/README.rst +++ b/README.rst @@ -27,7 +27,10 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ -#. Install `pip`_ and `virtualenv`_ if you do not already have them. +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. @@ -254,7 +257,7 @@ To run this sample: $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic 6. Run this program: - $ python notification_polling my-project-id testsubscription + $ python notification_polling.py my-project-id testsubscription 7. While the program is running, upload and delete some files in the testbucket bucket (you could use the console or gsutil) and watch as changes scroll by From b40d2fc1b0431cedf0ea83e565b7b3a679792b71 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 23 Oct 2017 14:23:30 -0700 Subject: [PATCH 040/197] Auto-update dependencies. (#1138) --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 11a282e7b..a962375f4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.4.0 -google-cloud-pubsub==0.28.3 +google-cloud-storage==1.5.0 +google-cloud-pubsub==0.28.4 From 9f1c1fa76207d44befbe0070f6b93fe3a6df4fa4 Mon Sep 17 00:00:00 2001 From: Ryan Matsumoto Date: Mon, 30 Oct 2017 09:31:45 -0700 Subject: [PATCH 041/197] storage requester pays samples (#1122) * storage requester pays samples * Added tests and fixed linting issues * google-cloud-storage version update * changed get_bucket to bucket for downloading * small change --- requester_pays.py | 105 +++++++++++++++++++++++++++++++++++++++++ requester_pays_test.py | 62 ++++++++++++++++++++++++ 2 files changed, 167 insertions(+) create mode 100644 requester_pays.py create mode 100644 requester_pays_test.py diff --git a/requester_pays.py b/requester_pays.py new file mode 100644 index 000000000..b98ba96f1 --- /dev/null +++ b/requester_pays.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +# Copyright 2017 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to use requester pays features on Google +Cloud Storage buckets. + +For more information, see the documentation at +https://cloud.google.com/storage/docs/using-requester-pays. +""" + +import argparse + +from google.cloud import storage + + +def get_requester_pays_status(bucket_name): + """Get a bucket's requester pays metadata""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + requester_pays_status = bucket.requester_pays + if requester_pays_status: + print('Requester Pays is enabled for {}'.format(bucket_name)) + else: + print('Requester Pays is disabled for {}'.format(bucket_name)) + + +def enable_requester_pays(bucket_name): + """Enable a bucket's requesterpays metadata""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.requester_pays = True + bucket.patch() + print('Requester Pays has been enabled for {}'.format(bucket_name)) + + +def disable_requester_pays(bucket_name): + """Disable a bucket's requesterpays metadata""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.requester_pays = False + bucket.patch() + print('Requester Pays has been disabled for {}'.format(bucket_name)) + + +def download_file_requester_pays( + bucket_name, project_id, source_blob_name, destination_file_name): + """Download file using specified project as the requester""" + storage_client = storage.Client() + user_project = project_id + bucket = storage_client.bucket(bucket_name, user_project) + blob = bucket.blob(source_blob_name) + blob.download_to_filename(destination_file_name) + + print('Blob {} downloaded to {} using a requester-pays request.'.format( + source_blob_name, + destination_file_name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket_name', help='Your Cloud Storage bucket name.') + subparsers = parser.add_subparsers(dest='command') + + subparsers.add_parser( + 'check-status', help=get_requester_pays_status.__doc__) + + subparsers.add_parser( + 'enable', help=enable_requester_pays.__doc__) + + subparsers.add_parser( + 'disable', help=disable_requester_pays.__doc__) + + download_parser = subparsers.add_parser( + 'download', help=download_file_requester_pays.__doc__) + download_parser.add_argument('project') + download_parser.add_argument('source_blob_name') + download_parser.add_argument('destination_file_name') + + args = parser.parse_args() + + if args.command == 'check-status': + get_requester_pays_status(args.bucket_name) + elif args.command == 'enable': + enable_requester_pays(args.bucket_name) + elif args.command == 'disable': + disable_requester_pays(args.bucket_name) + elif args.command == 'download': + download_file_requester_pays( + args.bucket_name, args.project, args.source_blob_name, + args.destination_file_name) diff --git a/requester_pays_test.py b/requester_pays_test.py new file mode 100644 index 000000000..05c9a2275 --- /dev/null +++ b/requester_pays_test.py @@ -0,0 +1,62 @@ +# Copyright 2017 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tempfile + +from google.cloud import storage +import pytest + +import requester_pays + +BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +PROJECT = os.environ['GCLOUD_PROJECT'] + + +def test_enable_requester_pays(capsys): + requester_pays.enable_requester_pays(BUCKET) + out, _ = capsys.readouterr() + assert 'Requester Pays has been enabled for {}'.format(BUCKET) in out + + +def test_disable_requester_pays(capsys): + requester_pays.disable_requester_pays(BUCKET) + out, _ = capsys.readouterr() + assert 'Requester Pays has been disabled for {}'.format(BUCKET) in out + + +def test_get_requester_pays_status(capsys): + requester_pays.get_requester_pays_status(BUCKET) + out, _ = capsys.readouterr() + assert 'Requester Pays is disabled for {}'.format(BUCKET) in out + + +@pytest.fixture +def test_blob(): + """Provides a pre-existing blob in the test bucket.""" + bucket = storage.Client().bucket(BUCKET) + blob = bucket.blob('storage_snippets_test_sigil') + blob.upload_from_string('Hello, is it me you\'re looking for?') + return blob + + +def test_download_file_requester_pays(test_blob, capsys): + with tempfile.NamedTemporaryFile() as dest_file: + requester_pays.download_file_requester_pays( + BUCKET, + PROJECT, + test_blob.name, + dest_file.name) + + assert dest_file.read() From 42fa87b61757d41a374b63564a6f281ac872c03c Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 1 Nov 2017 12:30:10 -0700 Subject: [PATCH 042/197] Auto-update dependencies. (#1186) --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index a962375f4..a3e9b8afa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.5.0 -google-cloud-pubsub==0.28.4 +google-cloud-storage==1.6.0 +google-cloud-pubsub==0.29.0 From cb0f1cd8c7dad590ab46b967841f347daa5c1076 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 28 Nov 2017 09:52:33 -0800 Subject: [PATCH 043/197] Auto-update dependencies. (#1234) * Auto-update dependencies. * Drop pytest-logcapture as it's no longer needed Change-Id: Ia8b9e8aaf248e9770db6bc4842a4532df8383893 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a3e9b8afa..0c8cdca4e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.6.0 -google-cloud-pubsub==0.29.0 +google-cloud-pubsub==0.29.1 From 992b4f61c5976658587da1cfb5cc95177c4e3320 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Thu, 30 Nov 2017 10:25:03 -0800 Subject: [PATCH 044/197] Auto-update dependencies. (#1239) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0c8cdca4e..027a8eae6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.6.0 -google-cloud-pubsub==0.29.1 +google-cloud-pubsub==0.29.2 From ebe578b01cc546796467587397c41167204dae03 Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 7 Dec 2017 10:34:29 -0800 Subject: [PATCH 045/197] Added "Open in Cloud Shell" buttons to README files (#1254) --- README.rst | 80 ++++++++++++++++++++++++++++++++++----------------- README.rst.in | 2 ++ 2 files changed, 56 insertions(+), 26 deletions(-) diff --git a/README.rst b/README.rst index 471df76a7..fdfde20f8 100644 --- a/README.rst +++ b/README.rst @@ -3,6 +3,10 @@ Google Cloud Storage Python Samples =============================================================================== +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/README.rst + + This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. @@ -54,6 +58,10 @@ Samples Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/quickstart.py;storage/cloud-client/README.rst + + To run this sample: @@ -66,6 +74,10 @@ To run this sample: Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/snippets.py;storage/cloud-client/README.rst + + To run this sample: @@ -78,13 +90,13 @@ To run this sample: bucket_name {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} ... - + This application demonstrates how to perform basic operations on blobs (objects) in a Google Cloud Storage bucket. - + For more information, see the README.md under /storage and the documentation at https://cloud.google.com/storage/docs. - + positional arguments: bucket_name Your cloud storage bucket. {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} @@ -117,14 +129,19 @@ To run this sample: Google Cloud SDK. rename Renames a blob. copy Renames a blob. - + optional arguments: -h, --help show this help message and exit + Access Control Lists +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/acl.py;storage/cloud-client/README.rst + + To run this sample: @@ -136,13 +153,13 @@ To run this sample: usage: acl.py [-h] {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} ... - + This application demonstrates how to manage access control lists (acls) in Google Cloud Storage. - + For more information, see the README.md under /storage and the documentation at https://cloud.google.com/storage/docs/encryption. - + positional arguments: {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} print-bucket-acl Prints out a bucket's access control list. @@ -165,14 +182,19 @@ To run this sample: add-blob-owner Adds a user as an owner on the given blob. remove-blob-owner Removes a user from the access control list of the given blob in the given bucket. - + optional arguments: -h, --help show this help message and exit + Customer-Supplied Encryption +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/encryption.py;storage/cloud-client/README.rst + + To run this sample: @@ -182,19 +204,19 @@ To run this sample: $ python encryption.py usage: encryption.py [-h] {generate-encryption-key,upload,download,rotate} ... - + This application demonstrates how to upload and download encrypted blobs (objects) in Google Cloud Storage. - + Use `generate-encryption-key` to generate an example key: - + python encryption.py generate-encryption-key - + Then use the key to upload and download files encrypted with a custom key. - + For more information, see the README.md under /storage and the documentation at https://cloud.google.com/storage/docs/encryption. - + positional arguments: {generate-encryption-key,upload,download,rotate} generate-encryption-key @@ -212,14 +234,19 @@ To run this sample: same key provided when uploading the blob. rotate Performs a key rotation by re-writing an encrypted blob with a new encryption key. - + optional arguments: -h, --help show this help message and exit + Notification Polling +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/notification_polling.py;storage/cloud-client/README.rst + + To run this sample: @@ -229,50 +256,51 @@ To run this sample: $ python notification_polling.py usage: notification_polling.py [-h] project subscription - + This application demonstrates how to poll for GCS notifications from a Cloud Pub/Sub subscription, parse the incoming message, and acknowledge the successful processing of the message. - + This application will work with any subscription configured for pull rather than push notifications. If you do not already have notifications configured, you may consult the docs at https://cloud.google.com/storage/docs/reporting-changes or follow the steps below: - + 1. First, follow the common setup steps for these snippets, specically configuring auth and installing dependencies. See the README's "Setup" section. - + 2. Activate the Google Cloud Pub/Sub API, if you have not already done so. https://console.cloud.google.com/flows/enableapi?apiid=pubsub - + 3. Create a Google Cloud Storage bucket: $ gsutil mb gs://testbucket - + 4. Create a Cloud Pub/Sub topic and publish bucket notifications there: $ gsutil notification create -f json -t testtopic gs://testbucket - + 5. Create a subscription for your new topic: $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic - + 6. Run this program: $ python notification_polling.py my-project-id testsubscription - + 7. While the program is running, upload and delete some files in the testbucket bucket (you could use the console or gsutil) and watch as changes scroll by in the app. - + positional arguments: project The ID of the project that owns the subscription subscription The ID of the Pub/Sub subscription - + optional arguments: -h, --help show this help message and exit + The client library ------------------------------------------------------------------------------- diff --git a/README.rst.in b/README.rst.in index aa9690abe..bae062df6 100644 --- a/README.rst.in +++ b/README.rst.in @@ -29,3 +29,5 @@ samples: show_help: true cloud_client_library: true + +folder: storage/cloud-client \ No newline at end of file From 55cf1e27b3a5703ff99dca70dce8c79fb0d1e665 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 11 Dec 2017 09:45:02 -0800 Subject: [PATCH 046/197] Auto-update dependencies. (#1263) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 027a8eae6..4137ba91d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.6.0 -google-cloud-pubsub==0.29.2 +google-cloud-pubsub==0.29.3 From 132c57ad886bfa4f981cb9e493efabf6df669b9d Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 12 Dec 2017 09:26:42 -0800 Subject: [PATCH 047/197] Auto-update dependencies. (#1272) * Auto-update dependencies. * Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4137ba91d..be6a97679 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.6.0 -google-cloud-pubsub==0.29.3 +google-cloud-pubsub==0.29.4 From 2dc734bf69f2a0758da84cd4091645e869900fa2 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 2 Jan 2018 14:02:47 -0800 Subject: [PATCH 048/197] Auto-update dependencies. (#1282) * Auto-update dependencies. * Fix storage acl sample Change-Id: I413bea899fdde4c4859e4070a9da25845b81f7cf --- acl_test.py | 4 +++- requirements.txt | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/acl_test.py b/acl_test.py index 0c9fd1a50..7796ef043 100644 --- a/acl_test.py +++ b/acl_test.py @@ -23,7 +23,9 @@ BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] # Typically we'd use a @example.com address, but GCS requires a real Google # account. -TEST_EMAIL = 'jonwayne@google.com' +TEST_EMAIL = ( + 'google-auth-system-tests' + '@python-docs-samples-tests.iam.gserviceaccount.com') @pytest.fixture diff --git a/requirements.txt b/requirements.txt index be6a97679..fe7238974 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.6.0 -google-cloud-pubsub==0.29.4 +google-cloud-pubsub==0.30.1 From a9d3af4653f01b590926357212e5449934dc4636 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Thu, 1 Feb 2018 22:20:35 -0800 Subject: [PATCH 049/197] Auto-update dependencies. (#1320) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fe7238974..11c81e62e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.6.0 +google-cloud-storage==1.7.0 google-cloud-pubsub==0.30.1 From e2b368c1376767d540115b806e3d1a0a8a036f9c Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Feb 2018 09:03:37 -0800 Subject: [PATCH 050/197] Auto-update dependencies. (#1359) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 11c81e62e..93eb8b0b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.7.0 -google-cloud-pubsub==0.30.1 +google-cloud-pubsub==0.32.0 From d20daa619276710a5365028810d99df39d2023be Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 5 Mar 2018 12:28:55 -0800 Subject: [PATCH 051/197] Auto-update dependencies. (#1377) * Auto-update dependencies. * Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 93eb8b0b2..e779429dc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.7.0 +google-cloud-storage==1.8.0 google-cloud-pubsub==0.32.0 From 221ef4c1c2f16fb825ca57e7d68bd7116f7a30a5 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Thu, 8 Mar 2018 13:33:57 -0800 Subject: [PATCH 052/197] Auto-update dependencies. (#1389) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e779429dc..2aa1cdc9b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.8.0 -google-cloud-pubsub==0.32.0 +google-cloud-pubsub==0.32.1 From 0e217fb22387c7d6b3bde92963817f0f31c95ee3 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 2 Apr 2018 02:51:10 -0700 Subject: [PATCH 053/197] Auto-update dependencies. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2aa1cdc9b..a66952c92 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-storage==1.8.0 -google-cloud-pubsub==0.32.1 +google-cloud-pubsub==0.33.0 From c875b4c7fe403829c434e1bb3dd0e53f7bd76eb5 Mon Sep 17 00:00:00 2001 From: chenyumic Date: Fri, 6 Apr 2018 22:57:36 -0700 Subject: [PATCH 054/197] Regenerate the README files and fix the Open in Cloud Shell link for some samples (#1441) --- README.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index fdfde20f8..f76b0e164 100644 --- a/README.rst +++ b/README.rst @@ -12,7 +12,7 @@ This directory contains samples for Google Cloud Storage. `Google Cloud Storage` -.. _Google Cloud Storage: https://cloud.google.com/storage/docs +.. _Google Cloud Storage: https://cloud.google.com/storage/docs Setup ------------------------------------------------------------------------------- @@ -59,7 +59,7 @@ Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/quickstart.py;storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/quickstart.py,storage/cloud-client/README.rst @@ -75,7 +75,7 @@ Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/snippets.py;storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/snippets.py,storage/cloud-client/README.rst @@ -139,7 +139,7 @@ Access Control Lists +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/acl.py;storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/acl.py,storage/cloud-client/README.rst @@ -192,7 +192,7 @@ Customer-Supplied Encryption +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/encryption.py;storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/encryption.py,storage/cloud-client/README.rst @@ -244,7 +244,7 @@ Notification Polling +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/notification_polling.py;storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/notification_polling.py,storage/cloud-client/README.rst From 32a3dd471df09cb920e8197e5ca6789eb20bfd47 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Thu, 26 Apr 2018 10:26:41 -0700 Subject: [PATCH 055/197] Update READMEs to fix numbering and add git clone (#1464) --- README.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index f76b0e164..5705fd44c 100644 --- a/README.rst +++ b/README.rst @@ -31,10 +31,16 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. From ac3b25fe09652599101bd22cc3a0264894b1719b Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Wed, 30 May 2018 08:51:42 -0700 Subject: [PATCH 056/197] Fix typo. (#1509) Fixes https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1485 --- notification_polling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notification_polling.py b/notification_polling.py index dde3a90a2..88868f948 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -79,7 +79,7 @@ def summarize(message): attributes['overwroteGeneration']) if 'overwrittenByGeneration' in attributes: description += '\tOverwritten by generation: %s\n' % ( - attributes['ovewrittenByGeneration']) + attributes['overwrittenByGeneration']) payload_format = attributes['payloadFormat'] if payload_format == 'JSON_API_V1': From 1884a242fdf6217cb8cec0a4f9d66242aa035661 Mon Sep 17 00:00:00 2001 From: Chris Broadfoot Date: Mon, 11 Jun 2018 10:37:23 -0700 Subject: [PATCH 057/197] Storage: add KMS samples (#1510) * Storage: add KMS samples * Add CLOUD_KMS_KEY environment variable --- README.rst | 9 +++++++-- requirements.txt | 2 +- snippets.py | 44 ++++++++++++++++++++++++++++++++++++++++++++ snippets_test.py | 12 ++++++++++++ 4 files changed, 64 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 5705fd44c..eb1e26b6e 100644 --- a/README.rst +++ b/README.rst @@ -94,7 +94,7 @@ To run this sample: usage: snippets.py [-h] bucket_name - {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,rename,copy} ... This application demonstrates how to perform basic operations on blobs @@ -105,7 +105,7 @@ To run this sample: positional arguments: bucket_name Your cloud storage bucket. - {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,download,delete,metadata,make-public,signed-url,rename,copy} + {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,rename,copy} create-bucket Creates a new bucket. delete-bucket Deletes a bucket. The bucket must be empty. get-bucket-labels Prints out a bucket's labels. @@ -124,6 +124,11 @@ To run this sample: However, if you specify prefix='/a' and delimiter='/', you'll get back: /a/1.txt upload Uploads a file to the bucket. + enable-default-kms-key + Sets a bucket's default KMS key. + upload-with-kms-key + Uploads a file to the bucket, encrypting it with the + given KMS key. download Downloads a blob from the bucket. delete Deletes a blob from the bucket. metadata Prints out a blob's metadata. diff --git a/requirements.txt b/requirements.txt index 2aa1cdc9b..553705c1e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.8.0 +google-cloud-storage==1.10.0 google-cloud-pubsub==0.32.1 diff --git a/snippets.py b/snippets.py index 3d490891a..8980715ba 100644 --- a/snippets.py +++ b/snippets.py @@ -43,6 +43,18 @@ def delete_bucket(bucket_name): print('Bucket {} deleted'.format(bucket.name)) +def enable_default_kms_key(bucket_name, kms_key_name): + """Sets a bucket's default KMS key.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.default_kms_key_name = kms_key_name + bucket.patch() + + print('Set default KMS key for bucket {} to {}.'.format( + bucket.name, + bucket.default_kms_key_name)) + + def get_bucket_labels(bucket_name): """Prints out a bucket's labels.""" storage_client = storage.Client() @@ -143,6 +155,20 @@ def upload_blob(bucket_name, source_file_name, destination_blob_name): destination_blob_name)) +def upload_blob_with_kms(bucket_name, source_file_name, destination_blob_name, + kms_key_name): + """Uploads a file to the bucket, encrypting it with the given KMS key.""" + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name, kms_key_name=kms_key_name) + blob.upload_from_filename(source_file_name) + + print('File {} uploaded to {} with encryption key {}.'.format( + source_file_name, + destination_blob_name, + kms_key_name)) + + def download_blob(bucket_name, source_blob_name, destination_file_name): """Downloads a blob from the bucket.""" storage_client = storage.Client() @@ -277,6 +303,16 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): upload_parser.add_argument('source_file_name') upload_parser.add_argument('destination_blob_name') + enable_default_kms_parser = subparsers.add_parser( + 'enable-default-kms-key', help=enable_default_kms_key.__doc__) + enable_default_kms_parser.add_argument('kms_key_name') + + upload_kms_parser = subparsers.add_parser( + 'upload-with-kms-key', help=upload_blob_with_kms.__doc__) + upload_kms_parser.add_argument('source_file_name') + upload_kms_parser.add_argument('destination_blob_name') + upload_kms_parser.add_argument('kms_key_name') + download_parser = subparsers.add_parser( 'download', help=download_blob.__doc__) download_parser.add_argument('source_blob_name') @@ -310,6 +346,8 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): if args.command == 'create-bucket': create_bucket(args.bucket_name) + if args.command == 'enable-default-kms-key': + enable_default_kms_key(args.bucket_name, args.kms_key_name) elif args.command == 'delete-bucket': delete_bucket(args.bucket_name) if args.command == 'get-bucket-labels': @@ -327,6 +365,12 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): args.bucket_name, args.source_file_name, args.destination_blob_name) + elif args.command == 'upload-with-kms-key': + upload_blob_with_kms( + args.bucket_name, + args.source_file_name, + args.destination_blob_name, + args.kms_key_name) elif args.command == 'download': download_blob( args.bucket_name, diff --git a/snippets_test.py b/snippets_test.py index bf5ecb8e1..fde16798f 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -23,6 +23,7 @@ import snippets BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +KMS_KEY = os.environ['CLOUD_KMS_KEY'] def test_get_bucket_labels(): @@ -79,6 +80,17 @@ def test_upload_blob(): 'test_upload_blob') +def test_upload_blob_with_kms(): + with tempfile.NamedTemporaryFile() as source_file: + source_file.write(b'test') + + snippets.upload_blob_with_kms( + BUCKET, + source_file.name, + 'test_upload_blob_encrypted', + KMS_KEY) + + def test_download_blob(test_blob): with tempfile.NamedTemporaryFile() as dest_file: snippets.download_blob( From 3e89f19ab6cc0f58a5cad2d24f8250b1f39c5cec Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Mon, 11 Jun 2018 14:35:15 -0700 Subject: [PATCH 058/197] [Storage] Update kms samples (#1517) * Storage: add KMS samples * Add CLOUD_KMS_KEY environment variable * Add region tags around samples * Add more testing * Fix tests and lint --- snippets.py | 4 ++++ snippets_test.py | 14 +++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/snippets.py b/snippets.py index 8980715ba..89434bd85 100644 --- a/snippets.py +++ b/snippets.py @@ -44,6 +44,7 @@ def delete_bucket(bucket_name): def enable_default_kms_key(bucket_name, kms_key_name): + # [START storage_set_bucket_default_kms_key] """Sets a bucket's default KMS key.""" storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) @@ -53,6 +54,7 @@ def enable_default_kms_key(bucket_name, kms_key_name): print('Set default KMS key for bucket {} to {}.'.format( bucket.name, bucket.default_kms_key_name)) + # [END storage_set_bucket_default_kms_key] def get_bucket_labels(bucket_name): @@ -157,6 +159,7 @@ def upload_blob(bucket_name, source_file_name, destination_blob_name): def upload_blob_with_kms(bucket_name, source_file_name, destination_blob_name, kms_key_name): + # [START storage_upload_with_kms_key] """Uploads a file to the bucket, encrypting it with the given KMS key.""" storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) @@ -167,6 +170,7 @@ def upload_blob_with_kms(bucket_name, source_file_name, destination_blob_name, source_file_name, destination_blob_name, kms_key_name)) + # [END storage_upload_with_kms_key] def download_blob(bucket_name, source_blob_name, destination_file_name): diff --git a/snippets_test.py b/snippets_test.py index fde16798f..dc97b084f 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -26,6 +26,16 @@ KMS_KEY = os.environ['CLOUD_KMS_KEY'] +def test_enable_default_kms_key(): + snippets.enable_default_kms_key( + bucket_name=BUCKET, + kms_key_name=KMS_KEY) + bucket = storage.Client().get_bucket(BUCKET) + assert bucket.default_kms_key_name.startswith(KMS_KEY) + bucket.default_kms_key_name = None + bucket.patch() + + def test_get_bucket_labels(): snippets.get_bucket_labels(BUCKET) @@ -83,12 +93,14 @@ def test_upload_blob(): def test_upload_blob_with_kms(): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b'test') - snippets.upload_blob_with_kms( BUCKET, source_file.name, 'test_upload_blob_encrypted', KMS_KEY) + bucket = storage.Client().bucket(BUCKET) + kms_blob = bucket.get_blob('test_upload_blob_encrypted') + assert kms_blob.kms_key_name.startswith(KMS_KEY) def test_download_blob(test_blob): From 123fbc943780e6fd597411d722679ddde6293928 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Mon, 27 Aug 2018 11:25:59 -0700 Subject: [PATCH 059/197] Remove leftover merge conflict. (#1657) --- requirements.txt | 5 ----- 1 file changed, 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index d26f9a014..553705c1e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,2 @@ -<<<<<<< HEAD -google-cloud-storage==1.8.0 -google-cloud-pubsub==0.33.0 -======= google-cloud-storage==1.10.0 google-cloud-pubsub==0.32.1 ->>>>>>> master From 6db621be2826222137bd0f7b4c7084c1329ea179 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Wed, 29 Aug 2018 15:28:04 -0700 Subject: [PATCH 060/197] Add region tag to upload_blob snippet (#1671) --- snippets.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/snippets.py b/snippets.py index 89434bd85..1b13379ae 100644 --- a/snippets.py +++ b/snippets.py @@ -25,8 +25,11 @@ import datetime import pprint +# [START storage_upload_file] from google.cloud import storage +# [END storage_upload_file] + def create_bucket(bucket_name): """Creates a new bucket.""" @@ -144,6 +147,7 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): print(prefix) +# [START storage_upload_file] def upload_blob(bucket_name, source_file_name, destination_blob_name): """Uploads a file to the bucket.""" storage_client = storage.Client() @@ -155,6 +159,7 @@ def upload_blob(bucket_name, source_file_name, destination_blob_name): print('File {} uploaded to {}.'.format( source_file_name, destination_blob_name)) +# [END storage_upload_file] def upload_blob_with_kms(bucket_name, source_file_name, destination_blob_name, From 9dfc16e24508f85bd5444ebfc3815e33975e0770 Mon Sep 17 00:00:00 2001 From: Billy Jacobson Date: Fri, 5 Oct 2018 11:24:05 -0400 Subject: [PATCH 061/197] Bucket lock samples (#1588) --- README.rst | 49 ++++++++ README.rst.in | 3 + bucket_lock.py | 287 ++++++++++++++++++++++++++++++++++++++++++++ bucket_lock_test.py | 136 +++++++++++++++++++++ requirements.txt | 4 +- snippets.py | 7 ++ 6 files changed, 484 insertions(+), 2 deletions(-) create mode 100644 bucket_lock.py create mode 100644 bucket_lock_test.py diff --git a/README.rst b/README.rst index eb1e26b6e..bece81967 100644 --- a/README.rst +++ b/README.rst @@ -251,6 +251,55 @@ To run this sample: +Bucket Lock ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/bucket_lock.py,storage/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python bucket_lock.py + + usage: bucket_lock.py [-h] + {set-retention-policy,remove-retention-policy,lock-retention-policy,get-retention-policy,set-temporary-hold,release-temporary-hold,set-event-based-hold,release-event-based-hold,enable-default-event-based-hold,disable-default-event-based-hold,get-default-event-based-hold} + ... + + positional arguments: + {set-retention-policy,remove-retention-policy,lock-retention-policy,get-retention-policy,set-temporary-hold,release-temporary-hold,set-event-based-hold,release-event-based-hold,enable-default-event-based-hold,disable-default-event-based-hold,get-default-event-based-hold} + set-retention-policy + Defines a retention policy on a given bucket + remove-retention-policy + Removes the retention policy on a given bucket + lock-retention-policy + Locks the retention policy on a given bucket + get-retention-policy + Gets the retention policy on a given bucket + set-temporary-hold Sets a temporary hold on a given blob + release-temporary-hold + Releases the temporary hold on a given blob + set-event-based-hold + Sets a event based hold on a given blob + release-event-based-hold + Releases the event based hold on a given blob + enable-default-event-based-hold + Enables the default event based hold on a given bucket + disable-default-event-based-hold + Disables the default event based hold on a given + bucket + get-default-event-based-hold + Gets the default event based hold on a given bucket + + optional arguments: + -h, --help show this help message and exit + + + Notification Polling +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/README.rst.in b/README.rst.in index bae062df6..1a24a0373 100644 --- a/README.rst.in +++ b/README.rst.in @@ -24,6 +24,9 @@ samples: - name: Customer-Supplied Encryption file: encryption.py show_help: true +- name: Bucket Lock + file: bucket_lock.py + show_help: true - name: Notification Polling file: notification_polling.py show_help: true diff --git a/bucket_lock.py b/bucket_lock.py new file mode 100644 index 000000000..0363a5750 --- /dev/null +++ b/bucket_lock.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python + +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from google.cloud import storage + + +def set_retention_policy(bucket_name, retention_period): + """Defines a retention policy on a given bucket""" + # [START storage_set_retention_policy] + # bucket_name = "my-bucket" + # retention_period = 10 + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.retention_period = retention_period + bucket.patch() + + print('Bucket {} retention period set for {} seconds'.format( + bucket.name, + bucket.retention_period)) + # [END storage_set_retention_policy] + + +def remove_retention_policy(bucket_name): + """Removes the retention policy on a given bucket""" + # [START storage_remove_retention_policy] + # bucket_name = "my-bucket" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.reload() + + if bucket.retention_policy_locked: + print( + 'Unable to remove retention period as retention policy is locked.') + return + + bucket.retention_period = None + bucket.patch() + + print('Removed bucket {} retention policy'.format(bucket.name)) + # [END storage_remove_retention_policy] + + +def lock_retention_policy(bucket_name): + """Locks the retention policy on a given bucket""" + # [START storage_lock_retention_policy] + # bucket_name = "my-bucket" + storage_client = storage.Client() + # get_bucket gets the current metageneration value for the bucket, + # required by lock_retention_policy. + bucket = storage_client.get_bucket(bucket_name) + + # Warning: Once a retention policy is locked it cannot be unlocked + # and retention period can only be increased. + bucket.lock_retention_policy() + + print('Retention policy for {} is now locked'.format(bucket_name)) + print('Retention policy effective as of {}'.format( + bucket.retention_policy_effective_time)) + # [END storage_lock_retention_policy] + + +def get_retention_policy(bucket_name): + """Gets the retention policy on a given bucket""" + # [START storage_get_retention_policy] + # bucket_name = "my-bucket" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.reload() + + print('Retention Policy for {}'.format(bucket_name)) + print('Retention Period: {}'.format(bucket.retention_period)) + if bucket.retention_policy_locked: + print('Retention Policy is locked') + + if bucket.retention_policy_effective_time: + print('Effective Time: {}' + .format(bucket.retention_policy_effective_time)) + # [END storage_get_retention_policy] + + +def set_temporary_hold(bucket_name, blob_name): + """Sets a temporary hold on a given blob""" + # [START storage_set_temporary_hold] + # bucket_name = "my-bucket" + # blob_name = "my-blob" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.temporary_hold = True + blob.patch() + + print("Temporary hold was set for #{blob_name}") + # [END storage_set_temporary_hold] + + +def release_temporary_hold(bucket_name, blob_name): + """Releases the temporary hold on a given blob""" + # [START storage_release_temporary_hold] + # bucket_name = "my-bucket" + # blob_name = "my-blob" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.temporary_hold = False + blob.patch() + + print("Temporary hold was release for #{blob_name}") + # [END storage_release_temporary_hold] + + +def set_event_based_hold(bucket_name, blob_name): + """Sets a event based hold on a given blob""" + # [START storage_set_event_based_hold] + # bucket_name = "my-bucket" + # blob_name = "my-blob" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.event_based_hold = True + blob.patch() + + print('Event based hold was set for {}'.format(blob_name)) + # [END storage_set_event_based_hold] + + +def release_event_based_hold(bucket_name, blob_name): + """Releases the event based hold on a given blob""" + # [START storage_release_event_based_hold] + # bucket_name = "my-bucket" + # blob_name = "my-blob" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.event_based_hold = False + blob.patch() + + print('Event based hold was released for {}'.format(blob_name)) + # [END storage_release_event_based_hold] + + +def enable_default_event_based_hold(bucket_name): + """Enables the default event based hold on a given bucket""" + # [START storage_enable_default_event_based_hold] + # bucket_name = "my-bucket" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.default_event_based_hold = True + bucket.patch() + + print('Default event based hold was enabled for {}'.format(bucket_name)) + # [END storage_enable_default_event_based_hold] + + +def disable_default_event_based_hold(bucket_name): + """Disables the default event based hold on a given bucket""" + # [START storage_disable_default_event_based_hold] + # bucket_name = "my-bucket" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.default_event_based_hold = False + bucket.patch() + + print("Default event based hold was disabled for {}".format(bucket_name)) + # [END storage_disable_default_event_based_hold] + + +def get_default_event_based_hold(bucket_name): + """Gets the default event based hold on a given bucket""" + # [START storage_get_default_event_based_hold] + # bucket_name = "my-bucket" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.reload() + + if bucket.default_event_based_hold: + print('Default event-based hold is enabled for {}'.format(bucket_name)) + else: + print('Default event-based hold is not enabled for {}' + .format(bucket_name)) + # [END storage_get_default_event_based_hold] + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + subparsers = parser.add_subparsers(dest='command') + + set_retention_policy_parser = subparsers.add_parser( + 'set-retention-policy', help=set_retention_policy.__doc__) + set_retention_policy_parser.add_argument('bucket_name') + set_retention_policy_parser.add_argument('retention_period') + + remove_retention_policy_parser = subparsers.add_parser( + 'remove-retention-policy', help=remove_retention_policy.__doc__) + remove_retention_policy_parser.add_argument('bucket_name') + + lock_retention_policy_parser = subparsers.add_parser( + 'lock-retention-policy', help=lock_retention_policy.__doc__) + lock_retention_policy_parser.add_argument('bucket_name') + + get_retention_policy_parser = subparsers.add_parser( + 'get-retention-policy', help=get_retention_policy.__doc__) + get_retention_policy_parser.add_argument('bucket_name') + + set_temporary_hold_parser = subparsers.add_parser( + 'set-temporary-hold', help=set_temporary_hold.__doc__) + set_temporary_hold_parser.add_argument('bucket_name') + set_temporary_hold_parser.add_argument('blob_name') + + release_temporary_hold_parser = subparsers.add_parser( + 'release-temporary-hold', help=release_temporary_hold.__doc__) + release_temporary_hold_parser.add_argument('bucket_name') + release_temporary_hold_parser.add_argument('blob_name') + + set_event_based_hold_parser = subparsers.add_parser( + 'set-event-based-hold', help=set_event_based_hold.__doc__) + set_event_based_hold_parser.add_argument('bucket_name') + set_event_based_hold_parser.add_argument('blob_name') + + release_event_based_hold_parser = subparsers.add_parser( + 'release-event-based-hold', help=release_event_based_hold.__doc__) + release_event_based_hold_parser.add_argument('bucket_name') + release_event_based_hold_parser.add_argument('blob_name') + + enable_default_event_based_hold_parser = subparsers.add_parser( + 'enable-default-event-based-hold', + help=enable_default_event_based_hold.__doc__) + enable_default_event_based_hold_parser.add_argument('bucket_name') + + disable_default_event_based_hold_parser = subparsers.add_parser( + 'disable-default-event-based-hold', + help=disable_default_event_based_hold.__doc__) + disable_default_event_based_hold_parser.add_argument('bucket_name') + + get_default_event_based_hold_parser = subparsers.add_parser( + 'get-default-event-based-hold', + help=get_default_event_based_hold.__doc__) + get_default_event_based_hold_parser.add_argument('bucket_name') + + args = parser.parse_args() + + if args.command == 'set-retention-policy': + set_retention_policy(args.bucket_name, args.retention_period) + elif args.command == 'remove-retention-policy': + remove_retention_policy(args.bucket_name) + elif args.command == 'lock-retention-policy': + lock_retention_policy(args.bucket_name) + elif args.command == 'get-retention-policy': + get_retention_policy(args.bucket_name) + elif args.command == 'set-temporary-hold': + set_temporary_hold(args.bucket_name, args.blob_name) + elif args.command == 'release-temporary-hold': + release_temporary_hold(args.bucket_name, args.blob_name) + elif args.command == 'set-event-based-hold': + set_event_based_hold(args.bucket_name, args.blob_name) + elif args.command == 'release-event-based-hold': + release_event_based_hold(args.bucket_name, args.blob_name) + elif args.command == 'enable-default-event-based-hold': + enable_default_event_based_hold(args.bucket_name) + elif args.command == 'disable-default-event-based-hold': + disable_default_event_based_hold(args.bucket_name) + elif args.command == 'get-default-event-based-hold': + get_default_event_based_hold(args.bucket_name) diff --git a/bucket_lock_test.py b/bucket_lock_test.py new file mode 100644 index 000000000..63e1afbaa --- /dev/null +++ b/bucket_lock_test.py @@ -0,0 +1,136 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from google.cloud import storage + +import pytest + +import bucket_lock + +BLOB_NAME = 'storage_snippets_test_sigil' +BLOB_CONTENT = 'Hello, is it me you\'re looking for?' +# Retention policy for 5 seconds +RETENTION_POLICY = 5 + + +@pytest.fixture() +def bucket(): + """Creates a test bucket and deletes it upon completion.""" + client = storage.Client() + bucket_name = 'bucket-lock-' + str(int(time.time())) + bucket = client.create_bucket(bucket_name) + yield bucket + bucket.delete(force=True) + + +def test_retention_policy_no_lock(bucket, capsys): + bucket_lock.set_retention_policy(bucket.name, RETENTION_POLICY) + bucket.reload() + + assert bucket.retention_period is RETENTION_POLICY + assert bucket.retention_policy_effective_time is not None + assert bucket.retention_policy_locked is None + + bucket_lock.get_retention_policy(bucket.name) + out, _ = capsys.readouterr() + assert 'Retention Policy for {}'.format(bucket.name) in out + assert 'Retention Period: 5' in out + assert 'Effective Time: ' in out + assert 'Retention Policy is locked' not in out + + blob = bucket.blob(BLOB_NAME) + blob.upload_from_string(BLOB_CONTENT) + + assert blob.retention_expiration_time is not None + + bucket_lock.remove_retention_policy(bucket.name) + bucket.reload() + assert bucket.retention_period is None + + time.sleep(RETENTION_POLICY) + + +def test_retention_policy_lock(bucket, capsys): + bucket_lock.set_retention_policy(bucket.name, RETENTION_POLICY) + bucket.reload() + assert bucket.retention_policy_locked is None + + bucket_lock.lock_retention_policy(bucket.name) + bucket.reload() + assert bucket.retention_policy_locked is True + + bucket_lock.get_retention_policy(bucket.name) + out, _ = capsys.readouterr() + assert 'Retention Policy is locked' in out + + +def test_enable_disable_bucket_default_event_based_hold(bucket, capsys): + bucket_lock.get_default_event_based_hold(bucket.name) + out, _ = capsys.readouterr() + assert 'Default event-based hold is not enabled for {}'.format( + bucket.name) in out + assert 'Default event-based hold is enabled for {}'.format( + bucket.name) not in out + + bucket_lock.enable_default_event_based_hold(bucket.name) + bucket.reload() + + assert bucket.default_event_based_hold is True + + bucket_lock.get_default_event_based_hold(bucket.name) + out, _ = capsys.readouterr() + assert 'Default event-based hold is enabled for {}'.format( + bucket.name) in out + + blob = bucket.blob(BLOB_NAME) + blob.upload_from_string(BLOB_CONTENT) + assert blob.event_based_hold is True + + bucket_lock.release_event_based_hold(bucket.name, blob.name) + blob.reload() + assert blob.event_based_hold is False + + bucket_lock.disable_default_event_based_hold(bucket.name) + bucket.reload() + assert bucket.default_event_based_hold is False + + +def test_enable_disable_temporary_hold(bucket): + blob = bucket.blob(BLOB_NAME) + blob.upload_from_string(BLOB_CONTENT) + assert blob.temporary_hold is None + + bucket_lock.set_temporary_hold(bucket.name, blob.name) + blob.reload() + assert blob.temporary_hold is True + + bucket_lock.release_temporary_hold(bucket.name, blob.name) + blob.reload() + assert blob.temporary_hold is False + + +def test_enable_disable_event_based_hold(bucket): + blob = bucket.blob(BLOB_NAME) + blob.upload_from_string(BLOB_CONTENT) + assert blob.event_based_hold is None + + bucket_lock.set_event_based_hold(bucket.name, blob.name) + blob.reload() + assert blob.event_based_hold is True + + bucket_lock.release_event_based_hold(bucket.name, blob.name) + blob.reload() + assert blob.event_based_hold is False diff --git a/requirements.txt b/requirements.txt index 553705c1e..beab65dbb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-storage==1.10.0 -google-cloud-pubsub==0.32.1 +google-cloud-pubsub==0.33.0 +google-cloud-storage==1.13.0 \ No newline at end of file diff --git a/snippets.py b/snippets.py index 1b13379ae..a8f83bd7f 100644 --- a/snippets.py +++ b/snippets.py @@ -227,6 +227,13 @@ def blob_metadata(bucket_name, blob_name): print('Content-encoding: {}'.format(blob.content_encoding)) print('Content-language: {}'.format(blob.content_language)) print('Metadata: {}'.format(blob.metadata)) + print("Temporary hold: ", + 'enabled' if blob.temporary_hold else 'disabled') + print("Event based hold: ", + 'enabled' if blob.event_based_hold else 'disabled') + if blob.retention_expiration_time: + print("retentionExpirationTime: {}" + .format(blob.retention_expiration_time)) def make_blob_public(bucket_name, blob_name): From d22155b10d36714152458e4f4a12af7728107d7e Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Fri, 5 Oct 2018 09:03:42 -0700 Subject: [PATCH 062/197] [Storage] Add spacing in sample code. (#1735) * Add spacing in sample code. * remove whitespace --- bucket_lock.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/bucket_lock.py b/bucket_lock.py index 0363a5750..497176f9f 100644 --- a/bucket_lock.py +++ b/bucket_lock.py @@ -24,6 +24,7 @@ def set_retention_policy(bucket_name, retention_period): # [START storage_set_retention_policy] # bucket_name = "my-bucket" # retention_period = 10 + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) @@ -40,6 +41,7 @@ def remove_retention_policy(bucket_name): """Removes the retention policy on a given bucket""" # [START storage_remove_retention_policy] # bucket_name = "my-bucket" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) bucket.reload() @@ -60,6 +62,7 @@ def lock_retention_policy(bucket_name): """Locks the retention policy on a given bucket""" # [START storage_lock_retention_policy] # bucket_name = "my-bucket" + storage_client = storage.Client() # get_bucket gets the current metageneration value for the bucket, # required by lock_retention_policy. @@ -79,6 +82,7 @@ def get_retention_policy(bucket_name): """Gets the retention policy on a given bucket""" # [START storage_get_retention_policy] # bucket_name = "my-bucket" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) bucket.reload() @@ -99,6 +103,7 @@ def set_temporary_hold(bucket_name, blob_name): # [START storage_set_temporary_hold] # bucket_name = "my-bucket" # blob_name = "my-blob" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) @@ -115,6 +120,7 @@ def release_temporary_hold(bucket_name, blob_name): # [START storage_release_temporary_hold] # bucket_name = "my-bucket" # blob_name = "my-blob" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) @@ -131,6 +137,7 @@ def set_event_based_hold(bucket_name, blob_name): # [START storage_set_event_based_hold] # bucket_name = "my-bucket" # blob_name = "my-blob" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) @@ -147,6 +154,7 @@ def release_event_based_hold(bucket_name, blob_name): # [START storage_release_event_based_hold] # bucket_name = "my-bucket" # blob_name = "my-blob" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(blob_name) @@ -162,6 +170,7 @@ def enable_default_event_based_hold(bucket_name): """Enables the default event based hold on a given bucket""" # [START storage_enable_default_event_based_hold] # bucket_name = "my-bucket" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) @@ -176,6 +185,7 @@ def disable_default_event_based_hold(bucket_name): """Disables the default event based hold on a given bucket""" # [START storage_disable_default_event_based_hold] # bucket_name = "my-bucket" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) @@ -190,6 +200,7 @@ def get_default_event_based_hold(bucket_name): """Gets the default event based hold on a given bucket""" # [START storage_get_default_event_based_hold] # bucket_name = "my-bucket" + storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) bucket.reload() From f8218b9db0af3a7e42045c78677942f46b1dd021 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 20 Nov 2018 15:40:29 -0800 Subject: [PATCH 063/197] Auto-update dependencies. (#1846) ACK, merging. --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index beab65dbb..262e2b8df 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==0.33.0 -google-cloud-storage==1.13.0 \ No newline at end of file +google-cloud-pubsub==0.38.0 +google-cloud-storage==1.13.0 From 9abddd9fda06617f6eddeae67d768c909369b7e9 Mon Sep 17 00:00:00 2001 From: Charles Engelke Date: Wed, 19 Dec 2018 13:23:45 -0800 Subject: [PATCH 064/197] Update requirements.txt (#1944) * Update requirements.txt * Adding some rate limiting --- acl_test.py | 5 +++++ requirements.txt | 2 +- snippets.py | 1 + snippets_test.py | 7 +++---- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/acl_test.py b/acl_test.py index 7796ef043..e584bdea0 100644 --- a/acl_test.py +++ b/acl_test.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import time from google.cloud import storage import google.cloud.storage.acl @@ -36,7 +37,9 @@ def test_bucket(): object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) acl.reload() object_default_acl.reload() + time.sleep(1) # bucket ops rate limited 1 update per second yield bucket + time.sleep(1) # bucket ops rate limited 1 update per second acl.save() object_default_acl.save() @@ -49,7 +52,9 @@ def test_blob(): blob.upload_from_string('Hello, is it me you\'re looking for?') acl = google.cloud.storage.acl.ObjectACL(blob) acl.reload() + time.sleep(1) # bucket ops rate limited 1 update per second yield blob + time.sleep(1) # bucket ops rate limited 1 update per second acl.save() diff --git a/requirements.txt b/requirements.txt index 262e2b8df..2bed7f589 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.38.0 -google-cloud-storage==1.13.0 +google-cloud-storage==1.13.2 diff --git a/snippets.py b/snippets.py index a8f83bd7f..fcc0ca060 100644 --- a/snippets.py +++ b/snippets.py @@ -266,6 +266,7 @@ def generate_signed_url(bucket_name, blob_name): method='GET') print('The signed url for {} is {}'.format(blob.name, url)) + return url def rename_blob(bucket_name, blob_name, new_name): diff --git a/snippets_test.py b/snippets_test.py index dc97b084f..ec444b36b 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -14,6 +14,7 @@ import os import tempfile +import time from google.cloud import storage import google.cloud.exceptions @@ -30,6 +31,7 @@ def test_enable_default_kms_key(): snippets.enable_default_kms_key( bucket_name=BUCKET, kms_key_name=KMS_KEY) + time.sleep(2) # Let change propagate as needed bucket = storage.Client().get_bucket(BUCKET) assert bucket.default_kms_key_name.startswith(KMS_KEY) bucket.default_kms_key_name = None @@ -135,13 +137,10 @@ def test_make_blob_public(test_blob): def test_generate_signed_url(test_blob, capsys): - snippets.generate_signed_url( + url = snippets.generate_signed_url( BUCKET, test_blob.name) - out, _ = capsys.readouterr() - url = out.rsplit().pop() - r = requests.get(url) assert r.text == 'Hello, is it me you\'re looking for?' From f1c494dfa5f94b007b03a657c5e4860e1f97c6d3 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Wed, 6 Feb 2019 12:06:35 -0800 Subject: [PATCH 065/197] Auto-update dependencies. (#1980) * Auto-update dependencies. * Update requirements.txt * Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2bed7f589..0f31a7307 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==0.38.0 +google-cloud-pubsub==0.39.1 google-cloud-storage==1.13.2 From 00f34a94f70a14c28bc8ec98dc780c174ef88e93 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 6 Feb 2019 15:54:01 -0800 Subject: [PATCH 066/197] storage: bucket policy only samples (#1976) * humble beginnings * Verified integration tests and updated README.rst * Updating samples to reflect fixed surface * Use release 1.14.0 --- README.rst | 33 +++++++++++++ README.rst.in | 5 +- acl_test.py | 12 ++--- bucket_policy_only.py | 96 ++++++++++++++++++++++++++++++++++++++ bucket_policy_only_test.py | 52 +++++++++++++++++++++ requirements.txt | 2 +- 6 files changed, 192 insertions(+), 8 deletions(-) create mode 100644 bucket_policy_only.py create mode 100644 bucket_policy_only_test.py diff --git a/README.rst b/README.rst index bece81967..dc8b7fdca 100644 --- a/README.rst +++ b/README.rst @@ -300,6 +300,39 @@ To run this sample: +Bucket Policy Only ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/bucket_policy_only.py,storage/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python bucket_policy_only.py + + usage: bucket_policy_only.py [-h] + {enable-bucket-policy-only,disable-bucket-policy-only,get-bucket-policy-only} + ... + + positional arguments: + {enable-bucket-policy-only,disable-bucket-policy-only,get-bucket-policy-only} + enable-bucket-policy-only + Enable Bucket Policy Only for a bucket + disable-bucket-policy-only + Disable Bucket Policy Only for a bucket + get-bucket-policy-only + Get Bucket Policy Only for a bucket + + optional arguments: + -h, --help show this help message and exit + + + Notification Polling +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/README.rst.in b/README.rst.in index 1a24a0373..3b8f33af7 100644 --- a/README.rst.in +++ b/README.rst.in @@ -4,7 +4,7 @@ product: name: Google Cloud Storage short_name: Cloud Storage url: https://cloud.google.com/storage/docs - description: > + description: > `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. @@ -27,6 +27,9 @@ samples: - name: Bucket Lock file: bucket_lock.py show_help: true +- name: Bucket Policy Only + file: bucket_policy_only.py + show_help: true - name: Notification Polling file: notification_polling.py show_help: true diff --git a/acl_test.py b/acl_test.py index e584bdea0..aeb1312ee 100644 --- a/acl_test.py +++ b/acl_test.py @@ -37,9 +37,9 @@ def test_bucket(): object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) acl.reload() object_default_acl.reload() - time.sleep(1) # bucket ops rate limited 1 update per second + time.sleep(1) # bucket ops rate limited 1 update per second yield bucket - time.sleep(1) # bucket ops rate limited 1 update per second + time.sleep(1) # bucket ops rate limited 1 update per second acl.save() object_default_acl.save() @@ -51,10 +51,10 @@ def test_blob(): blob = bucket.blob('storage_acl_test_sigil') blob.upload_from_string('Hello, is it me you\'re looking for?') acl = google.cloud.storage.acl.ObjectACL(blob) - acl.reload() - time.sleep(1) # bucket ops rate limited 1 update per second - yield blob - time.sleep(1) # bucket ops rate limited 1 update per second + acl.reload() # bucket ops rate limited 1 update per second + time.sleep(1) + yield blob # bucket ops rate limited 1 update per second + time.sleep(1) acl.save() diff --git a/bucket_policy_only.py b/bucket_policy_only.py new file mode 100644 index 000000000..530574544 --- /dev/null +++ b/bucket_policy_only.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from google.cloud import storage + + +def enable_bucket_policy_only(bucket_name): + """Enable Bucket Policy Only for a bucket""" + # [START storage_enable_bucket_policy_only] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.iam_configuration.bucket_policy_only_enabled = True + bucket.patch() + + print('Bucket Policy Only was enabled for {}.'.format(bucket.name)) + # [END storage_enable_bucket_policy_only] + + +def disable_bucket_policy_only(bucket_name): + """Disable Bucket Policy Only for a bucket""" + # [START storage_disable_bucket_policy_only] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.iam_configuration.bucket_policy_only_enabled = False + bucket.patch() + + print('Bucket Policy Only was disabled for {}.'.format(bucket.name)) + # [END storage_disable_bucket_policy_only] + + +def get_bucket_policy_only(bucket_name): + """Get Bucket Policy Only for a bucket""" + # [START storage_get_bucket_policy_only] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + iam_configuration = bucket.iam_configuration + + if iam_configuration.bucket_policy_only_enabled: + print('Bucket Policy Only is enabled for {}.'.format(bucket.name)) + print('Bucket will be locked on {}.'.format( + iam_configuration.bucket_policy_only_locked_time)) + else: + print('Bucket Policy Only is disabled for {}.'.format(bucket.name)) + # [END storage_get_bucket_policy_only] + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + subparsers = parser.add_subparsers(dest='command') + + enable_bucket_policy_only_parser = subparsers.add_parser( + 'enable-bucket-policy-only', help=enable_bucket_policy_only.__doc__) + enable_bucket_policy_only_parser.add_argument('bucket_name') + + disable_bucket_policy_only_parser = subparsers.add_parser( + 'disable-bucket-policy-only', help=disable_bucket_policy_only.__doc__) + disable_bucket_policy_only_parser.add_argument('bucket_name') + + get_bucket_policy_only_parser = subparsers.add_parser( + 'get-bucket-policy-only', help=get_bucket_policy_only.__doc__) + get_bucket_policy_only_parser.add_argument('bucket_name') + + args = parser.parse_args() + + if args.command == 'enable-bucket-policy-only': + enable_bucket_policy_only(args.bucket_name) + elif args.command == 'disable-bucket-policy-only': + disable_bucket_policy_only(args.bucket_name) + elif args.command == 'get-bucket-policy-only': + get_bucket_policy_only(args.bucket_name) diff --git a/bucket_policy_only_test.py b/bucket_policy_only_test.py new file mode 100644 index 000000000..64a9dad10 --- /dev/null +++ b/bucket_policy_only_test.py @@ -0,0 +1,52 @@ +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from google.cloud import storage + +import pytest + +import bucket_policy_only + + +@pytest.fixture() +def bucket(): + """Creates a test bucket and deletes it upon completion.""" + client = storage.Client() + bucket_name = 'bucket-policy-only-' + str(int(time.time())) + bucket = client.create_bucket(bucket_name) + yield bucket + bucket.delete(force=True) + + +def test_get_bucket_policy_only(bucket, capsys): + bucket_policy_only.get_bucket_policy_only(bucket.name) + out, _ = capsys.readouterr() + assert 'Bucket Policy Only is disabled for {}.'.format( + bucket.name) in out + + +def test_enable_bucket_policy_only(bucket, capsys): + bucket_policy_only.enable_bucket_policy_only(bucket.name) + out, _ = capsys.readouterr() + assert 'Bucket Policy Only was enabled for {}.'.format( + bucket.name) in out + + +def test_disable_bucket_policy_only(bucket, capsys): + bucket_policy_only.disable_bucket_policy_only(bucket.name) + out, _ = capsys.readouterr() + assert 'Bucket Policy Only was disabled for {}.'.format( + bucket.name) in out diff --git a/requirements.txt b/requirements.txt index 0f31a7307..4cb3be26a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.39.1 -google-cloud-storage==1.13.2 +google-cloud-storage==1.14.0 From 9ea18066447138b79e724ebd284c2ea8f35b8e77 Mon Sep 17 00:00:00 2001 From: Charles Engelke Date: Wed, 1 May 2019 11:17:18 -0700 Subject: [PATCH 067/197] Add sleep to avoid bucket rate limit (#2136) --- bucket_policy_only_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bucket_policy_only_test.py b/bucket_policy_only_test.py index 64a9dad10..5ae433fa4 100644 --- a/bucket_policy_only_test.py +++ b/bucket_policy_only_test.py @@ -28,6 +28,7 @@ def bucket(): bucket_name = 'bucket-policy-only-' + str(int(time.time())) bucket = client.create_bucket(bucket_name) yield bucket + time.sleep(3) bucket.delete(force=True) From 8f15a51bf2ec1be4a47c44ada6d852b23bc93789 Mon Sep 17 00:00:00 2001 From: Jonathan Lui Date: Fri, 10 May 2019 11:49:57 -0700 Subject: [PATCH 068/197] feat(storage): Add snippets for v4 signed URLs (#2142) * feat(storage): Add snippets for v4 signed URLs * lint * fix .format() * add v4 command to switch statement * fix region tag * change if => elif to try to make func less complex * move main to a function --- requirements.txt | 2 +- snippets.py | 84 ++++++++++++++++++++++++++++++++++++++++++++---- snippets_test.py | 24 ++++++++++++++ 3 files changed, 103 insertions(+), 7 deletions(-) diff --git a/requirements.txt b/requirements.txt index 4cb3be26a..6cb328874 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.39.1 -google-cloud-storage==1.14.0 +google-cloud-storage==1.15.0 diff --git a/snippets.py b/snippets.py index fcc0ca060..8fcdd87ae 100644 --- a/snippets.py +++ b/snippets.py @@ -249,7 +249,7 @@ def make_blob_public(bucket_name, blob_name): def generate_signed_url(bucket_name, blob_name): - """Generates a signed URL for a blob. + """Generates a v2 signed URL for downloading a blob. Note that this method requires a service account key file. You can not use this if you are using Application Default Credentials from Google Compute @@ -269,6 +269,62 @@ def generate_signed_url(bucket_name, blob_name): return url +# [START storage_generate_signed_url_v4] +def generate_download_signed_url_v4(bucket_name, blob_name): + """Generates a v4 signed URL for downloading a blob. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + version='v4', + # This URL is valid for 15 minutes + expiration=datetime.timedelta(minutes=15), + # Allow GET requests using this URL. + method='GET') + + print('Generated GET signed URL:') + print(url) + print('You can use this URL with any user agent, for example:') + print('curl \'{}\''.format(url)) + return url +# [END storage_generate_signed_url_v4] + + +# [START storage_generate_upload_signed_url_v4] +def generate_upload_signed_url_v4(bucket_name, blob_name): + """Generates a v4 signed URL for uploading a blob using HTTP PUT. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + version='v4', + # This URL is valid for 15 minutes + expiration=datetime.timedelta(minutes=15), + # Allow GET requests using this URL. + method='PUT', + content_type='application/octet-stream') + + print('Generated PUT signed URL:') + print(url) + print('You can use this URL with any user agent, for example:') + print("curl -X PUT -H 'Content-Type: application/octet-stream' " + "--upload-file my-file '{}'".format(url)) + return url +# [END storage_generate_upload_signed_url_v4] + + def rename_blob(bucket_name, blob_name, new_name): """Renames a blob.""" storage_client = storage.Client() @@ -296,7 +352,7 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): destination_bucket.name)) -if __name__ == '__main__': +def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) @@ -350,6 +406,14 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): 'signed-url', help=generate_signed_url.__doc__) signed_url_parser.add_argument('blob_name') + signed_url_download_v4_parser = subparsers.add_parser( + 'signed-url-download-v4', help=generate_download_signed_url_v4.__doc__) + signed_url_download_v4_parser.add_argument('blob_name') + + signed_url_upload_v4_parser = subparsers.add_parser( + 'signed-url-upload-v4', help=generate_upload_signed_url_v4.__doc__) + signed_url_upload_v4_parser.add_argument('blob_name') + rename_parser = subparsers.add_parser('rename', help=rename_blob.__doc__) rename_parser.add_argument('blob_name') rename_parser.add_argument('new_name') @@ -363,15 +427,15 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): if args.command == 'create-bucket': create_bucket(args.bucket_name) - if args.command == 'enable-default-kms-key': + elif args.command == 'enable-default-kms-key': enable_default_kms_key(args.bucket_name, args.kms_key_name) elif args.command == 'delete-bucket': delete_bucket(args.bucket_name) - if args.command == 'get-bucket-labels': + elif args.command == 'get-bucket-labels': get_bucket_labels(args.bucket_name) - if args.command == 'add-bucket-label': + elif args.command == 'add-bucket-label': add_bucket_label(args.bucket_name) - if args.command == 'remove-bucket-label': + elif args.command == 'remove-bucket-label': remove_bucket_label(args.bucket_name) elif args.command == 'list': list_blobs(args.bucket_name) @@ -401,6 +465,10 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): make_blob_public(args.bucket_name, args.blob_name) elif args.command == 'signed-url': generate_signed_url(args.bucket_name, args.blob_name) + elif args.command == 'signed-url-download-v4': + generate_download_signed_url_v4(args.bucket_name, args.blob_name) + elif args.command == 'signed-url-upload-v4': + generate_upload_signed_url_v4(args.bucket_name, args.blob_name) elif args.command == 'rename': rename_blob(args.bucket_name, args.blob_name, args.new_name) elif args.command == 'copy': @@ -409,3 +477,7 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): args.blob_name, args.new_bucket_name, args.new_blob_name) + + +if __name__ == '__main__': + main() diff --git a/snippets_test.py b/snippets_test.py index ec444b36b..d04c03611 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -145,6 +145,30 @@ def test_generate_signed_url(test_blob, capsys): assert r.text == 'Hello, is it me you\'re looking for?' +def test_generate_download_signed_url_v4(test_blob, capsys): + url = snippets.generate_download_signed_url_v4( + BUCKET, + test_blob.name) + + r = requests.get(url) + assert r.text == 'Hello, is it me you\'re looking for?' + + +def test_generate_upload_signed_url_v4(capsys): + blob_name = 'storage_snippets_test_upload' + content = b'Uploaded via v4 signed url' + url = snippets.generate_upload_signed_url_v4( + BUCKET, + blob_name) + + requests.put(url, data=content, headers={ + 'content-type': 'application/octet-stream'}) + + bucket = storage.Client().bucket(BUCKET) + blob = bucket.blob(blob_name) + assert blob.download_as_string() == content + + def test_rename_blob(test_blob): bucket = storage.Client().bucket(BUCKET) From 36df2ace9de818702ac0b8a52b776aa5100abf62 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Tue, 23 Jul 2019 13:46:58 -0700 Subject: [PATCH 069/197] storage: add list buckets (#2149) * Add list_buckets sample * Allow for more if conditions --- README.rst | 29 ++++--- snippets.py | 196 ++++++++++++++++++++++++++++++++--------------- snippets_test.py | 6 ++ 3 files changed, 159 insertions(+), 72 deletions(-) diff --git a/README.rst b/README.rst index dc8b7fdca..ef32d53de 100644 --- a/README.rst +++ b/README.rst @@ -93,8 +93,7 @@ To run this sample: $ python snippets.py usage: snippets.py [-h] - bucket_name - {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,rename,copy} + {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} ... This application demonstrates how to perform basic operations on blobs @@ -104,8 +103,8 @@ To run this sample: at https://cloud.google.com/storage/docs. positional arguments: - bucket_name Your cloud storage bucket. - {create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,rename,copy} + {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} + list-buckets Lists all buckets. create-bucket Creates a new bucket. delete-bucket Deletes a bucket. The bucket must be empty. get-bucket-labels Prints out a bucket's labels. @@ -133,11 +132,23 @@ To run this sample: delete Deletes a blob from the bucket. metadata Prints out a blob's metadata. make-public Makes a blob publicly accessible. - signed-url Generates a signed URL for a blob. Note that this - method requires a service account key file. You can - not use this if you are using Application Default - Credentials from Google Compute Engine or from the - Google Cloud SDK. + signed-url Generates a v2 signed URL for downloading a blob. Note + that this method requires a service account key file. + You can not use this if you are using Application + Default Credentials from Google Compute Engine or from + the Google Cloud SDK. + signed-url-download-v4 + Generates a v4 signed URL for downloading a blob. Note + that this method requires a service account key file. + You can not use this if you are using Application + Default Credentials from Google Compute Engine or from + the Google Cloud SDK. + signed-url-upload-v4 + Generates a v4 signed URL for uploading a blob using + HTTP PUT. Note that this method requires a service + account key file. You can not use this if you are + using Application Default Credentials from Google + Compute Engine or from the Google Cloud SDK. rename Renames a blob. copy Renames a blob. diff --git a/snippets.py b/snippets.py index 8fcdd87ae..171f1c665 100644 --- a/snippets.py +++ b/snippets.py @@ -99,6 +99,17 @@ def remove_bucket_label(bucket_name): pprint.pprint(bucket.labels) +# [START storage_list_buckets] +def list_buckets(): + """Lists all buckets.""" + storage_client = storage.Client() + buckets = storage_client.list_buckets() + + for bucket in buckets: + print(bucket.name) +# [END storage_list_buckets] + + def list_blobs(bucket_name): """Lists all the blobs in the bucket.""" storage_client = storage.Client() @@ -352,58 +363,164 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): destination_bucket.name)) +def bucket_commands(args): + if args.command == 'list-buckets': + list_buckets() + elif args.command == 'create-bucket': + create_bucket(args.bucket_name) + elif args.command == 'enable-default-kms-key': + enable_default_kms_key(args.bucket_name, args.kms_key_name) + elif args.command == 'delete-bucket': + delete_bucket(args.bucket_name) + elif args.command == 'get-bucket-labels': + get_bucket_labels(args.bucket_name) + elif args.command == 'add-bucket-label': + add_bucket_label(args.bucket_name) + elif args.command == 'remove-bucket-label': + remove_bucket_label(args.bucket_name) + + +def blob_commands(args): + if args.command == 'list': + list_blobs(args.bucket_name) + elif args.command == 'list-with-prefix': + list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) + elif args.command == 'upload': + upload_blob( + args.bucket_name, + args.source_file_name, + args.destination_blob_name) + elif args.command == 'upload-with-kms-key': + upload_blob_with_kms( + args.bucket_name, + args.source_file_name, + args.destination_blob_name, + args.kms_key_name) + elif args.command == 'download': + download_blob( + args.bucket_name, + args.source_blob_name, + args.destination_file_name) + elif args.command == 'delete': + delete_blob(args.bucket_name, args.blob_name) + elif args.command == 'metadata': + blob_metadata(args.bucket_name, args.blob_name) + elif args.command == 'make-public': + make_blob_public(args.bucket_name, args.blob_name) + elif args.command == 'signed-url': + generate_signed_url(args.bucket_name, args.blob_name) + elif args.command == 'signed-url-download-v4': + generate_download_signed_url_v4(args.bucket_name, args.blob_name) + elif args.command == 'signed-url-upload-v4': + generate_upload_signed_url_v4(args.bucket_name, args.blob_name) + elif args.command == 'rename': + rename_blob(args.bucket_name, args.blob_name, args.new_name) + elif args.command == 'copy': + copy_blob( + args.bucket_name, + args.blob_name, + args.new_bucket_name, + args.new_blob_name) + + def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket_name', help='Your cloud storage bucket.') subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('create-bucket', help=create_bucket.__doc__) - subparsers.add_parser('delete-bucket', help=delete_bucket.__doc__) - subparsers.add_parser('get-bucket-labels', help=get_bucket_labels.__doc__) - subparsers.add_parser('add-bucket-label', help=add_bucket_label.__doc__) - subparsers.add_parser( + list_buckets_parser = subparsers.add_parser( + 'list-buckets', help=list_buckets.__doc__) + list_buckets_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + create_bucket_parser = subparsers.add_parser( + 'create-bucket', help=create_bucket.__doc__) + create_bucket_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + delete_create_parser = subparsers.add_parser( + 'delete-bucket', help=delete_bucket.__doc__) + delete_create_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + get_bucket_labels_parser = subparsers.add_parser( + 'get-bucket-labels', help=get_bucket_labels.__doc__) + get_bucket_labels_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + add_bucket_label_parser = subparsers.add_parser( + 'add-bucket-label', help=add_bucket_label.__doc__) + add_bucket_label_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + remove_bucket_label_parser = subparsers.add_parser( 'remove-bucket-label', help=remove_bucket_label.__doc__) - subparsers.add_parser('list', help=list_blobs.__doc__) + remove_bucket_label_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + + list_blobs_parser = subparsers.add_parser( + 'list', help=list_blobs.__doc__) + list_blobs_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') list_with_prefix_parser = subparsers.add_parser( 'list-with-prefix', help=list_blobs_with_prefix.__doc__) + list_with_prefix_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') list_with_prefix_parser.add_argument('prefix') list_with_prefix_parser.add_argument('--delimiter', default=None) - upload_parser = subparsers.add_parser('upload', help=upload_blob.__doc__) + upload_parser = subparsers.add_parser( + 'upload', help=upload_blob.__doc__) + upload_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') upload_parser.add_argument('source_file_name') upload_parser.add_argument('destination_blob_name') enable_default_kms_parser = subparsers.add_parser( 'enable-default-kms-key', help=enable_default_kms_key.__doc__) + enable_default_kms_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') enable_default_kms_parser.add_argument('kms_key_name') upload_kms_parser = subparsers.add_parser( 'upload-with-kms-key', help=upload_blob_with_kms.__doc__) + upload_kms_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') upload_kms_parser.add_argument('source_file_name') upload_kms_parser.add_argument('destination_blob_name') upload_kms_parser.add_argument('kms_key_name') download_parser = subparsers.add_parser( 'download', help=download_blob.__doc__) + download_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') download_parser.add_argument('source_blob_name') download_parser.add_argument('destination_file_name') - delete_parser = subparsers.add_parser('delete', help=delete_blob.__doc__) + delete_parser = subparsers.add_parser( + 'delete', help=delete_blob.__doc__) + delete_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') delete_parser.add_argument('blob_name') metadata_parser = subparsers.add_parser( 'metadata', help=blob_metadata.__doc__) + metadata_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') metadata_parser.add_argument('blob_name') make_public_parser = subparsers.add_parser( 'make-public', help=make_blob_public.__doc__) + make_public_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') make_public_parser.add_argument('blob_name') signed_url_parser = subparsers.add_parser( 'signed-url', help=generate_signed_url.__doc__) + signed_url_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') signed_url_parser.add_argument('blob_name') signed_url_download_v4_parser = subparsers.add_parser( @@ -414,69 +531,22 @@ def main(): 'signed-url-upload-v4', help=generate_upload_signed_url_v4.__doc__) signed_url_upload_v4_parser.add_argument('blob_name') - rename_parser = subparsers.add_parser('rename', help=rename_blob.__doc__) + rename_parser = subparsers.add_parser( + 'rename', help=rename_blob.__doc__) + rename_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') rename_parser.add_argument('blob_name') rename_parser.add_argument('new_name') copy_parser = subparsers.add_parser('copy', help=rename_blob.__doc__) + copy_parser.add_argument('bucket_name', help='Your cloud storage bucket.') copy_parser.add_argument('blob_name') copy_parser.add_argument('new_bucket_name') copy_parser.add_argument('new_blob_name') args = parser.parse_args() - - if args.command == 'create-bucket': - create_bucket(args.bucket_name) - elif args.command == 'enable-default-kms-key': - enable_default_kms_key(args.bucket_name, args.kms_key_name) - elif args.command == 'delete-bucket': - delete_bucket(args.bucket_name) - elif args.command == 'get-bucket-labels': - get_bucket_labels(args.bucket_name) - elif args.command == 'add-bucket-label': - add_bucket_label(args.bucket_name) - elif args.command == 'remove-bucket-label': - remove_bucket_label(args.bucket_name) - elif args.command == 'list': - list_blobs(args.bucket_name) - elif args.command == 'list-with-prefix': - list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) - elif args.command == 'upload': - upload_blob( - args.bucket_name, - args.source_file_name, - args.destination_blob_name) - elif args.command == 'upload-with-kms-key': - upload_blob_with_kms( - args.bucket_name, - args.source_file_name, - args.destination_blob_name, - args.kms_key_name) - elif args.command == 'download': - download_blob( - args.bucket_name, - args.source_blob_name, - args.destination_file_name) - elif args.command == 'delete': - delete_blob(args.bucket_name, args.blob_name) - elif args.command == 'metadata': - blob_metadata(args.bucket_name, args.blob_name) - elif args.command == 'make-public': - make_blob_public(args.bucket_name, args.blob_name) - elif args.command == 'signed-url': - generate_signed_url(args.bucket_name, args.blob_name) - elif args.command == 'signed-url-download-v4': - generate_download_signed_url_v4(args.bucket_name, args.blob_name) - elif args.command == 'signed-url-upload-v4': - generate_upload_signed_url_v4(args.bucket_name, args.blob_name) - elif args.command == 'rename': - rename_blob(args.bucket_name, args.blob_name, args.new_name) - elif args.command == 'copy': - copy_blob( - args.bucket_name, - args.blob_name, - args.new_bucket_name, - args.new_blob_name) + bucket_commands(args) + blob_commands(args) if __name__ == '__main__': diff --git a/snippets_test.py b/snippets_test.py index d04c03611..32a767bd6 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -68,6 +68,12 @@ def test_blob(): return blob +def test_list_buckets(capsys): + snippets.list_buckets() + out, _ = capsys.readouterr() + assert BUCKET in out + + def test_list_blobs(test_blob, capsys): snippets.list_blobs(BUCKET) out, _ = capsys.readouterr() From b75282907aa94587f517fde33f7cef519c75da3a Mon Sep 17 00:00:00 2001 From: John Whitlock Date: Wed, 24 Jul 2019 14:19:13 -0500 Subject: [PATCH 070/197] Drop xfail for passing test_remove_bucket_label (#2173) The Python client was fixed in https://github.com/googleapis/google-cloud-python/issues/3711 so the test now passes. --- snippets_test.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/snippets_test.py b/snippets_test.py index 32a767bd6..c9c740dbf 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -48,10 +48,6 @@ def test_add_bucket_label(capsys): assert 'example' in out -@pytest.mark.xfail( - reason=( - 'https://github.com/GoogleCloudPlatform' - '/google-cloud-python/issues/3711')) def test_remove_bucket_label(capsys): snippets.add_bucket_label(BUCKET) snippets.remove_bucket_label(BUCKET) From 70ea3998969e09a6018f44a4e3cbe7142ae4de83 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Wed, 24 Jul 2019 15:11:59 -0700 Subject: [PATCH 071/197] Update string reported in snippet and update test --- snippets.py | 2 +- snippets_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/snippets.py b/snippets.py index 171f1c665..6c4ab8fcd 100644 --- a/snippets.py +++ b/snippets.py @@ -95,7 +95,7 @@ def remove_bucket_label(bucket_name): bucket.labels = labels bucket.patch() - print('Updated labels on {}.'.format(bucket.name)) + print('Removed labels on {}.'.format(bucket.name)) pprint.pprint(bucket.labels) diff --git a/snippets_test.py b/snippets_test.py index c9c740dbf..117bbd5bb 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -52,7 +52,7 @@ def test_remove_bucket_label(capsys): snippets.add_bucket_label(BUCKET) snippets.remove_bucket_label(BUCKET) out, _ = capsys.readouterr() - assert '{}' in out + assert 'Removed labels' in out @pytest.fixture From b760eb475212583fabd1326c2bb18024b30fdc5f Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 24 Jul 2019 15:22:45 -0700 Subject: [PATCH 072/197] Update list blobs to use new client.list_blobs() method. (#2296) * Update list blob samples * Update requirements.txt * Fix lint issues * Use latest storage client --- requirements.txt | 2 +- snippets.py | 9 +++------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/requirements.txt b/requirements.txt index 6cb328874..f50fabc18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.39.1 -google-cloud-storage==1.15.0 +google-cloud-storage==1.17.0 diff --git a/snippets.py b/snippets.py index 6c4ab8fcd..e118170c0 100644 --- a/snippets.py +++ b/snippets.py @@ -113,9 +113,7 @@ def list_buckets(): def list_blobs(bucket_name): """Lists all the blobs in the bucket.""" storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - blobs = bucket.list_blobs() + blobs = storage_client.list_blobs(bucket_name) for blob in blobs: print(blob.name) @@ -144,9 +142,8 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): """ storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - blobs = bucket.list_blobs(prefix=prefix, delimiter=delimiter) + blobs = storage_client.list_blobs(bucket_name, prefix=prefix, + delimiter=delimiter) print('Blobs:') for blob in blobs: From 1106886c2a6f04873395219d6998d67e42de8fd9 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Tue, 6 Aug 2019 10:55:36 -0700 Subject: [PATCH 073/197] [Storage] Add comment to clarify which package version is necessary (#2315) * Add comment to clarify which package version * Lint and add another comment to related sample --- snippets.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/snippets.py b/snippets.py index e118170c0..95aff2c50 100644 --- a/snippets.py +++ b/snippets.py @@ -113,6 +113,8 @@ def list_buckets(): def list_blobs(bucket_name): """Lists all the blobs in the bucket.""" storage_client = storage.Client() + + # Note: Client.list_blobs requires at least package version 1.17.0. blobs = storage_client.list_blobs(bucket_name) for blob in blobs: @@ -142,6 +144,8 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): """ storage_client = storage.Client() + + # Note: Client.list_blobs requires at least package version 1.17.0. blobs = storage_client.list_blobs(bucket_name, prefix=prefix, delimiter=delimiter) From bbf79faf9826dd217b1aacb1f50c1e0522b80829 Mon Sep 17 00:00:00 2001 From: Chris Cotter Date: Tue, 3 Sep 2019 12:34:31 -0400 Subject: [PATCH 074/197] Storage: HMAC key samples (#2372) Add samples for HMAC key functionality: list, create, get, activate, deactivate, delete. Includes tests and version bump for client library. --- hmac.py | 158 +++++++++++++++++++++++++++++++++++++++++++++++ hmac_test.py | 95 ++++++++++++++++++++++++++++ requirements.txt | 2 +- 3 files changed, 254 insertions(+), 1 deletion(-) create mode 100644 hmac.py create mode 100644 hmac_test.py diff --git a/hmac.py b/hmac.py new file mode 100644 index 000000000..0334bbcc1 --- /dev/null +++ b/hmac.py @@ -0,0 +1,158 @@ +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Samples to illustrate management of HMAC keys via the python client library. +""" + + +from google.cloud import storage + + +def list_keys(project_id): + """ + List all HMAC keys associated with the project. + """ + # [START storage_list_hmac_keys] + # project_id = 'Your Google Cloud project ID' + storage_client = storage.Client(project=project_id) + hmac_keys = storage_client.list_hmac_keys(project_id=project_id) + print('HMAC Keys:') + for hmac_key in hmac_keys: + print('The HMAC key metadata is:') + print('Key ID: {}'.format(hmac_key.id)) + print('Access ID: {}'.format(hmac_key.access_id)) + print('Project ID: {}'.format(hmac_key.project)) + print('State: {}'.format(hmac_key.state)) + print('Created At: {}'.format(hmac_key.time_created)) + print('Updated At: {}'.format(hmac_key.updated)) + print('Etag: {}'.format(hmac_key.etag)) + # [END storage_list_hmac_keys] + return hmac_keys + + +def create_key(project_id, service_account_email): + """ + Create a new HMAC key using the given project and service account. + """ + # [START storage_create_hmac_key] + # project_id = 'Your Google Cloud project ID' + # service_account_email = 'Service account used to generate HMAC key' + storage_client = storage.Client(project=project_id) + hmac_key, secret = storage_client.create_hmac_key( + service_account_email=service_account_email, + project_id=project_id) + print('The base64 encoded secret is {}'.format(secret)) + print('Do not miss that secret, there is no API to recover it.') + print('The HMAC key metadata is:') + print('Key ID: {}'.format(hmac_key.id)) + print('Access ID: {}'.format(hmac_key.access_id)) + print('Project ID: {}'.format(hmac_key.project)) + print('State: {}'.format(hmac_key.state)) + print('Created At: {}'.format(hmac_key.time_created)) + print('Updated At: {}'.format(hmac_key.updated)) + print('Etag: {}'.format(hmac_key.etag)) + # [END storage_create_hmac_key] + return hmac_key + + +def get_key(access_id, project_id): + """ + Retrieve the HMACKeyMetadata with the given access id. + """ + # [START storage_get_hmac_key] + # project_id = 'Your Google Cloud project ID' + # access_id = 'ID of an HMAC key' + storage_client = storage.Client(project=project_id) + hmac_key = storage_client.get_hmac_key_metadata( + access_id, + project_id=project_id) + print('The HMAC key metadata is:') + print('Key ID: {}'.format(hmac_key.id)) + print('Access ID: {}'.format(hmac_key.access_id)) + print('Project ID: {}'.format(hmac_key.project)) + print('State: {}'.format(hmac_key.state)) + print('Created At: {}'.format(hmac_key.time_created)) + print('Updated At: {}'.format(hmac_key.updated)) + print('Etag: {}'.format(hmac_key.etag)) + # [END storage_get_hmac_key] + return hmac_key + + +def activate_key(access_id, project_id): + """ + Activate the HMAC key with the given access ID. + """ + # [START storage_activate_hmac_key] + # project_id = 'Your Google Cloud project ID' + # access_id = 'ID of an inactive HMAC key' + storage_client = storage.Client(project=project_id) + hmac_key = storage_client.get_hmac_key_metadata( + access_id, + project_id=project_id) + hmac_key.state = 'ACTIVE' + hmac_key.update() + print('The HMAC key metadata is:') + print('Key ID: {}'.format(hmac_key.id)) + print('Access ID: {}'.format(hmac_key.access_id)) + print('Project ID: {}'.format(hmac_key.project)) + print('State: {}'.format(hmac_key.state)) + print('Created At: {}'.format(hmac_key.time_created)) + print('Updated At: {}'.format(hmac_key.updated)) + print('Etag: {}'.format(hmac_key.etag)) + # [END storage_activate_hmac_key] + return hmac_key + + +def deactivate_key(access_id, project_id): + """ + Deactivate the HMAC key with the given access ID. + """ + # [START storage_deactivate_hmac_key] + # project_id = 'Your Google Cloud project ID' + # access_id = 'ID of an active HMAC key' + storage_client = storage.Client(project=project_id) + hmac_key = storage_client.get_hmac_key_metadata( + access_id, + project_id=project_id) + hmac_key.state = 'INACTIVE' + hmac_key.update() + print('The HMAC key is now inactive.') + print('The HMAC key metadata is:') + print('Key ID: {}'.format(hmac_key.id)) + print('Access ID: {}'.format(hmac_key.access_id)) + print('Project ID: {}'.format(hmac_key.project)) + print('State: {}'.format(hmac_key.state)) + print('Created At: {}'.format(hmac_key.time_created)) + print('Updated At: {}'.format(hmac_key.updated)) + print('Etag: {}'.format(hmac_key.etag)) + # [END storage_deactivate_hmac_key] + return hmac_key + + +def delete_key(access_id, project_id): + """ + Delete the HMAC key with the given access ID. Key must have state INACTIVE + in order to succeed. + """ + # [START storage_delete_hmac_key] + # project_id = 'Your Google Cloud project ID' + # access_id = 'ID of an HMAC key (must be in INACTIVE state)' + storage_client = storage.Client(project=project_id) + hmac_key = storage_client.get_hmac_key_metadata( + access_id, + project_id=project_id) + hmac_key.delete() + print('The key is deleted, though it may still appear in list_hmac_keys()' + ' results.') + # [END storage_delete_hmac_key] diff --git a/hmac_test.py b/hmac_test.py new file mode 100644 index 000000000..0e936467c --- /dev/null +++ b/hmac_test.py @@ -0,0 +1,95 @@ +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Tests for hmac.py. Requires GOOGLE_CLOUD_PROJECT (valid project) and +HMAC_KEY_TEST_SERVICE_ACCOUNT (valid service account email) env variables to be +set in order to run. +""" + + +import os + +from google.cloud import storage +import pytest + +import hmac + + +PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] +SERVICE_ACCOUNT_EMAIL = os.environ['HMAC_KEY_TEST_SERVICE_ACCOUNT'] +STORAGE_CLIENT = storage.Client(project=PROJECT_ID) + + +@pytest.fixture +def new_hmac_key(): + """ + Fixture to create a new HMAC key, and to guarantee all keys are deleted at + the end of each test. + """ + hmac_key, secret = STORAGE_CLIENT.create_hmac_key( + service_account_email=SERVICE_ACCOUNT_EMAIL, + project_id=PROJECT_ID) + yield hmac_key + # Re-fetch the key metadata in case state has changed during the test. + hmac_key = STORAGE_CLIENT.get_hmac_key_metadata( + hmac_key.access_id, + project_id=PROJECT_ID) + if hmac_key.state == 'DELETED': + return + if not hmac_key.state == 'INACTIVE': + hmac_key.state = 'INACTIVE' + hmac_key.update() + hmac_key.delete() + + +def test_list_keys(capsys, new_hmac_key): + hmac_keys = hmac.list_keys(PROJECT_ID) + assert 'HMAC Keys:' in capsys.readouterr().out + assert hmac_keys.num_results >= 1 + + +def test_create_key(capsys): + hmac_key = hmac.create_key(PROJECT_ID, SERVICE_ACCOUNT_EMAIL) + hmac_key.state = 'INACTIVE' + hmac_key.update() + hmac_key.delete() + assert 'Key ID:' in capsys.readouterr().out + assert hmac_key.access_id + + +def test_get_key(capsys, new_hmac_key): + hmac_key = hmac.get_key(new_hmac_key.access_id, PROJECT_ID) + assert 'HMAC key metadata' in capsys.readouterr().out + assert hmac_key.access_id == new_hmac_key.access_id + + +def test_activate_key(capsys, new_hmac_key): + new_hmac_key.state = 'INACTIVE' + new_hmac_key.update() + hmac_key = hmac.activate_key(new_hmac_key.access_id, PROJECT_ID) + assert 'State: ACTIVE' in capsys.readouterr().out + assert hmac_key.state == 'ACTIVE' + + +def test_deactivate_key(capsys, new_hmac_key): + hmac_key = hmac.deactivate_key(new_hmac_key.access_id, PROJECT_ID) + assert 'State: INACTIVE' in capsys.readouterr().out + assert hmac_key.state == 'INACTIVE' + + +def test_delete_key(capsys, new_hmac_key): + new_hmac_key.state = 'INACTIVE' + new_hmac_key.update() + hmac.delete_key(new_hmac_key.access_id, PROJECT_ID) + assert 'The key is deleted' in capsys.readouterr().out diff --git a/requirements.txt b/requirements.txt index f50fabc18..a4682fd3b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.39.1 -google-cloud-storage==1.17.0 +google-cloud-storage==1.19.0 From dea500be0a5622b0a85fee5e644ff9d40068c3aa Mon Sep 17 00:00:00 2001 From: JesseLovelace <43148100+JesseLovelace@users.noreply.github.com> Date: Fri, 13 Sep 2019 10:35:49 -0700 Subject: [PATCH 075/197] Remove required argument from list buckets sample (#2394) * Remove required argument from list buckets sample * Remove required argument from list buckets sample --- snippets.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/snippets.py b/snippets.py index 95aff2c50..656c6ebb1 100644 --- a/snippets.py +++ b/snippets.py @@ -430,10 +430,9 @@ def main(): formatter_class=argparse.RawDescriptionHelpFormatter) subparsers = parser.add_subparsers(dest='command') - list_buckets_parser = subparsers.add_parser( + + subparsers.add_parser( 'list-buckets', help=list_buckets.__doc__) - list_buckets_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') create_bucket_parser = subparsers.add_parser( 'create-bucket', help=create_bucket.__doc__) From 81690d3ff1ddd33b22c0416b9932a63d8e1f742d Mon Sep 17 00:00:00 2001 From: Chris Cotter Date: Tue, 17 Sep 2019 10:54:38 -0400 Subject: [PATCH 076/197] Fixup sample for list_hmac_keys (#2404) Correct printed metadata to match canonical samples. --- hmac.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/hmac.py b/hmac.py index 0334bbcc1..248c6ab26 100644 --- a/hmac.py +++ b/hmac.py @@ -29,14 +29,9 @@ def list_keys(project_id): hmac_keys = storage_client.list_hmac_keys(project_id=project_id) print('HMAC Keys:') for hmac_key in hmac_keys: - print('The HMAC key metadata is:') - print('Key ID: {}'.format(hmac_key.id)) + print('Service Account Email: {}'.format( + hmac_key.service_account_email)) print('Access ID: {}'.format(hmac_key.access_id)) - print('Project ID: {}'.format(hmac_key.project)) - print('State: {}'.format(hmac_key.state)) - print('Created At: {}'.format(hmac_key.time_created)) - print('Updated At: {}'.format(hmac_key.updated)) - print('Etag: {}'.format(hmac_key.etag)) # [END storage_list_hmac_keys] return hmac_keys @@ -55,6 +50,7 @@ def create_key(project_id, service_account_email): print('The base64 encoded secret is {}'.format(secret)) print('Do not miss that secret, there is no API to recover it.') print('The HMAC key metadata is:') + print('Service Account Email: {}'.format(hmac_key.service_account_email)) print('Key ID: {}'.format(hmac_key.id)) print('Access ID: {}'.format(hmac_key.access_id)) print('Project ID: {}'.format(hmac_key.project)) @@ -78,6 +74,7 @@ def get_key(access_id, project_id): access_id, project_id=project_id) print('The HMAC key metadata is:') + print('Service Account Email: {}'.format(hmac_key.service_account_email)) print('Key ID: {}'.format(hmac_key.id)) print('Access ID: {}'.format(hmac_key.access_id)) print('Project ID: {}'.format(hmac_key.project)) @@ -103,6 +100,7 @@ def activate_key(access_id, project_id): hmac_key.state = 'ACTIVE' hmac_key.update() print('The HMAC key metadata is:') + print('Service Account Email: {}'.format(hmac_key.service_account_email)) print('Key ID: {}'.format(hmac_key.id)) print('Access ID: {}'.format(hmac_key.access_id)) print('Project ID: {}'.format(hmac_key.project)) @@ -129,6 +127,7 @@ def deactivate_key(access_id, project_id): hmac_key.update() print('The HMAC key is now inactive.') print('The HMAC key metadata is:') + print('Service Account Email: {}'.format(hmac_key.service_account_email)) print('Key ID: {}'.format(hmac_key.id)) print('Access ID: {}'.format(hmac_key.access_id)) print('Project ID: {}'.format(hmac_key.project)) From d25fbb6d6f760cb790fc8b8586210091ce8e9748 Mon Sep 17 00:00:00 2001 From: JesseLovelace <43148100+JesseLovelace@users.noreply.github.com> Date: Tue, 24 Sep 2019 16:12:25 -0700 Subject: [PATCH 077/197] Bucket metadata sample (#2414) * Remove required argument from list buckets sample * Bucket metadata sample * Bucket metadata sample --- README.rst | 21 ++++++++++++++-- README.rst.in | 5 +++- hmac.py => hmac_samples.py | 0 hmac_test.py => hmac_samples_test.py | 14 +++++------ requirements.txt | 2 +- snippets.py | 36 ++++++++++++++++++++++++++++ snippets_test.py | 6 +++++ 7 files changed, 73 insertions(+), 11 deletions(-) rename hmac.py => hmac_samples.py (100%) rename hmac_test.py => hmac_samples_test.py (85%) diff --git a/README.rst b/README.rst index ef32d53de..48580f661 100644 --- a/README.rst +++ b/README.rst @@ -93,7 +93,7 @@ To run this sample: $ python snippets.py usage: snippets.py [-h] - {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} + {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,bucket-metadata,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} ... This application demonstrates how to perform basic operations on blobs @@ -103,7 +103,7 @@ To run this sample: at https://cloud.google.com/storage/docs. positional arguments: - {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} + {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,bucket-metadata,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} list-buckets Lists all buckets. create-bucket Creates a new bucket. delete-bucket Deletes a bucket. The bucket must be empty. @@ -112,6 +112,7 @@ To run this sample: remove-bucket-label Remove a label from a bucket. list Lists all the blobs in the bucket. + bucket-metadata Prints out a bucket's metadata. list-with-prefix Lists all the blobs in the bucket that begin with the prefix. This can be used to list all blobs in a "folder", e.g. "public/". The delimiter argument can @@ -403,6 +404,22 @@ To run this sample: +Service Account HMAC Keys ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/hmac_samples.py,storage/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python hmac_samples.py + + The client library diff --git a/README.rst.in b/README.rst.in index 3b8f33af7..346c6a0d8 100644 --- a/README.rst.in +++ b/README.rst.in @@ -33,7 +33,10 @@ samples: - name: Notification Polling file: notification_polling.py show_help: true +- name: Service Account HMAC Keys + file: hmac_samples.py + show_help: false cloud_client_library: true -folder: storage/cloud-client \ No newline at end of file +folder: storage/cloud-client diff --git a/hmac.py b/hmac_samples.py similarity index 100% rename from hmac.py rename to hmac_samples.py diff --git a/hmac_test.py b/hmac_samples_test.py similarity index 85% rename from hmac_test.py rename to hmac_samples_test.py index 0e936467c..9d67f9ef7 100644 --- a/hmac_test.py +++ b/hmac_samples_test.py @@ -23,7 +23,7 @@ from google.cloud import storage import pytest -import hmac +import hmac_samples PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] @@ -54,13 +54,13 @@ def new_hmac_key(): def test_list_keys(capsys, new_hmac_key): - hmac_keys = hmac.list_keys(PROJECT_ID) + hmac_keys = hmac_samples.list_keys(PROJECT_ID) assert 'HMAC Keys:' in capsys.readouterr().out assert hmac_keys.num_results >= 1 def test_create_key(capsys): - hmac_key = hmac.create_key(PROJECT_ID, SERVICE_ACCOUNT_EMAIL) + hmac_key = hmac_samples.create_key(PROJECT_ID, SERVICE_ACCOUNT_EMAIL) hmac_key.state = 'INACTIVE' hmac_key.update() hmac_key.delete() @@ -69,7 +69,7 @@ def test_create_key(capsys): def test_get_key(capsys, new_hmac_key): - hmac_key = hmac.get_key(new_hmac_key.access_id, PROJECT_ID) + hmac_key = hmac_samples.get_key(new_hmac_key.access_id, PROJECT_ID) assert 'HMAC key metadata' in capsys.readouterr().out assert hmac_key.access_id == new_hmac_key.access_id @@ -77,13 +77,13 @@ def test_get_key(capsys, new_hmac_key): def test_activate_key(capsys, new_hmac_key): new_hmac_key.state = 'INACTIVE' new_hmac_key.update() - hmac_key = hmac.activate_key(new_hmac_key.access_id, PROJECT_ID) + hmac_key = hmac_samples.activate_key(new_hmac_key.access_id, PROJECT_ID) assert 'State: ACTIVE' in capsys.readouterr().out assert hmac_key.state == 'ACTIVE' def test_deactivate_key(capsys, new_hmac_key): - hmac_key = hmac.deactivate_key(new_hmac_key.access_id, PROJECT_ID) + hmac_key = hmac_samples.deactivate_key(new_hmac_key.access_id, PROJECT_ID) assert 'State: INACTIVE' in capsys.readouterr().out assert hmac_key.state == 'INACTIVE' @@ -91,5 +91,5 @@ def test_deactivate_key(capsys, new_hmac_key): def test_delete_key(capsys, new_hmac_key): new_hmac_key.state = 'INACTIVE' new_hmac_key.update() - hmac.delete_key(new_hmac_key.access_id, PROJECT_ID) + hmac_samples.delete_key(new_hmac_key.access_id, PROJECT_ID) assert 'The key is deleted' in capsys.readouterr().out diff --git a/requirements.txt b/requirements.txt index a4682fd3b..a498968ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==0.39.1 -google-cloud-storage==1.19.0 +google-cloud-storage==1.19.1 diff --git a/snippets.py b/snippets.py index 656c6ebb1..a184b5323 100644 --- a/snippets.py +++ b/snippets.py @@ -248,6 +248,35 @@ def blob_metadata(bucket_name, blob_name): .format(blob.retention_expiration_time)) +def bucket_metadata(bucket_name): + """Prints out a bucket's metadata.""" + # [START storage_get_bucket_metadata] + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + print('ID: {}'.format(bucket.id)) + print('Name: {}'.format(bucket.name)) + print('Storage Class: {}'.format(bucket.storage_class)) + print('Location: {}'.format(bucket.location)) + print('Location Type: {}'.format(bucket.location_type)) + print('Cors: {}'.format(bucket.cors)) + print('Default Event Based Hold: {}' + .format(bucket.default_event_based_hold)) + print('Default KMS Key Name: {}'.format(bucket.default_kms_key_name)) + print('Metageneration: {}'.format(bucket.metageneration)) + print('Retention Effective Time: {}' + .format(bucket.retention_policy_effective_time)) + print('Retention Period: {}'.format(bucket.retention_period)) + print('Retention Policy Locked: {}'.format(bucket.retention_policy_locked)) + print('Requester Pays: {}'.format(bucket.requester_pays)) + print('Self Link: {}'.format(bucket.self_link)) + print('Time Created: {}'.format(bucket.time_created)) + print('Versioning Enabled: {}'.format(bucket.versioning_enabled)) + print('Labels:') + pprint.pprint(bucket.labels) + # [END storage_get_bucket_metadata] + + def make_blob_public(bucket_name, blob_name): """Makes a blob publicly accessible.""" storage_client = storage.Client() @@ -367,6 +396,8 @@ def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): def bucket_commands(args): if args.command == 'list-buckets': list_buckets() + elif args.command == 'bucket-metadata': + bucket_metadata(args.bucket_name) elif args.command == 'create-bucket': create_bucket(args.bucket_name) elif args.command == 'enable-default-kms-key': @@ -464,6 +495,11 @@ def main(): list_blobs_parser.add_argument( 'bucket_name', help='Your cloud storage bucket.') + bucket_metadata_parser = subparsers.add_parser( + 'bucket-metadata', help=bucket_metadata.__doc__) + bucket_metadata_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') + list_with_prefix_parser = subparsers.add_parser( 'list-with-prefix', help=list_blobs_with_prefix.__doc__) list_with_prefix_parser.add_argument( diff --git a/snippets_test.py b/snippets_test.py index 117bbd5bb..a03609743 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -76,6 +76,12 @@ def test_list_blobs(test_blob, capsys): assert test_blob.name in out +def test_bucket_metadata(capsys): + snippets.bucket_metadata(BUCKET) + out, _ = capsys.readouterr() + assert BUCKET in out + + def test_list_blobs_with_prefix(test_blob, capsys): snippets.list_blobs_with_prefix( BUCKET, From e195db746ad0c2aacd5e81ffbfb7beef71456970 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Mon, 7 Oct 2019 15:45:22 -0700 Subject: [PATCH 078/197] Adds updates for samples profiler ... vision (#2439) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a498968ff..6a97d00e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==0.39.1 +google-cloud-pubsub==1.0.0 google-cloud-storage==1.19.1 From fd92a5de470d1cc568b23fff64ba8a95e853cd9d Mon Sep 17 00:00:00 2001 From: Jonathan Lui Date: Mon, 28 Oct 2019 13:53:17 -0700 Subject: [PATCH 079/197] fix: add bucket-name as required arg to v4 snippets (#2502) --- snippets.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/snippets.py b/snippets.py index a184b5323..ee8a53f9d 100644 --- a/snippets.py +++ b/snippets.py @@ -561,10 +561,14 @@ def main(): signed_url_download_v4_parser = subparsers.add_parser( 'signed-url-download-v4', help=generate_download_signed_url_v4.__doc__) + signed_url_download_v4_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') signed_url_download_v4_parser.add_argument('blob_name') signed_url_upload_v4_parser = subparsers.add_parser( 'signed-url-upload-v4', help=generate_upload_signed_url_v4.__doc__) + signed_url_upload_v4_parser.add_argument( + 'bucket_name', help='Your cloud storage bucket.') signed_url_upload_v4_parser.add_argument('blob_name') rename_parser = subparsers.add_parser( From f955a699832cc5f43dd33276041dff386caa5416 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 6 Nov 2019 09:39:39 -0800 Subject: [PATCH 080/197] [Storage] Support rename of BPO to UniformBucketLevelAccess (#2335) * Update BPO -> UBLA * Update BPO -> UBLA --- README.rst | 26 ++--- README.rst.in | 4 +- bucket_policy_only.py | 96 ----------------- requirements.txt | 2 +- uniform_bucket_level_access.py | 102 ++++++++++++++++++ ....py => uniform_bucket_level_access_test.py | 23 ++-- 6 files changed, 130 insertions(+), 123 deletions(-) delete mode 100644 bucket_policy_only.py create mode 100644 uniform_bucket_level_access.py rename bucket_policy_only_test.py => uniform_bucket_level_access_test.py (59%) diff --git a/README.rst b/README.rst index 48580f661..47670a4e5 100644 --- a/README.rst +++ b/README.rst @@ -312,11 +312,11 @@ To run this sample: -Bucket Policy Only +Uniform Bucket Level Access +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/bucket_policy_only.py,storage/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/uniform_bucket_level_access.py,storage/cloud-client/README.rst @@ -325,20 +325,20 @@ To run this sample: .. code-block:: bash - $ python bucket_policy_only.py + $ python uniform_bucket_level_access.py - usage: bucket_policy_only.py [-h] - {enable-bucket-policy-only,disable-bucket-policy-only,get-bucket-policy-only} + usage: uniform_bucket_level_access.py [-h] + {enable-uniform-bucket-level-access,disable-uniform-bucket-level-access,get-uniform-bucket-level-access} ... positional arguments: - {enable-bucket-policy-only,disable-bucket-policy-only,get-bucket-policy-only} - enable-bucket-policy-only - Enable Bucket Policy Only for a bucket - disable-bucket-policy-only - Disable Bucket Policy Only for a bucket - get-bucket-policy-only - Get Bucket Policy Only for a bucket + {enable-uniform-bucket-level-access,disable-uniform-bucket-level-access,get-uniform-bucket-level-access} + enable-uniform-bucket-level-access + Enable uniform bucket-level access for a bucket + disable-uniform-bucket-level-access + Disable uniform bucket-level access for a bucket + get-uniform-bucket-level-access + Get uniform bucket-level access for a bucket optional arguments: -h, --help show this help message and exit @@ -437,4 +437,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/README.rst.in b/README.rst.in index 346c6a0d8..065fe0742 100644 --- a/README.rst.in +++ b/README.rst.in @@ -27,8 +27,8 @@ samples: - name: Bucket Lock file: bucket_lock.py show_help: true -- name: Bucket Policy Only - file: bucket_policy_only.py +- name: Uniform bucket-level access + file: uniform_bucket_level_access.py show_help: true - name: Notification Polling file: notification_polling.py diff --git a/bucket_policy_only.py b/bucket_policy_only.py deleted file mode 100644 index 530574544..000000000 --- a/bucket_policy_only.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse - -from google.cloud import storage - - -def enable_bucket_policy_only(bucket_name): - """Enable Bucket Policy Only for a bucket""" - # [START storage_enable_bucket_policy_only] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.iam_configuration.bucket_policy_only_enabled = True - bucket.patch() - - print('Bucket Policy Only was enabled for {}.'.format(bucket.name)) - # [END storage_enable_bucket_policy_only] - - -def disable_bucket_policy_only(bucket_name): - """Disable Bucket Policy Only for a bucket""" - # [START storage_disable_bucket_policy_only] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.iam_configuration.bucket_policy_only_enabled = False - bucket.patch() - - print('Bucket Policy Only was disabled for {}.'.format(bucket.name)) - # [END storage_disable_bucket_policy_only] - - -def get_bucket_policy_only(bucket_name): - """Get Bucket Policy Only for a bucket""" - # [START storage_get_bucket_policy_only] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - iam_configuration = bucket.iam_configuration - - if iam_configuration.bucket_policy_only_enabled: - print('Bucket Policy Only is enabled for {}.'.format(bucket.name)) - print('Bucket will be locked on {}.'.format( - iam_configuration.bucket_policy_only_locked_time)) - else: - print('Bucket Policy Only is disabled for {}.'.format(bucket.name)) - # [END storage_get_bucket_policy_only] - - -if __name__ == '__main__': - - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - subparsers = parser.add_subparsers(dest='command') - - enable_bucket_policy_only_parser = subparsers.add_parser( - 'enable-bucket-policy-only', help=enable_bucket_policy_only.__doc__) - enable_bucket_policy_only_parser.add_argument('bucket_name') - - disable_bucket_policy_only_parser = subparsers.add_parser( - 'disable-bucket-policy-only', help=disable_bucket_policy_only.__doc__) - disable_bucket_policy_only_parser.add_argument('bucket_name') - - get_bucket_policy_only_parser = subparsers.add_parser( - 'get-bucket-policy-only', help=get_bucket_policy_only.__doc__) - get_bucket_policy_only_parser.add_argument('bucket_name') - - args = parser.parse_args() - - if args.command == 'enable-bucket-policy-only': - enable_bucket_policy_only(args.bucket_name) - elif args.command == 'disable-bucket-policy-only': - disable_bucket_policy_only(args.bucket_name) - elif args.command == 'get-bucket-policy-only': - get_bucket_policy_only(args.bucket_name) diff --git a/requirements.txt b/requirements.txt index 6a97d00e8..8a6eeead8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.0.0 -google-cloud-storage==1.19.1 +google-cloud-storage==1.22.0 \ No newline at end of file diff --git a/uniform_bucket_level_access.py b/uniform_bucket_level_access.py new file mode 100644 index 000000000..6ff2fb31f --- /dev/null +++ b/uniform_bucket_level_access.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from google.cloud import storage + + +def enable_uniform_bucket_level_access(bucket_name): + """Enable uniform bucket-level access for a bucket""" + # [START storage_enable_uniform_bucket_level_access] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + bucket.patch() + + print('Uniform bucket-level access was enabled for {}.'.format( + bucket.name)) + # [END storage_enable_uniform_bucket_level_access] + + +def disable_uniform_bucket_level_access(bucket_name): + """Disable uniform bucket-level access for a bucket""" + # [START storage_uniform_bucket_level_access] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.iam_configuration.uniform_bucket_level_access_enabled = False + bucket.patch() + + print('Uniform bucket-level access was disabled for {}.'.format( + bucket.name)) + + +def get_uniform_bucket_level_access(bucket_name): + """Get uniform bucket-level access for a bucket""" + # [START storage_get_uniform_bucket_level_access] + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + iam_configuration = bucket.iam_configuration + + if iam_configuration.uniform_bucket_level_access_enabled: + print('Uniform bucket-level access is enabled for {}.'.format( + bucket.name)) + print('Bucket will be locked on {}.'.format( + iam_configuration.uniform_bucket_level_locked_time)) + else: + print('Uniform bucket-level access is disabled for {}.'.format( + bucket.name)) + # [END storage_get_uniform_bucket_level_access] + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + subparsers = parser.add_subparsers(dest='command') + + enable_uniform_bucket_level_access_parser = subparsers.add_parser( + 'enable-uniform-bucket-level-access', + help=enable_uniform_bucket_level_access.__doc__) + enable_uniform_bucket_level_access_parser.add_argument('bucket_name') + + disable_uniform_bucket_level_access_parser = subparsers.add_parser( + 'disable-uniform-bucket-level-access', + help=disable_uniform_bucket_level_access.__doc__) + disable_uniform_bucket_level_access_parser.add_argument('bucket_name') + + get_uniform_bucket_level_access_parser = subparsers.add_parser( + 'get-uniform-bucket-level-access', + help=get_uniform_bucket_level_access.__doc__) + get_uniform_bucket_level_access_parser.add_argument('bucket_name') + + args = parser.parse_args() + + if args.command == 'enable-uniform-bucket-level-access': + enable_uniform_bucket_level_access(args.bucket_name) + elif args.command == 'disable-uniform-bucket-level-access': + disable_uniform_bucket_level_access(args.bucket_name) + elif args.command == 'get-uniform-bucket-level-access': + get_uniform_bucket_level_access(args.bucket_name) diff --git a/bucket_policy_only_test.py b/uniform_bucket_level_access_test.py similarity index 59% rename from bucket_policy_only_test.py rename to uniform_bucket_level_access_test.py index 5ae433fa4..e18a0d79b 100644 --- a/bucket_policy_only_test.py +++ b/uniform_bucket_level_access_test.py @@ -18,36 +18,37 @@ import pytest -import bucket_policy_only +import uniform_bucket_level_access @pytest.fixture() def bucket(): """Creates a test bucket and deletes it upon completion.""" client = storage.Client() - bucket_name = 'bucket-policy-only-' + str(int(time.time())) + bucket_name = 'uniform-bucket-level-access-' + str(int(time.time())) bucket = client.create_bucket(bucket_name) yield bucket time.sleep(3) bucket.delete(force=True) -def test_get_bucket_policy_only(bucket, capsys): - bucket_policy_only.get_bucket_policy_only(bucket.name) +def test_get_uniform_bucket_level_access(bucket, capsys): + uniform_bucket_level_access.get_uniform_bucket_level_access(bucket.name) out, _ = capsys.readouterr() - assert 'Bucket Policy Only is disabled for {}.'.format( + assert 'Uniform bucket-level access is disabled for {}.'.format( bucket.name) in out -def test_enable_bucket_policy_only(bucket, capsys): - bucket_policy_only.enable_bucket_policy_only(bucket.name) +def test_enable_uniform_bucket_level_access(bucket, capsys): + uniform_bucket_level_access.enable_uniform_bucket_level_access(bucket.name) out, _ = capsys.readouterr() - assert 'Bucket Policy Only was enabled for {}.'.format( + assert 'Uniform bucket-level access was enabled for {}.'.format( bucket.name) in out -def test_disable_bucket_policy_only(bucket, capsys): - bucket_policy_only.disable_bucket_policy_only(bucket.name) +def test_disable_uniform_bucket_level_access(bucket, capsys): + uniform_bucket_level_access.disable_uniform_bucket_level_access( + bucket.name) out, _ = capsys.readouterr() - assert 'Bucket Policy Only was disabled for {}.'.format( + assert 'Uniform bucket-level access was disabled for {}.'.format( bucket.name) in out From eb1b6461914144b7fbc99f488906209c50ea13dc Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 6 Nov 2019 14:32:10 -0800 Subject: [PATCH 081/197] Fix region tag (#2515) --- uniform_bucket_level_access.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/uniform_bucket_level_access.py b/uniform_bucket_level_access.py index 6ff2fb31f..649728ff9 100644 --- a/uniform_bucket_level_access.py +++ b/uniform_bucket_level_access.py @@ -37,7 +37,7 @@ def enable_uniform_bucket_level_access(bucket_name): def disable_uniform_bucket_level_access(bucket_name): """Disable uniform bucket-level access for a bucket""" - # [START storage_uniform_bucket_level_access] + # [START storage_disable_uniform_bucket_level_access] # bucket_name = "my-bucket" storage_client = storage.Client() @@ -48,6 +48,7 @@ def disable_uniform_bucket_level_access(bucket_name): print('Uniform bucket-level access was disabled for {}.'.format( bucket.name)) + # [END storage_disable_uniform_bucket_level_access] def get_uniform_bucket_level_access(bucket_name): From 56583f9b30497aafeb0ab11142d3487cf98189dc Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Fri, 15 Nov 2019 15:04:51 -0800 Subject: [PATCH 082/197] Update documentation for prefix, delimiter search (#2537) * Update documentation for prefix, delimiter search * Remove whitespace. --- snippets.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/snippets.py b/snippets.py index ee8a53f9d..dcdca34ea 100644 --- a/snippets.py +++ b/snippets.py @@ -130,18 +130,21 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): "files" in the given "folder". Without the delimiter, the entire tree under the prefix is returned. For example, given these blobs: - /a/1.txt - /a/b/2.txt + a/1.txt + a/b/2.txt - If you just specify prefix = '/a', you'll get back: + If you just specify prefix = 'a', you'll get back: - /a/1.txt - /a/b/2.txt + a/1.txt + a/b/2.txt - However, if you specify prefix='/a' and delimiter='/', you'll get back: + However, if you specify prefix='a' and delimiter='/', you'll get back: - /a/1.txt + a/1.txt + Additionally, the same request will return blobs.prefixes populated with: + + a/b/ """ storage_client = storage.Client() From ed3cff23f9a16abb2a7084e9a99e593e66c55c2f Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Fri, 13 Dec 2019 13:16:59 -0800 Subject: [PATCH 083/197] [Storage] Split samples (#2602) * split bucket_lock samples and lint * split samples * blacken * fix typos * Add missing tests and lint * lint * fix typos * fix typo * typo * remove README --- README.rst | 440 ------------- README.rst.in | 42 -- acl.py | 274 -------- acl_test.py | 72 ++- bucket_lock.py | 298 --------- bucket_lock_test.py | 97 ++- encryption.py | 177 ------ encryption_test.py | 63 +- hmac_samples.py | 157 ----- hmac_samples_test.py | 65 +- iam.py | 95 --- iam_test.py | 20 +- notification_polling.py | 91 +-- notification_polling_test.py | 50 +- quickstart.py | 6 +- quickstart_test.py | 4 +- requester_pays.py | 105 --- requester_pays_test.py | 33 +- snippets.py | 596 ------------------ snippets_test.py | 152 +++-- storage_activate_hmac_key.py | 53 ++ storage_add_bucket_default_owner.py | 51 ++ storage_add_bucket_iam_member.py | 47 ++ storage_add_bucket_label.py | 43 ++ storage_add_bucket_owner.py | 52 ++ storage_add_file_owner.py | 54 ++ storage_copy_file.py | 60 ++ storage_create_bucket.py | 37 ++ storage_create_hmac_key.py | 53 ++ storage_deactivate_hmac_key.py | 54 ++ storage_delete_bucket.py | 38 ++ storage_delete_file.py | 40 ++ storage_delete_hmac_key.py | 47 ++ storage_disable_default_event_based_hold.py | 40 ++ storage_disable_requester_pays.py | 40 ++ ...age_disable_uniform_bucket_level_access.py | 41 ++ storage_download_encrypted_file.py | 67 ++ storage_download_file.py | 49 ++ storage_download_file_requester_pays.py | 53 ++ storage_enable_default_event_based_hold.py | 40 ++ storage_enable_requester_pays.py | 39 ++ storage_enable_uniform_bucket_level_access.py | 41 ++ storage_generate_encryption_key.py | 39 ++ storage_generate_signed_url_v2.py | 52 ++ storage_generate_signed_url_v4.py | 58 ++ storage_generate_upload_signed_url_v4.py | 63 ++ storage_get_bucket_labels.py | 38 ++ storage_get_bucket_metadata.py | 60 ++ storage_get_default_event_based_hold.py | 45 ++ storage_get_hmac_key.py | 51 ++ storage_get_metadata.py | 67 ++ storage_get_requester_pays_status.py | 40 ++ storage_get_retention_policy.py | 46 ++ storage_get_uniform_bucket_level_access.py | 53 ++ storage_list_buckets.py | 35 + storage_list_files.py | 40 ++ storage_list_files_with_prefix.py | 71 +++ storage_list_hmac_keys.py | 43 ++ storage_lock_retention_policy.py | 48 ++ storage_make_public.py | 44 ++ storage_move_file.py | 43 ++ storage_print_bucket_acl.py | 36 ++ storage_print_bucket_acl_for_user.py | 41 ++ storage_print_file_acl.py | 37 ++ storage_print_file_acl_for_user.py | 45 ++ storage_release_event_based_hold.py | 43 ++ storage_release_temporary_hold.py | 43 ++ storage_remove_bucket_default_owner.py | 54 ++ storage_remove_bucket_iam_member.py | 46 ++ storage_remove_bucket_label.py | 46 ++ storage_remove_bucket_owner.py | 47 ++ storage_remove_file_owner.py | 53 ++ storage_remove_retention_policy.py | 47 ++ storage_rotate_encryption_key.py | 63 ++ storage_set_bucket_default_kms_key.py | 43 ++ storage_set_event_based_hold.py | 42 ++ storage_set_retention_policy.py | 45 ++ storage_set_temporary_hold.py | 42 ++ storage_upload_encrypted_file.py | 65 ++ storage_upload_file.py | 49 ++ storage_upload_with_kms_key.py | 52 ++ storage_view_bucket_iam_members.py | 41 ++ uniform_bucket_level_access.py | 103 --- uniform_bucket_level_access_test.py | 39 +- 84 files changed, 3356 insertions(+), 2578 deletions(-) delete mode 100644 README.rst delete mode 100644 README.rst.in delete mode 100644 acl.py delete mode 100644 bucket_lock.py delete mode 100644 encryption.py delete mode 100644 hmac_samples.py delete mode 100644 iam.py delete mode 100644 requester_pays.py delete mode 100644 snippets.py create mode 100644 storage_activate_hmac_key.py create mode 100644 storage_add_bucket_default_owner.py create mode 100644 storage_add_bucket_iam_member.py create mode 100644 storage_add_bucket_label.py create mode 100644 storage_add_bucket_owner.py create mode 100644 storage_add_file_owner.py create mode 100644 storage_copy_file.py create mode 100644 storage_create_bucket.py create mode 100644 storage_create_hmac_key.py create mode 100644 storage_deactivate_hmac_key.py create mode 100644 storage_delete_bucket.py create mode 100644 storage_delete_file.py create mode 100644 storage_delete_hmac_key.py create mode 100644 storage_disable_default_event_based_hold.py create mode 100644 storage_disable_requester_pays.py create mode 100644 storage_disable_uniform_bucket_level_access.py create mode 100644 storage_download_encrypted_file.py create mode 100644 storage_download_file.py create mode 100644 storage_download_file_requester_pays.py create mode 100644 storage_enable_default_event_based_hold.py create mode 100644 storage_enable_requester_pays.py create mode 100644 storage_enable_uniform_bucket_level_access.py create mode 100644 storage_generate_encryption_key.py create mode 100644 storage_generate_signed_url_v2.py create mode 100644 storage_generate_signed_url_v4.py create mode 100644 storage_generate_upload_signed_url_v4.py create mode 100644 storage_get_bucket_labels.py create mode 100644 storage_get_bucket_metadata.py create mode 100644 storage_get_default_event_based_hold.py create mode 100644 storage_get_hmac_key.py create mode 100644 storage_get_metadata.py create mode 100644 storage_get_requester_pays_status.py create mode 100644 storage_get_retention_policy.py create mode 100644 storage_get_uniform_bucket_level_access.py create mode 100644 storage_list_buckets.py create mode 100644 storage_list_files.py create mode 100644 storage_list_files_with_prefix.py create mode 100644 storage_list_hmac_keys.py create mode 100644 storage_lock_retention_policy.py create mode 100644 storage_make_public.py create mode 100644 storage_move_file.py create mode 100644 storage_print_bucket_acl.py create mode 100644 storage_print_bucket_acl_for_user.py create mode 100644 storage_print_file_acl.py create mode 100644 storage_print_file_acl_for_user.py create mode 100644 storage_release_event_based_hold.py create mode 100644 storage_release_temporary_hold.py create mode 100644 storage_remove_bucket_default_owner.py create mode 100644 storage_remove_bucket_iam_member.py create mode 100644 storage_remove_bucket_label.py create mode 100644 storage_remove_bucket_owner.py create mode 100644 storage_remove_file_owner.py create mode 100644 storage_remove_retention_policy.py create mode 100644 storage_rotate_encryption_key.py create mode 100644 storage_set_bucket_default_kms_key.py create mode 100644 storage_set_event_based_hold.py create mode 100644 storage_set_retention_policy.py create mode 100644 storage_set_temporary_hold.py create mode 100644 storage_upload_encrypted_file.py create mode 100644 storage_upload_file.py create mode 100644 storage_upload_with_kms_key.py create mode 100644 storage_view_bucket_iam_members.py delete mode 100644 uniform_bucket_level_access.py diff --git a/README.rst b/README.rst deleted file mode 100644 index 47670a4e5..000000000 --- a/README.rst +++ /dev/null @@ -1,440 +0,0 @@ -.. This file is automatically generated. Do not edit this file directly. - -Google Cloud Storage Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/README.rst - - -This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. - - - - -.. _Google Cloud Storage: https://cloud.google.com/storage/docs - -Setup -------------------------------------------------------------------------------- - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - -Samples -------------------------------------------------------------------------------- - -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/quickstart.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - -Snippets -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/snippets.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python snippets.py - - usage: snippets.py [-h] - {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,bucket-metadata,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} - ... - - This application demonstrates how to perform basic operations on blobs - (objects) in a Google Cloud Storage bucket. - - For more information, see the README.md under /storage and the documentation - at https://cloud.google.com/storage/docs. - - positional arguments: - {list-buckets,create-bucket,delete-bucket,get-bucket-labels,add-bucket-label,remove-bucket-label,list,bucket-metadata,list-with-prefix,upload,enable-default-kms-key,upload-with-kms-key,download,delete,metadata,make-public,signed-url,signed-url-download-v4,signed-url-upload-v4,rename,copy} - list-buckets Lists all buckets. - create-bucket Creates a new bucket. - delete-bucket Deletes a bucket. The bucket must be empty. - get-bucket-labels Prints out a bucket's labels. - add-bucket-label Add a label to a bucket. - remove-bucket-label - Remove a label from a bucket. - list Lists all the blobs in the bucket. - bucket-metadata Prints out a bucket's metadata. - list-with-prefix Lists all the blobs in the bucket that begin with the - prefix. This can be used to list all blobs in a - "folder", e.g. "public/". The delimiter argument can - be used to restrict the results to only the "files" in - the given "folder". Without the delimiter, the entire - tree under the prefix is returned. For example, given - these blobs: /a/1.txt /a/b/2.txt If you just specify - prefix = '/a', you'll get back: /a/1.txt /a/b/2.txt - However, if you specify prefix='/a' and delimiter='/', - you'll get back: /a/1.txt - upload Uploads a file to the bucket. - enable-default-kms-key - Sets a bucket's default KMS key. - upload-with-kms-key - Uploads a file to the bucket, encrypting it with the - given KMS key. - download Downloads a blob from the bucket. - delete Deletes a blob from the bucket. - metadata Prints out a blob's metadata. - make-public Makes a blob publicly accessible. - signed-url Generates a v2 signed URL for downloading a blob. Note - that this method requires a service account key file. - You can not use this if you are using Application - Default Credentials from Google Compute Engine or from - the Google Cloud SDK. - signed-url-download-v4 - Generates a v4 signed URL for downloading a blob. Note - that this method requires a service account key file. - You can not use this if you are using Application - Default Credentials from Google Compute Engine or from - the Google Cloud SDK. - signed-url-upload-v4 - Generates a v4 signed URL for uploading a blob using - HTTP PUT. Note that this method requires a service - account key file. You can not use this if you are - using Application Default Credentials from Google - Compute Engine or from the Google Cloud SDK. - rename Renames a blob. - copy Renames a blob. - - optional arguments: - -h, --help show this help message and exit - - - -Access Control Lists -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/acl.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python acl.py - - usage: acl.py [-h] - {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} - ... - - This application demonstrates how to manage access control lists (acls) in - Google Cloud Storage. - - For more information, see the README.md under /storage and the documentation - at https://cloud.google.com/storage/docs/encryption. - - positional arguments: - {print-bucket-acl,print-bucket-acl-for-user,add-bucket-owner,remove-bucket-owner,add-bucket-default-owner,remove-bucket-default-owner,print-blob-acl,print-blob-acl-for-user,add-blob-owner,remove-blob-owner} - print-bucket-acl Prints out a bucket's access control list. - print-bucket-acl-for-user - Prints out a bucket's access control list. - add-bucket-owner Adds a user as an owner on the given bucket. - remove-bucket-owner - Removes a user from the access control list of the - given bucket. - add-bucket-default-owner - Adds a user as an owner in the given bucket's default - object access control list. - remove-bucket-default-owner - Removes a user from the access control list of the - given bucket's default object access control list. - print-blob-acl Prints out a blob's access control list. - print-blob-acl-for-user - Prints out a blob's access control list for a given - user. - add-blob-owner Adds a user as an owner on the given blob. - remove-blob-owner Removes a user from the access control list of the - given blob in the given bucket. - - optional arguments: - -h, --help show this help message and exit - - - -Customer-Supplied Encryption -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/encryption.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python encryption.py - - usage: encryption.py [-h] {generate-encryption-key,upload,download,rotate} ... - - This application demonstrates how to upload and download encrypted blobs - (objects) in Google Cloud Storage. - - Use `generate-encryption-key` to generate an example key: - - python encryption.py generate-encryption-key - - Then use the key to upload and download files encrypted with a custom key. - - For more information, see the README.md under /storage and the documentation - at https://cloud.google.com/storage/docs/encryption. - - positional arguments: - {generate-encryption-key,upload,download,rotate} - generate-encryption-key - Generates a 256 bit (32 byte) AES encryption key and - prints the base64 representation. This is included for - demonstration purposes. You should generate your own - key. Please remember that encryption keys should be - handled with a comprehensive security policy. - upload Uploads a file to a Google Cloud Storage bucket using - a custom encryption key. The file will be encrypted by - Google Cloud Storage and only retrievable using the - provided encryption key. - download Downloads a previously-encrypted blob from Google - Cloud Storage. The encryption key provided must be the - same key provided when uploading the blob. - rotate Performs a key rotation by re-writing an encrypted - blob with a new encryption key. - - optional arguments: - -h, --help show this help message and exit - - - -Bucket Lock -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/bucket_lock.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python bucket_lock.py - - usage: bucket_lock.py [-h] - {set-retention-policy,remove-retention-policy,lock-retention-policy,get-retention-policy,set-temporary-hold,release-temporary-hold,set-event-based-hold,release-event-based-hold,enable-default-event-based-hold,disable-default-event-based-hold,get-default-event-based-hold} - ... - - positional arguments: - {set-retention-policy,remove-retention-policy,lock-retention-policy,get-retention-policy,set-temporary-hold,release-temporary-hold,set-event-based-hold,release-event-based-hold,enable-default-event-based-hold,disable-default-event-based-hold,get-default-event-based-hold} - set-retention-policy - Defines a retention policy on a given bucket - remove-retention-policy - Removes the retention policy on a given bucket - lock-retention-policy - Locks the retention policy on a given bucket - get-retention-policy - Gets the retention policy on a given bucket - set-temporary-hold Sets a temporary hold on a given blob - release-temporary-hold - Releases the temporary hold on a given blob - set-event-based-hold - Sets a event based hold on a given blob - release-event-based-hold - Releases the event based hold on a given blob - enable-default-event-based-hold - Enables the default event based hold on a given bucket - disable-default-event-based-hold - Disables the default event based hold on a given - bucket - get-default-event-based-hold - Gets the default event based hold on a given bucket - - optional arguments: - -h, --help show this help message and exit - - - -Uniform Bucket Level Access -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/uniform_bucket_level_access.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python uniform_bucket_level_access.py - - usage: uniform_bucket_level_access.py [-h] - {enable-uniform-bucket-level-access,disable-uniform-bucket-level-access,get-uniform-bucket-level-access} - ... - - positional arguments: - {enable-uniform-bucket-level-access,disable-uniform-bucket-level-access,get-uniform-bucket-level-access} - enable-uniform-bucket-level-access - Enable uniform bucket-level access for a bucket - disable-uniform-bucket-level-access - Disable uniform bucket-level access for a bucket - get-uniform-bucket-level-access - Get uniform bucket-level access for a bucket - - optional arguments: - -h, --help show this help message and exit - - - -Notification Polling -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/notification_polling.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python notification_polling.py - - usage: notification_polling.py [-h] project subscription - - This application demonstrates how to poll for GCS notifications from a - Cloud Pub/Sub subscription, parse the incoming message, and acknowledge the - successful processing of the message. - - This application will work with any subscription configured for pull rather - than push notifications. If you do not already have notifications configured, - you may consult the docs at - https://cloud.google.com/storage/docs/reporting-changes or follow the steps - below: - - 1. First, follow the common setup steps for these snippets, specically - configuring auth and installing dependencies. See the README's "Setup" - section. - - 2. Activate the Google Cloud Pub/Sub API, if you have not already done so. - https://console.cloud.google.com/flows/enableapi?apiid=pubsub - - 3. Create a Google Cloud Storage bucket: - $ gsutil mb gs://testbucket - - 4. Create a Cloud Pub/Sub topic and publish bucket notifications there: - $ gsutil notification create -f json -t testtopic gs://testbucket - - 5. Create a subscription for your new topic: - $ gcloud beta pubsub subscriptions create testsubscription --topic=testtopic - - 6. Run this program: - $ python notification_polling.py my-project-id testsubscription - - 7. While the program is running, upload and delete some files in the testbucket - bucket (you could use the console or gsutil) and watch as changes scroll by - in the app. - - positional arguments: - project The ID of the project that owns the subscription - subscription The ID of the Pub/Sub subscription - - optional arguments: - -h, --help show this help message and exit - - - -Service Account HMAC Keys -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=storage/cloud-client/hmac_samples.py,storage/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python hmac_samples.py - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/README.rst.in b/README.rst.in deleted file mode 100644 index 065fe0742..000000000 --- a/README.rst.in +++ /dev/null @@ -1,42 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Cloud Storage - short_name: Cloud Storage - url: https://cloud.google.com/storage/docs - description: > - `Google Cloud Storage`_ allows world-wide storage and retrieval of any - amount of data at any time. - -setup: -- auth -- install_deps - -samples: -- name: Quickstart - file: quickstart.py -- name: Snippets - file: snippets.py - show_help: true -- name: Access Control Lists - file: acl.py - show_help: true -- name: Customer-Supplied Encryption - file: encryption.py - show_help: true -- name: Bucket Lock - file: bucket_lock.py - show_help: true -- name: Uniform bucket-level access - file: uniform_bucket_level_access.py - show_help: true -- name: Notification Polling - file: notification_polling.py - show_help: true -- name: Service Account HMAC Keys - file: hmac_samples.py - show_help: false - -cloud_client_library: true - -folder: storage/cloud-client diff --git a/acl.py b/acl.py deleted file mode 100644 index d742ae428..000000000 --- a/acl.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to manage access control lists (acls) in -Google Cloud Storage. - -For more information, see the README.md under /storage and the documentation -at https://cloud.google.com/storage/docs/encryption. -""" - -import argparse - -from google.cloud import storage - - -def print_bucket_acl(bucket_name): - """Prints out a bucket's access control list.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - for entry in bucket.acl: - print('{}: {}'.format(entry['role'], entry['entity'])) - - -def print_bucket_acl_for_user(bucket_name, user_email): - """Prints out a bucket's access control list for a given user.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - # Reload fetches the current ACL from Cloud Storage. - bucket.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # get the roles for different types of entities. - roles = bucket.acl.user(user_email).get_roles() - - print(roles) - - -def add_bucket_owner(bucket_name, user_email): - """Adds a user as an owner on the given bucket.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - # Reload fetches the current ACL from Cloud Storage. - bucket.acl.reload() - - # You can also use `group()`, `domain()`, `all_authenticated()` and `all()` - # to grant access to different types of entities. - # You can also use `grant_read()` or `grant_write()` to grant different - # roles. - bucket.acl.user(user_email).grant_owner() - bucket.acl.save() - - print('Added user {} as an owner on bucket {}.'.format( - user_email, bucket_name)) - - -def remove_bucket_owner(bucket_name, user_email): - """Removes a user from the access control list of the given bucket.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - # Reload fetches the current ACL from Cloud Storage. - bucket.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # remove access for different types of entities. - bucket.acl.user(user_email).revoke_read() - bucket.acl.user(user_email).revoke_write() - bucket.acl.user(user_email).revoke_owner() - bucket.acl.save() - - print('Removed user {} from bucket {}.'.format( - user_email, bucket_name)) - - -def add_bucket_default_owner(bucket_name, user_email): - """Adds a user as an owner in the given bucket's default object access - control list.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - # Reload fetches the current ACL from Cloud Storage. - bucket.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # grant access to different types of entities. You can also use - # `grant_read` or `grant_write` to grant different roles. - bucket.default_object_acl.user(user_email).grant_owner() - bucket.default_object_acl.save() - - print('Added user {} as an owner in the default acl on bucket {}.'.format( - user_email, bucket_name)) - - -def remove_bucket_default_owner(bucket_name, user_email): - """Removes a user from the access control list of the given bucket's - default object access control list.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - # Reload fetches the current ACL from Cloud Storage. - bucket.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # remove access for different types of entities. - bucket.default_object_acl.user(user_email).revoke_read() - bucket.default_object_acl.user(user_email).revoke_write() - bucket.default_object_acl.user(user_email).revoke_owner() - bucket.default_object_acl.save() - - print('Removed user {} from the default acl of bucket {}.'.format( - user_email, bucket_name)) - - -def print_blob_acl(bucket_name, blob_name): - """Prints out a blob's access control list.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - for entry in blob.acl: - print('{}: {}'.format(entry['role'], entry['entity'])) - - -def print_blob_acl_for_user(bucket_name, blob_name, user_email): - """Prints out a blob's access control list for a given user.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - # Reload fetches the current ACL from Cloud Storage. - blob.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # get the roles for different types of entities. - roles = blob.acl.user(user_email).get_roles() - - print(roles) - - -def add_blob_owner(bucket_name, blob_name, user_email): - """Adds a user as an owner on the given blob.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - # Reload fetches the current ACL from Cloud Storage. - blob.acl.reload() - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # grant access to different types of entities. You can also use - # `grant_read` or `grant_write` to grant different roles. - blob.acl.user(user_email).grant_owner() - blob.acl.save() - - print('Added user {} as an owner on blob {} in bucket {}.'.format( - user_email, blob_name, bucket_name)) - - -def remove_blob_owner(bucket_name, blob_name, user_email): - """Removes a user from the access control list of the given blob in the - given bucket.""" - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - # You can also use `group`, `domain`, `all_authenticated` and `all` to - # remove access for different types of entities. - blob.acl.user(user_email).revoke_read() - blob.acl.user(user_email).revoke_write() - blob.acl.user(user_email).revoke_owner() - blob.acl.save() - - print('Removed user {} from blob {} in bucket {}.'.format( - user_email, blob_name, bucket_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - subparsers = parser.add_subparsers(dest='command') - - print_bucket_acl_parser = subparsers.add_parser( - 'print-bucket-acl', help=print_bucket_acl.__doc__) - print_bucket_acl_parser.add_argument('bucket_name') - - print_bucket_acl_for_user_parser = subparsers.add_parser( - 'print-bucket-acl-for-user', help=print_bucket_acl.__doc__) - print_bucket_acl_for_user_parser.add_argument('bucket_name') - print_bucket_acl_for_user_parser.add_argument('user_email') - - add_bucket_owner_parser = subparsers.add_parser( - 'add-bucket-owner', help=add_bucket_owner.__doc__) - add_bucket_owner_parser.add_argument('bucket_name') - add_bucket_owner_parser.add_argument('user_email') - - remove_bucket_owner_parser = subparsers.add_parser( - 'remove-bucket-owner', help=remove_bucket_owner.__doc__) - remove_bucket_owner_parser.add_argument('bucket_name') - remove_bucket_owner_parser.add_argument('user_email') - - add_bucket_default_owner_parser = subparsers.add_parser( - 'add-bucket-default-owner', help=add_bucket_default_owner.__doc__) - add_bucket_default_owner_parser.add_argument('bucket_name') - add_bucket_default_owner_parser.add_argument('user_email') - - remove_bucket_default_owner_parser = subparsers.add_parser( - 'remove-bucket-default-owner', - help=remove_bucket_default_owner.__doc__) - remove_bucket_default_owner_parser.add_argument('bucket_name') - remove_bucket_default_owner_parser.add_argument('user_email') - - print_blob_acl_parser = subparsers.add_parser( - 'print-blob-acl', help=print_blob_acl.__doc__) - print_blob_acl_parser.add_argument('bucket_name') - print_blob_acl_parser.add_argument('blob_name') - - print_blob_acl_for_user_parser = subparsers.add_parser( - 'print-blob-acl-for-user', help=print_blob_acl_for_user.__doc__) - print_blob_acl_for_user_parser.add_argument('bucket_name') - print_blob_acl_for_user_parser.add_argument('blob_name') - print_blob_acl_for_user_parser.add_argument('user_email') - - add_blob_owner_parser = subparsers.add_parser( - 'add-blob-owner', help=add_blob_owner.__doc__) - add_blob_owner_parser.add_argument('bucket_name') - add_blob_owner_parser.add_argument('blob_name') - add_blob_owner_parser.add_argument('user_email') - - remove_blob_owner_parser = subparsers.add_parser( - 'remove-blob-owner', help=remove_blob_owner.__doc__) - remove_blob_owner_parser.add_argument('bucket_name') - remove_blob_owner_parser.add_argument('blob_name') - remove_blob_owner_parser.add_argument('user_email') - - args = parser.parse_args() - - if args.command == 'print-bucket-acl': - print_bucket_acl(args.bucket_name) - elif args.command == 'print-bucket-acl-for-user': - print_bucket_acl_for_user(args.bucket_name, args.user_email) - elif args.command == 'add-bucket-owner': - add_bucket_owner(args.bucket_name, args.user_email) - elif args.command == 'remove-bucket-owner': - remove_bucket_owner(args.bucket_name, args.user_email) - elif args.command == 'add-bucket-default-owner': - add_bucket_default_owner(args.bucket_name, args.user_email) - elif args.command == 'remove-bucket-default-owner': - remove_bucket_default_owner(args.bucket_name, args.user_email) - elif args.command == 'print-blob-acl': - print_blob_acl(args.bucket_name, args.blob_name) - elif args.command == 'print-blob-acl-for-user': - print_blob_acl_for_user( - args.bucket_name, args.blob_name, args.user_email) - elif args.command == 'add-blob-owner': - add_blob_owner(args.bucket_name, args.blob_name, args.user_email) - elif args.command == 'remove-blob-owner': - remove_blob_owner(args.bucket_name, args.blob_name, args.user_email) diff --git a/acl_test.py b/acl_test.py index aeb1312ee..60947c895 100644 --- a/acl_test.py +++ b/acl_test.py @@ -19,14 +19,24 @@ import google.cloud.storage.acl import pytest -import acl - -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +import storage_remove_file_owner +import storage_remove_bucket_owner +import storage_remove_bucket_default_owner +import storage_add_file_owner +import storage_add_bucket_owner +import storage_add_bucket_default_owner +import storage_print_bucket_acl_for_user +import storage_print_bucket_acl +import storage_print_file_acl_for_user +import storage_print_file_acl + +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] # Typically we'd use a @example.com address, but GCS requires a real Google # account. TEST_EMAIL = ( - 'google-auth-system-tests' - '@python-docs-samples-tests.iam.gserviceaccount.com') + "google-auth-system-tests" + "@python-docs-samples-tests.iam.gserviceaccount.com" +) @pytest.fixture @@ -48,8 +58,8 @@ def test_bucket(): def test_blob(): """Yields a blob that resets its acl after the test completes.""" bucket = storage.Client().bucket(BUCKET) - blob = bucket.blob('storage_acl_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') + blob = bucket.blob("storage_acl_test_sigil") + blob.upload_from_string("Hello, is it me you're looking for?") acl = google.cloud.storage.acl.ObjectACL(blob) acl.reload() # bucket ops rate limited 1 update per second time.sleep(1) @@ -59,7 +69,7 @@ def test_blob(): def test_print_bucket_acl(capsys): - acl.print_bucket_acl(BUCKET) + storage_print_bucket_acl.print_bucket_acl(BUCKET) out, _ = capsys.readouterr() assert out @@ -68,50 +78,56 @@ def test_print_bucket_acl_for_user(test_bucket, capsys): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.print_bucket_acl_for_user(BUCKET, TEST_EMAIL) + storage_print_bucket_acl_for_user.print_bucket_acl_for_user( + BUCKET, TEST_EMAIL + ) out, _ = capsys.readouterr() - assert 'OWNER' in out + assert "OWNER" in out def test_add_bucket_owner(test_bucket): - acl.add_bucket_owner(BUCKET, TEST_EMAIL) + storage_add_bucket_owner.add_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() - assert 'OWNER' in test_bucket.acl.user(TEST_EMAIL).get_roles() + assert "OWNER" in test_bucket.acl.user(TEST_EMAIL).get_roles() def test_remove_bucket_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_owner(BUCKET, TEST_EMAIL) + storage_remove_bucket_owner.remove_bucket_owner(BUCKET, TEST_EMAIL) test_bucket.acl.reload() - assert 'OWNER' not in test_bucket.acl.user(TEST_EMAIL).get_roles() + assert "OWNER" not in test_bucket.acl.user(TEST_EMAIL).get_roles() def test_add_bucket_default_owner(test_bucket): - acl.add_bucket_default_owner(BUCKET, TEST_EMAIL) + storage_add_bucket_default_owner.add_bucket_default_owner( + BUCKET, TEST_EMAIL + ) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() - assert 'OWNER' in roles + assert "OWNER" in roles def test_remove_bucket_default_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - acl.remove_bucket_default_owner(BUCKET, TEST_EMAIL) + storage_remove_bucket_default_owner.remove_bucket_default_owner( + BUCKET, TEST_EMAIL + ) test_bucket.default_object_acl.reload() roles = test_bucket.default_object_acl.user(TEST_EMAIL).get_roles() - assert 'OWNER' not in roles + assert "OWNER" not in roles def test_print_blob_acl(test_blob, capsys): - acl.print_blob_acl(BUCKET, test_blob.name) + storage_print_file_acl.print_blob_acl(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert out @@ -120,26 +136,28 @@ def test_print_blob_acl_for_user(test_blob, capsys): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() - acl.print_blob_acl_for_user( - BUCKET, test_blob.name, TEST_EMAIL) + storage_print_file_acl_for_user.print_blob_acl_for_user( + BUCKET, test_blob.name, TEST_EMAIL + ) out, _ = capsys.readouterr() - assert 'OWNER' in out + assert "OWNER" in out def test_add_blob_owner(test_blob): - acl.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL) + storage_add_file_owner.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL) test_blob.acl.reload() - assert 'OWNER' in test_blob.acl.user(TEST_EMAIL).get_roles() + assert "OWNER" in test_blob.acl.user(TEST_EMAIL).get_roles() def test_remove_blob_owner(test_blob): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() - acl.remove_blob_owner( - BUCKET, test_blob.name, TEST_EMAIL) + storage_remove_file_owner.remove_blob_owner( + BUCKET, test_blob.name, TEST_EMAIL + ) test_blob.acl.reload() - assert 'OWNER' not in test_blob.acl.user(TEST_EMAIL).get_roles() + assert "OWNER" not in test_blob.acl.user(TEST_EMAIL).get_roles() diff --git a/bucket_lock.py b/bucket_lock.py deleted file mode 100644 index 497176f9f..000000000 --- a/bucket_lock.py +++ /dev/null @@ -1,298 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse - -from google.cloud import storage - - -def set_retention_policy(bucket_name, retention_period): - """Defines a retention policy on a given bucket""" - # [START storage_set_retention_policy] - # bucket_name = "my-bucket" - # retention_period = 10 - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.retention_period = retention_period - bucket.patch() - - print('Bucket {} retention period set for {} seconds'.format( - bucket.name, - bucket.retention_period)) - # [END storage_set_retention_policy] - - -def remove_retention_policy(bucket_name): - """Removes the retention policy on a given bucket""" - # [START storage_remove_retention_policy] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - bucket.reload() - - if bucket.retention_policy_locked: - print( - 'Unable to remove retention period as retention policy is locked.') - return - - bucket.retention_period = None - bucket.patch() - - print('Removed bucket {} retention policy'.format(bucket.name)) - # [END storage_remove_retention_policy] - - -def lock_retention_policy(bucket_name): - """Locks the retention policy on a given bucket""" - # [START storage_lock_retention_policy] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - # get_bucket gets the current metageneration value for the bucket, - # required by lock_retention_policy. - bucket = storage_client.get_bucket(bucket_name) - - # Warning: Once a retention policy is locked it cannot be unlocked - # and retention period can only be increased. - bucket.lock_retention_policy() - - print('Retention policy for {} is now locked'.format(bucket_name)) - print('Retention policy effective as of {}'.format( - bucket.retention_policy_effective_time)) - # [END storage_lock_retention_policy] - - -def get_retention_policy(bucket_name): - """Gets the retention policy on a given bucket""" - # [START storage_get_retention_policy] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - bucket.reload() - - print('Retention Policy for {}'.format(bucket_name)) - print('Retention Period: {}'.format(bucket.retention_period)) - if bucket.retention_policy_locked: - print('Retention Policy is locked') - - if bucket.retention_policy_effective_time: - print('Effective Time: {}' - .format(bucket.retention_policy_effective_time)) - # [END storage_get_retention_policy] - - -def set_temporary_hold(bucket_name, blob_name): - """Sets a temporary hold on a given blob""" - # [START storage_set_temporary_hold] - # bucket_name = "my-bucket" - # blob_name = "my-blob" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.temporary_hold = True - blob.patch() - - print("Temporary hold was set for #{blob_name}") - # [END storage_set_temporary_hold] - - -def release_temporary_hold(bucket_name, blob_name): - """Releases the temporary hold on a given blob""" - # [START storage_release_temporary_hold] - # bucket_name = "my-bucket" - # blob_name = "my-blob" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.temporary_hold = False - blob.patch() - - print("Temporary hold was release for #{blob_name}") - # [END storage_release_temporary_hold] - - -def set_event_based_hold(bucket_name, blob_name): - """Sets a event based hold on a given blob""" - # [START storage_set_event_based_hold] - # bucket_name = "my-bucket" - # blob_name = "my-blob" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.event_based_hold = True - blob.patch() - - print('Event based hold was set for {}'.format(blob_name)) - # [END storage_set_event_based_hold] - - -def release_event_based_hold(bucket_name, blob_name): - """Releases the event based hold on a given blob""" - # [START storage_release_event_based_hold] - # bucket_name = "my-bucket" - # blob_name = "my-blob" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.event_based_hold = False - blob.patch() - - print('Event based hold was released for {}'.format(blob_name)) - # [END storage_release_event_based_hold] - - -def enable_default_event_based_hold(bucket_name): - """Enables the default event based hold on a given bucket""" - # [START storage_enable_default_event_based_hold] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.default_event_based_hold = True - bucket.patch() - - print('Default event based hold was enabled for {}'.format(bucket_name)) - # [END storage_enable_default_event_based_hold] - - -def disable_default_event_based_hold(bucket_name): - """Disables the default event based hold on a given bucket""" - # [START storage_disable_default_event_based_hold] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.default_event_based_hold = False - bucket.patch() - - print("Default event based hold was disabled for {}".format(bucket_name)) - # [END storage_disable_default_event_based_hold] - - -def get_default_event_based_hold(bucket_name): - """Gets the default event based hold on a given bucket""" - # [START storage_get_default_event_based_hold] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - bucket.reload() - - if bucket.default_event_based_hold: - print('Default event-based hold is enabled for {}'.format(bucket_name)) - else: - print('Default event-based hold is not enabled for {}' - .format(bucket_name)) - # [END storage_get_default_event_based_hold] - - -if __name__ == '__main__': - - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - subparsers = parser.add_subparsers(dest='command') - - set_retention_policy_parser = subparsers.add_parser( - 'set-retention-policy', help=set_retention_policy.__doc__) - set_retention_policy_parser.add_argument('bucket_name') - set_retention_policy_parser.add_argument('retention_period') - - remove_retention_policy_parser = subparsers.add_parser( - 'remove-retention-policy', help=remove_retention_policy.__doc__) - remove_retention_policy_parser.add_argument('bucket_name') - - lock_retention_policy_parser = subparsers.add_parser( - 'lock-retention-policy', help=lock_retention_policy.__doc__) - lock_retention_policy_parser.add_argument('bucket_name') - - get_retention_policy_parser = subparsers.add_parser( - 'get-retention-policy', help=get_retention_policy.__doc__) - get_retention_policy_parser.add_argument('bucket_name') - - set_temporary_hold_parser = subparsers.add_parser( - 'set-temporary-hold', help=set_temporary_hold.__doc__) - set_temporary_hold_parser.add_argument('bucket_name') - set_temporary_hold_parser.add_argument('blob_name') - - release_temporary_hold_parser = subparsers.add_parser( - 'release-temporary-hold', help=release_temporary_hold.__doc__) - release_temporary_hold_parser.add_argument('bucket_name') - release_temporary_hold_parser.add_argument('blob_name') - - set_event_based_hold_parser = subparsers.add_parser( - 'set-event-based-hold', help=set_event_based_hold.__doc__) - set_event_based_hold_parser.add_argument('bucket_name') - set_event_based_hold_parser.add_argument('blob_name') - - release_event_based_hold_parser = subparsers.add_parser( - 'release-event-based-hold', help=release_event_based_hold.__doc__) - release_event_based_hold_parser.add_argument('bucket_name') - release_event_based_hold_parser.add_argument('blob_name') - - enable_default_event_based_hold_parser = subparsers.add_parser( - 'enable-default-event-based-hold', - help=enable_default_event_based_hold.__doc__) - enable_default_event_based_hold_parser.add_argument('bucket_name') - - disable_default_event_based_hold_parser = subparsers.add_parser( - 'disable-default-event-based-hold', - help=disable_default_event_based_hold.__doc__) - disable_default_event_based_hold_parser.add_argument('bucket_name') - - get_default_event_based_hold_parser = subparsers.add_parser( - 'get-default-event-based-hold', - help=get_default_event_based_hold.__doc__) - get_default_event_based_hold_parser.add_argument('bucket_name') - - args = parser.parse_args() - - if args.command == 'set-retention-policy': - set_retention_policy(args.bucket_name, args.retention_period) - elif args.command == 'remove-retention-policy': - remove_retention_policy(args.bucket_name) - elif args.command == 'lock-retention-policy': - lock_retention_policy(args.bucket_name) - elif args.command == 'get-retention-policy': - get_retention_policy(args.bucket_name) - elif args.command == 'set-temporary-hold': - set_temporary_hold(args.bucket_name, args.blob_name) - elif args.command == 'release-temporary-hold': - release_temporary_hold(args.bucket_name, args.blob_name) - elif args.command == 'set-event-based-hold': - set_event_based_hold(args.bucket_name, args.blob_name) - elif args.command == 'release-event-based-hold': - release_event_based_hold(args.bucket_name, args.blob_name) - elif args.command == 'enable-default-event-based-hold': - enable_default_event_based_hold(args.bucket_name) - elif args.command == 'disable-default-event-based-hold': - disable_default_event_based_hold(args.bucket_name) - elif args.command == 'get-default-event-based-hold': - get_default_event_based_hold(args.bucket_name) diff --git a/bucket_lock_test.py b/bucket_lock_test.py index 63e1afbaa..89db4a7cc 100644 --- a/bucket_lock_test.py +++ b/bucket_lock_test.py @@ -18,10 +18,20 @@ import pytest -import bucket_lock - -BLOB_NAME = 'storage_snippets_test_sigil' -BLOB_CONTENT = 'Hello, is it me you\'re looking for?' +import storage_set_retention_policy +import storage_lock_retention_policy +import storage_get_retention_policy +import storage_get_default_event_based_hold +import storage_enable_default_event_based_hold +import storage_disable_default_event_based_hold +import storage_set_event_based_hold +import storage_release_event_based_hold +import storage_set_temporary_hold +import storage_release_temporary_hold +import storage_remove_retention_policy + +BLOB_NAME = "storage_snippets_test_sigil" +BLOB_CONTENT = "Hello, is it me you're looking for?" # Retention policy for 5 seconds RETENTION_POLICY = 5 @@ -30,33 +40,35 @@ def bucket(): """Creates a test bucket and deletes it upon completion.""" client = storage.Client() - bucket_name = 'bucket-lock-' + str(int(time.time())) + bucket_name = "bucket-lock-" + str(int(time.time())) bucket = client.create_bucket(bucket_name) yield bucket bucket.delete(force=True) def test_retention_policy_no_lock(bucket, capsys): - bucket_lock.set_retention_policy(bucket.name, RETENTION_POLICY) + storage_set_retention_policy.set_retention_policy( + bucket.name, RETENTION_POLICY + ) bucket.reload() assert bucket.retention_period is RETENTION_POLICY assert bucket.retention_policy_effective_time is not None assert bucket.retention_policy_locked is None - bucket_lock.get_retention_policy(bucket.name) + storage_get_retention_policy.get_retention_policy(bucket.name) out, _ = capsys.readouterr() - assert 'Retention Policy for {}'.format(bucket.name) in out - assert 'Retention Period: 5' in out - assert 'Effective Time: ' in out - assert 'Retention Policy is locked' not in out + assert "Retention Policy for {}".format(bucket.name) in out + assert "Retention Period: 5" in out + assert "Effective Time: " in out + assert "Retention Policy is locked" not in out blob = bucket.blob(BLOB_NAME) blob.upload_from_string(BLOB_CONTENT) assert blob.retention_expiration_time is not None - bucket_lock.remove_retention_policy(bucket.name) + storage_remove_retention_policy.remove_retention_policy(bucket.name) bucket.reload() assert bucket.retention_period is None @@ -64,46 +76,63 @@ def test_retention_policy_no_lock(bucket, capsys): def test_retention_policy_lock(bucket, capsys): - bucket_lock.set_retention_policy(bucket.name, RETENTION_POLICY) + storage_set_retention_policy.set_retention_policy( + bucket.name, RETENTION_POLICY + ) bucket.reload() assert bucket.retention_policy_locked is None - bucket_lock.lock_retention_policy(bucket.name) + storage_lock_retention_policy.lock_retention_policy(bucket.name) bucket.reload() assert bucket.retention_policy_locked is True - bucket_lock.get_retention_policy(bucket.name) + storage_get_retention_policy.get_retention_policy(bucket.name) out, _ = capsys.readouterr() - assert 'Retention Policy is locked' in out + assert "Retention Policy is locked" in out def test_enable_disable_bucket_default_event_based_hold(bucket, capsys): - bucket_lock.get_default_event_based_hold(bucket.name) + storage_get_default_event_based_hold.get_default_event_based_hold( + bucket.name + ) out, _ = capsys.readouterr() - assert 'Default event-based hold is not enabled for {}'.format( - bucket.name) in out - assert 'Default event-based hold is enabled for {}'.format( - bucket.name) not in out - - bucket_lock.enable_default_event_based_hold(bucket.name) + assert ( + "Default event-based hold is not enabled for {}".format(bucket.name) + in out + ) + assert ( + "Default event-based hold is enabled for {}".format(bucket.name) + not in out + ) + + storage_enable_default_event_based_hold.enable_default_event_based_hold( + bucket.name + ) bucket.reload() assert bucket.default_event_based_hold is True - bucket_lock.get_default_event_based_hold(bucket.name) + storage_get_default_event_based_hold.get_default_event_based_hold( + bucket.name + ) out, _ = capsys.readouterr() - assert 'Default event-based hold is enabled for {}'.format( - bucket.name) in out + assert ( + "Default event-based hold is enabled for {}".format(bucket.name) in out + ) blob = bucket.blob(BLOB_NAME) blob.upload_from_string(BLOB_CONTENT) assert blob.event_based_hold is True - bucket_lock.release_event_based_hold(bucket.name, blob.name) + storage_release_event_based_hold.release_event_based_hold( + bucket.name, blob.name + ) blob.reload() assert blob.event_based_hold is False - bucket_lock.disable_default_event_based_hold(bucket.name) + storage_disable_default_event_based_hold.disable_default_event_based_hold( + bucket.name + ) bucket.reload() assert bucket.default_event_based_hold is False @@ -113,11 +142,13 @@ def test_enable_disable_temporary_hold(bucket): blob.upload_from_string(BLOB_CONTENT) assert blob.temporary_hold is None - bucket_lock.set_temporary_hold(bucket.name, blob.name) + storage_set_temporary_hold.set_temporary_hold(bucket.name, blob.name) blob.reload() assert blob.temporary_hold is True - bucket_lock.release_temporary_hold(bucket.name, blob.name) + storage_release_temporary_hold.release_temporary_hold( + bucket.name, blob.name + ) blob.reload() assert blob.temporary_hold is False @@ -127,10 +158,12 @@ def test_enable_disable_event_based_hold(bucket): blob.upload_from_string(BLOB_CONTENT) assert blob.event_based_hold is None - bucket_lock.set_event_based_hold(bucket.name, blob.name) + storage_set_event_based_hold.set_event_based_hold(bucket.name, blob.name) blob.reload() assert blob.event_based_hold is True - bucket_lock.release_event_based_hold(bucket.name, blob.name) + storage_release_event_based_hold.release_event_based_hold( + bucket.name, blob.name + ) blob.reload() assert blob.event_based_hold is False diff --git a/encryption.py b/encryption.py deleted file mode 100644 index 04718cc9d..000000000 --- a/encryption.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to upload and download encrypted blobs -(objects) in Google Cloud Storage. - -Use `generate-encryption-key` to generate an example key: - - python encryption.py generate-encryption-key - -Then use the key to upload and download files encrypted with a custom key. - -For more information, see the README.md under /storage and the documentation -at https://cloud.google.com/storage/docs/encryption. -""" - -import argparse -import base64 -import os - -from google.cloud import storage -from google.cloud.storage import Blob - - -def generate_encryption_key(): - """Generates a 256 bit (32 byte) AES encryption key and prints the - base64 representation. - - This is included for demonstration purposes. You should generate your own - key. Please remember that encryption keys should be handled with a - comprehensive security policy. - """ - key = os.urandom(32) - encoded_key = base64.b64encode(key).decode('utf-8') - print('Base 64 encoded encryption key: {}'.format(encoded_key)) - - -def upload_encrypted_blob(bucket_name, source_file_name, - destination_blob_name, base64_encryption_key): - """Uploads a file to a Google Cloud Storage bucket using a custom - encryption key. - - The file will be encrypted by Google Cloud Storage and only - retrievable using the provided encryption key. - """ - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - # Encryption key must be an AES256 key represented as a bytestring with - # 32 bytes. Since it's passed in as a base64 encoded string, it needs - # to be decoded. - encryption_key = base64.b64decode(base64_encryption_key) - blob = Blob(destination_blob_name, bucket, encryption_key=encryption_key) - - blob.upload_from_filename(source_file_name) - - print('File {} uploaded to {}.'.format( - source_file_name, - destination_blob_name)) - - -def download_encrypted_blob(bucket_name, source_blob_name, - destination_file_name, base64_encryption_key): - """Downloads a previously-encrypted blob from Google Cloud Storage. - - The encryption key provided must be the same key provided when uploading - the blob. - """ - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - # Encryption key must be an AES256 key represented as a bytestring with - # 32 bytes. Since it's passed in as a base64 encoded string, it needs - # to be decoded. - encryption_key = base64.b64decode(base64_encryption_key) - blob = Blob(source_blob_name, bucket, encryption_key=encryption_key) - - blob.download_to_filename(destination_file_name) - - print('Blob {} downloaded to {}.'.format( - source_blob_name, - destination_file_name)) - - -def rotate_encryption_key(bucket_name, blob_name, base64_encryption_key, - base64_new_encryption_key): - """Performs a key rotation by re-writing an encrypted blob with a new - encryption key.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - current_encryption_key = base64.b64decode(base64_encryption_key) - new_encryption_key = base64.b64decode(base64_new_encryption_key) - - # Both source_blob and destination_blob refer to the same storage object, - # but destination_blob has the new encryption key. - source_blob = Blob( - blob_name, bucket, encryption_key=current_encryption_key) - destination_blob = Blob( - blob_name, bucket, encryption_key=new_encryption_key) - - token = None - - while True: - token, bytes_rewritten, total_bytes = destination_blob.rewrite( - source_blob, token=token) - if token is None: - break - - print('Key rotation complete for Blob {}'.format(blob_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - subparsers = parser.add_subparsers(dest='command') - - subparsers.add_parser( - 'generate-encryption-key', help=generate_encryption_key.__doc__) - - upload_parser = subparsers.add_parser( - 'upload', help=upload_encrypted_blob.__doc__) - upload_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - upload_parser.add_argument('source_file_name') - upload_parser.add_argument('destination_blob_name') - upload_parser.add_argument('base64_encryption_key') - - download_parser = subparsers.add_parser( - 'download', help=download_encrypted_blob.__doc__) - download_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - download_parser.add_argument('source_blob_name') - download_parser.add_argument('destination_file_name') - download_parser.add_argument('base64_encryption_key') - - rotate_parser = subparsers.add_parser( - 'rotate', help=rotate_encryption_key.__doc__) - rotate_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - rotate_parser.add_argument('blob_name') - rotate_parser.add_argument('base64_encryption_key') - rotate_parser.add_argument('base64_new_encryption_key') - - args = parser.parse_args() - - if args.command == 'generate-encryption-key': - generate_encryption_key() - elif args.command == 'upload': - upload_encrypted_blob( - args.bucket_name, - args.source_file_name, - args.destination_blob_name, - args.base64_encryption_key) - elif args.command == 'download': - download_encrypted_blob( - args.bucket_name, - args.source_blob_name, - args.destination_file_name, - args.base64_encryption_key) - elif args.command == 'rotate': - rotate_encryption_key( - args.bucket_name, - args.blob_name, - args.base64_encryption_key, - args.base64_new_encryption_key) diff --git a/encryption_test.py b/encryption_test.py index 4db6e6cb0..40eab55a4 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -20,43 +20,50 @@ from google.cloud.storage import Blob import pytest -import encryption +import storage_download_encrypted_file +import storage_generate_encryption_key +import storage_upload_encrypted_file +import storage_rotate_encryption_key -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] -TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=' +TEST_ENCRYPTION_KEY = "brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=" TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) -TEST_ENCRYPTION_KEY_2 = 'o4OD7SWCaPjfeEGhAY+YCgMdY9UW+OJ8mvfWD9lNtO4=' +TEST_ENCRYPTION_KEY_2 = "o4OD7SWCaPjfeEGhAY+YCgMdY9UW+OJ8mvfWD9lNtO4=" TEST_ENCRYPTION_KEY_2_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY_2) def test_generate_encryption_key(capsys): - encryption.generate_encryption_key() + storage_generate_encryption_key.generate_encryption_key() out, _ = capsys.readouterr() - encoded_key = out.split(':', 1).pop().strip() + encoded_key = out.split(":", 1).pop().strip() key = base64.b64decode(encoded_key) - assert len(key) == 32, 'Returned key should be 32 bytes' + assert len(key) == 32, "Returned key should be 32 bytes" def test_upload_encrypted_blob(): with tempfile.NamedTemporaryFile() as source_file: - source_file.write(b'test') + source_file.write(b"test") - encryption.upload_encrypted_blob( + storage_upload_encrypted_file.upload_encrypted_blob( BUCKET, source_file.name, - 'test_encrypted_upload_blob', - TEST_ENCRYPTION_KEY) + "test_encrypted_upload_blob", + TEST_ENCRYPTION_KEY, + ) @pytest.fixture def test_blob(): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(BUCKET) - blob = Blob('encryption_test_sigil', - bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED) - content = 'Hello, is it me you\'re looking for?' + blob = Blob( + "encryption_test_sigil", + bucket, + encryption_key=TEST_ENCRYPTION_KEY_DECODED, + ) + content = "Hello, is it me you're looking for?" blob.upload_from_string(content) return blob.name, content @@ -64,30 +71,24 @@ def test_blob(): def test_download_blob(test_blob): test_blob_name, test_blob_content = test_blob with tempfile.NamedTemporaryFile() as dest_file: - encryption.download_encrypted_blob( - BUCKET, - test_blob_name, - dest_file.name, - TEST_ENCRYPTION_KEY) + storage_download_encrypted_file.download_encrypted_blob( + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY + ) - downloaded_content = dest_file.read().decode('utf-8') + downloaded_content = dest_file.read().decode("utf-8") assert downloaded_content == test_blob_content def test_rotate_encryption_key(test_blob): test_blob_name, test_blob_content = test_blob - encryption.rotate_encryption_key( - BUCKET, - test_blob_name, - TEST_ENCRYPTION_KEY, - TEST_ENCRYPTION_KEY_2) + storage_rotate_encryption_key.rotate_encryption_key( + BUCKET, test_blob_name, TEST_ENCRYPTION_KEY, TEST_ENCRYPTION_KEY_2 + ) with tempfile.NamedTemporaryFile() as dest_file: - encryption.download_encrypted_blob( - BUCKET, - test_blob_name, - dest_file.name, - TEST_ENCRYPTION_KEY_2) + storage_download_encrypted_file.download_encrypted_blob( + BUCKET, test_blob_name, dest_file.name, TEST_ENCRYPTION_KEY_2 + ) - downloaded_content = dest_file.read().decode('utf-8') + downloaded_content = dest_file.read().decode("utf-8") assert downloaded_content == test_blob_content diff --git a/hmac_samples.py b/hmac_samples.py deleted file mode 100644 index 248c6ab26..000000000 --- a/hmac_samples.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Samples to illustrate management of HMAC keys via the python client library. -""" - - -from google.cloud import storage - - -def list_keys(project_id): - """ - List all HMAC keys associated with the project. - """ - # [START storage_list_hmac_keys] - # project_id = 'Your Google Cloud project ID' - storage_client = storage.Client(project=project_id) - hmac_keys = storage_client.list_hmac_keys(project_id=project_id) - print('HMAC Keys:') - for hmac_key in hmac_keys: - print('Service Account Email: {}'.format( - hmac_key.service_account_email)) - print('Access ID: {}'.format(hmac_key.access_id)) - # [END storage_list_hmac_keys] - return hmac_keys - - -def create_key(project_id, service_account_email): - """ - Create a new HMAC key using the given project and service account. - """ - # [START storage_create_hmac_key] - # project_id = 'Your Google Cloud project ID' - # service_account_email = 'Service account used to generate HMAC key' - storage_client = storage.Client(project=project_id) - hmac_key, secret = storage_client.create_hmac_key( - service_account_email=service_account_email, - project_id=project_id) - print('The base64 encoded secret is {}'.format(secret)) - print('Do not miss that secret, there is no API to recover it.') - print('The HMAC key metadata is:') - print('Service Account Email: {}'.format(hmac_key.service_account_email)) - print('Key ID: {}'.format(hmac_key.id)) - print('Access ID: {}'.format(hmac_key.access_id)) - print('Project ID: {}'.format(hmac_key.project)) - print('State: {}'.format(hmac_key.state)) - print('Created At: {}'.format(hmac_key.time_created)) - print('Updated At: {}'.format(hmac_key.updated)) - print('Etag: {}'.format(hmac_key.etag)) - # [END storage_create_hmac_key] - return hmac_key - - -def get_key(access_id, project_id): - """ - Retrieve the HMACKeyMetadata with the given access id. - """ - # [START storage_get_hmac_key] - # project_id = 'Your Google Cloud project ID' - # access_id = 'ID of an HMAC key' - storage_client = storage.Client(project=project_id) - hmac_key = storage_client.get_hmac_key_metadata( - access_id, - project_id=project_id) - print('The HMAC key metadata is:') - print('Service Account Email: {}'.format(hmac_key.service_account_email)) - print('Key ID: {}'.format(hmac_key.id)) - print('Access ID: {}'.format(hmac_key.access_id)) - print('Project ID: {}'.format(hmac_key.project)) - print('State: {}'.format(hmac_key.state)) - print('Created At: {}'.format(hmac_key.time_created)) - print('Updated At: {}'.format(hmac_key.updated)) - print('Etag: {}'.format(hmac_key.etag)) - # [END storage_get_hmac_key] - return hmac_key - - -def activate_key(access_id, project_id): - """ - Activate the HMAC key with the given access ID. - """ - # [START storage_activate_hmac_key] - # project_id = 'Your Google Cloud project ID' - # access_id = 'ID of an inactive HMAC key' - storage_client = storage.Client(project=project_id) - hmac_key = storage_client.get_hmac_key_metadata( - access_id, - project_id=project_id) - hmac_key.state = 'ACTIVE' - hmac_key.update() - print('The HMAC key metadata is:') - print('Service Account Email: {}'.format(hmac_key.service_account_email)) - print('Key ID: {}'.format(hmac_key.id)) - print('Access ID: {}'.format(hmac_key.access_id)) - print('Project ID: {}'.format(hmac_key.project)) - print('State: {}'.format(hmac_key.state)) - print('Created At: {}'.format(hmac_key.time_created)) - print('Updated At: {}'.format(hmac_key.updated)) - print('Etag: {}'.format(hmac_key.etag)) - # [END storage_activate_hmac_key] - return hmac_key - - -def deactivate_key(access_id, project_id): - """ - Deactivate the HMAC key with the given access ID. - """ - # [START storage_deactivate_hmac_key] - # project_id = 'Your Google Cloud project ID' - # access_id = 'ID of an active HMAC key' - storage_client = storage.Client(project=project_id) - hmac_key = storage_client.get_hmac_key_metadata( - access_id, - project_id=project_id) - hmac_key.state = 'INACTIVE' - hmac_key.update() - print('The HMAC key is now inactive.') - print('The HMAC key metadata is:') - print('Service Account Email: {}'.format(hmac_key.service_account_email)) - print('Key ID: {}'.format(hmac_key.id)) - print('Access ID: {}'.format(hmac_key.access_id)) - print('Project ID: {}'.format(hmac_key.project)) - print('State: {}'.format(hmac_key.state)) - print('Created At: {}'.format(hmac_key.time_created)) - print('Updated At: {}'.format(hmac_key.updated)) - print('Etag: {}'.format(hmac_key.etag)) - # [END storage_deactivate_hmac_key] - return hmac_key - - -def delete_key(access_id, project_id): - """ - Delete the HMAC key with the given access ID. Key must have state INACTIVE - in order to succeed. - """ - # [START storage_delete_hmac_key] - # project_id = 'Your Google Cloud project ID' - # access_id = 'ID of an HMAC key (must be in INACTIVE state)' - storage_client = storage.Client(project=project_id) - hmac_key = storage_client.get_hmac_key_metadata( - access_id, - project_id=project_id) - hmac_key.delete() - print('The key is deleted, though it may still appear in list_hmac_keys()' - ' results.') - # [END storage_delete_hmac_key] diff --git a/hmac_samples_test.py b/hmac_samples_test.py index 9d67f9ef7..e852d31bf 100644 --- a/hmac_samples_test.py +++ b/hmac_samples_test.py @@ -23,11 +23,16 @@ from google.cloud import storage import pytest -import hmac_samples +import storage_activate_hmac_key +import storage_create_hmac_key +import storage_deactivate_hmac_key +import storage_delete_hmac_key +import storage_get_hmac_key +import storage_list_hmac_keys -PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] -SERVICE_ACCOUNT_EMAIL = os.environ['HMAC_KEY_TEST_SERVICE_ACCOUNT'] +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +SERVICE_ACCOUNT_EMAIL = os.environ["HMAC_KEY_TEST_SERVICE_ACCOUNT"] STORAGE_CLIENT = storage.Client(project=PROJECT_ID) @@ -38,58 +43,64 @@ def new_hmac_key(): the end of each test. """ hmac_key, secret = STORAGE_CLIENT.create_hmac_key( - service_account_email=SERVICE_ACCOUNT_EMAIL, - project_id=PROJECT_ID) + service_account_email=SERVICE_ACCOUNT_EMAIL, project_id=PROJECT_ID + ) yield hmac_key # Re-fetch the key metadata in case state has changed during the test. hmac_key = STORAGE_CLIENT.get_hmac_key_metadata( - hmac_key.access_id, - project_id=PROJECT_ID) - if hmac_key.state == 'DELETED': + hmac_key.access_id, project_id=PROJECT_ID + ) + if hmac_key.state == "DELETED": return - if not hmac_key.state == 'INACTIVE': - hmac_key.state = 'INACTIVE' + if not hmac_key.state == "INACTIVE": + hmac_key.state = "INACTIVE" hmac_key.update() hmac_key.delete() def test_list_keys(capsys, new_hmac_key): - hmac_keys = hmac_samples.list_keys(PROJECT_ID) - assert 'HMAC Keys:' in capsys.readouterr().out + hmac_keys = storage_list_hmac_keys.list_keys(PROJECT_ID) + assert "HMAC Keys:" in capsys.readouterr().out assert hmac_keys.num_results >= 1 def test_create_key(capsys): - hmac_key = hmac_samples.create_key(PROJECT_ID, SERVICE_ACCOUNT_EMAIL) - hmac_key.state = 'INACTIVE' + hmac_key = storage_create_hmac_key.create_key( + PROJECT_ID, SERVICE_ACCOUNT_EMAIL + ) + hmac_key.state = "INACTIVE" hmac_key.update() hmac_key.delete() - assert 'Key ID:' in capsys.readouterr().out + assert "Key ID:" in capsys.readouterr().out assert hmac_key.access_id def test_get_key(capsys, new_hmac_key): - hmac_key = hmac_samples.get_key(new_hmac_key.access_id, PROJECT_ID) - assert 'HMAC key metadata' in capsys.readouterr().out + hmac_key = storage_get_hmac_key.get_key(new_hmac_key.access_id, PROJECT_ID) + assert "HMAC key metadata" in capsys.readouterr().out assert hmac_key.access_id == new_hmac_key.access_id def test_activate_key(capsys, new_hmac_key): - new_hmac_key.state = 'INACTIVE' + new_hmac_key.state = "INACTIVE" new_hmac_key.update() - hmac_key = hmac_samples.activate_key(new_hmac_key.access_id, PROJECT_ID) - assert 'State: ACTIVE' in capsys.readouterr().out - assert hmac_key.state == 'ACTIVE' + hmac_key = storage_activate_hmac_key.activate_key( + new_hmac_key.access_id, PROJECT_ID + ) + assert "State: ACTIVE" in capsys.readouterr().out + assert hmac_key.state == "ACTIVE" def test_deactivate_key(capsys, new_hmac_key): - hmac_key = hmac_samples.deactivate_key(new_hmac_key.access_id, PROJECT_ID) - assert 'State: INACTIVE' in capsys.readouterr().out - assert hmac_key.state == 'INACTIVE' + hmac_key = storage_deactivate_hmac_key.deactivate_key( + new_hmac_key.access_id, PROJECT_ID + ) + assert "State: INACTIVE" in capsys.readouterr().out + assert hmac_key.state == "INACTIVE" def test_delete_key(capsys, new_hmac_key): - new_hmac_key.state = 'INACTIVE' + new_hmac_key.state = "INACTIVE" new_hmac_key.update() - hmac_samples.delete_key(new_hmac_key.access_id, PROJECT_ID) - assert 'The key is deleted' in capsys.readouterr().out + storage_delete_hmac_key.delete_key(new_hmac_key.access_id, PROJECT_ID) + assert "The key is deleted" in capsys.readouterr().out diff --git a/iam.py b/iam.py deleted file mode 100644 index ba20bc1dd..000000000 --- a/iam.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to get and set IAM policies on Google -Cloud Storage buckets. - -For more information, see the documentation at -https://cloud.google.com/storage/docs/access-control/using-iam-permissions. -""" - -import argparse - -from google.cloud import storage - - -def view_bucket_iam_members(bucket_name): - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - policy = bucket.get_iam_policy() - - for role in policy: - members = policy[role] - print('Role: {}, Members: {}'.format(role, members)) - - -def add_bucket_iam_member(bucket_name, role, member): - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - policy = bucket.get_iam_policy() - - policy[role].add(member) - - bucket.set_iam_policy(policy) - - print('Added {} with role {} to {}.'.format( - member, role, bucket_name)) - - -def remove_bucket_iam_member(bucket_name, role, member): - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - policy = bucket.get_iam_policy() - - policy[role].discard(member) - - bucket.set_iam_policy(policy) - - print('Removed {} with role {} from {}.'.format( - member, role, bucket_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket_name', help='Your Cloud Storage bucket name.') - subparsers = parser.add_subparsers(dest='command') - - subparsers.add_parser( - 'view-bucket-iam-members', help=view_bucket_iam_members.__doc__) - - add_member_parser = subparsers.add_parser( - 'add-bucket-iam-member', help=add_bucket_iam_member.__doc__) - add_member_parser.add_argument('role') - add_member_parser.add_argument('member') - - remove_member_parser = subparsers.add_parser( - 'remove-bucket-iam-member', help=remove_bucket_iam_member.__doc__) - remove_member_parser.add_argument('role') - remove_member_parser.add_argument('member') - - args = parser.parse_args() - - if args.command == 'view-bucket-iam-members': - view_bucket_iam_members(args.bucket_name) - elif args.command == 'add-bucket-iam-member': - add_bucket_iam_member(args.bucket_name, args.role, args.member) - elif args.command == 'remove-bucket-iam-member': - remove_bucket_iam_member(args.bucket_name, args.role, args.member) diff --git a/iam_test.py b/iam_test.py index 0c823afa0..21ab0dba1 100644 --- a/iam_test.py +++ b/iam_test.py @@ -17,11 +17,13 @@ from google.cloud import storage import pytest -import iam +import storage_remove_bucket_iam_member +import storage_add_bucket_iam_member +import storage_view_bucket_iam_members -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -MEMBER = 'group:dpebot@google.com' -ROLE = 'roles/storage.legacyBucketReader' +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] +MEMBER = "group:dpebot@google.com" +ROLE = "roles/storage.legacyBucketReader" @pytest.fixture @@ -30,16 +32,16 @@ def bucket(): def test_view_bucket_iam_members(): - iam.view_bucket_iam_members(BUCKET) + storage_view_bucket_iam_members.view_bucket_iam_members(BUCKET) def test_add_bucket_iam_member(bucket): - iam.add_bucket_iam_member( - BUCKET, ROLE, MEMBER) + storage_add_bucket_iam_member.add_bucket_iam_member(BUCKET, ROLE, MEMBER) assert MEMBER in bucket.get_iam_policy()[ROLE] def test_remove_bucket_iam_member(bucket): - iam.remove_bucket_iam_member( - BUCKET, ROLE, MEMBER) + storage_remove_bucket_iam_member.remove_bucket_iam_member( + BUCKET, ROLE, MEMBER + ) assert MEMBER not in bucket.get_iam_policy()[ROLE] diff --git a/notification_polling.py b/notification_polling.py index 88868f948..e468638e1 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -57,43 +57,49 @@ def summarize(message): # [START parse_message] - data = message.data.decode('utf-8') + data = message.data.decode("utf-8") attributes = message.attributes - event_type = attributes['eventType'] - bucket_id = attributes['bucketId'] - object_id = attributes['objectId'] - generation = attributes['objectGeneration'] + event_type = attributes["eventType"] + bucket_id = attributes["bucketId"] + object_id = attributes["objectId"] + generation = attributes["objectGeneration"] description = ( - '\tEvent type: {event_type}\n' - '\tBucket ID: {bucket_id}\n' - '\tObject ID: {object_id}\n' - '\tGeneration: {generation}\n').format( - event_type=event_type, - bucket_id=bucket_id, - object_id=object_id, - generation=generation) - - if 'overwroteGeneration' in attributes: - description += '\tOverwrote generation: %s\n' % ( - attributes['overwroteGeneration']) - if 'overwrittenByGeneration' in attributes: - description += '\tOverwritten by generation: %s\n' % ( - attributes['overwrittenByGeneration']) - - payload_format = attributes['payloadFormat'] - if payload_format == 'JSON_API_V1': + "\tEvent type: {event_type}\n" + "\tBucket ID: {bucket_id}\n" + "\tObject ID: {object_id}\n" + "\tGeneration: {generation}\n" + ).format( + event_type=event_type, + bucket_id=bucket_id, + object_id=object_id, + generation=generation, + ) + + if "overwroteGeneration" in attributes: + description += "\tOverwrote generation: %s\n" % ( + attributes["overwroteGeneration"] + ) + if "overwrittenByGeneration" in attributes: + description += "\tOverwritten by generation: %s\n" % ( + attributes["overwrittenByGeneration"] + ) + + payload_format = attributes["payloadFormat"] + if payload_format == "JSON_API_V1": object_metadata = json.loads(data) - size = object_metadata['size'] - content_type = object_metadata['contentType'] - metageneration = object_metadata['metageneration'] + size = object_metadata["size"] + content_type = object_metadata["contentType"] + metageneration = object_metadata["metageneration"] description += ( - '\tContent type: {content_type}\n' - '\tSize: {object_size}\n' - '\tMetageneration: {metageneration}\n').format( - content_type=content_type, - object_size=size, - metageneration=metageneration) + "\tContent type: {content_type}\n" + "\tSize: {object_size}\n" + "\tMetageneration: {metageneration}\n" + ).format( + content_type=content_type, + object_size=size, + metageneration=metageneration, + ) return description # [END parse_message] @@ -103,30 +109,33 @@ def poll_notifications(project, subscription_name): # [BEGIN poll_notifications] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project, subscription_name + ) def callback(message): - print('Received message:\n{}'.format(summarize(message))) + print("Received message:\n{}".format(summarize(message))) message.ack() subscriber.subscribe(subscription_path, callback=callback) # The subscriber is non-blocking, so we must keep the main thread from # exiting to allow it to process messages in the background. - print('Listening for messages on {}'.format(subscription_path)) + print("Listening for messages on {}".format(subscription_path)) while True: time.sleep(60) # [END poll_notifications] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) + formatter_class=argparse.RawDescriptionHelpFormatter, + ) parser.add_argument( - 'project', - help='The ID of the project that owns the subscription') - parser.add_argument('subscription', - help='The ID of the Pub/Sub subscription') + "project", help="The ID of the project that owns the subscription" + ) + parser.add_argument( + "subscription", help="The ID of the Pub/Sub subscription" + ) args = parser.parse_args() poll_notifications(args.project, args.subscription) diff --git a/notification_polling_test.py b/notification_polling_test.py index b816bd9df..0f8329fbe 100644 --- a/notification_polling_test.py +++ b/notification_polling_test.py @@ -24,28 +24,32 @@ def test_parse_json_message(): attributes = { - 'eventType': 'OBJECT_FINALIZE', - 'bucketId': 'mybucket', - 'objectId': 'myobject', - 'objectGeneration': 1234567, - 'resource': 'projects/_/buckets/mybucket/objects/myobject#1234567', - 'notificationConfig': ('projects/_/buckets/mybucket/' - 'notificationConfigs/5'), - 'payloadFormat': 'JSON_API_V1'} - data = (b'{' - b' "size": 12345,' - b' "contentType": "text/html",' - b' "metageneration": 1' - b'}') + "eventType": "OBJECT_FINALIZE", + "bucketId": "mybucket", + "objectId": "myobject", + "objectGeneration": 1234567, + "resource": "projects/_/buckets/mybucket/objects/myobject#1234567", + "notificationConfig": ( + "projects/_/buckets/mybucket/" "notificationConfigs/5" + ), + "payloadFormat": "JSON_API_V1", + } + data = ( + b"{" + b' "size": 12345,' + b' "contentType": "text/html",' + b' "metageneration": 1' + b"}" + ) message = Message( - mock.Mock(data=data, attributes=attributes), - MESSAGE_ID, - mock.Mock()) + mock.Mock(data=data, attributes=attributes), MESSAGE_ID, mock.Mock() + ) assert summarize(message) == ( - '\tEvent type: OBJECT_FINALIZE\n' - '\tBucket ID: mybucket\n' - '\tObject ID: myobject\n' - '\tGeneration: 1234567\n' - '\tContent type: text/html\n' - '\tSize: 12345\n' - '\tMetageneration: 1\n') + "\tEvent type: OBJECT_FINALIZE\n" + "\tBucket ID: mybucket\n" + "\tObject ID: myobject\n" + "\tGeneration: 1234567\n" + "\tContent type: text/html\n" + "\tSize: 12345\n" + "\tMetageneration: 1\n" + ) diff --git a/quickstart.py b/quickstart.py index 9aff9b214..578e50753 100644 --- a/quickstart.py +++ b/quickstart.py @@ -24,14 +24,14 @@ def run_quickstart(): storage_client = storage.Client() # The name for the new bucket - bucket_name = 'my-new-bucket' + bucket_name = "my-new-bucket" # Creates the new bucket bucket = storage_client.create_bucket(bucket_name) - print('Bucket {} created.'.format(bucket.name)) + print("Bucket {} created.".format(bucket.name)) # [END storage_quickstart] -if __name__ == '__main__': +if __name__ == "__main__": run_quickstart() diff --git a/quickstart_test.py b/quickstart_test.py index cb0503972..f6e06ad93 100644 --- a/quickstart_test.py +++ b/quickstart_test.py @@ -17,7 +17,7 @@ import quickstart -@mock.patch('google.cloud.storage.client.Client.create_bucket') +@mock.patch("google.cloud.storage.client.Client.create_bucket") def test_quickstart(create_bucket_mock, capsys): # Unlike other quickstart tests, this one mocks out the creation # because buckets are expensive, globally-namespaced object. @@ -25,4 +25,4 @@ def test_quickstart(create_bucket_mock, capsys): quickstart.run_quickstart() - create_bucket_mock.assert_called_with('my-new-bucket') + create_bucket_mock.assert_called_with("my-new-bucket") diff --git a/requester_pays.py b/requester_pays.py deleted file mode 100644 index b98ba96f1..000000000 --- a/requester_pays.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to use requester pays features on Google -Cloud Storage buckets. - -For more information, see the documentation at -https://cloud.google.com/storage/docs/using-requester-pays. -""" - -import argparse - -from google.cloud import storage - - -def get_requester_pays_status(bucket_name): - """Get a bucket's requester pays metadata""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - requester_pays_status = bucket.requester_pays - if requester_pays_status: - print('Requester Pays is enabled for {}'.format(bucket_name)) - else: - print('Requester Pays is disabled for {}'.format(bucket_name)) - - -def enable_requester_pays(bucket_name): - """Enable a bucket's requesterpays metadata""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - bucket.requester_pays = True - bucket.patch() - print('Requester Pays has been enabled for {}'.format(bucket_name)) - - -def disable_requester_pays(bucket_name): - """Disable a bucket's requesterpays metadata""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - bucket.requester_pays = False - bucket.patch() - print('Requester Pays has been disabled for {}'.format(bucket_name)) - - -def download_file_requester_pays( - bucket_name, project_id, source_blob_name, destination_file_name): - """Download file using specified project as the requester""" - storage_client = storage.Client() - user_project = project_id - bucket = storage_client.bucket(bucket_name, user_project) - blob = bucket.blob(source_blob_name) - blob.download_to_filename(destination_file_name) - - print('Blob {} downloaded to {} using a requester-pays request.'.format( - source_blob_name, - destination_file_name)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('bucket_name', help='Your Cloud Storage bucket name.') - subparsers = parser.add_subparsers(dest='command') - - subparsers.add_parser( - 'check-status', help=get_requester_pays_status.__doc__) - - subparsers.add_parser( - 'enable', help=enable_requester_pays.__doc__) - - subparsers.add_parser( - 'disable', help=disable_requester_pays.__doc__) - - download_parser = subparsers.add_parser( - 'download', help=download_file_requester_pays.__doc__) - download_parser.add_argument('project') - download_parser.add_argument('source_blob_name') - download_parser.add_argument('destination_file_name') - - args = parser.parse_args() - - if args.command == 'check-status': - get_requester_pays_status(args.bucket_name) - elif args.command == 'enable': - enable_requester_pays(args.bucket_name) - elif args.command == 'disable': - disable_requester_pays(args.bucket_name) - elif args.command == 'download': - download_file_requester_pays( - args.bucket_name, args.project, args.source_blob_name, - args.destination_file_name) diff --git a/requester_pays_test.py b/requester_pays_test.py index 05c9a2275..70a4b2002 100644 --- a/requester_pays_test.py +++ b/requester_pays_test.py @@ -18,45 +18,46 @@ from google.cloud import storage import pytest -import requester_pays +import storage_disable_requester_pays +import storage_enable_requester_pays +import storage_download_file_requester_pays +import storage_get_requester_pays_status -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -PROJECT = os.environ['GCLOUD_PROJECT'] +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] +PROJECT = os.environ["GCLOUD_PROJECT"] def test_enable_requester_pays(capsys): - requester_pays.enable_requester_pays(BUCKET) + storage_enable_requester_pays.enable_requester_pays(BUCKET) out, _ = capsys.readouterr() - assert 'Requester Pays has been enabled for {}'.format(BUCKET) in out + assert "Requester Pays has been enabled for {}".format(BUCKET) in out def test_disable_requester_pays(capsys): - requester_pays.disable_requester_pays(BUCKET) + storage_disable_requester_pays.disable_requester_pays(BUCKET) out, _ = capsys.readouterr() - assert 'Requester Pays has been disabled for {}'.format(BUCKET) in out + assert "Requester Pays has been disabled for {}".format(BUCKET) in out def test_get_requester_pays_status(capsys): - requester_pays.get_requester_pays_status(BUCKET) + storage_get_requester_pays_status.get_requester_pays_status(BUCKET) out, _ = capsys.readouterr() - assert 'Requester Pays is disabled for {}'.format(BUCKET) in out + assert "Requester Pays is disabled for {}".format(BUCKET) in out @pytest.fixture def test_blob(): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(BUCKET) - blob = bucket.blob('storage_snippets_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') + blob = bucket.blob("storage_snippets_test_sigil") + blob.upload_from_string("Hello, is it me you're looking for?") return blob def test_download_file_requester_pays(test_blob, capsys): with tempfile.NamedTemporaryFile() as dest_file: - requester_pays.download_file_requester_pays( - BUCKET, - PROJECT, - test_blob.name, - dest_file.name) + storage_download_file_requester_pays.download_file_requester_pays( + BUCKET, PROJECT, test_blob.name, dest_file.name + ) assert dest_file.read() diff --git a/snippets.py b/snippets.py deleted file mode 100644 index dcdca34ea..000000000 --- a/snippets.py +++ /dev/null @@ -1,596 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on blobs -(objects) in a Google Cloud Storage bucket. - -For more information, see the README.md under /storage and the documentation -at https://cloud.google.com/storage/docs. -""" - -import argparse -import datetime -import pprint - -# [START storage_upload_file] -from google.cloud import storage - -# [END storage_upload_file] - - -def create_bucket(bucket_name): - """Creates a new bucket.""" - storage_client = storage.Client() - bucket = storage_client.create_bucket(bucket_name) - print('Bucket {} created'.format(bucket.name)) - - -def delete_bucket(bucket_name): - """Deletes a bucket. The bucket must be empty.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - bucket.delete() - print('Bucket {} deleted'.format(bucket.name)) - - -def enable_default_kms_key(bucket_name, kms_key_name): - # [START storage_set_bucket_default_kms_key] - """Sets a bucket's default KMS key.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - bucket.default_kms_key_name = kms_key_name - bucket.patch() - - print('Set default KMS key for bucket {} to {}.'.format( - bucket.name, - bucket.default_kms_key_name)) - # [END storage_set_bucket_default_kms_key] - - -def get_bucket_labels(bucket_name): - """Prints out a bucket's labels.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - labels = bucket.labels - pprint.pprint(labels) - - -def add_bucket_label(bucket_name): - """Add a label to a bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - labels = bucket.labels - labels['example'] = 'label' - bucket.labels = labels - bucket.patch() - - print('Updated labels on {}.'.format(bucket.name)) - pprint.pprint(bucket.labels) - - -def remove_bucket_label(bucket_name): - """Remove a label from a bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - labels = bucket.labels - - if 'example' in labels: - del labels['example'] - - bucket.labels = labels - bucket.patch() - - print('Removed labels on {}.'.format(bucket.name)) - pprint.pprint(bucket.labels) - - -# [START storage_list_buckets] -def list_buckets(): - """Lists all buckets.""" - storage_client = storage.Client() - buckets = storage_client.list_buckets() - - for bucket in buckets: - print(bucket.name) -# [END storage_list_buckets] - - -def list_blobs(bucket_name): - """Lists all the blobs in the bucket.""" - storage_client = storage.Client() - - # Note: Client.list_blobs requires at least package version 1.17.0. - blobs = storage_client.list_blobs(bucket_name) - - for blob in blobs: - print(blob.name) - - -def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): - """Lists all the blobs in the bucket that begin with the prefix. - - This can be used to list all blobs in a "folder", e.g. "public/". - - The delimiter argument can be used to restrict the results to only the - "files" in the given "folder". Without the delimiter, the entire tree under - the prefix is returned. For example, given these blobs: - - a/1.txt - a/b/2.txt - - If you just specify prefix = 'a', you'll get back: - - a/1.txt - a/b/2.txt - - However, if you specify prefix='a' and delimiter='/', you'll get back: - - a/1.txt - - Additionally, the same request will return blobs.prefixes populated with: - - a/b/ - """ - storage_client = storage.Client() - - # Note: Client.list_blobs requires at least package version 1.17.0. - blobs = storage_client.list_blobs(bucket_name, prefix=prefix, - delimiter=delimiter) - - print('Blobs:') - for blob in blobs: - print(blob.name) - - if delimiter: - print('Prefixes:') - for prefix in blobs.prefixes: - print(prefix) - - -# [START storage_upload_file] -def upload_blob(bucket_name, source_file_name, destination_blob_name): - """Uploads a file to the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(destination_blob_name) - - blob.upload_from_filename(source_file_name) - - print('File {} uploaded to {}.'.format( - source_file_name, - destination_blob_name)) -# [END storage_upload_file] - - -def upload_blob_with_kms(bucket_name, source_file_name, destination_blob_name, - kms_key_name): - # [START storage_upload_with_kms_key] - """Uploads a file to the bucket, encrypting it with the given KMS key.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(destination_blob_name, kms_key_name=kms_key_name) - blob.upload_from_filename(source_file_name) - - print('File {} uploaded to {} with encryption key {}.'.format( - source_file_name, - destination_blob_name, - kms_key_name)) - # [END storage_upload_with_kms_key] - - -def download_blob(bucket_name, source_blob_name, destination_file_name): - """Downloads a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(source_blob_name) - - blob.download_to_filename(destination_file_name) - - print('Blob {} downloaded to {}.'.format( - source_blob_name, - destination_file_name)) - - -def delete_blob(bucket_name, blob_name): - """Deletes a blob from the bucket.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.delete() - - print('Blob {} deleted.'.format(blob_name)) - - -def blob_metadata(bucket_name, blob_name): - """Prints out a blob's metadata.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.get_blob(blob_name) - - print('Blob: {}'.format(blob.name)) - print('Bucket: {}'.format(blob.bucket.name)) - print('Storage class: {}'.format(blob.storage_class)) - print('ID: {}'.format(blob.id)) - print('Size: {} bytes'.format(blob.size)) - print('Updated: {}'.format(blob.updated)) - print('Generation: {}'.format(blob.generation)) - print('Metageneration: {}'.format(blob.metageneration)) - print('Etag: {}'.format(blob.etag)) - print('Owner: {}'.format(blob.owner)) - print('Component count: {}'.format(blob.component_count)) - print('Crc32c: {}'.format(blob.crc32c)) - print('md5_hash: {}'.format(blob.md5_hash)) - print('Cache-control: {}'.format(blob.cache_control)) - print('Content-type: {}'.format(blob.content_type)) - print('Content-disposition: {}'.format(blob.content_disposition)) - print('Content-encoding: {}'.format(blob.content_encoding)) - print('Content-language: {}'.format(blob.content_language)) - print('Metadata: {}'.format(blob.metadata)) - print("Temporary hold: ", - 'enabled' if blob.temporary_hold else 'disabled') - print("Event based hold: ", - 'enabled' if blob.event_based_hold else 'disabled') - if blob.retention_expiration_time: - print("retentionExpirationTime: {}" - .format(blob.retention_expiration_time)) - - -def bucket_metadata(bucket_name): - """Prints out a bucket's metadata.""" - # [START storage_get_bucket_metadata] - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - - print('ID: {}'.format(bucket.id)) - print('Name: {}'.format(bucket.name)) - print('Storage Class: {}'.format(bucket.storage_class)) - print('Location: {}'.format(bucket.location)) - print('Location Type: {}'.format(bucket.location_type)) - print('Cors: {}'.format(bucket.cors)) - print('Default Event Based Hold: {}' - .format(bucket.default_event_based_hold)) - print('Default KMS Key Name: {}'.format(bucket.default_kms_key_name)) - print('Metageneration: {}'.format(bucket.metageneration)) - print('Retention Effective Time: {}' - .format(bucket.retention_policy_effective_time)) - print('Retention Period: {}'.format(bucket.retention_period)) - print('Retention Policy Locked: {}'.format(bucket.retention_policy_locked)) - print('Requester Pays: {}'.format(bucket.requester_pays)) - print('Self Link: {}'.format(bucket.self_link)) - print('Time Created: {}'.format(bucket.time_created)) - print('Versioning Enabled: {}'.format(bucket.versioning_enabled)) - print('Labels:') - pprint.pprint(bucket.labels) - # [END storage_get_bucket_metadata] - - -def make_blob_public(bucket_name, blob_name): - """Makes a blob publicly accessible.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - blob.make_public() - - print('Blob {} is publicly accessible at {}'.format( - blob.name, blob.public_url)) - - -def generate_signed_url(bucket_name, blob_name): - """Generates a v2 signed URL for downloading a blob. - - Note that this method requires a service account key file. You can not use - this if you are using Application Default Credentials from Google Compute - Engine or from the Google Cloud SDK. - """ - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - url = blob.generate_signed_url( - # This URL is valid for 1 hour - expiration=datetime.timedelta(hours=1), - # Allow GET requests using this URL. - method='GET') - - print('The signed url for {} is {}'.format(blob.name, url)) - return url - - -# [START storage_generate_signed_url_v4] -def generate_download_signed_url_v4(bucket_name, blob_name): - """Generates a v4 signed URL for downloading a blob. - - Note that this method requires a service account key file. You can not use - this if you are using Application Default Credentials from Google Compute - Engine or from the Google Cloud SDK. - """ - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - url = blob.generate_signed_url( - version='v4', - # This URL is valid for 15 minutes - expiration=datetime.timedelta(minutes=15), - # Allow GET requests using this URL. - method='GET') - - print('Generated GET signed URL:') - print(url) - print('You can use this URL with any user agent, for example:') - print('curl \'{}\''.format(url)) - return url -# [END storage_generate_signed_url_v4] - - -# [START storage_generate_upload_signed_url_v4] -def generate_upload_signed_url_v4(bucket_name, blob_name): - """Generates a v4 signed URL for uploading a blob using HTTP PUT. - - Note that this method requires a service account key file. You can not use - this if you are using Application Default Credentials from Google Compute - Engine or from the Google Cloud SDK. - """ - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - url = blob.generate_signed_url( - version='v4', - # This URL is valid for 15 minutes - expiration=datetime.timedelta(minutes=15), - # Allow GET requests using this URL. - method='PUT', - content_type='application/octet-stream') - - print('Generated PUT signed URL:') - print(url) - print('You can use this URL with any user agent, for example:') - print("curl -X PUT -H 'Content-Type: application/octet-stream' " - "--upload-file my-file '{}'".format(url)) - return url -# [END storage_generate_upload_signed_url_v4] - - -def rename_blob(bucket_name, blob_name, new_name): - """Renames a blob.""" - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.blob(blob_name) - - new_blob = bucket.rename_blob(blob, new_name) - - print('Blob {} has been renamed to {}'.format( - blob.name, new_blob.name)) - - -def copy_blob(bucket_name, blob_name, new_bucket_name, new_blob_name): - """Copies a blob from one bucket to another with a new name.""" - storage_client = storage.Client() - source_bucket = storage_client.get_bucket(bucket_name) - source_blob = source_bucket.blob(blob_name) - destination_bucket = storage_client.get_bucket(new_bucket_name) - - new_blob = source_bucket.copy_blob( - source_blob, destination_bucket, new_blob_name) - - print('Blob {} in bucket {} copied to blob {} in bucket {}.'.format( - source_blob.name, source_bucket.name, new_blob.name, - destination_bucket.name)) - - -def bucket_commands(args): - if args.command == 'list-buckets': - list_buckets() - elif args.command == 'bucket-metadata': - bucket_metadata(args.bucket_name) - elif args.command == 'create-bucket': - create_bucket(args.bucket_name) - elif args.command == 'enable-default-kms-key': - enable_default_kms_key(args.bucket_name, args.kms_key_name) - elif args.command == 'delete-bucket': - delete_bucket(args.bucket_name) - elif args.command == 'get-bucket-labels': - get_bucket_labels(args.bucket_name) - elif args.command == 'add-bucket-label': - add_bucket_label(args.bucket_name) - elif args.command == 'remove-bucket-label': - remove_bucket_label(args.bucket_name) - - -def blob_commands(args): - if args.command == 'list': - list_blobs(args.bucket_name) - elif args.command == 'list-with-prefix': - list_blobs_with_prefix(args.bucket_name, args.prefix, args.delimiter) - elif args.command == 'upload': - upload_blob( - args.bucket_name, - args.source_file_name, - args.destination_blob_name) - elif args.command == 'upload-with-kms-key': - upload_blob_with_kms( - args.bucket_name, - args.source_file_name, - args.destination_blob_name, - args.kms_key_name) - elif args.command == 'download': - download_blob( - args.bucket_name, - args.source_blob_name, - args.destination_file_name) - elif args.command == 'delete': - delete_blob(args.bucket_name, args.blob_name) - elif args.command == 'metadata': - blob_metadata(args.bucket_name, args.blob_name) - elif args.command == 'make-public': - make_blob_public(args.bucket_name, args.blob_name) - elif args.command == 'signed-url': - generate_signed_url(args.bucket_name, args.blob_name) - elif args.command == 'signed-url-download-v4': - generate_download_signed_url_v4(args.bucket_name, args.blob_name) - elif args.command == 'signed-url-upload-v4': - generate_upload_signed_url_v4(args.bucket_name, args.blob_name) - elif args.command == 'rename': - rename_blob(args.bucket_name, args.blob_name, args.new_name) - elif args.command == 'copy': - copy_blob( - args.bucket_name, - args.blob_name, - args.new_bucket_name, - args.new_blob_name) - - -def main(): - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - subparsers = parser.add_subparsers(dest='command') - - subparsers.add_parser( - 'list-buckets', help=list_buckets.__doc__) - - create_bucket_parser = subparsers.add_parser( - 'create-bucket', help=create_bucket.__doc__) - create_bucket_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - delete_create_parser = subparsers.add_parser( - 'delete-bucket', help=delete_bucket.__doc__) - delete_create_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - get_bucket_labels_parser = subparsers.add_parser( - 'get-bucket-labels', help=get_bucket_labels.__doc__) - get_bucket_labels_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - add_bucket_label_parser = subparsers.add_parser( - 'add-bucket-label', help=add_bucket_label.__doc__) - add_bucket_label_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - remove_bucket_label_parser = subparsers.add_parser( - 'remove-bucket-label', help=remove_bucket_label.__doc__) - remove_bucket_label_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - list_blobs_parser = subparsers.add_parser( - 'list', help=list_blobs.__doc__) - list_blobs_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - bucket_metadata_parser = subparsers.add_parser( - 'bucket-metadata', help=bucket_metadata.__doc__) - bucket_metadata_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - - list_with_prefix_parser = subparsers.add_parser( - 'list-with-prefix', help=list_blobs_with_prefix.__doc__) - list_with_prefix_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - list_with_prefix_parser.add_argument('prefix') - list_with_prefix_parser.add_argument('--delimiter', default=None) - - upload_parser = subparsers.add_parser( - 'upload', help=upload_blob.__doc__) - upload_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - upload_parser.add_argument('source_file_name') - upload_parser.add_argument('destination_blob_name') - - enable_default_kms_parser = subparsers.add_parser( - 'enable-default-kms-key', help=enable_default_kms_key.__doc__) - enable_default_kms_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - enable_default_kms_parser.add_argument('kms_key_name') - - upload_kms_parser = subparsers.add_parser( - 'upload-with-kms-key', help=upload_blob_with_kms.__doc__) - upload_kms_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - upload_kms_parser.add_argument('source_file_name') - upload_kms_parser.add_argument('destination_blob_name') - upload_kms_parser.add_argument('kms_key_name') - - download_parser = subparsers.add_parser( - 'download', help=download_blob.__doc__) - download_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - download_parser.add_argument('source_blob_name') - download_parser.add_argument('destination_file_name') - - delete_parser = subparsers.add_parser( - 'delete', help=delete_blob.__doc__) - delete_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - delete_parser.add_argument('blob_name') - - metadata_parser = subparsers.add_parser( - 'metadata', help=blob_metadata.__doc__) - metadata_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - metadata_parser.add_argument('blob_name') - - make_public_parser = subparsers.add_parser( - 'make-public', help=make_blob_public.__doc__) - make_public_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - make_public_parser.add_argument('blob_name') - - signed_url_parser = subparsers.add_parser( - 'signed-url', help=generate_signed_url.__doc__) - signed_url_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - signed_url_parser.add_argument('blob_name') - - signed_url_download_v4_parser = subparsers.add_parser( - 'signed-url-download-v4', help=generate_download_signed_url_v4.__doc__) - signed_url_download_v4_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - signed_url_download_v4_parser.add_argument('blob_name') - - signed_url_upload_v4_parser = subparsers.add_parser( - 'signed-url-upload-v4', help=generate_upload_signed_url_v4.__doc__) - signed_url_upload_v4_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - signed_url_upload_v4_parser.add_argument('blob_name') - - rename_parser = subparsers.add_parser( - 'rename', help=rename_blob.__doc__) - rename_parser.add_argument( - 'bucket_name', help='Your cloud storage bucket.') - rename_parser.add_argument('blob_name') - rename_parser.add_argument('new_name') - - copy_parser = subparsers.add_parser('copy', help=rename_blob.__doc__) - copy_parser.add_argument('bucket_name', help='Your cloud storage bucket.') - copy_parser.add_argument('blob_name') - copy_parser.add_argument('new_bucket_name') - copy_parser.add_argument('new_blob_name') - - args = parser.parse_args() - bucket_commands(args) - blob_commands(args) - - -if __name__ == '__main__': - main() diff --git a/snippets_test.py b/snippets_test.py index a03609743..cef211363 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -21,16 +21,34 @@ import pytest import requests -import snippets - -BUCKET = os.environ['CLOUD_STORAGE_BUCKET'] -KMS_KEY = os.environ['CLOUD_KMS_KEY'] +import storage_copy_file +import storage_add_bucket_label +import storage_delete_file +import storage_download_file +import storage_get_bucket_labels +import storage_get_bucket_metadata +import storage_get_metadata +import storage_list_buckets +import storage_list_files_with_prefix +import storage_list_files +import storage_make_public +import storage_remove_bucket_label +import storage_move_file +import storage_upload_file +import storage_upload_with_kms_key +import storage_generate_signed_url_v2 +import storage_generate_signed_url_v4 +import storage_generate_upload_signed_url_v4 +import storage_set_bucket_default_kms_key + +BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] +KMS_KEY = os.environ["CLOUD_KMS_KEY"] def test_enable_default_kms_key(): - snippets.enable_default_kms_key( - bucket_name=BUCKET, - kms_key_name=KMS_KEY) + storage_set_bucket_default_kms_key.enable_default_kms_key( + bucket_name=BUCKET, kms_key_name=KMS_KEY + ) time.sleep(2) # Let change propagate as needed bucket = storage.Client().get_bucket(BUCKET) assert bucket.default_kms_key_name.startswith(KMS_KEY) @@ -39,138 +57,133 @@ def test_enable_default_kms_key(): def test_get_bucket_labels(): - snippets.get_bucket_labels(BUCKET) + storage_get_bucket_labels.get_bucket_labels(BUCKET) def test_add_bucket_label(capsys): - snippets.add_bucket_label(BUCKET) + storage_add_bucket_label.add_bucket_label(BUCKET) out, _ = capsys.readouterr() - assert 'example' in out + assert "example" in out def test_remove_bucket_label(capsys): - snippets.add_bucket_label(BUCKET) - snippets.remove_bucket_label(BUCKET) + storage_add_bucket_label.add_bucket_label(BUCKET) + storage_remove_bucket_label.remove_bucket_label(BUCKET) out, _ = capsys.readouterr() - assert 'Removed labels' in out + assert "Removed labels" in out @pytest.fixture def test_blob(): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(BUCKET) - blob = bucket.blob('storage_snippets_test_sigil') - blob.upload_from_string('Hello, is it me you\'re looking for?') + blob = bucket.blob("storage_snippets_test_sigil") + blob.upload_from_string("Hello, is it me you're looking for?") return blob def test_list_buckets(capsys): - snippets.list_buckets() + storage_list_buckets.list_buckets() out, _ = capsys.readouterr() assert BUCKET in out def test_list_blobs(test_blob, capsys): - snippets.list_blobs(BUCKET) + storage_list_files.list_blobs(BUCKET) out, _ = capsys.readouterr() assert test_blob.name in out def test_bucket_metadata(capsys): - snippets.bucket_metadata(BUCKET) + storage_get_bucket_metadata.bucket_metadata(BUCKET) out, _ = capsys.readouterr() assert BUCKET in out def test_list_blobs_with_prefix(test_blob, capsys): - snippets.list_blobs_with_prefix( - BUCKET, - prefix='storage_snippets') + storage_list_files_with_prefix.list_blobs_with_prefix( + BUCKET, prefix="storage_snippets" + ) out, _ = capsys.readouterr() assert test_blob.name in out def test_upload_blob(): with tempfile.NamedTemporaryFile() as source_file: - source_file.write(b'test') + source_file.write(b"test") - snippets.upload_blob( - BUCKET, - source_file.name, - 'test_upload_blob') + storage_upload_file.upload_blob( + BUCKET, source_file.name, "test_upload_blob" + ) def test_upload_blob_with_kms(): with tempfile.NamedTemporaryFile() as source_file: - source_file.write(b'test') - snippets.upload_blob_with_kms( - BUCKET, - source_file.name, - 'test_upload_blob_encrypted', - KMS_KEY) + source_file.write(b"test") + storage_upload_with_kms_key.upload_blob_with_kms( + BUCKET, source_file.name, "test_upload_blob_encrypted", KMS_KEY + ) bucket = storage.Client().bucket(BUCKET) - kms_blob = bucket.get_blob('test_upload_blob_encrypted') + kms_blob = bucket.get_blob("test_upload_blob_encrypted") assert kms_blob.kms_key_name.startswith(KMS_KEY) def test_download_blob(test_blob): with tempfile.NamedTemporaryFile() as dest_file: - snippets.download_blob( - BUCKET, - test_blob.name, - dest_file.name) + storage_download_file.download_blob( + BUCKET, test_blob.name, dest_file.name + ) assert dest_file.read() def test_blob_metadata(test_blob, capsys): - snippets.blob_metadata(BUCKET, test_blob.name) + storage_get_metadata.blob_metadata(BUCKET, test_blob.name) out, _ = capsys.readouterr() assert test_blob.name in out def test_delete_blob(test_blob): - snippets.delete_blob( - BUCKET, - test_blob.name) + storage_delete_file.delete_blob(BUCKET, test_blob.name) def test_make_blob_public(test_blob): - snippets.make_blob_public( - BUCKET, - test_blob.name) + storage_make_public.make_blob_public(BUCKET, test_blob.name) r = requests.get(test_blob.public_url) - assert r.text == 'Hello, is it me you\'re looking for?' + assert r.text == "Hello, is it me you're looking for?" def test_generate_signed_url(test_blob, capsys): - url = snippets.generate_signed_url( - BUCKET, - test_blob.name) + url = storage_generate_signed_url_v2.generate_signed_url( + BUCKET, test_blob.name + ) r = requests.get(url) - assert r.text == 'Hello, is it me you\'re looking for?' + assert r.text == "Hello, is it me you're looking for?" def test_generate_download_signed_url_v4(test_blob, capsys): - url = snippets.generate_download_signed_url_v4( - BUCKET, - test_blob.name) + url = storage_generate_signed_url_v4.generate_download_signed_url_v4( + BUCKET, test_blob.name + ) r = requests.get(url) - assert r.text == 'Hello, is it me you\'re looking for?' + assert r.text == "Hello, is it me you're looking for?" def test_generate_upload_signed_url_v4(capsys): - blob_name = 'storage_snippets_test_upload' - content = b'Uploaded via v4 signed url' - url = snippets.generate_upload_signed_url_v4( - BUCKET, - blob_name) - - requests.put(url, data=content, headers={ - 'content-type': 'application/octet-stream'}) + blob_name = "storage_snippets_test_upload" + content = b"Uploaded via v4 signed url" + url = storage_generate_upload_signed_url_v4.generate_upload_signed_url_v4( + BUCKET, blob_name + ) + + requests.put( + url, + data=content, + headers={"content-type": "application/octet-stream"}, + ) bucket = storage.Client().bucket(BUCKET) blob = bucket.blob(blob_name) @@ -181,13 +194,15 @@ def test_rename_blob(test_blob): bucket = storage.Client().bucket(BUCKET) try: - bucket.delete_blob('test_rename_blob') + bucket.delete_blob("test_rename_blob") except google.cloud.exceptions.exceptions.NotFound: pass - snippets.rename_blob(bucket.name, test_blob.name, 'test_rename_blob') + storage_move_file.rename_blob( + bucket.name, test_blob.name, "test_rename_blob" + ) - assert bucket.get_blob('test_rename_blob') is not None + assert bucket.get_blob("test_rename_blob") is not None assert bucket.get_blob(test_blob.name) is None @@ -195,12 +210,13 @@ def test_copy_blob(test_blob): bucket = storage.Client().bucket(BUCKET) try: - bucket.delete_blob('test_copy_blob') + bucket.delete_blob("test_copy_blob") except google.cloud.exceptions.NotFound: pass - snippets.copy_blob( - bucket.name, test_blob.name, bucket.name, 'test_copy_blob') + storage_copy_file.copy_blob( + bucket.name, test_blob.name, bucket.name, "test_copy_blob" + ) - assert bucket.get_blob('test_copy_blob') is not None + assert bucket.get_blob("test_copy_blob") is not None assert bucket.get_blob(test_blob.name) is not None diff --git a/storage_activate_hmac_key.py b/storage_activate_hmac_key.py new file mode 100644 index 000000000..e77cd8066 --- /dev/null +++ b/storage_activate_hmac_key.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_activate_hmac_key] +from google.cloud import storage + + +def activate_key(access_id, project_id): + """ + Activate the HMAC key with the given access ID. + """ + # project_id = "Your Google Cloud project ID" + # access_id = "ID of an inactive HMAC key" + + storage_client = storage.Client(project=project_id) + + hmac_key = storage_client.get_hmac_key_metadata( + access_id, project_id=project_id + ) + hmac_key.state = "ACTIVE" + hmac_key.update() + + print("The HMAC key metadata is:") + print("Service Account Email: {}".format(hmac_key.service_account_email)) + print("Key ID: {}".format(hmac_key.id)) + print("Access ID: {}".format(hmac_key.access_id)) + print("Project ID: {}".format(hmac_key.project)) + print("State: {}".format(hmac_key.state)) + print("Created At: {}".format(hmac_key.time_created)) + print("Updated At: {}".format(hmac_key.updated)) + print("Etag: {}".format(hmac_key.etag)) + return hmac_key + + +# [END storage_activate_hmac_key] + +if __name__ == "__main__": + activate_key(access_id=sys.argv[1], project_id=sys.argv[2]) diff --git a/storage_add_bucket_default_owner.py b/storage_add_bucket_default_owner.py new file mode 100644 index 000000000..932b1328f --- /dev/null +++ b/storage_add_bucket_default_owner.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_bucket_default_owner] +from google.cloud import storage + + +def add_bucket_default_owner(bucket_name, user_email): + """Adds a user as an owner in the given bucket's default object access + control list.""" + # bucket_name = "your-bucket-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # grant access to different types of entities. You can also use + # `grant_read` or `grant_write` to grant different roles. + bucket.default_object_acl.user(user_email).grant_owner() + bucket.default_object_acl.save() + + print( + "Added user {} as an owner in the default acl on bucket {}.".format( + user_email, bucket_name + ) + ) + + +# [END storage_add_bucket_default_owner] + +if __name__ == "__main__": + add_bucket_default_owner(bucket_name=sys.argv[1], user_email=sys.argv[2]) diff --git a/storage_add_bucket_iam_member.py b/storage_add_bucket_iam_member.py new file mode 100644 index 000000000..bd26405cf --- /dev/null +++ b/storage_add_bucket_iam_member.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_bucket_iam_member] +from google.cloud import storage + + +def add_bucket_iam_member(bucket_name, role, member): + """Add a new member to an IAM Policy""" + # bucket_name = "your-bucket-name" + # role = "IAM role, e.g. roles/storage.objectViewer" + # member = "IAM identity, e.g. user: name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + policy[role].add(member) + + bucket.set_iam_policy(policy) + + print("Added {} with role {} to {}.".format(member, role, bucket_name)) + + +# [END storage_add_bucket_iam_member] + + +if __name__ == "__main__": + add_bucket_iam_member( + bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3] + ) diff --git a/storage_add_bucket_label.py b/storage_add_bucket_label.py new file mode 100644 index 000000000..a021036c6 --- /dev/null +++ b/storage_add_bucket_label.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_bucket_label] +from google.cloud import storage +import pprint + + +def add_bucket_label(bucket_name): + """Add a label to a bucket.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + labels = bucket.labels + labels["example"] = "label" + bucket.labels = labels + bucket.patch() + + print("Updated labels on {}.".format(bucket.name)) + pprint.pprint(bucket.labels) + + +# [END storage_add_bucket_label] + +if __name__ == "__main__": + add_bucket_label(bucket_name=sys.argv[1]) diff --git a/storage_add_bucket_owner.py b/storage_add_bucket_owner.py new file mode 100644 index 000000000..acdb60dc5 --- /dev/null +++ b/storage_add_bucket_owner.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_bucket_owner] +from google.cloud import storage + + +def add_bucket_owner(bucket_name, user_email): + """Adds a user as an owner on the given bucket.""" + # bucket_name = "your-bucket-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group()`, `domain()`, `all_authenticated()` and `all()` + # to grant access to different types of entities. + # You can also use `grant_read()` or `grant_write()` to grant different + # roles. + bucket.acl.user(user_email).grant_owner() + bucket.acl.save() + + print( + "Added user {} as an owner on bucket {}.".format( + user_email, bucket_name + ) + ) + + +# [END storage_add_bucket_owner] + +if __name__ == "__main__": + add_bucket_owner(bucket_name=sys.argv[1], user_email=sys.argv[2]) diff --git a/storage_add_file_owner.py b/storage_add_file_owner.py new file mode 100644 index 000000000..9e9342590 --- /dev/null +++ b/storage_add_file_owner.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_file_owner] +from google.cloud import storage + + +def add_blob_owner(bucket_name, blob_name, user_email): + """Adds a user as an owner on the given blob.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # Reload fetches the current ACL from Cloud Storage. + blob.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # grant access to different types of entities. You can also use + # `grant_read` or `grant_write` to grant different roles. + blob.acl.user(user_email).grant_owner() + blob.acl.save() + + print( + "Added user {} as an owner on blob {} in bucket {}.".format( + user_email, blob_name, bucket_name + ) + ) + + +# [END storage_add_file_owner] + +if __name__ == "__main__": + add_blob_owner( + bucket_name=sys.argv[1], blob_name=sys.argv[2], user_email=sys.argv[3], + ) diff --git a/storage_copy_file.py b/storage_copy_file.py new file mode 100644 index 000000000..5d36aa94b --- /dev/null +++ b/storage_copy_file.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_copy_file] +from google.cloud import storage + + +def copy_blob( + bucket_name, blob_name, destination_bucket_name, destination_blob_name +): + """Copies a blob from one bucket to another with a new name.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # destination_bucket_name = "destination-bucket-name" + # destination_blob_name = "destination-object-name" + + storage_client = storage.Client() + + source_bucket = storage_client.bucket(bucket_name) + source_blob = source_bucket.blob(blob_name) + destination_bucket = storage_client.bucket(destination_bucket_name) + + blob_copy = source_bucket.copy_blob( + source_blob, destination_bucket, destination_blob_name + ) + + print( + "Blob {} in bucket {} copied to blob {} in bucket {}.".format( + source_blob.name, + source_bucket.name, + blob_copy.name, + destination_bucket.name, + ) + ) + + +# [END storage_copy_file] + +if __name__ == "__main__": + copy_blob( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + destination_bucket_name=sys.argv[3], + destination_blob_name=sys.argv[4], + ) diff --git a/storage_create_bucket.py b/storage_create_bucket.py new file mode 100644 index 000000000..aaee9e234 --- /dev/null +++ b/storage_create_bucket.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_create_bucket] +from google.cloud import storage + + +def create_bucket(bucket_name): + """Creates a new bucket.""" + # bucket_name = "your-new-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.create_bucket(bucket_name) + + print("Bucket {} created".format(bucket.name)) + + +# [END storage_create_bucket] + +if __name__ == "__main__": + create_bucket(bucket_name=sys.argv[1]) diff --git a/storage_create_hmac_key.py b/storage_create_hmac_key.py new file mode 100644 index 000000000..33c4a98c2 --- /dev/null +++ b/storage_create_hmac_key.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_create_hmac_key] +from google.cloud import storage + + +def create_key(project_id, service_account_email): + """ + Create a new HMAC key using the given project and service account. + """ + # project_id = 'Your Google Cloud project ID' + # service_account_email = 'Service account used to generate HMAC key' + + storage_client = storage.Client(project=project_id) + + hmac_key, secret = storage_client.create_hmac_key( + service_account_email=service_account_email, project_id=project_id + ) + + print("The base64 encoded secret is {}".format(secret)) + print("Do not miss that secret, there is no API to recover it.") + print("The HMAC key metadata is:") + print("Service Account Email: {}".format(hmac_key.service_account_email)) + print("Key ID: {}".format(hmac_key.id)) + print("Access ID: {}".format(hmac_key.access_id)) + print("Project ID: {}".format(hmac_key.project)) + print("State: {}".format(hmac_key.state)) + print("Created At: {}".format(hmac_key.time_created)) + print("Updated At: {}".format(hmac_key.updated)) + print("Etag: {}".format(hmac_key.etag)) + return hmac_key + + +# [END storage_create_hmac_key] + +if __name__ == "__main__": + create_key(project_id=sys.argv[1], service_account_email=sys.argv[2]) diff --git a/storage_deactivate_hmac_key.py b/storage_deactivate_hmac_key.py new file mode 100644 index 000000000..389efb998 --- /dev/null +++ b/storage_deactivate_hmac_key.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_deactivate_hmac_key] +from google.cloud import storage + + +def deactivate_key(access_id, project_id): + """ + Deactivate the HMAC key with the given access ID. + """ + # project_id = "Your Google Cloud project ID" + # access_id = "ID of an active HMAC key" + + storage_client = storage.Client(project=project_id) + + hmac_key = storage_client.get_hmac_key_metadata( + access_id, project_id=project_id + ) + hmac_key.state = "INACTIVE" + hmac_key.update() + + print("The HMAC key is now inactive.") + print("The HMAC key metadata is:") + print("Service Account Email: {}".format(hmac_key.service_account_email)) + print("Key ID: {}".format(hmac_key.id)) + print("Access ID: {}".format(hmac_key.access_id)) + print("Project ID: {}".format(hmac_key.project)) + print("State: {}".format(hmac_key.state)) + print("Created At: {}".format(hmac_key.time_created)) + print("Updated At: {}".format(hmac_key.updated)) + print("Etag: {}".format(hmac_key.etag)) + return hmac_key + + +# [END storage_deactivate_hmac_key] + +if __name__ == "__main__": + deactivate_key(access_id=sys.argv[1], project_id=sys.argv[2]) diff --git a/storage_delete_bucket.py b/storage_delete_bucket.py new file mode 100644 index 000000000..b3e264c74 --- /dev/null +++ b/storage_delete_bucket.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_delete_bucket] +from google.cloud import storage + + +def delete_bucket(bucket_name): + """Deletes a bucket. The bucket must be empty.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.delete() + + print("Bucket {} deleted".format(bucket.name)) + + +# [END storage_delete_bucket] + +if __name__ == "__main__": + delete_bucket(bucket_name=sys.argv[1]) diff --git a/storage_delete_file.py b/storage_delete_file.py new file mode 100644 index 000000000..1105f3725 --- /dev/null +++ b/storage_delete_file.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_delete_file] +from google.cloud import storage + + +def delete_blob(bucket_name, blob_name): + """Deletes a blob from the bucket.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + blob.delete() + + print("Blob {} deleted.".format(blob_name)) + + +# [END storage_delete_file] + +if __name__ == "__main__": + delete_blob(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_delete_hmac_key.py b/storage_delete_hmac_key.py new file mode 100644 index 000000000..403dc193b --- /dev/null +++ b/storage_delete_hmac_key.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_delete_hmac_key] +from google.cloud import storage + + +def delete_key(access_id, project_id): + """ + Delete the HMAC key with the given access ID. Key must have state INACTIVE + in order to succeed. + """ + # project_id = "Your Google Cloud project ID" + # access_id = "ID of an HMAC key (must be in INACTIVE state)" + + storage_client = storage.Client(project=project_id) + + hmac_key = storage_client.get_hmac_key_metadata( + access_id, project_id=project_id + ) + hmac_key.delete() + + print( + "The key is deleted, though it may still appear in list_hmac_keys()" + " results." + ) + + +# [END storage_delete_hmac_key] + +if __name__ == "__main__": + delete_key(access_id=sys.argv[1], project_id=sys.argv[2]) diff --git a/storage_disable_default_event_based_hold.py b/storage_disable_default_event_based_hold.py new file mode 100644 index 000000000..dff3ed3c1 --- /dev/null +++ b/storage_disable_default_event_based_hold.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_disable_default_event_based_hold] +from google.cloud import storage + + +def disable_default_event_based_hold(bucket_name): + """Disables the default event based hold on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.default_event_based_hold = False + bucket.patch() + + print("Default event based hold was disabled for {}".format(bucket_name)) + + +# [END storage_disable_default_event_based_hold] + + +if __name__ == "__main__": + disable_default_event_based_hold(bucket_name=sys.argv[1]) diff --git a/storage_disable_requester_pays.py b/storage_disable_requester_pays.py new file mode 100644 index 000000000..c49cc28ea --- /dev/null +++ b/storage_disable_requester_pays.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_disable_requester_pays] +from google.cloud import storage + + +def disable_requester_pays(bucket_name): + """Disable a bucket's requesterpays metadata""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.requester_pays = False + bucket.patch() + + print("Requester Pays has been disabled for {}".format(bucket_name)) + + +# [END storage_disable_requester_pays] + + +if __name__ == "__main__": + disable_requester_pays(bucket_name=sys.argv[1]) diff --git a/storage_disable_uniform_bucket_level_access.py b/storage_disable_uniform_bucket_level_access.py new file mode 100644 index 000000000..4f4691611 --- /dev/null +++ b/storage_disable_uniform_bucket_level_access.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_disable_uniform_bucket_level_access] +from google.cloud import storage + + +def disable_uniform_bucket_level_access(bucket_name): + """Disable uniform bucket-level access for a bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + bucket.iam_configuration.uniform_bucket_level_access_enabled = False + bucket.patch() + + print( + "Uniform bucket-level access was disabled for {}.".format(bucket.name) + ) + + +# [END storage_disable_uniform_bucket_level_access] + +if __name__ == "__main__": + disable_uniform_bucket_level_access(bucket_name=sys.argv[1]) diff --git a/storage_download_encrypted_file.py b/storage_download_encrypted_file.py new file mode 100644 index 000000000..c0b3ad560 --- /dev/null +++ b/storage_download_encrypted_file.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_download_encrypted_file] +from google.cloud import storage +import base64 + + +def download_encrypted_blob( + bucket_name, + source_blob_name, + destination_file_name, + base64_encryption_key, +): + """Downloads a previously-encrypted blob from Google Cloud Storage. + + The encryption key provided must be the same key provided when uploading + the blob. + """ + # bucket_name = "your-bucket-name" + # source_blob_name = "storage-object-name" + # destination_file_name = "local/path/to/file" + # base64_encryption_key = "base64-encoded-encryption-key" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + + # Encryption key must be an AES256 key represented as a bytestring with + # 32 bytes. Since it's passed in as a base64 encoded string, it needs + # to be decoded. + encryption_key = base64.b64decode(base64_encryption_key) + blob = bucket.blob(source_blob_name, encryption_key=encryption_key) + + blob.download_to_filename(destination_file_name) + + print( + "Blob {} downloaded to {}.".format( + source_blob_name, destination_file_name + ) + ) + + +# [END storage_download_encrypted_file] + +if __name__ == "__main__": + download_encrypted_blob( + bucket_name=sys.argv[1], + source_blob_name=sys.argv[2], + destination_file_name=sys.argv[3], + base64_encryption_key=sys.argv[4], + ) diff --git a/storage_download_file.py b/storage_download_file.py new file mode 100644 index 000000000..ffbe885a8 --- /dev/null +++ b/storage_download_file.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_download_file] +from google.cloud import storage + + +def download_blob(bucket_name, source_blob_name, destination_file_name): + """Downloads a blob from the bucket.""" + # bucket_name = "your-bucket-name" + # source_blob_name = "storage-object-name" + # destination_file_name = "local/path/to/file" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(source_blob_name) + blob.download_to_filename(destination_file_name) + + print( + "Blob {} downloaded to {}.".format( + source_blob_name, destination_file_name + ) + ) + + +# [END storage_download_file] + +if __name__ == "__main__": + download_blob( + bucket_name=sys.argv[1], + source_blob_name=sys.argv[2], + destination_file_name=sys.argv[3], + ) diff --git a/storage_download_file_requester_pays.py b/storage_download_file_requester_pays.py new file mode 100644 index 000000000..babbafda7 --- /dev/null +++ b/storage_download_file_requester_pays.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_download_file_requester_pays] +from google.cloud import storage + + +def download_file_requester_pays( + bucket_name, project_id, source_blob_name, destination_file_name +): + """Download file using specified project as the requester""" + # bucket_name = "your-bucket-name" + # project_id = "your-project-id" + # source_blob_name = "source-blob-name" + # destination_file_name = "local-destination-file-name" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name, user_project=project_id) + blob = bucket.blob(source_blob_name) + blob.download_to_filename(destination_file_name) + + print( + "Blob {} downloaded to {} using a requester-pays request.".format( + source_blob_name, destination_file_name + ) + ) + + +# [END storage_download_file_requester_pays] + +if __name__ == "__main__": + download_file_requester_pays( + bucket_name=sys.argv[1], + project_id=sys.argv[2], + source_blob_name=sys.argv[3], + destination_file_name=sys.argv[4], + ) diff --git a/storage_enable_default_event_based_hold.py b/storage_enable_default_event_based_hold.py new file mode 100644 index 000000000..a535390c9 --- /dev/null +++ b/storage_enable_default_event_based_hold.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_enable_default_event_based_hold] +from google.cloud import storage + + +def enable_default_event_based_hold(bucket_name): + """Enables the default event based hold on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + bucket.default_event_based_hold = True + bucket.patch() + + print("Default event based hold was enabled for {}".format(bucket_name)) + + +# [END storage_enable_default_event_based_hold] + + +if __name__ == "__main__": + enable_default_event_based_hold(bucket_name=sys.argv[1]) diff --git a/storage_enable_requester_pays.py b/storage_enable_requester_pays.py new file mode 100644 index 000000000..9787008dd --- /dev/null +++ b/storage_enable_requester_pays.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_enable_requester_pays] +from google.cloud import storage + + +def enable_requester_pays(bucket_name): + """Enable a bucket's requesterpays metadata""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.requester_pays = True + bucket.patch() + + print("Requester Pays has been enabled for {}".format(bucket_name)) + + +# [END storage_enable_requester_pays] + +if __name__ == "__main__": + enable_requester_pays(bucket_name=sys.argv[1]) diff --git a/storage_enable_uniform_bucket_level_access.py b/storage_enable_uniform_bucket_level_access.py new file mode 100644 index 000000000..c689bb735 --- /dev/null +++ b/storage_enable_uniform_bucket_level_access.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_enable_uniform_bucket_level_access] +from google.cloud import storage + + +def enable_uniform_bucket_level_access(bucket_name): + """Enable uniform bucket-level access for a bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + bucket.patch() + + print( + "Uniform bucket-level access was enabled for {}.".format(bucket.name) + ) + + +# [END storage_enable_uniform_bucket_level_access] + +if __name__ == "__main__": + enable_uniform_bucket_level_access(bucket_name=sys.argv[1]) diff --git a/storage_generate_encryption_key.py b/storage_generate_encryption_key.py new file mode 100644 index 000000000..a973418a6 --- /dev/null +++ b/storage_generate_encryption_key.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START storage_generate_encryption_key] +import base64 +import os + + +def generate_encryption_key(): + """Generates a 256 bit (32 byte) AES encryption key and prints the + base64 representation. + + This is included for demonstration purposes. You should generate your own + key. Please remember that encryption keys should be handled with a + comprehensive security policy. + """ + key = os.urandom(32) + encoded_key = base64.b64encode(key).decode("utf-8") + + print("Base 64 encoded encryption key: {}".format(encoded_key)) + + +# [END storage_generate_encryption_key] + +if __name__ == "__main__": + generate_encryption_key() diff --git a/storage_generate_signed_url_v2.py b/storage_generate_signed_url_v2.py new file mode 100644 index 000000000..d594e91ad --- /dev/null +++ b/storage_generate_signed_url_v2.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_generate_signed_url_v2] +from google.cloud import storage +import datetime + + +def generate_signed_url(bucket_name, blob_name): + """Generates a v2 signed URL for downloading a blob. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + # This URL is valid for 1 hour + expiration=datetime.timedelta(hours=1), + # Allow GET requests using this URL. + method="GET", + ) + + print("The signed url for {} is {}".format(blob.name, url)) + return url + + +# [END storage_generate_signed_url_v2] + +if __name__ == "__main__": + generate_signed_url(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_generate_signed_url_v4.py b/storage_generate_signed_url_v4.py new file mode 100644 index 000000000..a6647e73b --- /dev/null +++ b/storage_generate_signed_url_v4.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_generate_signed_url_v4] +from google.cloud import storage +import datetime + + +def generate_download_signed_url_v4(bucket_name, blob_name): + """Generates a v4 signed URL for downloading a blob. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + version="v4", + # This URL is valid for 15 minutes + expiration=datetime.timedelta(minutes=15), + # Allow GET requests using this URL. + method="GET", + ) + + print("Generated GET signed URL:") + print(url) + print("You can use this URL with any user agent, for example:") + print("curl '{}'".format(url)) + return url + + +# [END storage_generate_signed_url_v4] + +if __name__ == "__main__": + generate_download_signed_url_v4( + bucket_name=sys.argv[1], blob_name=sys.argv[2] + ) diff --git a/storage_generate_upload_signed_url_v4.py b/storage_generate_upload_signed_url_v4.py new file mode 100644 index 000000000..4dcbc2a32 --- /dev/null +++ b/storage_generate_upload_signed_url_v4.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_generate_upload_signed_url_v4] +from google.cloud import storage +import datetime + + +def generate_upload_signed_url_v4(bucket_name, blob_name): + """Generates a v4 signed URL for uploading a blob using HTTP PUT. + + Note that this method requires a service account key file. You can not use + this if you are using Application Default Credentials from Google Compute + Engine or from the Google Cloud SDK. + """ + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + url = blob.generate_signed_url( + version="v4", + # This URL is valid for 15 minutes + expiration=datetime.timedelta(minutes=15), + # Allow GET requests using this URL. + method="PUT", + content_type="application/octet-stream", + ) + + print("Generated PUT signed URL:") + print(url) + print("You can use this URL with any user agent, for example:") + print( + "curl -X PUT -H 'Content-Type: application/octet-stream' " + "--upload-file my-file '{}'".format(url) + ) + return url + + +# [END storage_generate_upload_signed_url_v4] + + +if __name__ == "__main__": + generate_upload_signed_url_v4( + bucket_name=sys.argv[1], blob_name=sys.argv[2] + ) diff --git a/storage_get_bucket_labels.py b/storage_get_bucket_labels.py new file mode 100644 index 000000000..531fae22e --- /dev/null +++ b/storage_get_bucket_labels.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_bucket_labels] +from google.cloud import storage +import pprint + + +def get_bucket_labels(bucket_name): + """Prints out a bucket's labels.""" + # bucket_name = 'your-bucket-name' + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + + labels = bucket.labels + pprint.pprint(labels) + + +# [END storage_get_bucket_labels] + +if __name__ == "__main__": + get_bucket_labels(bucket_name=sys.argv[1]) diff --git a/storage_get_bucket_metadata.py b/storage_get_bucket_metadata.py new file mode 100644 index 000000000..de724fc0a --- /dev/null +++ b/storage_get_bucket_metadata.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_bucket_metadata] +from google.cloud import storage +import pprint + + +def bucket_metadata(bucket_name): + """Prints out a bucket's metadata.""" + # bucket_name = 'your-bucket-name' + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + print("ID: {}".format(bucket.id)) + print("Name: {}".format(bucket.name)) + print("Storage Class: {}".format(bucket.storage_class)) + print("Location: {}".format(bucket.location)) + print("Location Type: {}".format(bucket.location_type)) + print("Cors: {}".format(bucket.cors)) + print( + "Default Event Based Hold: {}".format(bucket.default_event_based_hold) + ) + print("Default KMS Key Name: {}".format(bucket.default_kms_key_name)) + print("Metageneration: {}".format(bucket.metageneration)) + print( + "Retention Effective Time: {}".format( + bucket.retention_policy_effective_time + ) + ) + print("Retention Period: {}".format(bucket.retention_period)) + print("Retention Policy Locked: {}".format(bucket.retention_policy_locked)) + print("Requester Pays: {}".format(bucket.requester_pays)) + print("Self Link: {}".format(bucket.self_link)) + print("Time Created: {}".format(bucket.time_created)) + print("Versioning Enabled: {}".format(bucket.versioning_enabled)) + print("Labels:") + pprint.pprint(bucket.labels) + + +# [END storage_get_bucket_metadata] + +if __name__ == "__main__": + bucket_metadata(bucket_name=sys.argv[1]) diff --git a/storage_get_default_event_based_hold.py b/storage_get_default_event_based_hold.py new file mode 100644 index 000000000..4cf13914d --- /dev/null +++ b/storage_get_default_event_based_hold.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_default_event_based_hold] +from google.cloud import storage + + +def get_default_event_based_hold(bucket_name): + """Gets the default event based hold on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + + if bucket.default_event_based_hold: + print("Default event-based hold is enabled for {}".format(bucket_name)) + else: + print( + "Default event-based hold is not enabled for {}".format( + bucket_name + ) + ) + + +# [END storage_get_default_event_based_hold] + + +if __name__ == "__main__": + get_default_event_based_hold(bucket_name=sys.argv[1]) diff --git a/storage_get_hmac_key.py b/storage_get_hmac_key.py new file mode 100644 index 000000000..4dc52240d --- /dev/null +++ b/storage_get_hmac_key.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_hmac_key] +from google.cloud import storage + + +def get_key(access_id, project_id): + """ + Retrieve the HMACKeyMetadata with the given access id. + """ + # project_id = "Your Google Cloud project ID" + # access_id = "ID of an HMAC key" + + storage_client = storage.Client(project=project_id) + + hmac_key = storage_client.get_hmac_key_metadata( + access_id, project_id=project_id + ) + + print("The HMAC key metadata is:") + print("Service Account Email: {}".format(hmac_key.service_account_email)) + print("Key ID: {}".format(hmac_key.id)) + print("Access ID: {}".format(hmac_key.access_id)) + print("Project ID: {}".format(hmac_key.project)) + print("State: {}".format(hmac_key.state)) + print("Created At: {}".format(hmac_key.time_created)) + print("Updated At: {}".format(hmac_key.updated)) + print("Etag: {}".format(hmac_key.etag)) + return hmac_key + + +# [END storage_get_hmac_key] + +if __name__ == "__main__": + get_key(access_id=sys.argv[1], project_id=sys.argv[2]) diff --git a/storage_get_metadata.py b/storage_get_metadata.py new file mode 100644 index 000000000..5166998c0 --- /dev/null +++ b/storage_get_metadata.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_metadata] +from google.cloud import storage + + +def blob_metadata(bucket_name, blob_name): + """Prints out a blob's metadata.""" + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.get_blob(blob_name) + + print("Blob: {}".format(blob.name)) + print("Bucket: {}".format(blob.bucket.name)) + print("Storage class: {}".format(blob.storage_class)) + print("ID: {}".format(blob.id)) + print("Size: {} bytes".format(blob.size)) + print("Updated: {}".format(blob.updated)) + print("Generation: {}".format(blob.generation)) + print("Metageneration: {}".format(blob.metageneration)) + print("Etag: {}".format(blob.etag)) + print("Owner: {}".format(blob.owner)) + print("Component count: {}".format(blob.component_count)) + print("Crc32c: {}".format(blob.crc32c)) + print("md5_hash: {}".format(blob.md5_hash)) + print("Cache-control: {}".format(blob.cache_control)) + print("Content-type: {}".format(blob.content_type)) + print("Content-disposition: {}".format(blob.content_disposition)) + print("Content-encoding: {}".format(blob.content_encoding)) + print("Content-language: {}".format(blob.content_language)) + print("Metadata: {}".format(blob.metadata)) + print("Temporary hold: ", "enabled" if blob.temporary_hold else "disabled") + print( + "Event based hold: ", + "enabled" if blob.event_based_hold else "disabled", + ) + if blob.retention_expiration_time: + print( + "retentionExpirationTime: {}".format( + blob.retention_expiration_time + ) + ) + + +# [END storage_get_metadata] + +if __name__ == "__main__": + blob_metadata(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_get_requester_pays_status.py b/storage_get_requester_pays_status.py new file mode 100644 index 000000000..2014d654c --- /dev/null +++ b/storage_get_requester_pays_status.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_requester_pays_status] +from google.cloud import storage + + +def get_requester_pays_status(bucket_name): + """Get a bucket's requester pays metadata""" + # bucket_name = "my-bucket" + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + requester_pays_status = bucket.requester_pays + + if requester_pays_status: + print("Requester Pays is enabled for {}".format(bucket_name)) + else: + print("Requester Pays is disabled for {}".format(bucket_name)) + + +# [END storage_get_requester_pays_status] + +if __name__ == "__main__": + get_requester_pays_status(bucket_name=sys.argv[1]) diff --git a/storage_get_retention_policy.py b/storage_get_retention_policy.py new file mode 100644 index 000000000..f2ca26d26 --- /dev/null +++ b/storage_get_retention_policy.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_retention_policy] +from google.cloud import storage + + +def get_retention_policy(bucket_name): + """Gets the retention policy on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.reload() + + print("Retention Policy for {}".format(bucket_name)) + print("Retention Period: {}".format(bucket.retention_period)) + if bucket.retention_policy_locked: + print("Retention Policy is locked") + + if bucket.retention_policy_effective_time: + print( + "Effective Time: {}".format(bucket.retention_policy_effective_time) + ) + + +# [END storage_get_retention_policy] + + +if __name__ == "__main__": + get_retention_policy(bucket_name=sys.argv[1]) diff --git a/storage_get_uniform_bucket_level_access.py b/storage_get_uniform_bucket_level_access.py new file mode 100644 index 000000000..eddb8bc1a --- /dev/null +++ b/storage_get_uniform_bucket_level_access.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_uniform_bucket_level_access] +from google.cloud import storage + + +def get_uniform_bucket_level_access(bucket_name): + """Get uniform bucket-level access for a bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + iam_configuration = bucket.iam_configuration + + if iam_configuration.uniform_bucket_level_access_enabled: + print( + "Uniform bucket-level access is enabled for {}.".format( + bucket.name + ) + ) + print( + "Bucket will be locked on {}.".format( + iam_configuration.uniform_bucket_level_locked_time + ) + ) + else: + print( + "Uniform bucket-level access is disabled for {}.".format( + bucket.name + ) + ) + + +# [END storage_get_uniform_bucket_level_access] + +if __name__ == "__main__": + get_uniform_bucket_level_access(bucket_name=sys.argv[1]) diff --git a/storage_list_buckets.py b/storage_list_buckets.py new file mode 100644 index 000000000..f5897e47a --- /dev/null +++ b/storage_list_buckets.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START storage_list_buckets] +from google.cloud import storage + + +def list_buckets(): + """Lists all buckets.""" + + storage_client = storage.Client() + buckets = storage_client.list_buckets() + + for bucket in buckets: + print(bucket.name) + + +# [END storage_list_buckets] + + +if __name__ == "__main__": + list_buckets() diff --git a/storage_list_files.py b/storage_list_files.py new file mode 100644 index 000000000..c6a80d9fa --- /dev/null +++ b/storage_list_files.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_list_files] +from google.cloud import storage + + +def list_blobs(bucket_name): + """Lists all the blobs in the bucket.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + # Note: Client.list_blobs requires at least package version 1.17.0. + blobs = storage_client.list_blobs(bucket_name) + + for blob in blobs: + print(blob.name) + + +# [END storage_list_files] + + +if __name__ == "__main__": + list_blobs(bucket_name=sys.argv[1]) diff --git a/storage_list_files_with_prefix.py b/storage_list_files_with_prefix.py new file mode 100644 index 000000000..cf7c7394b --- /dev/null +++ b/storage_list_files_with_prefix.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_list_files_with_prefix] +from google.cloud import storage + + +def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): + """Lists all the blobs in the bucket that begin with the prefix. + + This can be used to list all blobs in a "folder", e.g. "public/". + + The delimiter argument can be used to restrict the results to only the + "files" in the given "folder". Without the delimiter, the entire tree under + the prefix is returned. For example, given these blobs: + + a/1.txt + a/b/2.txt + + If you just specify prefix = 'a', you'll get back: + + a/1.txt + a/b/2.txt + + However, if you specify prefix='a' and delimiter='/', you'll get back: + + a/1.txt + + Additionally, the same request will return blobs.prefixes populated with: + + a/b/ + """ + + storage_client = storage.Client() + + # Note: Client.list_blobs requires at least package version 1.17.0. + blobs = storage_client.list_blobs( + bucket_name, prefix=prefix, delimiter=delimiter + ) + + print("Blobs:") + for blob in blobs: + print(blob.name) + + if delimiter: + print("Prefixes:") + for prefix in blobs.prefixes: + print(prefix) + + +# [END storage_list_files_with_prefix] + +if __name__ == "__main__": + list_blobs_with_prefix( + bucket_name=sys.argv[1], prefix=sys.argv[2], delimiter=sys.argv[3] + ) diff --git a/storage_list_hmac_keys.py b/storage_list_hmac_keys.py new file mode 100644 index 000000000..8e5c53b58 --- /dev/null +++ b/storage_list_hmac_keys.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_list_hmac_keys] +from google.cloud import storage + + +def list_keys(project_id): + """ + List all HMAC keys associated with the project. + """ + # project_id = "Your Google Cloud project ID" + + storage_client = storage.Client(project=project_id) + hmac_keys = storage_client.list_hmac_keys(project_id=project_id) + print("HMAC Keys:") + for hmac_key in hmac_keys: + print( + "Service Account Email: {}".format(hmac_key.service_account_email) + ) + print("Access ID: {}".format(hmac_key.access_id)) + return hmac_keys + + +# [END storage_list_hmac_keys] + +if __name__ == "__main__": + list_keys(project_id=sys.argv[1]) diff --git a/storage_lock_retention_policy.py b/storage_lock_retention_policy.py new file mode 100644 index 000000000..d59572f5d --- /dev/null +++ b/storage_lock_retention_policy.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_lock_retention_policy] +from google.cloud import storage + + +def lock_retention_policy(bucket_name): + """Locks the retention policy on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + # get_bucket gets the current metageneration value for the bucket, + # required by lock_retention_policy. + bucket = storage_client.get_bucket(bucket_name) + + # Warning: Once a retention policy is locked it cannot be unlocked + # and retention period can only be increased. + bucket.lock_retention_policy() + + print("Retention policy for {} is now locked".format(bucket_name)) + print( + "Retention policy effective as of {}".format( + bucket.retention_policy_effective_time + ) + ) + + +# [END storage_lock_retention_policy] + + +if __name__ == "__main__": + lock_retention_policy(bucket_name=sys.argv[1]) diff --git a/storage_make_public.py b/storage_make_public.py new file mode 100644 index 000000000..79ae40d12 --- /dev/null +++ b/storage_make_public.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_make_public] +from google.cloud import storage + + +def make_blob_public(bucket_name, blob_name): + """Makes a blob publicly accessible.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.make_public() + + print( + "Blob {} is publicly accessible at {}".format( + blob.name, blob.public_url + ) + ) + + +# [END storage_make_public] + +if __name__ == "__main__": + make_blob_public(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_move_file.py b/storage_move_file.py new file mode 100644 index 000000000..9c075e230 --- /dev/null +++ b/storage_move_file.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_move_file] +from google.cloud import storage + + +def rename_blob(bucket_name, blob_name, new_name): + """Renames a blob.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # new_name = "new-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + new_blob = bucket.rename_blob(blob, new_name) + + print("Blob {} has been renamed to {}".format(blob.name, new_blob.name)) + + +# [END storage_move_file] + +if __name__ == "__main__": + rename_blob( + bucket_name=sys.argv[1], blob_name=sys.argv[2], new_name=sys.argv[3] + ) diff --git a/storage_print_bucket_acl.py b/storage_print_bucket_acl.py new file mode 100644 index 000000000..0804f7a9a --- /dev/null +++ b/storage_print_bucket_acl.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_print_bucket_acl] +from google.cloud import storage + + +def print_bucket_acl(bucket_name): + """Prints out a bucket's access control list.""" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + for entry in bucket.acl: + print("{}: {}".format(entry["role"], entry["entity"])) + + +# [END storage_print_bucket_acl] + +if __name__ == "__main__": + print_bucket_acl(bucket_name=sys.argv[1]) diff --git a/storage_print_bucket_acl_for_user.py b/storage_print_bucket_acl_for_user.py new file mode 100644 index 000000000..fa786d03a --- /dev/null +++ b/storage_print_bucket_acl_for_user.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_print_bucket_acl_for_user] +from google.cloud import storage + + +def print_bucket_acl_for_user(bucket_name, user_email): + """Prints out a bucket's access control list for a given user.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # get the roles for different types of entities. + roles = bucket.acl.user(user_email).get_roles() + + print(roles) + + +# [END storage_print_bucket_acl_for_user] + +if __name__ == "__main__": + print_bucket_acl_for_user(bucket_name=sys.argv[1], user_email=sys.argv[2]) diff --git a/storage_print_file_acl.py b/storage_print_file_acl.py new file mode 100644 index 000000000..f34a5283b --- /dev/null +++ b/storage_print_file_acl.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_print_file_acl] +from google.cloud import storage + + +def print_blob_acl(bucket_name, blob_name): + """Prints out a blob's access control list.""" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + for entry in blob.acl: + print("{}: {}".format(entry["role"], entry["entity"])) + + +# [END storage_print_file_acl] + +if __name__ == "__main__": + print_blob_acl(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_print_file_acl_for_user.py b/storage_print_file_acl_for_user.py new file mode 100644 index 000000000..e399b9160 --- /dev/null +++ b/storage_print_file_acl_for_user.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_print_file_acl_for_user] +from google.cloud import storage + + +def print_blob_acl_for_user(bucket_name, blob_name, user_email): + """Prints out a blob's access control list for a given user.""" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # Reload fetches the current ACL from Cloud Storage. + blob.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # get the roles for different types of entities. + roles = blob.acl.user(user_email).get_roles() + + print(roles) + + +# [END storage_print_file_acl_for_user] + +if __name__ == "__main__": + print_blob_acl_for_user( + bucket_name=sys.argv[1], blob_name=sys.argv[2], user_email=sys.argv[3], + ) diff --git a/storage_release_event_based_hold.py b/storage_release_event_based_hold.py new file mode 100644 index 000000000..8c3c11b6f --- /dev/null +++ b/storage_release_event_based_hold.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_release_event_based_hold] +from google.cloud import storage + + +def release_event_based_hold(bucket_name, blob_name): + """Releases the event based hold on a given blob""" + + # bucket_name = "my-bucket" + # blob_name = "my-blob" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.event_based_hold = False + blob.patch() + + print("Event based hold was released for {}".format(blob_name)) + + +# [END storage_release_event_based_hold] + + +if __name__ == "__main__": + release_event_based_hold(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_release_temporary_hold.py b/storage_release_temporary_hold.py new file mode 100644 index 000000000..02a6ca96c --- /dev/null +++ b/storage_release_temporary_hold.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_release_temporary_hold] +from google.cloud import storage + + +def release_temporary_hold(bucket_name, blob_name): + """Releases the temporary hold on a given blob""" + + # bucket_name = "my-bucket" + # blob_name = "my-blob" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.temporary_hold = False + blob.patch() + + print("Temporary hold was release for #{blob_name}") + + +# [END storage_release_temporary_hold] + + +if __name__ == "__main__": + release_temporary_hold(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_remove_bucket_default_owner.py b/storage_remove_bucket_default_owner.py new file mode 100644 index 000000000..beaf6be84 --- /dev/null +++ b/storage_remove_bucket_default_owner.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_bucket_default_owner] +from google.cloud import storage + + +def remove_bucket_default_owner(bucket_name, user_email): + """Removes a user from the access control list of the given bucket's + default object access control list.""" + # bucket_name = "your-bucket-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + bucket.default_object_acl.user(user_email).revoke_read() + bucket.default_object_acl.user(user_email).revoke_write() + bucket.default_object_acl.user(user_email).revoke_owner() + bucket.default_object_acl.save() + + print( + "Removed user {} from the default acl of bucket {}.".format( + user_email, bucket_name + ) + ) + + +# [END storage_remove_bucket_default_owner] + +if __name__ == "__main__": + remove_bucket_default_owner( + bucket_name=sys.argv[1], user_email=sys.argv[2] + ) diff --git a/storage_remove_bucket_iam_member.py b/storage_remove_bucket_iam_member.py new file mode 100644 index 000000000..f37f351df --- /dev/null +++ b/storage_remove_bucket_iam_member.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_bucket_iam_member] +from google.cloud import storage + + +def remove_bucket_iam_member(bucket_name, role, member): + """Remove member from bucket IAM Policy""" + # bucket_name = "your-bucket-name" + # role = "IAM role, e.g. roles/storage.objectViewer" + # member = "IAM identity, e.g. user: name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + policy[role].discard(member) + + bucket.set_iam_policy(policy) + + print("Removed {} with role {} from {}.".format(member, role, bucket_name)) + + +# [END storage_remove_bucket_iam_member] + +if __name__ == "__main__": + remove_bucket_iam_member( + bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3] + ) diff --git a/storage_remove_bucket_label.py b/storage_remove_bucket_label.py new file mode 100644 index 000000000..d39c03f24 --- /dev/null +++ b/storage_remove_bucket_label.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_bucket_label] +from google.cloud import storage +import pprint + + +def remove_bucket_label(bucket_name): + """Remove a label from a bucket.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + labels = bucket.labels + + if "example" in labels: + del labels["example"] + + bucket.labels = labels + bucket.patch() + + print("Removed labels on {}.".format(bucket.name)) + pprint.pprint(bucket.labels) + + +# [END storage_remove_bucket_label] + +if __name__ == "__main__": + remove_bucket_label(bucket_name=sys.argv[1]) diff --git a/storage_remove_bucket_owner.py b/storage_remove_bucket_owner.py new file mode 100644 index 000000000..f54e7a7cc --- /dev/null +++ b/storage_remove_bucket_owner.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_bucket_owner] +from google.cloud import storage + + +def remove_bucket_owner(bucket_name, user_email): + """Removes a user from the access control list of the given bucket.""" + # bucket_name = "your-bucket-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # Reload fetches the current ACL from Cloud Storage. + bucket.acl.reload() + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + bucket.acl.user(user_email).revoke_read() + bucket.acl.user(user_email).revoke_write() + bucket.acl.user(user_email).revoke_owner() + bucket.acl.save() + + print("Removed user {} from bucket {}.".format(user_email, bucket_name)) + + +# [END storage_remove_bucket_owner] + +if __name__ == "__main__": + remove_bucket_owner(bucket_name=sys.argv[1], user_email=sys.argv[2]) diff --git a/storage_remove_file_owner.py b/storage_remove_file_owner.py new file mode 100644 index 000000000..9db83cce0 --- /dev/null +++ b/storage_remove_file_owner.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_file_owner] +from google.cloud import storage + + +def remove_blob_owner(bucket_name, blob_name, user_email): + """Removes a user from the access control list of the given blob in the + given bucket.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # user_email = "name@example.com" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # You can also use `group`, `domain`, `all_authenticated` and `all` to + # remove access for different types of entities. + blob.acl.user(user_email).revoke_read() + blob.acl.user(user_email).revoke_write() + blob.acl.user(user_email).revoke_owner() + blob.acl.save() + + print( + "Removed user {} from blob {} in bucket {}.".format( + user_email, blob_name, bucket_name + ) + ) + + +# [END storage_remove_file_owner] + +if __name__ == "__main__": + remove_blob_owner( + bucket_name=sys.argv[1], blob_name=sys.argv[2], user_email=sys.argv[3], + ) diff --git a/storage_remove_retention_policy.py b/storage_remove_retention_policy.py new file mode 100644 index 000000000..cb8ee548c --- /dev/null +++ b/storage_remove_retention_policy.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_retention_policy] +from google.cloud import storage + + +def remove_retention_policy(bucket_name): + """Removes the retention policy on a given bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.reload() + + if bucket.retention_policy_locked: + print( + "Unable to remove retention period as retention policy is locked." + ) + return + + bucket.retention_period = None + bucket.patch() + + print("Removed bucket {} retention policy".format(bucket.name)) + + +# [END storage_remove_retention_policy] + + +if __name__ == "__main__": + remove_retention_policy(bucket_name=sys.argv[1]) diff --git a/storage_rotate_encryption_key.py b/storage_rotate_encryption_key.py new file mode 100644 index 000000000..f66c8f8a0 --- /dev/null +++ b/storage_rotate_encryption_key.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_rotate_encryption_key] +from google.cloud import storage +import base64 + + +def rotate_encryption_key( + bucket_name, blob_name, base64_encryption_key, base64_new_encryption_key +): + """Performs a key rotation by re-writing an encrypted blob with a new + encryption key.""" + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + current_encryption_key = base64.b64decode(base64_encryption_key) + new_encryption_key = base64.b64decode(base64_new_encryption_key) + + # Both source_blob and destination_blob refer to the same storage object, + # but destination_blob has the new encryption key. + source_blob = bucket.blob( + blob_name, encryption_key=current_encryption_key + ) + destination_blob = bucket.blob( + blob_name, encryption_key=new_encryption_key + ) + + token = None + + while True: + token, bytes_rewritten, total_bytes = destination_blob.rewrite( + source_blob, token=token + ) + if token is None: + break + + print("Key rotation complete for Blob {}".format(blob_name)) + + +# [END storage_rotate_encryption_key] + +if __name__ == "__main__": + rotate_encryption_key( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + base64_encryption_key=sys.argv[3], + base64_new_encryption_key=sys.argv[4], + ) diff --git a/storage_set_bucket_default_kms_key.py b/storage_set_bucket_default_kms_key.py new file mode 100644 index 000000000..7ba4718b2 --- /dev/null +++ b/storage_set_bucket_default_kms_key.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_bucket_default_kms_key] +from google.cloud import storage + + +def enable_default_kms_key(bucket_name, kms_key_name): + """Sets a bucket's default KMS key.""" + # bucket_name = "your-bucket-name" + # kms_key_name = "projects/PROJ/locations/LOC/keyRings/RING/cryptoKey/KEY" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.default_kms_key_name = kms_key_name + bucket.patch() + + print( + "Set default KMS key for bucket {} to {}.".format( + bucket.name, bucket.default_kms_key_name + ) + ) + + +# [END storage_set_bucket_default_kms_key] + +if __name__ == "__main__": + enable_default_kms_key(bucket_name=sys.argv[1], kms_key_name=sys.argv[2]) diff --git a/storage_set_event_based_hold.py b/storage_set_event_based_hold.py new file mode 100644 index 000000000..52a89b88e --- /dev/null +++ b/storage_set_event_based_hold.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_event_based_hold] +from google.cloud import storage + + +def set_event_based_hold(bucket_name, blob_name): + """Sets a event based hold on a given blob""" + # bucket_name = "my-bucket" + # blob_name = "my-blob" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.event_based_hold = True + blob.patch() + + print("Event based hold was set for {}".format(blob_name)) + + +# [END storage_set_event_based_hold] + + +if __name__ == "__main__": + set_event_based_hold(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_set_retention_policy.py b/storage_set_retention_policy.py new file mode 100644 index 000000000..2b3602491 --- /dev/null +++ b/storage_set_retention_policy.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_retention_policy] +from google.cloud import storage + + +def set_retention_policy(bucket_name, retention_period): + """Defines a retention policy on a given bucket""" + # bucket_name = "my-bucket" + # retention_period = 10 + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + bucket.retention_period = retention_period + bucket.patch() + + print( + "Bucket {} retention period set for {} seconds".format( + bucket.name, bucket.retention_period + ) + ) + + +# [END storage_set_retention_policy] + + +if __name__ == "__main__": + set_retention_policy(bucket_name=sys.argv[1], retention_period=sys.argv[2]) diff --git a/storage_set_temporary_hold.py b/storage_set_temporary_hold.py new file mode 100644 index 000000000..edeb3c578 --- /dev/null +++ b/storage_set_temporary_hold.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_temporary_hold] +from google.cloud import storage + + +def set_temporary_hold(bucket_name, blob_name): + """Sets a temporary hold on a given blob""" + # bucket_name = "my-bucket" + # blob_name = "my-blob" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + blob.temporary_hold = True + blob.patch() + + print("Temporary hold was set for #{blob_name}") + + +# [END storage_set_temporary_hold] + + +if __name__ == "__main__": + set_temporary_hold(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_upload_encrypted_file.py b/storage_upload_encrypted_file.py new file mode 100644 index 000000000..e016da759 --- /dev/null +++ b/storage_upload_encrypted_file.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +# Copyright 2019 Google, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_upload_encrypted_file] +from google.cloud import storage +import base64 + + +def upload_encrypted_blob( + bucket_name, + source_file_name, + destination_blob_name, + base64_encryption_key, +): + """Uploads a file to a Google Cloud Storage bucket using a custom + encryption key. + + The file will be encrypted by Google Cloud Storage and only + retrievable using the provided encryption key. + """ + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + # Encryption key must be an AES256 key represented as a bytestring with + # 32 bytes. Since it's passed in as a base64 encoded string, it needs + # to be decoded. + encryption_key = base64.b64decode(base64_encryption_key) + blob = bucket.blob( + destination_blob_name, encryption_key=encryption_key + ) + + blob.upload_from_filename(source_file_name) + + print( + "File {} uploaded to {}.".format( + source_file_name, destination_blob_name + ) + ) + + +# [END storage_upload_encrypted_file] + +if __name__ == "__main__": + upload_encrypted_blob( + bucket_name=sys.argv[1], + source_file_name=sys.argv[2], + destination_blob_name=sys.argv[3], + base64_encryption_key=sys.argv[4], + ) diff --git a/storage_upload_file.py b/storage_upload_file.py new file mode 100644 index 000000000..c067f7063 --- /dev/null +++ b/storage_upload_file.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_upload_file] +from google.cloud import storage + + +def upload_blob(bucket_name, source_file_name, destination_blob_name): + """Uploads a file to the bucket.""" + # bucket_name = "your-bucket-name" + # source_file_name = "local/path/to/file" + # destination_blob_name = "storage-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + blob.upload_from_filename(source_file_name) + + print( + "File {} uploaded to {}.".format( + source_file_name, destination_blob_name + ) + ) + + +# [END storage_upload_file] + +if __name__ == "__main__": + upload_blob( + bucket_name=sys.argv[1], + source_file_name=sys.argv[2], + destination_blob_name=sys.argv[3], + ) diff --git a/storage_upload_with_kms_key.py b/storage_upload_with_kms_key.py new file mode 100644 index 000000000..e83c10aea --- /dev/null +++ b/storage_upload_with_kms_key.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_upload_with_kms_key] +from google.cloud import storage + + +def upload_blob_with_kms( + bucket_name, source_file_name, destination_blob_name, kms_key_name +): + """Uploads a file to the bucket, encrypting it with the given KMS key.""" + # bucket_name = "your-bucket-name" + # source_file_name = "local/path/to/file" + # destination_blob_name = "storage-object-name" + # kms_key_name = "projects/PROJ/locations/LOC/keyRings/RING/cryptoKey/KEY" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(destination_blob_name, kms_key_name=kms_key_name) + blob.upload_from_filename(source_file_name) + + print( + "File {} uploaded to {} with encryption key {}.".format( + source_file_name, destination_blob_name, kms_key_name + ) + ) + + +# [END storage_upload_with_kms_key] + +if __name__ == "__main__": + upload_blob_with_kms( + bucket_name=sys.argv[1], + source_file_name=sys.argv[2], + destination_blob_name=sys.argv[3], + kms_key_name=sys.argv[4], + ) diff --git a/storage_view_bucket_iam_members.py b/storage_view_bucket_iam_members.py new file mode 100644 index 000000000..1ef511ce0 --- /dev/null +++ b/storage_view_bucket_iam_members.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_view_bucket_iam_members] +from google.cloud import storage + + +def view_bucket_iam_members(bucket_name): + """View IAM Policy for a bucket""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy() + + for role in policy: + members = policy[role] + print("Role: {}, Members: {}".format(role, members)) + + +# [END storage_view_bucket_iam_members] + + +if __name__ == "__main__": + view_bucket_iam_members(bucket_name=sys.argv[1]) diff --git a/uniform_bucket_level_access.py b/uniform_bucket_level_access.py deleted file mode 100644 index 649728ff9..000000000 --- a/uniform_bucket_level_access.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse - -from google.cloud import storage - - -def enable_uniform_bucket_level_access(bucket_name): - """Enable uniform bucket-level access for a bucket""" - # [START storage_enable_uniform_bucket_level_access] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.iam_configuration.uniform_bucket_level_access_enabled = True - bucket.patch() - - print('Uniform bucket-level access was enabled for {}.'.format( - bucket.name)) - # [END storage_enable_uniform_bucket_level_access] - - -def disable_uniform_bucket_level_access(bucket_name): - """Disable uniform bucket-level access for a bucket""" - # [START storage_disable_uniform_bucket_level_access] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - - bucket.iam_configuration.uniform_bucket_level_access_enabled = False - bucket.patch() - - print('Uniform bucket-level access was disabled for {}.'.format( - bucket.name)) - # [END storage_disable_uniform_bucket_level_access] - - -def get_uniform_bucket_level_access(bucket_name): - """Get uniform bucket-level access for a bucket""" - # [START storage_get_uniform_bucket_level_access] - # bucket_name = "my-bucket" - - storage_client = storage.Client() - bucket = storage_client.get_bucket(bucket_name) - iam_configuration = bucket.iam_configuration - - if iam_configuration.uniform_bucket_level_access_enabled: - print('Uniform bucket-level access is enabled for {}.'.format( - bucket.name)) - print('Bucket will be locked on {}.'.format( - iam_configuration.uniform_bucket_level_locked_time)) - else: - print('Uniform bucket-level access is disabled for {}.'.format( - bucket.name)) - # [END storage_get_uniform_bucket_level_access] - - -if __name__ == '__main__': - - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - subparsers = parser.add_subparsers(dest='command') - - enable_uniform_bucket_level_access_parser = subparsers.add_parser( - 'enable-uniform-bucket-level-access', - help=enable_uniform_bucket_level_access.__doc__) - enable_uniform_bucket_level_access_parser.add_argument('bucket_name') - - disable_uniform_bucket_level_access_parser = subparsers.add_parser( - 'disable-uniform-bucket-level-access', - help=disable_uniform_bucket_level_access.__doc__) - disable_uniform_bucket_level_access_parser.add_argument('bucket_name') - - get_uniform_bucket_level_access_parser = subparsers.add_parser( - 'get-uniform-bucket-level-access', - help=get_uniform_bucket_level_access.__doc__) - get_uniform_bucket_level_access_parser.add_argument('bucket_name') - - args = parser.parse_args() - - if args.command == 'enable-uniform-bucket-level-access': - enable_uniform_bucket_level_access(args.bucket_name) - elif args.command == 'disable-uniform-bucket-level-access': - disable_uniform_bucket_level_access(args.bucket_name) - elif args.command == 'get-uniform-bucket-level-access': - get_uniform_bucket_level_access(args.bucket_name) diff --git a/uniform_bucket_level_access_test.py b/uniform_bucket_level_access_test.py index e18a0d79b..30ac50c7a 100644 --- a/uniform_bucket_level_access_test.py +++ b/uniform_bucket_level_access_test.py @@ -18,14 +18,16 @@ import pytest -import uniform_bucket_level_access +import storage_get_uniform_bucket_level_access +import storage_disable_uniform_bucket_level_access +import storage_enable_uniform_bucket_level_access @pytest.fixture() def bucket(): """Creates a test bucket and deletes it upon completion.""" client = storage.Client() - bucket_name = 'uniform-bucket-level-access-' + str(int(time.time())) + bucket_name = "uniform-bucket-level-access-" + str(int(time.time())) bucket = client.create_bucket(bucket_name) yield bucket time.sleep(3) @@ -33,22 +35,35 @@ def bucket(): def test_get_uniform_bucket_level_access(bucket, capsys): - uniform_bucket_level_access.get_uniform_bucket_level_access(bucket.name) + storage_get_uniform_bucket_level_access.get_uniform_bucket_level_access( + bucket.name + ) out, _ = capsys.readouterr() - assert 'Uniform bucket-level access is disabled for {}.'.format( - bucket.name) in out + assert ( + "Uniform bucket-level access is disabled for {}.".format(bucket.name) + in out + ) def test_enable_uniform_bucket_level_access(bucket, capsys): - uniform_bucket_level_access.enable_uniform_bucket_level_access(bucket.name) + short_name = storage_enable_uniform_bucket_level_access + short_name.enable_uniform_bucket_level_access( + bucket.name + ) out, _ = capsys.readouterr() - assert 'Uniform bucket-level access was enabled for {}.'.format( - bucket.name) in out + assert ( + "Uniform bucket-level access was enabled for {}.".format(bucket.name) + in out + ) def test_disable_uniform_bucket_level_access(bucket, capsys): - uniform_bucket_level_access.disable_uniform_bucket_level_access( - bucket.name) + short_name = storage_disable_uniform_bucket_level_access + short_name.disable_uniform_bucket_level_access( + bucket.name + ) out, _ = capsys.readouterr() - assert 'Uniform bucket-level access was disabled for {}.'.format( - bucket.name) in out + assert ( + "Uniform bucket-level access was disabled for {}.".format(bucket.name) + in out + ) From f28ca5baeeeeb302a65ca0ce3fc83b37550d8657 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 084/197] Auto-update dependencies. (#2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 8a6eeead8..5c5078f1c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.0.0 -google-cloud-storage==1.22.0 \ No newline at end of file +google-cloud-pubsub==1.1.0 +google-cloud-storage==1.23.0 From a128459b6331255156025343eb9329f537b6ca52 Mon Sep 17 00:00:00 2001 From: Jonathan Lui Date: Tue, 21 Jan 2020 13:36:53 -0800 Subject: [PATCH 085/197] samples(storage): IAM conditions samples (#2730) * docs(storage): use policy.bindings in Storage/IAM samples * update view Bucket IAM to use policy.bindings * update remove Bucket IAM to use policy.bindings * blacken * add IAM condition sample * add conditional iam binding sample * bump storage requirement to 1.25.0 * fix tests * remove unused imports --- iam_test.py | 62 ++++++++++++--- requirements.txt | 2 +- storage_add_bucket_conditional_iam_binding.py | 78 +++++++++++++++++++ storage_add_bucket_iam_member.py | 8 +- storage_remove_bucket_iam_member.py | 7 +- storage_view_bucket_iam_members.py | 7 +- 6 files changed, 141 insertions(+), 23 deletions(-) create mode 100644 storage_add_bucket_conditional_iam_binding.py diff --git a/iam_test.py b/iam_test.py index 21ab0dba1..fa8b1d1b6 100644 --- a/iam_test.py +++ b/iam_test.py @@ -12,36 +12,76 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os - from google.cloud import storage import pytest +import re +import time import storage_remove_bucket_iam_member import storage_add_bucket_iam_member +import storage_add_bucket_conditional_iam_binding import storage_view_bucket_iam_members -BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] MEMBER = "group:dpebot@google.com" ROLE = "roles/storage.legacyBucketReader" +CONDITION_TITLE = "match-prefix" +CONDITION_DESCRIPTION = "Applies to objects matching a prefix" +CONDITION_EXPRESSION = "resource.name.startsWith(\"projects/_/buckets/bucket-name/objects/prefix-a-\")" + @pytest.fixture def bucket(): - yield storage.Client().bucket(BUCKET) + bucket_name = "test-iam-" + str(int(time.time())) + bucket = storage.Client().create_bucket(bucket_name) + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + bucket.patch() + yield bucket + time.sleep(3) + bucket.delete(force=True) -def test_view_bucket_iam_members(): - storage_view_bucket_iam_members.view_bucket_iam_members(BUCKET) +def test_view_bucket_iam_members(capsys, bucket): + storage_view_bucket_iam_members.view_bucket_iam_members(bucket.name) + assert re.match("Role: .*, Members: .*", capsys.readouterr().out) def test_add_bucket_iam_member(bucket): - storage_add_bucket_iam_member.add_bucket_iam_member(BUCKET, ROLE, MEMBER) - assert MEMBER in bucket.get_iam_policy()[ROLE] + storage_add_bucket_iam_member.add_bucket_iam_member(bucket.name, ROLE, MEMBER) + policy = bucket.get_iam_policy(requested_policy_version=3) + assert any( + binding["role"] == ROLE and MEMBER in binding["members"] + for binding in policy.bindings + ) + + +def test_add_bucket_conditional_iam_binding(bucket): + storage_add_bucket_conditional_iam_binding.add_bucket_conditional_iam_binding( + bucket.name, + ROLE, + CONDITION_TITLE, + CONDITION_DESCRIPTION, + CONDITION_EXPRESSION, + {MEMBER} + ) + policy = bucket.get_iam_policy(requested_policy_version=3) + assert any( + binding["role"] == ROLE and + binding["members"] == {MEMBER} and + binding["condition"] == { + "title": CONDITION_TITLE, + "description": CONDITION_DESCRIPTION, + "expression": CONDITION_EXPRESSION + } + for binding in policy.bindings + ) def test_remove_bucket_iam_member(bucket): - storage_remove_bucket_iam_member.remove_bucket_iam_member( - BUCKET, ROLE, MEMBER + storage_remove_bucket_iam_member.remove_bucket_iam_member(bucket.name, ROLE, MEMBER) + + policy = bucket.get_iam_policy(requested_policy_version=3) + assert not any( + binding["role"] == ROLE and MEMBER in binding["members"] + for binding in policy.bindings ) - assert MEMBER not in bucket.get_iam_policy()[ROLE] diff --git a/requirements.txt b/requirements.txt index 5c5078f1c..ab438508a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.1.0 -google-cloud-storage==1.23.0 +google-cloud-storage==1.25.0 diff --git a/storage_add_bucket_conditional_iam_binding.py b/storage_add_bucket_conditional_iam_binding.py new file mode 100644 index 000000000..ddc0fc028 --- /dev/null +++ b/storage_add_bucket_conditional_iam_binding.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_add_bucket_conditional_iam_binding] +from google.cloud import storage + + +def add_bucket_conditional_iam_binding( + bucket_name, role, title, description, expression, members +): + """Add a conditional IAM binding to a bucket's IAM policy.""" + # bucket_name = "your-bucket-name" + # role = "IAM role, e.g. roles/storage.objectViewer" + # members = {"IAM identity, e.g. user: name@example.com}" + # title = "Condition title." + # description = "Condition description." + # expression = "Condition expression." + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy(requested_policy_version=3) + + # Set the policy's version to 3 to use condition in bindings. + policy.version = 3 + + policy.bindings.append( + { + "role": role, + "members": members, + "condition": { + "title": title, + "description": description, + "expression": expression, + }, + } + ) + + bucket.set_iam_policy(policy) + + print("Added the following member(s) with role {} to {}:".format(role, bucket_name)) + + for member in members: + print(" {}".format(member)) + + print("with condition:") + print(" Title: {}".format(title)) + print(" Description: {}".format(description)) + print(" Expression: {}".format(expression)) + + +# [END storage_add_bucket_conditional_iam_binding] + + +if __name__ == "__main__": + add_bucket_conditional_iam_binding( + bucket_name=sys.argv[1], + role=sys.argv[2], + title=sys.argv[3], + description=sys.argv[4], + expression=sys.argv[5], + members=set(sys.argv[6::]), + ) diff --git a/storage_add_bucket_iam_member.py b/storage_add_bucket_iam_member.py index bd26405cf..19d7ab7c0 100644 --- a/storage_add_bucket_iam_member.py +++ b/storage_add_bucket_iam_member.py @@ -29,9 +29,9 @@ def add_bucket_iam_member(bucket_name, role, member): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) - policy = bucket.get_iam_policy() + policy = bucket.get_iam_policy(requested_policy_version=3) - policy[role].add(member) + policy.bindings.append({"role": role, "members": {member}}) bucket.set_iam_policy(policy) @@ -42,6 +42,4 @@ def add_bucket_iam_member(bucket_name, role, member): if __name__ == "__main__": - add_bucket_iam_member( - bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3] - ) + add_bucket_iam_member(bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3]) diff --git a/storage_remove_bucket_iam_member.py b/storage_remove_bucket_iam_member.py index f37f351df..ef75a1a15 100644 --- a/storage_remove_bucket_iam_member.py +++ b/storage_remove_bucket_iam_member.py @@ -29,9 +29,12 @@ def remove_bucket_iam_member(bucket_name, role, member): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) - policy = bucket.get_iam_policy() + policy = bucket.get_iam_policy(requested_policy_version=3) - policy[role].discard(member) + for binding in policy.bindings: + print(binding) + if binding["role"] == role and binding.get("condition") is None: + binding["members"].discard(member) bucket.set_iam_policy(policy) diff --git a/storage_view_bucket_iam_members.py b/storage_view_bucket_iam_members.py index 1ef511ce0..5272f0ddb 100644 --- a/storage_view_bucket_iam_members.py +++ b/storage_view_bucket_iam_members.py @@ -27,11 +27,10 @@ def view_bucket_iam_members(bucket_name): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) - policy = bucket.get_iam_policy() + policy = bucket.get_iam_policy(requested_policy_version=3) - for role in policy: - members = policy[role] - print("Role: {}, Members: {}".format(role, members)) + for binding in policy.bindings: + print("Role: {}, Members: {}".format(binding["role"], binding["members"])) # [END storage_view_bucket_iam_members] From 6f1483afdcf43441aed80b4e40723357d8642d7a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 5 Mar 2020 09:22:20 -0800 Subject: [PATCH 086/197] fix: Use unique resources for storage snippets. (#3029) * fix: use unique buckets and blobs for acl tests * fix: use unique buckets and blobs for snippets tests * fix: reuse test_bucket within module to avoid exhausting quota * fix: Due to retention policy, don't reuse fixture for bucket lock * fix: randomize blob names to disperse file edits --- acl_test.py | 64 ++++++++++++++------------------ bucket_lock_test.py | 13 ++++--- snippets_test.py | 90 +++++++++++++++++++++++++-------------------- 3 files changed, 87 insertions(+), 80 deletions(-) diff --git a/acl_test.py b/acl_test.py index 60947c895..157f399ca 100644 --- a/acl_test.py +++ b/acl_test.py @@ -12,11 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import time +import uuid from google.cloud import storage -import google.cloud.storage.acl import pytest import storage_remove_file_owner @@ -30,7 +28,6 @@ import storage_print_file_acl_for_user import storage_print_file_acl -BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] # Typically we'd use a @example.com address, but GCS requires a real Google # account. TEST_EMAIL = ( @@ -39,37 +36,29 @@ ) -@pytest.fixture +@pytest.fixture(scope="module") def test_bucket(): - """Yields a bucket that resets its acl after the test completes.""" - bucket = storage.Client().bucket(BUCKET) - acl = google.cloud.storage.acl.BucketACL(bucket) - object_default_acl = google.cloud.storage.acl.DefaultObjectACL(bucket) - acl.reload() - object_default_acl.reload() - time.sleep(1) # bucket ops rate limited 1 update per second + """Yields a bucket that is deleted after the test completes.""" + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "acl-test-{}".format(uuid.uuid4()) + bucket = storage.Client().bucket(bucket_name) + bucket.create() yield bucket - time.sleep(1) # bucket ops rate limited 1 update per second - acl.save() - object_default_acl.save() + bucket.delete(force=True) @pytest.fixture -def test_blob(): - """Yields a blob that resets its acl after the test completes.""" - bucket = storage.Client().bucket(BUCKET) - blob = bucket.blob("storage_acl_test_sigil") +def test_blob(test_bucket): + """Yields a blob that is deleted after the test completes.""" + bucket = test_bucket + blob = bucket.blob("storage_acl_test_sigil-{}".format(uuid.uuid4())) blob.upload_from_string("Hello, is it me you're looking for?") - acl = google.cloud.storage.acl.ObjectACL(blob) - acl.reload() # bucket ops rate limited 1 update per second - time.sleep(1) - yield blob # bucket ops rate limited 1 update per second - time.sleep(1) - acl.save() + yield blob -def test_print_bucket_acl(capsys): - storage_print_bucket_acl.print_bucket_acl(BUCKET) +def test_print_bucket_acl(test_bucket, capsys): + storage_print_bucket_acl.print_bucket_acl(test_bucket.name) out, _ = capsys.readouterr() assert out @@ -79,7 +68,7 @@ def test_print_bucket_acl_for_user(test_bucket, capsys): test_bucket.acl.save() storage_print_bucket_acl_for_user.print_bucket_acl_for_user( - BUCKET, TEST_EMAIL + test_bucket.name, TEST_EMAIL ) out, _ = capsys.readouterr() @@ -87,7 +76,7 @@ def test_print_bucket_acl_for_user(test_bucket, capsys): def test_add_bucket_owner(test_bucket): - storage_add_bucket_owner.add_bucket_owner(BUCKET, TEST_EMAIL) + storage_add_bucket_owner.add_bucket_owner(test_bucket.name, TEST_EMAIL) test_bucket.acl.reload() assert "OWNER" in test_bucket.acl.user(TEST_EMAIL).get_roles() @@ -97,7 +86,8 @@ def test_remove_bucket_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() - storage_remove_bucket_owner.remove_bucket_owner(BUCKET, TEST_EMAIL) + storage_remove_bucket_owner.remove_bucket_owner( + test_bucket.name, TEST_EMAIL) test_bucket.acl.reload() assert "OWNER" not in test_bucket.acl.user(TEST_EMAIL).get_roles() @@ -105,7 +95,7 @@ def test_remove_bucket_owner(test_bucket): def test_add_bucket_default_owner(test_bucket): storage_add_bucket_default_owner.add_bucket_default_owner( - BUCKET, TEST_EMAIL + test_bucket.name, TEST_EMAIL ) test_bucket.default_object_acl.reload() @@ -118,7 +108,7 @@ def test_remove_bucket_default_owner(test_bucket): test_bucket.acl.save() storage_remove_bucket_default_owner.remove_bucket_default_owner( - BUCKET, TEST_EMAIL + test_bucket.name, TEST_EMAIL ) test_bucket.default_object_acl.reload() @@ -127,7 +117,8 @@ def test_remove_bucket_default_owner(test_bucket): def test_print_blob_acl(test_blob, capsys): - storage_print_file_acl.print_blob_acl(BUCKET, test_blob.name) + storage_print_file_acl.print_blob_acl( + test_blob.bucket.name, test_blob.name) out, _ = capsys.readouterr() assert out @@ -137,7 +128,7 @@ def test_print_blob_acl_for_user(test_blob, capsys): test_blob.acl.save() storage_print_file_acl_for_user.print_blob_acl_for_user( - BUCKET, test_blob.name, TEST_EMAIL + test_blob.bucket.name, test_blob.name, TEST_EMAIL ) out, _ = capsys.readouterr() @@ -145,7 +136,8 @@ def test_print_blob_acl_for_user(test_blob, capsys): def test_add_blob_owner(test_blob): - storage_add_file_owner.add_blob_owner(BUCKET, test_blob.name, TEST_EMAIL) + storage_add_file_owner.add_blob_owner( + test_blob.bucket.name, test_blob.name, TEST_EMAIL) test_blob.acl.reload() assert "OWNER" in test_blob.acl.user(TEST_EMAIL).get_roles() @@ -156,7 +148,7 @@ def test_remove_blob_owner(test_blob): test_blob.acl.save() storage_remove_file_owner.remove_blob_owner( - BUCKET, test_blob.name, TEST_EMAIL + test_blob.bucket.name, test_blob.name, TEST_EMAIL ) test_blob.acl.reload() diff --git a/bucket_lock_test.py b/bucket_lock_test.py index 89db4a7cc..98a95835c 100644 --- a/bucket_lock_test.py +++ b/bucket_lock_test.py @@ -13,6 +13,7 @@ # limitations under the License. import time +import uuid from google.cloud import storage @@ -36,12 +37,14 @@ RETENTION_POLICY = 5 -@pytest.fixture() +@pytest.fixture def bucket(): - """Creates a test bucket and deletes it upon completion.""" - client = storage.Client() - bucket_name = "bucket-lock-" + str(int(time.time())) - bucket = client.create_bucket(bucket_name) + """Yields a bucket that is deleted after the test completes.""" + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "bucket-lock-{}".format(uuid.uuid4()) + bucket = storage.Client().bucket(bucket_name) + bucket.create() yield bucket bucket.delete(force=True) diff --git a/snippets_test.py b/snippets_test.py index cef211363..42e73944d 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -15,6 +15,7 @@ import os import tempfile import time +import uuid from google.cloud import storage import google.cloud.exceptions @@ -41,89 +42,100 @@ import storage_generate_upload_signed_url_v4 import storage_set_bucket_default_kms_key -BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] KMS_KEY = os.environ["CLOUD_KMS_KEY"] -def test_enable_default_kms_key(): +def test_enable_default_kms_key(test_bucket): storage_set_bucket_default_kms_key.enable_default_kms_key( - bucket_name=BUCKET, kms_key_name=KMS_KEY + bucket_name=test_bucket.name, kms_key_name=KMS_KEY ) time.sleep(2) # Let change propagate as needed - bucket = storage.Client().get_bucket(BUCKET) + bucket = storage.Client().get_bucket(test_bucket.name) assert bucket.default_kms_key_name.startswith(KMS_KEY) bucket.default_kms_key_name = None bucket.patch() -def test_get_bucket_labels(): - storage_get_bucket_labels.get_bucket_labels(BUCKET) +def test_get_bucket_labels(test_bucket): + storage_get_bucket_labels.get_bucket_labels(test_bucket.name) -def test_add_bucket_label(capsys): - storage_add_bucket_label.add_bucket_label(BUCKET) +def test_add_bucket_label(test_bucket, capsys): + storage_add_bucket_label.add_bucket_label(test_bucket.name) out, _ = capsys.readouterr() assert "example" in out -def test_remove_bucket_label(capsys): - storage_add_bucket_label.add_bucket_label(BUCKET) - storage_remove_bucket_label.remove_bucket_label(BUCKET) +def test_remove_bucket_label(test_bucket, capsys): + storage_add_bucket_label.add_bucket_label(test_bucket.name) + storage_remove_bucket_label.remove_bucket_label(test_bucket.name) out, _ = capsys.readouterr() assert "Removed labels" in out +@pytest.fixture(scope="module") +def test_bucket(): + """Yields a bucket that is deleted after the test completes.""" + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "storage-snippets-test-{}".format(uuid.uuid4()) + bucket = storage.Client().bucket(bucket_name) + bucket.create() + yield bucket + bucket.delete(force=True) + + @pytest.fixture -def test_blob(): - """Provides a pre-existing blob in the test bucket.""" - bucket = storage.Client().bucket(BUCKET) - blob = bucket.blob("storage_snippets_test_sigil") +def test_blob(test_bucket): + """Yields a blob that is deleted after the test completes.""" + bucket = test_bucket + blob = bucket.blob("storage_snippets_test_sigil-{}".format(uuid.uuid4())) blob.upload_from_string("Hello, is it me you're looking for?") - return blob + yield blob -def test_list_buckets(capsys): +def test_list_buckets(test_bucket, capsys): storage_list_buckets.list_buckets() out, _ = capsys.readouterr() - assert BUCKET in out + assert test_bucket.name in out def test_list_blobs(test_blob, capsys): - storage_list_files.list_blobs(BUCKET) + storage_list_files.list_blobs(test_blob.bucket.name) out, _ = capsys.readouterr() assert test_blob.name in out -def test_bucket_metadata(capsys): - storage_get_bucket_metadata.bucket_metadata(BUCKET) +def test_bucket_metadata(test_bucket, capsys): + storage_get_bucket_metadata.bucket_metadata(test_bucket.name) out, _ = capsys.readouterr() - assert BUCKET in out + assert test_bucket.name in out def test_list_blobs_with_prefix(test_blob, capsys): storage_list_files_with_prefix.list_blobs_with_prefix( - BUCKET, prefix="storage_snippets" + test_blob.bucket.name, prefix="storage_snippets" ) out, _ = capsys.readouterr() assert test_blob.name in out -def test_upload_blob(): +def test_upload_blob(test_bucket): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b"test") storage_upload_file.upload_blob( - BUCKET, source_file.name, "test_upload_blob" + test_bucket.name, source_file.name, "test_upload_blob" ) -def test_upload_blob_with_kms(): +def test_upload_blob_with_kms(test_bucket): with tempfile.NamedTemporaryFile() as source_file: source_file.write(b"test") storage_upload_with_kms_key.upload_blob_with_kms( - BUCKET, source_file.name, "test_upload_blob_encrypted", KMS_KEY + test_bucket.name, source_file.name, "test_upload_blob_encrypted", KMS_KEY ) - bucket = storage.Client().bucket(BUCKET) + bucket = storage.Client().bucket(test_bucket.name) kms_blob = bucket.get_blob("test_upload_blob_encrypted") assert kms_blob.kms_key_name.startswith(KMS_KEY) @@ -131,24 +143,24 @@ def test_upload_blob_with_kms(): def test_download_blob(test_blob): with tempfile.NamedTemporaryFile() as dest_file: storage_download_file.download_blob( - BUCKET, test_blob.name, dest_file.name + test_blob.bucket.name, test_blob.name, dest_file.name ) assert dest_file.read() def test_blob_metadata(test_blob, capsys): - storage_get_metadata.blob_metadata(BUCKET, test_blob.name) + storage_get_metadata.blob_metadata(test_blob.bucket.name, test_blob.name) out, _ = capsys.readouterr() assert test_blob.name in out def test_delete_blob(test_blob): - storage_delete_file.delete_blob(BUCKET, test_blob.name) + storage_delete_file.delete_blob(test_blob.bucket.name, test_blob.name) def test_make_blob_public(test_blob): - storage_make_public.make_blob_public(BUCKET, test_blob.name) + storage_make_public.make_blob_public(test_blob.bucket.name, test_blob.name) r = requests.get(test_blob.public_url) assert r.text == "Hello, is it me you're looking for?" @@ -156,7 +168,7 @@ def test_make_blob_public(test_blob): def test_generate_signed_url(test_blob, capsys): url = storage_generate_signed_url_v2.generate_signed_url( - BUCKET, test_blob.name + test_blob.bucket.name, test_blob.name ) r = requests.get(url) @@ -165,18 +177,18 @@ def test_generate_signed_url(test_blob, capsys): def test_generate_download_signed_url_v4(test_blob, capsys): url = storage_generate_signed_url_v4.generate_download_signed_url_v4( - BUCKET, test_blob.name + test_blob.bucket.name, test_blob.name ) r = requests.get(url) assert r.text == "Hello, is it me you're looking for?" -def test_generate_upload_signed_url_v4(capsys): +def test_generate_upload_signed_url_v4(test_bucket, capsys): blob_name = "storage_snippets_test_upload" content = b"Uploaded via v4 signed url" url = storage_generate_upload_signed_url_v4.generate_upload_signed_url_v4( - BUCKET, blob_name + test_bucket.name, blob_name ) requests.put( @@ -185,13 +197,13 @@ def test_generate_upload_signed_url_v4(capsys): headers={"content-type": "application/octet-stream"}, ) - bucket = storage.Client().bucket(BUCKET) + bucket = storage.Client().bucket(test_bucket.name) blob = bucket.blob(blob_name) assert blob.download_as_string() == content def test_rename_blob(test_blob): - bucket = storage.Client().bucket(BUCKET) + bucket = storage.Client().bucket(test_blob.bucket.name) try: bucket.delete_blob("test_rename_blob") @@ -207,7 +219,7 @@ def test_rename_blob(test_blob): def test_copy_blob(test_blob): - bucket = storage.Client().bucket(BUCKET) + bucket = storage.Client().bucket(test_blob.bucket.name) try: bucket.delete_blob("test_copy_blob") From a6d478353220f3a061213704fb0f0dd6efdfcf60 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 5 Mar 2020 12:56:40 -0800 Subject: [PATCH 087/197] fix: Reuse HMAC key as we have a limit of 5 (#3037) * fix: Reuse HMAC key as we have a limit of 5 --- hmac_samples_test.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/hmac_samples_test.py b/hmac_samples_test.py index e852d31bf..9712ed159 100644 --- a/hmac_samples_test.py +++ b/hmac_samples_test.py @@ -21,6 +21,7 @@ import os from google.cloud import storage +import google.api_core.exceptions import pytest import storage_activate_hmac_key @@ -36,11 +37,13 @@ STORAGE_CLIENT = storage.Client(project=PROJECT_ID) -@pytest.fixture +@pytest.fixture(scope="module") def new_hmac_key(): """ Fixture to create a new HMAC key, and to guarantee all keys are deleted at - the end of each test. + the end of the module. + + NOTE: Due to the module scope, test order in this file is significant """ hmac_key, secret = STORAGE_CLIENT.create_hmac_key( service_account_email=SERVICE_ACCOUNT_EMAIL, project_id=PROJECT_ID @@ -100,7 +103,13 @@ def test_deactivate_key(capsys, new_hmac_key): def test_delete_key(capsys, new_hmac_key): - new_hmac_key.state = "INACTIVE" - new_hmac_key.update() + # Due to reuse of the HMAC key for each test function, the previous + # test has deactivated the key already. + try: + new_hmac_key.state = "INACTIVE" + new_hmac_key.update() + except google.api_core.exceptions.BadRequest: + pass + storage_delete_hmac_key.delete_key(new_hmac_key.access_id, PROJECT_ID) assert "The key is deleted" in capsys.readouterr().out From 3e3c55a2b330c6159294736498e2bc1b6be180e1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 5 Mar 2020 13:37:27 -0800 Subject: [PATCH 088/197] fix: harden storage test fixtures (#3039) * fix: improve UBLA test fixtures * fix: improve IAM test fixtures --- iam_test.py | 8 ++++++-- uniform_bucket_level_access_test.py | 10 ++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/iam_test.py b/iam_test.py index fa8b1d1b6..66acbdc80 100644 --- a/iam_test.py +++ b/iam_test.py @@ -16,6 +16,7 @@ import pytest import re import time +import uuid import storage_remove_bucket_iam_member import storage_add_bucket_iam_member @@ -32,8 +33,11 @@ @pytest.fixture def bucket(): - bucket_name = "test-iam-" + str(int(time.time())) - bucket = storage.Client().create_bucket(bucket_name) + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "test-iam-{}".format(uuid.uuid4()) + bucket = storage.Client().bucket(bucket_name) + bucket.create() bucket.iam_configuration.uniform_bucket_level_access_enabled = True bucket.patch() yield bucket diff --git a/uniform_bucket_level_access_test.py b/uniform_bucket_level_access_test.py index 30ac50c7a..7b5fdbf94 100644 --- a/uniform_bucket_level_access_test.py +++ b/uniform_bucket_level_access_test.py @@ -25,10 +25,12 @@ @pytest.fixture() def bucket(): - """Creates a test bucket and deletes it upon completion.""" - client = storage.Client() - bucket_name = "uniform-bucket-level-access-" + str(int(time.time())) - bucket = client.create_bucket(bucket_name) + """Yields a bucket that is deleted after the test completes.""" + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "uniform-bucket-level-access-{}".format(int(time.time())) + bucket = storage.Client().bucket(bucket_name) + bucket.create() yield bucket time.sleep(3) bucket.delete(force=True) From e14b9cdf761d25f2e6a07e1311b88a20b7e8effd Mon Sep 17 00:00:00 2001 From: Jake Stambaugh Date: Fri, 6 Mar 2020 16:29:25 -0500 Subject: [PATCH 089/197] storage: Fix docs for signed URL generation (#3008) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Christopher Wilcox --- storage_generate_upload_signed_url_v4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storage_generate_upload_signed_url_v4.py b/storage_generate_upload_signed_url_v4.py index 4dcbc2a32..8dd4a8658 100644 --- a/storage_generate_upload_signed_url_v4.py +++ b/storage_generate_upload_signed_url_v4.py @@ -39,7 +39,7 @@ def generate_upload_signed_url_v4(bucket_name, blob_name): version="v4", # This URL is valid for 15 minutes expiration=datetime.timedelta(minutes=15), - # Allow GET requests using this URL. + # Allow PUT requests using this URL. method="PUT", content_type="application/octet-stream", ) From a39b28feabdf55607542552e4b97f56cf64936b4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 9 Mar 2020 18:29:59 +0100 Subject: [PATCH 090/197] chore(deps): update dependency google-cloud-storage to v1.26.0 (#3046) * chore(deps): update dependency google-cloud-storage to v1.26.0 * chore(deps): specify dependencies by python version * chore: up other deps to try to remove errors Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Co-authored-by: Leah Cole --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ab438508a..ede7da0e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.1.0 -google-cloud-storage==1.25.0 +google-cloud-storage==1.26.0 From ac7d9d219bc0cc351b3ce8d90e1733f8cea06bf3 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Thu, 19 Mar 2020 16:46:25 -0700 Subject: [PATCH 091/197] feat: add remove conditional binding sample (#3107) * feat: add remove conditional binding sample * fix iam test fixture * fix silly mistake of removing all bindings * fix ubla test * address feedback * revert changes to tests --- iam_test.py | 41 +++++++++--- ...e_remove_bucket_conditional_iam_binding.py | 67 +++++++++++++++++++ 2 files changed, 98 insertions(+), 10 deletions(-) create mode 100644 storage_remove_bucket_conditional_iam_binding.py diff --git a/iam_test.py b/iam_test.py index 66acbdc80..ec130da0f 100644 --- a/iam_test.py +++ b/iam_test.py @@ -22,24 +22,27 @@ import storage_add_bucket_iam_member import storage_add_bucket_conditional_iam_binding import storage_view_bucket_iam_members +import storage_remove_bucket_conditional_iam_binding MEMBER = "group:dpebot@google.com" ROLE = "roles/storage.legacyBucketReader" CONDITION_TITLE = "match-prefix" CONDITION_DESCRIPTION = "Applies to objects matching a prefix" -CONDITION_EXPRESSION = "resource.name.startsWith(\"projects/_/buckets/bucket-name/objects/prefix-a-\")" +CONDITION_EXPRESSION = ( + 'resource.name.startsWith("projects/_/buckets/bucket-name/objects/prefix-a-")' +) @pytest.fixture def bucket(): bucket = None while bucket is None or bucket.exists(): + storage_client = storage.Client() bucket_name = "test-iam-{}".format(uuid.uuid4()) - bucket = storage.Client().bucket(bucket_name) - bucket.create() - bucket.iam_configuration.uniform_bucket_level_access_enabled = True - bucket.patch() + bucket = storage_client.bucket(bucket_name) + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + storage_client.create_bucket(bucket) yield bucket time.sleep(3) bucket.delete(force=True) @@ -66,16 +69,17 @@ def test_add_bucket_conditional_iam_binding(bucket): CONDITION_TITLE, CONDITION_DESCRIPTION, CONDITION_EXPRESSION, - {MEMBER} + {MEMBER}, ) policy = bucket.get_iam_policy(requested_policy_version=3) assert any( - binding["role"] == ROLE and - binding["members"] == {MEMBER} and - binding["condition"] == { + binding["role"] == ROLE + and binding["members"] == {MEMBER} + and binding["condition"] + == { "title": CONDITION_TITLE, "description": CONDITION_DESCRIPTION, - "expression": CONDITION_EXPRESSION + "expression": CONDITION_EXPRESSION, } for binding in policy.bindings ) @@ -89,3 +93,20 @@ def test_remove_bucket_iam_member(bucket): binding["role"] == ROLE and MEMBER in binding["members"] for binding in policy.bindings ) + + +def test_remove_bucket_conditional_iam_binding(bucket): + storage_remove_bucket_conditional_iam_binding.remove_bucket_conditional_iam_binding( + bucket.name, ROLE, CONDITION_TITLE, CONDITION_DESCRIPTION, CONDITION_EXPRESSION + ) + + policy = bucket.get_iam_policy(requested_policy_version=3) + condition = { + "title": CONDITION_TITLE, + "description": CONDITION_DESCRIPTION, + "expression": CONDITION_EXPRESSION, + } + assert not any( + (binding["role"] == ROLE and binding.get("condition") == condition) + for binding in policy.bindings + ) diff --git a/storage_remove_bucket_conditional_iam_binding.py b/storage_remove_bucket_conditional_iam_binding.py new file mode 100644 index 000000000..242544d8e --- /dev/null +++ b/storage_remove_bucket_conditional_iam_binding.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_bucket_conditional_iam_binding] +from google.cloud import storage + + +def remove_bucket_conditional_iam_binding( + bucket_name, role, title, description, expression +): + """Remove a conditional IAM binding from a bucket's IAM policy.""" + # bucket_name = "your-bucket-name" + # role = "IAM role, e.g. roles/storage.objectViewer" + # title = "Condition title." + # description = "Condition description." + # expression = "Condition expression." + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy(requested_policy_version=3) + + # Set the policy's version to 3 to use condition in bindings. + policy.version = 3 + + condition = { + "title": title, + "description": description, + "expression": expression, + } + policy.bindings = [ + binding + for binding in policy.bindings + if not (binding["role"] == role and binding.get("condition") == condition) + ] + + bucket.set_iam_policy(policy) + + print("Conditional Binding was removed.") + + +# [END storage_remove_bucket_conditional_iam_binding] + + +if __name__ == "__main__": + remove_bucket_conditional_iam_binding( + bucket_name=sys.argv[1], + role=sys.argv[2], + title=sys.argv[3], + description=sys.argv[4], + expression=sys.argv[5], + ) From a49107ef051b7539e8d705dd69a394095ca9a64f Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 092/197] Simplify noxfile setup. (#2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- requirements-test.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 requirements-test.txt diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 000000000..41c4d5110 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,2 @@ +pytest==5.3.2 +mock==3.0.5 From 806536a9e5faf0b59ca85ba119fcdb37a5a6b247 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Fri, 3 Apr 2020 14:29:47 -0700 Subject: [PATCH 093/197] [storage] feat: add post policy sample (#3231) * feat: add post policy sample * use 1.27.0 * fix * simplify iterator Co-authored-by: Jonathan Lui --- requirements.txt | 2 +- snippets_test.py | 17 +++++++ storage_generate_signed_post_policy_v4.py | 62 +++++++++++++++++++++++ 3 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 storage_generate_signed_post_policy_v4.py diff --git a/requirements.txt b/requirements.txt index ede7da0e8..05d4d995b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.1.0 -google-cloud-storage==1.26.0 +google-cloud-storage==1.27.0 diff --git a/snippets_test.py b/snippets_test.py index 42e73944d..31cec8383 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -40,6 +40,7 @@ import storage_generate_signed_url_v2 import storage_generate_signed_url_v4 import storage_generate_upload_signed_url_v4 +import storage_generate_signed_post_policy_v4 import storage_set_bucket_default_kms_key KMS_KEY = os.environ["CLOUD_KMS_KEY"] @@ -202,6 +203,22 @@ def test_generate_upload_signed_url_v4(test_bucket, capsys): assert blob.download_as_string() == content +def test_generate_signed_policy_v4(test_bucket, capsys): + blob_name = "storage_snippets_test_form" + short_name = storage_generate_signed_post_policy_v4 + form = short_name.generate_signed_post_policy_v4( + test_bucket.name, blob_name + ) + assert "name='key' value='{}'".format(blob_name) in form + assert "name='x-goog-signature'" in form + assert "name='x-goog-date'" in form + assert "name='x-goog-credential'" in form + assert "name='x-goog-algorithm' value='GOOG4-RSA-SHA256'" in form + assert "name='policy'" in form + assert "name='x-goog-meta-test' value='data'" in form + assert "type='file' name='file'/>" in form + + def test_rename_blob(test_blob): bucket = storage.Client().bucket(test_blob.bucket.name) diff --git a/storage_generate_signed_post_policy_v4.py b/storage_generate_signed_post_policy_v4.py new file mode 100644 index 000000000..8d7e2e937 --- /dev/null +++ b/storage_generate_signed_post_policy_v4.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_generate_signed_post_policy_v4] +from google.cloud import storage +import datetime + + +def generate_signed_post_policy_v4(bucket_name, blob_name): + """Generates a v4 POST Policy and prints an HTML form.""" + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + + policy = storage_client.generate_signed_post_policy_v4( + bucket_name, + blob_name, + expiration=datetime.timedelta(minutes=10), + fields={ + 'x-goog-meta-test': 'data' + } + ) + + # Create an HTML form with the provided policy + header = "
\n" + form = header.format(policy["url"]) + + # Include all fields returned in the HTML form as they're required + for key, value in policy["fields"].items(): + form += " \n".format(key, value) + + form += "
\n" + form += "
\n" + form += "
" + + print(form) + + return form + + +# [END storage_generate_signed_post_policy_v4] + +if __name__ == "__main__": + generate_signed_post_policy_v4( + bucket_name=sys.argv[1], blob_name=sys.argv[2] + ) From 886117a4b192830f5fa40d0bc3077b772a996770 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 24 Apr 2020 12:28:38 -0700 Subject: [PATCH 094/197] Update dependency google-cloud-pubsub to v1.4.2 in Storage and Pub/Sub (#3343) --- notification_polling_test.py | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notification_polling_test.py b/notification_polling_test.py index 0f8329fbe..692f2d6ce 100644 --- a/notification_polling_test.py +++ b/notification_polling_test.py @@ -42,7 +42,7 @@ def test_parse_json_message(): b"}" ) message = Message( - mock.Mock(data=data, attributes=attributes), MESSAGE_ID, mock.Mock() + mock.Mock(data=data, attributes=attributes), MESSAGE_ID, delivery_attempt=0, request_queue=mock.Mock() ) assert summarize(message) == ( "\tEvent type: OBJECT_FINALIZE\n" diff --git a/requirements.txt b/requirements.txt index 05d4d995b..24a4d8ae7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.1.0 +google-cloud-pubsub==1.4.2 google-cloud-storage==1.27.0 From de19d534115eeb9e028569dbf85dd580af9849dc Mon Sep 17 00:00:00 2001 From: Antonio Matarrese Date: Wed, 29 Apr 2020 04:32:18 +0200 Subject: [PATCH 095/197] Remove name attribute from the input (#3569) If name='submit' is specified for the input type='submit' the endpoint returns the following error: InvalidPolicyDocument The content of the form does not meet the conditions specified in the policy document.
Policy did not reference these fields: submit
Co-authored-by: Takashi Matsuo --- storage_generate_signed_post_policy_v4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storage_generate_signed_post_policy_v4.py b/storage_generate_signed_post_policy_v4.py index 8d7e2e937..8f5757450 100644 --- a/storage_generate_signed_post_policy_v4.py +++ b/storage_generate_signed_post_policy_v4.py @@ -46,7 +46,7 @@ def generate_signed_post_policy_v4(bucket_name, blob_name): form += " \n".format(key, value) form += "
\n" - form += "
\n" + form += "
\n" form += "" print(form) From b2f7edd0a4b9488e1e33bc0e760907eb9916a59b Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 28 Apr 2020 19:38:45 -0700 Subject: [PATCH 096/197] [storage] fix: use unique blob name (#3568) * [storage] fix: use unique blob name fixes #3567 * add some comments --- encryption_test.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/encryption_test.py b/encryption_test.py index 40eab55a4..3f85867a4 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -15,7 +15,9 @@ import base64 import os import tempfile +import uuid +from google.api_core.exceptions import NotFound from google.cloud import storage from google.cloud.storage import Blob import pytest @@ -54,18 +56,36 @@ def test_upload_encrypted_blob(): ) -@pytest.fixture +@pytest.fixture(scope="module") def test_blob(): """Provides a pre-existing blob in the test bucket.""" bucket = storage.Client().bucket(BUCKET) + blob_name = "test_blob_{}".format(uuid.uuid4().hex) blob = Blob( - "encryption_test_sigil", + blob_name, bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED, ) content = "Hello, is it me you're looking for?" blob.upload_from_string(content) - return blob.name, content + + yield blob.name, content + + # To delete an encrypted blob, you have to provide the same key + # used for the blob. When you provide a wrong key, you'll get + # NotFound. + try: + # Clean up for the case that the rotation didn't occur. + blob.delete() + except NotFound as e: + # For the case that the rotation succeeded. + print("Ignoring 404, detail: {}".format(e)) + blob = Blob( + blob_name, + bucket, + encryption_key=TEST_ENCRYPTION_KEY_2_DECODED + ) + blob.delete() def test_download_blob(test_blob): From f4df95c707c297a9f349067b628c7a12b460ba12 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 29 Apr 2020 07:26:36 +0200 Subject: [PATCH 097/197] chore(deps): update dependency google-cloud-storage to v1.28.0 (#3260) Co-authored-by: Takashi Matsuo --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 24a4d8ae7..232a4848a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.4.2 -google-cloud-storage==1.27.0 +google-cloud-storage==1.28.0 From d9014d1f7a18c7f19b05e714e7974dfda5fe5263 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Thu, 30 Apr 2020 15:30:44 -0700 Subject: [PATCH 098/197] [storage] fix: use a different bucket for requester_pays_test (#3655) * [storage] fix: use a different bucket for requester_pays_test fixes #3654 * rename to README.md, added the envvar to the template * add REQUESTER_PAYS_TEST_BUCKET env var * just use REQUESTER_PAYS_TEST_BUCKET --- README.md | 10 ++++++++++ requester_pays_test.py | 4 +++- 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 000000000..3d7e3664f --- /dev/null +++ b/README.md @@ -0,0 +1,10 @@ + +For requester_pays_test.py, we need to use a different Storage bucket. + +The test looks for an environment variable `REQUESTER_PAYS_TEST_BUCKET`. + +Also, the service account for the test needs to have `Billing Project +Manager` role in order to make changes on buckets with requester pays +enabled. + +We added that role to the test service account. diff --git a/requester_pays_test.py b/requester_pays_test.py index 70a4b2002..f034ad028 100644 --- a/requester_pays_test.py +++ b/requester_pays_test.py @@ -23,7 +23,9 @@ import storage_download_file_requester_pays import storage_get_requester_pays_status -BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] + +# We use a different bucket from other tests. +BUCKET = os.environ["REQUESTER_PAYS_TEST_BUCKET"] PROJECT = os.environ["GCLOUD_PROJECT"] From fff3b999d1362a0c3e94dc47f2d3522e915bcc80 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Tue, 5 May 2020 01:05:08 +0530 Subject: [PATCH 099/197] docs(storage): add samples for lifer cycle and versioning (#3578) * docs(storage): add samples for lifer cycle and versioning * docs(storage): nits * docs(storage): lint fix Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- snippets_test.py | 38 ++++++++++++++++ ...age_disable_bucket_lifecycle_management.py | 41 +++++++++++++++++ storage_disable_versioning.py | 40 +++++++++++++++++ storage_enable_bucket_lifecycle_management.py | 45 +++++++++++++++++++ storage_enable_versioning.py | 40 +++++++++++++++++ storage_set_metadata.py | 40 +++++++++++++++++ 6 files changed, 244 insertions(+) create mode 100644 storage_disable_bucket_lifecycle_management.py create mode 100644 storage_disable_versioning.py create mode 100644 storage_enable_bucket_lifecycle_management.py create mode 100644 storage_enable_versioning.py create mode 100644 storage_set_metadata.py diff --git a/snippets_test.py b/snippets_test.py index 31cec8383..443f07669 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -29,6 +29,7 @@ import storage_get_bucket_labels import storage_get_bucket_metadata import storage_get_metadata +import storage_set_metadata import storage_list_buckets import storage_list_files_with_prefix import storage_list_files @@ -42,6 +43,10 @@ import storage_generate_upload_signed_url_v4 import storage_generate_signed_post_policy_v4 import storage_set_bucket_default_kms_key +import storage_enable_versioning +import storage_disable_versioning +import storage_enable_bucket_lifecycle_management +import storage_disable_bucket_lifecycle_management KMS_KEY = os.environ["CLOUD_KMS_KEY"] @@ -156,6 +161,12 @@ def test_blob_metadata(test_blob, capsys): assert test_blob.name in out +def test_set_blob_metadata(test_blob, capsys): + storage_set_metadata.set_blob_metadata(test_blob.bucket.name, test_blob.name) + out, _ = capsys.readouterr() + assert test_blob.name in out + + def test_delete_blob(test_blob): storage_delete_file.delete_blob(test_blob.bucket.name, test_blob.name) @@ -249,3 +260,30 @@ def test_copy_blob(test_blob): assert bucket.get_blob("test_copy_blob") is not None assert bucket.get_blob(test_blob.name) is not None + + +def test_versioning(test_bucket, capsys): + bucket = storage_enable_versioning.enable_versioning(test_bucket) + out, _ = capsys.readouterr() + assert "Versioning was enabled for bucket" in out + assert bucket.versioning_enabled is True + + bucket = storage_disable_versioning.disable_versioning(test_bucket) + out, _ = capsys.readouterr() + assert "Versioning was disabled for bucket" in out + assert bucket.versioning_enabled is False + + +def test_bucket_lifecycle_management(test_bucket, capsys): + bucket = storage_enable_bucket_lifecycle_management.\ + enable_bucket_lifecycle_management(test_bucket) + out, _ = capsys.readouterr() + assert "[]" in out + assert "Lifecycle management is enable" in out + assert len(list(bucket.lifecycle_rules)) > 0 + + bucket = storage_disable_bucket_lifecycle_management.\ + disable_bucket_lifecycle_management(test_bucket) + out, _ = capsys.readouterr() + assert "[]" in out + assert len(list(bucket.lifecycle_rules)) == 0 diff --git a/storage_disable_bucket_lifecycle_management.py b/storage_disable_bucket_lifecycle_management.py new file mode 100644 index 000000000..9ef6971fb --- /dev/null +++ b/storage_disable_bucket_lifecycle_management.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_disable_bucket_lifecycle_management] +from google.cloud import storage + + +def disable_bucket_lifecycle_management(bucket_name): + """Disable lifecycle management for a bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.clear_lifecyle_rules() + bucket.patch() + rules = bucket.lifecycle_rules + + print("Lifecycle management is disable for bucket {} and the rules are {}".format(bucket_name, list(rules))) + return bucket + + +# [END storage_disable_bucket_lifecycle_management] + +if __name__ == "__main__": + disable_bucket_lifecycle_management(bucket_name=sys.argv[1]) diff --git a/storage_disable_versioning.py b/storage_disable_versioning.py new file mode 100644 index 000000000..bba4f7c07 --- /dev/null +++ b/storage_disable_versioning.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_disable_versioning] +from google.cloud import storage + + +def disable_versioning(bucket_name): + """Disable versioning for this bucket.""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.versioning_enabled = False + bucket.patch() + + print("Versioning was disabled for bucket {}".format(bucket)) + return bucket + + +# [END storage_enable_versioning] + +if __name__ == "__main__": + disable_versioning(bucket_name=sys.argv[1]) diff --git a/storage_enable_bucket_lifecycle_management.py b/storage_enable_bucket_lifecycle_management.py new file mode 100644 index 000000000..61c7d7b20 --- /dev/null +++ b/storage_enable_bucket_lifecycle_management.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_enable_bucket_lifecycle_management] +from google.cloud import storage + + +def enable_bucket_lifecycle_management(bucket_name): + """Enable lifecycle management for a bucket""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + rules = bucket.lifecycle_rules + + print("Lifecycle management rules for bucket {} are {}".format(bucket_name, list(rules))) + bucket.add_lifecycle_delete_rule(age=2) + bucket.patch() + + rules = bucket.lifecycle_rules + print("Lifecycle management is enable for bucket {} and the rules are {}".format(bucket_name, list(rules))) + + return bucket + + +# [END storage_enable_bucket_lifecycle_management] + +if __name__ == "__main__": + enable_bucket_lifecycle_management(bucket_name=sys.argv[1]) diff --git a/storage_enable_versioning.py b/storage_enable_versioning.py new file mode 100644 index 000000000..89693e426 --- /dev/null +++ b/storage_enable_versioning.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_enable_versioning] +from google.cloud import storage + + +def enable_versioning(bucket_name): + """Enable versioning for this bucket.""" + # bucket_name = "my-bucket" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.versioning_enabled = True + bucket.patch() + + print("Versioning was enabled for bucket {}".format(bucket.name)) + return bucket + + +# [END storage_enable_versioning] + +if __name__ == "__main__": + enable_versioning(bucket_name=sys.argv[1]) diff --git a/storage_set_metadata.py b/storage_set_metadata.py new file mode 100644 index 000000000..d8c77fa5f --- /dev/null +++ b/storage_set_metadata.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_metadata] +from google.cloud import storage + + +def set_blob_metadata(bucket_name, blob_name): + """Set a blob's metadata.""" + # bucket_name = 'your-bucket-name' + # blob_name = 'your-object-name' + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.get_blob(blob_name) + metadata = {'color': 'Red', 'name': 'Test'} + blob.metadata = metadata + + print("The metadata for the blob {} is {}".format(blob.name, blob.metadata)) + + +# [END storage_get_metadata] + +if __name__ == "__main__": + set_blob_metadata(bucket_name=sys.argv[1], blob_name=sys.argv[2]) From 2d338b5eb3da95ab35caac92601b7707be839c1d Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 12 May 2020 18:22:36 -0700 Subject: [PATCH 100/197] chore: some lint fixes (#3750) --- acl_test.py | 14 ++++++------ bucket_lock_test.py | 16 +++++++------- encryption_test.py | 3 ++- hmac_samples_test.py | 2 +- iam_test.py | 12 ++++++----- requester_pays_test.py | 2 +- snippets_test.py | 26 +++++++++++------------ storage_add_bucket_label.py | 7 ++++-- storage_download_encrypted_file.py | 6 ++++-- storage_generate_signed_post_policy_v4.py | 7 ++++-- storage_generate_signed_url_v2.py | 6 ++++-- storage_generate_signed_url_v4.py | 7 ++++-- storage_generate_upload_signed_url_v4.py | 7 ++++-- storage_get_bucket_labels.py | 7 ++++-- storage_get_bucket_metadata.py | 7 ++++-- storage_remove_bucket_label.py | 7 ++++-- storage_rotate_encryption_key.py | 7 ++++-- storage_upload_encrypted_file.py | 7 ++++-- uniform_bucket_level_access_test.py | 3 +-- 19 files changed, 93 insertions(+), 60 deletions(-) diff --git a/acl_test.py b/acl_test.py index 157f399ca..469d364ce 100644 --- a/acl_test.py +++ b/acl_test.py @@ -17,16 +17,16 @@ from google.cloud import storage import pytest -import storage_remove_file_owner -import storage_remove_bucket_owner -import storage_remove_bucket_default_owner -import storage_add_file_owner -import storage_add_bucket_owner import storage_add_bucket_default_owner -import storage_print_bucket_acl_for_user +import storage_add_bucket_owner +import storage_add_file_owner import storage_print_bucket_acl -import storage_print_file_acl_for_user +import storage_print_bucket_acl_for_user import storage_print_file_acl +import storage_print_file_acl_for_user +import storage_remove_bucket_default_owner +import storage_remove_bucket_owner +import storage_remove_file_owner # Typically we'd use a @example.com address, but GCS requires a real Google # account. diff --git a/bucket_lock_test.py b/bucket_lock_test.py index 98a95835c..99c5e1e56 100644 --- a/bucket_lock_test.py +++ b/bucket_lock_test.py @@ -16,20 +16,20 @@ import uuid from google.cloud import storage - import pytest -import storage_set_retention_policy -import storage_lock_retention_policy -import storage_get_retention_policy -import storage_get_default_event_based_hold -import storage_enable_default_event_based_hold import storage_disable_default_event_based_hold -import storage_set_event_based_hold +import storage_enable_default_event_based_hold +import storage_get_default_event_based_hold +import storage_get_retention_policy +import storage_lock_retention_policy import storage_release_event_based_hold -import storage_set_temporary_hold import storage_release_temporary_hold import storage_remove_retention_policy +import storage_set_event_based_hold +import storage_set_retention_policy +import storage_set_temporary_hold + BLOB_NAME = "storage_snippets_test_sigil" BLOB_CONTENT = "Hello, is it me you're looking for?" diff --git a/encryption_test.py b/encryption_test.py index 3f85867a4..5f24bb7eb 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -24,8 +24,9 @@ import storage_download_encrypted_file import storage_generate_encryption_key -import storage_upload_encrypted_file import storage_rotate_encryption_key +import storage_upload_encrypted_file + BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] diff --git a/hmac_samples_test.py b/hmac_samples_test.py index 9712ed159..f9fcb7509 100644 --- a/hmac_samples_test.py +++ b/hmac_samples_test.py @@ -20,8 +20,8 @@ import os -from google.cloud import storage import google.api_core.exceptions +from google.cloud import storage import pytest import storage_activate_hmac_key diff --git a/iam_test.py b/iam_test.py index ec130da0f..fe7aa7202 100644 --- a/iam_test.py +++ b/iam_test.py @@ -12,17 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud import storage -import pytest import re import time import uuid -import storage_remove_bucket_iam_member -import storage_add_bucket_iam_member +from google.cloud import storage +import pytest + import storage_add_bucket_conditional_iam_binding -import storage_view_bucket_iam_members +import storage_add_bucket_iam_member import storage_remove_bucket_conditional_iam_binding +import storage_remove_bucket_iam_member +import storage_view_bucket_iam_members + MEMBER = "group:dpebot@google.com" ROLE = "roles/storage.legacyBucketReader" diff --git a/requester_pays_test.py b/requester_pays_test.py index f034ad028..7511be739 100644 --- a/requester_pays_test.py +++ b/requester_pays_test.py @@ -19,8 +19,8 @@ import pytest import storage_disable_requester_pays -import storage_enable_requester_pays import storage_download_file_requester_pays +import storage_enable_requester_pays import storage_get_requester_pays_status diff --git a/snippets_test.py b/snippets_test.py index 443f07669..678b6a6b0 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -22,31 +22,31 @@ import pytest import requests -import storage_copy_file import storage_add_bucket_label +import storage_copy_file import storage_delete_file +import storage_disable_bucket_lifecycle_management +import storage_disable_versioning import storage_download_file +import storage_enable_bucket_lifecycle_management +import storage_enable_versioning +import storage_generate_signed_post_policy_v4 +import storage_generate_signed_url_v2 +import storage_generate_signed_url_v4 +import storage_generate_upload_signed_url_v4 import storage_get_bucket_labels import storage_get_bucket_metadata import storage_get_metadata -import storage_set_metadata import storage_list_buckets -import storage_list_files_with_prefix import storage_list_files +import storage_list_files_with_prefix import storage_make_public -import storage_remove_bucket_label import storage_move_file +import storage_remove_bucket_label +import storage_set_bucket_default_kms_key +import storage_set_metadata import storage_upload_file import storage_upload_with_kms_key -import storage_generate_signed_url_v2 -import storage_generate_signed_url_v4 -import storage_generate_upload_signed_url_v4 -import storage_generate_signed_post_policy_v4 -import storage_set_bucket_default_kms_key -import storage_enable_versioning -import storage_disable_versioning -import storage_enable_bucket_lifecycle_management -import storage_disable_bucket_lifecycle_management KMS_KEY = os.environ["CLOUD_KMS_KEY"] diff --git a/storage_add_bucket_label.py b/storage_add_bucket_label.py index a021036c6..8ae8fe1f4 100644 --- a/storage_add_bucket_label.py +++ b/storage_add_bucket_label.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_add_bucket_label] -from google.cloud import storage import pprint +# [END storage_add_bucket_label] +import sys +# [START storage_add_bucket_label] + +from google.cloud import storage def add_bucket_label(bucket_name): diff --git a/storage_download_encrypted_file.py b/storage_download_encrypted_file.py index c0b3ad560..ac7071fbe 100644 --- a/storage_download_encrypted_file.py +++ b/storage_download_encrypted_file.py @@ -14,11 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +# [START storage_download_encrypted_file] +import base64 +# [END storage_download_encrypted_file] import sys - # [START storage_download_encrypted_file] + from google.cloud import storage -import base64 def download_encrypted_blob( diff --git a/storage_generate_signed_post_policy_v4.py b/storage_generate_signed_post_policy_v4.py index 8f5757450..8217714e2 100644 --- a/storage_generate_signed_post_policy_v4.py +++ b/storage_generate_signed_post_policy_v4.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_generate_signed_post_policy_v4] -from google.cloud import storage import datetime +# [END storage_generate_signed_post_policy_v4] +import sys +# [START storage_generate_signed_post_policy_v4] + +from google.cloud import storage def generate_signed_post_policy_v4(bucket_name, blob_name): diff --git a/storage_generate_signed_url_v2.py b/storage_generate_signed_url_v2.py index d594e91ad..abea3dd54 100644 --- a/storage_generate_signed_url_v2.py +++ b/storage_generate_signed_url_v2.py @@ -14,11 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +# [START storage_generate_signed_url_v2] +import datetime +# [END storage_generate_signed_url_v2] import sys - # [START storage_generate_signed_url_v2] + from google.cloud import storage -import datetime def generate_signed_url(bucket_name, blob_name): diff --git a/storage_generate_signed_url_v4.py b/storage_generate_signed_url_v4.py index a6647e73b..2a45b23e9 100644 --- a/storage_generate_signed_url_v4.py +++ b/storage_generate_signed_url_v4.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_generate_signed_url_v4] -from google.cloud import storage import datetime +# [END storage_generate_signed_url_v4] +import sys +# [START storage_generate_signed_url_v4] + +from google.cloud import storage def generate_download_signed_url_v4(bucket_name, blob_name): diff --git a/storage_generate_upload_signed_url_v4.py b/storage_generate_upload_signed_url_v4.py index 8dd4a8658..dc1da8864 100644 --- a/storage_generate_upload_signed_url_v4.py +++ b/storage_generate_upload_signed_url_v4.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_generate_upload_signed_url_v4] -from google.cloud import storage import datetime +# [END storage_generate_upload_signed_url_v4] +import sys +# [START storage_generate_upload_signed_url_v4] + +from google.cloud import storage def generate_upload_signed_url_v4(bucket_name, blob_name): diff --git a/storage_get_bucket_labels.py b/storage_get_bucket_labels.py index 531fae22e..b3bcd6208 100644 --- a/storage_get_bucket_labels.py +++ b/storage_get_bucket_labels.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_get_bucket_labels] -from google.cloud import storage import pprint +# [END storage_get_bucket_labels] +import sys +# [START storage_get_bucket_labels] + +from google.cloud import storage def get_bucket_labels(bucket_name): diff --git a/storage_get_bucket_metadata.py b/storage_get_bucket_metadata.py index de724fc0a..0b17ce716 100644 --- a/storage_get_bucket_metadata.py +++ b/storage_get_bucket_metadata.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_get_bucket_metadata] -from google.cloud import storage import pprint +# [END storage_get_bucket_metadata] +import sys +# [START storage_get_bucket_metadata] + +from google.cloud import storage def bucket_metadata(bucket_name): diff --git a/storage_remove_bucket_label.py b/storage_remove_bucket_label.py index d39c03f24..58bbfef2d 100644 --- a/storage_remove_bucket_label.py +++ b/storage_remove_bucket_label.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_remove_bucket_label] -from google.cloud import storage import pprint +# [END storage_remove_bucket_label] +import sys +# [START storage_remove_bucket_label] + +from google.cloud import storage def remove_bucket_label(bucket_name): diff --git a/storage_rotate_encryption_key.py b/storage_rotate_encryption_key.py index f66c8f8a0..663ee4796 100644 --- a/storage_rotate_encryption_key.py +++ b/storage_rotate_encryption_key.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_rotate_encryption_key] -from google.cloud import storage import base64 +# [END storage_rotate_encryption_key] +import sys +# [START storage_rotate_encryption_key] + +from google.cloud import storage def rotate_encryption_key( diff --git a/storage_upload_encrypted_file.py b/storage_upload_encrypted_file.py index e016da759..e7d02c67b 100644 --- a/storage_upload_encrypted_file.py +++ b/storage_upload_encrypted_file.py @@ -14,11 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START storage_upload_encrypted_file] -from google.cloud import storage import base64 +# [END storage_upload_encrypted_file] +import sys +# [START storage_upload_encrypted_file] + +from google.cloud import storage def upload_encrypted_blob( diff --git a/uniform_bucket_level_access_test.py b/uniform_bucket_level_access_test.py index 7b5fdbf94..9d1e8e0b4 100644 --- a/uniform_bucket_level_access_test.py +++ b/uniform_bucket_level_access_test.py @@ -15,12 +15,11 @@ import time from google.cloud import storage - import pytest -import storage_get_uniform_bucket_level_access import storage_disable_uniform_bucket_level_access import storage_enable_uniform_bucket_level_access +import storage_get_uniform_bucket_level_access @pytest.fixture() From ef9c89d30721be527b9421a6c18b9abdad6a3d3a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 May 2020 07:31:59 +0200 Subject: [PATCH 101/197] chore(deps): update dependency google-cloud-pubsub to v1.4.3 (#3725) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Takashi Matsuo --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 232a4848a..9aa4e8ddb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.4.2 +google-cloud-pubsub==1.4.3 google-cloud-storage==1.28.0 From 80d2a25c30832e3fa1506b7165100ff6c92c68d4 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Fri, 15 May 2020 22:40:23 +0530 Subject: [PATCH 102/197] docs(storage): add samples (#3687) --- encryption_test.py | 12 +- iam_test.py | 15 ++- snippets_test.py | 121 ++++++++++++++++-- storage_bucket_delete_default_kms_key.py | 40 ++++++ storage_compose_file.py | 48 +++++++ storage_create_bucket_class_location.py | 44 +++++++ ...age_define_bucket_website_configuration.py | 50 ++++++++ storage_download_public_file.py | 49 +++++++ storage_get_service_account.py | 37 ++++++ storage_object_csek_to_cmek.py | 59 +++++++++ storage_object_get_kms_key.py | 42 ++++++ storage_set_bucket_public_iam.py | 45 +++++++ 12 files changed, 547 insertions(+), 15 deletions(-) create mode 100644 storage_bucket_delete_default_kms_key.py create mode 100644 storage_compose_file.py create mode 100644 storage_create_bucket_class_location.py create mode 100644 storage_define_bucket_website_configuration.py create mode 100644 storage_download_public_file.py create mode 100644 storage_get_service_account.py create mode 100644 storage_object_csek_to_cmek.py create mode 100644 storage_object_get_kms_key.py create mode 100644 storage_set_bucket_public_iam.py diff --git a/encryption_test.py b/encryption_test.py index 5f24bb7eb..6c2377e0f 100644 --- a/encryption_test.py +++ b/encryption_test.py @@ -24,11 +24,12 @@ import storage_download_encrypted_file import storage_generate_encryption_key +import storage_object_csek_to_cmek import storage_rotate_encryption_key import storage_upload_encrypted_file - BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] +KMS_KEY = os.environ["CLOUD_KMS_KEY"] TEST_ENCRYPTION_KEY = "brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0=" TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY) @@ -113,3 +114,12 @@ def test_rotate_encryption_key(test_blob): downloaded_content = dest_file.read().decode("utf-8") assert downloaded_content == test_blob_content + + +def test_object_csek_to_cmek(test_blob): + test_blob_name, test_blob_content = test_blob + cmek_blob = storage_object_csek_to_cmek.object_csek_to_cmek( + BUCKET, test_blob_name, TEST_ENCRYPTION_KEY_2, KMS_KEY + ) + + assert cmek_blob.download_as_string(), test_blob_content diff --git a/iam_test.py b/iam_test.py index fe7aa7202..5186eb190 100644 --- a/iam_test.py +++ b/iam_test.py @@ -23,9 +23,9 @@ import storage_add_bucket_iam_member import storage_remove_bucket_conditional_iam_binding import storage_remove_bucket_iam_member +import storage_set_bucket_public_iam import storage_view_bucket_iam_members - MEMBER = "group:dpebot@google.com" ROLE = "roles/storage.legacyBucketReader" @@ -112,3 +112,16 @@ def test_remove_bucket_conditional_iam_binding(bucket): (binding["role"] == ROLE and binding.get("condition") == condition) for binding in policy.bindings ) + + +def test_set_bucket_public_iam(bucket): + role = "roles/storage.objectViewer" + member = "allUsers" + storage_set_bucket_public_iam.set_bucket_public_iam( + bucket.name, role, member + ) + policy = bucket.get_iam_policy(requested_policy_version=3) + assert any( + binding["role"] == role and member in binding["members"] + for binding in policy.bindings + ) diff --git a/snippets_test.py b/snippets_test.py index 678b6a6b0..3b2570d4c 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -23,11 +23,16 @@ import requests import storage_add_bucket_label +import storage_bucket_delete_default_kms_key +import storage_compose_file import storage_copy_file +import storage_create_bucket_class_location +import storage_define_bucket_website_configuration import storage_delete_file import storage_disable_bucket_lifecycle_management import storage_disable_versioning import storage_download_file +import storage_download_public_file import storage_enable_bucket_lifecycle_management import storage_enable_versioning import storage_generate_signed_post_policy_v4 @@ -37,11 +42,13 @@ import storage_get_bucket_labels import storage_get_bucket_metadata import storage_get_metadata +import storage_get_service_account import storage_list_buckets import storage_list_files import storage_list_files_with_prefix import storage_make_public import storage_move_file +import storage_object_get_kms_key import storage_remove_bucket_label import storage_set_bucket_default_kms_key import storage_set_metadata @@ -100,6 +107,17 @@ def test_blob(test_bucket): yield blob +@pytest.fixture +def test_bucket_create(): + """Yields a bucket object that is deleted after the test completes.""" + bucket = None + while bucket is None or bucket.exists(): + bucket_name = "storage-snippets-test-{}".format(uuid.uuid4()) + bucket = storage.Client().bucket(bucket_name) + yield bucket + bucket.delete(force=True) + + def test_list_buckets(test_bucket, capsys): storage_list_buckets.list_buckets() out, _ = capsys.readouterr() @@ -204,9 +222,7 @@ def test_generate_upload_signed_url_v4(test_bucket, capsys): ) requests.put( - url, - data=content, - headers={"content-type": "application/octet-stream"}, + url, data=content, headers={"content-type": "application/octet-stream"}, ) bucket = storage.Client().bucket(test_bucket.name) @@ -217,9 +233,7 @@ def test_generate_upload_signed_url_v4(test_bucket, capsys): def test_generate_signed_policy_v4(test_bucket, capsys): blob_name = "storage_snippets_test_form" short_name = storage_generate_signed_post_policy_v4 - form = short_name.generate_signed_post_policy_v4( - test_bucket.name, blob_name - ) + form = short_name.generate_signed_post_policy_v4(test_bucket.name, blob_name) assert "name='key' value='{}'".format(blob_name) in form assert "name='x-goog-signature'" in form assert "name='x-goog-date'" in form @@ -238,9 +252,7 @@ def test_rename_blob(test_blob): except google.cloud.exceptions.exceptions.NotFound: pass - storage_move_file.rename_blob( - bucket.name, test_blob.name, "test_rename_blob" - ) + storage_move_file.rename_blob(bucket.name, test_blob.name, "test_rename_blob") assert bucket.get_blob("test_rename_blob") is not None assert bucket.get_blob(test_blob.name) is None @@ -275,15 +287,98 @@ def test_versioning(test_bucket, capsys): def test_bucket_lifecycle_management(test_bucket, capsys): - bucket = storage_enable_bucket_lifecycle_management.\ - enable_bucket_lifecycle_management(test_bucket) + bucket = storage_enable_bucket_lifecycle_management.enable_bucket_lifecycle_management( + test_bucket + ) out, _ = capsys.readouterr() assert "[]" in out assert "Lifecycle management is enable" in out assert len(list(bucket.lifecycle_rules)) > 0 - bucket = storage_disable_bucket_lifecycle_management.\ - disable_bucket_lifecycle_management(test_bucket) + bucket = storage_disable_bucket_lifecycle_management.disable_bucket_lifecycle_management( + test_bucket + ) out, _ = capsys.readouterr() assert "[]" in out assert len(list(bucket.lifecycle_rules)) == 0 + + +def test_create_bucket_class_location(test_bucket_create): + bucket = storage_create_bucket_class_location.create_bucket_class_location( + test_bucket_create.name + ) + + assert bucket.location == "US" + assert bucket.storage_class == "COLDLINE" + + +def test_bucket_delete_default_kms_key(test_bucket, capsys): + test_bucket.default_kms_key_name = KMS_KEY + test_bucket.patch() + + assert test_bucket.default_kms_key_name == KMS_KEY + + bucket = storage_bucket_delete_default_kms_key.bucket_delete_default_kms_key( + test_bucket.name + ) + + out, _ = capsys.readouterr() + assert bucket.default_kms_key_name is None + assert bucket.name in out + + +def test_get_service_account(capsys): + storage_get_service_account.get_service_account() + + out, _ = capsys.readouterr() + + assert "@gs-project-accounts.iam.gserviceaccount.com" in out + + +def test_download_public_file(test_blob): + storage_make_public.make_blob_public(test_blob.bucket.name, test_blob.name) + with tempfile.NamedTemporaryFile() as dest_file: + storage_download_public_file.download_public_file( + test_blob.bucket.name, test_blob.name, dest_file.name + ) + + assert dest_file.read() == b"Hello, is it me you're looking for?" + + +def test_define_bucket_website_configuration(test_bucket): + bucket = storage_define_bucket_website_configuration.define_bucket_website_configuration( + test_bucket.name, "index.html", "404.html" + ) + + website_val = {"mainPageSuffix": "index.html", "notFoundPage": "404.html"} + + assert bucket._properties["website"] == website_val + + +def test_object_get_kms_key(test_bucket): + with tempfile.NamedTemporaryFile() as source_file: + storage_upload_with_kms_key.upload_blob_with_kms( + test_bucket.name, source_file.name, "test_upload_blob_encrypted", KMS_KEY + ) + kms_key = storage_object_get_kms_key.object_get_kms_key( + test_bucket.name, "test_upload_blob_encrypted" + ) + + assert kms_key.startswith(KMS_KEY) + + +def test_storage_compose_file(test_bucket): + source_files = ["test_upload_blob_1", "test_upload_blob_2"] + blob_list = [] + for source in source_files: + blob = test_bucket.blob(source) + blob.upload_from_string(source) + blob_list.append(blob) + + with tempfile.NamedTemporaryFile() as dest_file: + destination = storage_compose_file.compose_file( + test_bucket.name, blob_list, dest_file.name + ) + composed = destination.download_as_string() + + assert composed.decode("utf-8") == source_files[0] + source_files[1] diff --git a/storage_bucket_delete_default_kms_key.py b/storage_bucket_delete_default_kms_key.py new file mode 100644 index 000000000..3df23767d --- /dev/null +++ b/storage_bucket_delete_default_kms_key.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_bucket_delete_default_kms_key] +from google.cloud import storage + + +def bucket_delete_default_kms_key(bucket_name): + """Delete a default KMS key of bucket""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.default_kms_key_name = None + bucket.patch() + + print("Default KMS key was removed from {}".format(bucket.name)) + return bucket + + +# [END storage_bucket_delete_default_kms_key] + +if __name__ == "__main__": + bucket_delete_default_kms_key(bucket_name=sys.argv[1]) diff --git a/storage_compose_file.py b/storage_compose_file.py new file mode 100644 index 000000000..234bf3f68 --- /dev/null +++ b/storage_compose_file.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_compose_file] +from google.cloud import storage + + +def compose_file(bucket_name, sources, destination_blob_name): + """Concatenate source blobs into destination blob.""" + # bucket_name = "your-bucket-name" + # sources = [blob_1, blob_2] + # destination_blob_name = "destination-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + destination = bucket.blob(destination_blob_name) + destination.content_type = "text/plain" + destination.compose(sources) + + print( + "Composed new object {} in the bucket {}".format( + destination_blob_name, bucket.name + ) + ) + return destination + + +# [END storage_compose_file] + +if __name__ == "__main__": + compose_file( + bucket_name=sys.argv[1], sources=sys.argv[2], destination_blob_name=sys.argv[3], + ) diff --git a/storage_create_bucket_class_location.py b/storage_create_bucket_class_location.py new file mode 100644 index 000000000..64c2652d7 --- /dev/null +++ b/storage_create_bucket_class_location.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_create_bucket_class_location] +from google.cloud import storage + + +def create_bucket_class_location(bucket_name): + """Create a new bucket in specific location with storage class""" + # bucket_name = "your-new-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + bucket.storage_class = "COLDLINE" + new_bucket = storage_client.create_bucket(bucket, location="us") + + print( + "Created bucket {} in {} with storage class {}".format( + new_bucket.name, new_bucket.location, new_bucket.storage_class + ) + ) + return new_bucket + + +# [END storage_create_bucket_class_location] + +if __name__ == "__main__": + create_bucket_class_location(bucket_name=sys.argv[1]) diff --git a/storage_define_bucket_website_configuration.py b/storage_define_bucket_website_configuration.py new file mode 100644 index 000000000..ce6c7e66c --- /dev/null +++ b/storage_define_bucket_website_configuration.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_define_bucket_website_configuration] +from google.cloud import storage + + +def define_bucket_website_configuration(bucket_name, main_page_suffix, not_found_page): + """Configure website-related properties of bucket""" + # bucket_name = "your-bucket-name" + # main_page_suffix = "index.html" + # not_found_page = "404.html" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.configure_website(main_page_suffix, not_found_page) + bucket.patch() + + print( + "Static website bucket {} is set up to use {} as the index page and {} as the 404 page".format( + bucket.name, main_page_suffix, not_found_page + ) + ) + return bucket + + +# [END storage_define_bucket_website_configuration] + +if __name__ == "__main__": + define_bucket_website_configuration( + bucket_name=sys.argv[1], + main_page_suffix=sys.argv[2], + not_found_page=sys.argv[3], + ) diff --git a/storage_download_public_file.py b/storage_download_public_file.py new file mode 100644 index 000000000..8fbb68405 --- /dev/null +++ b/storage_download_public_file.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_download_public_file] +from google.cloud import storage + + +def download_public_file(bucket_name, source_blob_name, destination_file_name): + """Downloads a public blob from the bucket.""" + # bucket_name = "your-bucket-name" + # source_blob_name = "storage-object-name" + # destination_file_name = "local/path/to/file" + + storage_client = storage.Client.create_anonymous_client() + + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(source_blob_name) + blob.download_to_filename(destination_file_name) + + print( + "Downloaded public blob {} from bucket {} to {}.".format( + source_blob_name, bucket.name, destination_file_name + ) + ) + + +# [END storage_download_public_file] + +if __name__ == "__main__": + download_public_file( + bucket_name=sys.argv[1], + source_blob_name=sys.argv[2], + destination_file_name=sys.argv[3], + ) diff --git a/storage_get_service_account.py b/storage_get_service_account.py new file mode 100644 index 000000000..58ababb91 --- /dev/null +++ b/storage_get_service_account.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# [START storage_get_service_account] +from google.cloud import storage + + +def get_service_account(): + """Get the service account email""" + storage_client = storage.Client() + + email = storage_client.get_service_account_email() + print( + "The GCS service account for project {} is: {} ".format( + storage_client.project, email + ) + ) + + +# [END storage_get_service_account] + +if __name__ == "__main__": + get_service_account() diff --git a/storage_object_csek_to_cmek.py b/storage_object_csek_to_cmek.py new file mode 100644 index 000000000..9d4d710bf --- /dev/null +++ b/storage_object_csek_to_cmek.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import sys + +# [START storage_object_csek_to_cmek] +from google.cloud import storage + + +def object_csek_to_cmek(bucket_name, blob_name, encryption_key, kms_key_name): + """Change a blob's customer-supplied encryption key to KMS key""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # encryption_key = "TIbv/fjexq+VmtXzAlc63J4z5kFmWJ6NdAPQulQBT7g=" + # kms_key_name = "projects/PROJ/locations/LOC/keyRings/RING/cryptoKey/KEY" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + current_encryption_key = base64.b64decode(encryption_key) + source_blob = bucket.blob(blob_name, encryption_key=current_encryption_key) + + destination_blob = bucket.blob(blob_name, kms_key_name=kms_key_name) + token, rewritten, total = destination_blob.rewrite(source_blob) + + while token is not None: + token, rewritten, total = destination_blob.rewrite(source_blob, token=token) + + print( + "Blob {} in bucket {} is now managed by the KMS key {} instead of a customer-supplied encryption key".format( + blob_name, bucket_name, kms_key_name + ) + ) + return destination_blob + + +# [END storage_object_csek_to_cmek] + +if __name__ == "__main__": + object_csek_to_cmek( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + encryption_key=sys.argv[3], + kms_key_name=sys.argv[4], + ) diff --git a/storage_object_get_kms_key.py b/storage_object_get_kms_key.py new file mode 100644 index 000000000..dddfc9151 --- /dev/null +++ b/storage_object_get_kms_key.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_object_get_kms_key] +from google.cloud import storage + + +def object_get_kms_key(bucket_name, blob_name): + """Retrieve the KMS key of a blob""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + + storage_client = storage.Client() + + bucket = storage_client.bucket(bucket_name) + blob = bucket.get_blob(blob_name) + + kms_key = blob.kms_key_name + + print("The KMS key of a blob is {}".format(blob.kms_key_name)) + return kms_key + + +# [END storage_object_get_kms_key] + +if __name__ == "__main__": + object_get_kms_key(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_set_bucket_public_iam.py b/storage_set_bucket_public_iam.py new file mode 100644 index 000000000..426318a36 --- /dev/null +++ b/storage_set_bucket_public_iam.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_bucket_public_iam] +from google.cloud import storage + + +def set_bucket_public_iam(bucket_name, role, member): + """Set a public IAM Policy to bucket""" + # bucket_name = "your-bucket-name" + # role = "IAM role, e.g. roles/storage.objectViewer" + # member = "IAM identity, e.g. allUsers" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + + policy = bucket.get_iam_policy(requested_policy_version=3) + policy.bindings.append({"role": role, "members": {member}}) + + bucket.set_iam_policy(policy) + + print("Bucket {} is now publicly readable".format(bucket.name)) + + +# [END storage_set_bucket_public_iam] + +if __name__ == "__main__": + set_bucket_public_iam( + bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3], + ) From 6a6581944fe89523b233ac2660368762aebb3987 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 19 May 2020 04:18:01 +0200 Subject: [PATCH 103/197] chore(deps): update dependency google-cloud-storage to v1.28.1 (#3785) * chore(deps): update dependency google-cloud-storage to v1.28.1 * [asset] testing: use uuid instead of time Co-authored-by: Takashi Matsuo --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9aa4e8ddb..b2d1ae09c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.4.3 -google-cloud-storage==1.28.0 +google-cloud-storage==1.28.1 From faf59e8ee1d6e7200ab6aa0c59491f6a4e52031e Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Wed, 20 May 2020 08:52:05 +0530 Subject: [PATCH 104/197] docs(storage): add samples for file archive generation and cors configuration (#3794) --- snippets_test.py | 71 ++++++++++++++++++++++ storage_change_default_storage_class.py | 41 +++++++++++++ storage_change_file_storage_class.py | 48 +++++++++++++++ storage_copy_file_archived_generation.py | 63 +++++++++++++++++++ storage_cors_configuration.py | 48 +++++++++++++++ storage_delete_file_archived_generation.py | 48 +++++++++++++++ storage_list_file_archived_generations.py | 39 ++++++++++++ storage_remove_cors_configuration.py | 39 ++++++++++++ 8 files changed, 397 insertions(+) create mode 100644 storage_change_default_storage_class.py create mode 100644 storage_change_file_storage_class.py create mode 100644 storage_copy_file_archived_generation.py create mode 100644 storage_cors_configuration.py create mode 100644 storage_delete_file_archived_generation.py create mode 100644 storage_list_file_archived_generations.py create mode 100644 storage_remove_cors_configuration.py diff --git a/snippets_test.py b/snippets_test.py index 3b2570d4c..e6c3d1c1d 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -24,11 +24,16 @@ import storage_add_bucket_label import storage_bucket_delete_default_kms_key +import storage_change_default_storage_class +import storage_change_file_storage_class import storage_compose_file import storage_copy_file +import storage_copy_file_archived_generation +import storage_cors_configuration import storage_create_bucket_class_location import storage_define_bucket_website_configuration import storage_delete_file +import storage_delete_file_archived_generation import storage_disable_bucket_lifecycle_management import storage_disable_versioning import storage_download_file @@ -44,12 +49,14 @@ import storage_get_metadata import storage_get_service_account import storage_list_buckets +import storage_list_file_archived_generations import storage_list_files import storage_list_files_with_prefix import storage_make_public import storage_move_file import storage_object_get_kms_key import storage_remove_bucket_label +import storage_remove_cors_configuration import storage_set_bucket_default_kms_key import storage_set_metadata import storage_upload_file @@ -382,3 +389,67 @@ def test_storage_compose_file(test_bucket): composed = destination.download_as_string() assert composed.decode("utf-8") == source_files[0] + source_files[1] + + +def test_cors_configuration(test_bucket, capsys): + bucket = storage_cors_configuration.cors_configuration(test_bucket) + out, _ = capsys.readouterr() + assert "Set CORS policies for bucket" in out + assert len(bucket.cors) > 0 + + bucket = storage_remove_cors_configuration.remove_cors_configuration(test_bucket) + out, _ = capsys.readouterr() + assert "Remove CORS policies for bucket" in out + assert len(bucket.cors) == 0 + + +def test_delete_blobs_archived_generation(test_blob, capsys): + storage_delete_file_archived_generation.delete_file_archived_generation( + test_blob.bucket.name, test_blob.name, test_blob.generation + ) + out, _ = capsys.readouterr() + assert "blob " + test_blob.name + " was deleted" in out + blob = test_blob.bucket.get_blob(test_blob.name, generation=test_blob.generation) + assert blob is None + + +def test_change_default_storage_class(test_bucket, capsys): + bucket = storage_change_default_storage_class.change_default_storage_class( + test_bucket + ) + out, _ = capsys.readouterr() + assert "Default storage class for bucket" in out + assert bucket.storage_class == 'COLDLINE' + + +def test_change_file_storage_class(test_blob, capsys): + blob = storage_change_file_storage_class.change_file_storage_class( + test_blob.bucket.name, test_blob.name + ) + out, _ = capsys.readouterr() + assert "Blob {} in bucket {}". format(blob.name, blob.bucket.name) in out + assert blob.storage_class == 'NEARLINE' + + +def test_copy_file_archived_generation(test_blob): + bucket = storage.Client().bucket(test_blob.bucket.name) + + try: + bucket.delete_blob("test_copy_blob") + except google.cloud.exceptions.NotFound: + pass + + storage_copy_file_archived_generation.copy_file_archived_generation( + bucket.name, test_blob.name, bucket.name, "test_copy_blob", test_blob.generation + ) + + assert bucket.get_blob("test_copy_blob") is not None + assert bucket.get_blob(test_blob.name) is not None + + +def test_list_blobs_archived_generation(test_blob, capsys): + storage_list_file_archived_generations.list_file_archived_generations( + test_blob.bucket.name + ) + out, _ = capsys.readouterr() + assert str(test_blob.generation) in out diff --git a/storage_change_default_storage_class.py b/storage_change_default_storage_class.py new file mode 100644 index 000000000..8a72719ba --- /dev/null +++ b/storage_change_default_storage_class.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_change_default_storage_class] +from google.cloud import storage +from google.cloud.storage import constants + + +def change_default_storage_class(bucket_name): + """Change the default storage class of the bucket""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.storage_class = constants.COLDLINE_STORAGE_CLASS + bucket.patch() + + print("Default storage class for bucket {} has been set to {}".format(bucket_name, bucket.storage_class)) + return bucket + + +# [END storage_change_default_storage_class] + +if __name__ == "__main__": + change_default_storage_class(bucket_name=sys.argv[1]) diff --git a/storage_change_file_storage_class.py b/storage_change_file_storage_class.py new file mode 100644 index 000000000..d0d9b45e8 --- /dev/null +++ b/storage_change_file_storage_class.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_change_file_storage_class] +from google.cloud import storage +from google.cloud.storage import constants + + +def change_file_storage_class(bucket_name, blob_name): + """Change the default storage class of the blob""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.get_blob(blob_name) + blob.update_storage_class(constants.NEARLINE_STORAGE_CLASS) + + print( + "Blob {} in bucket {} had its storage class set to {}".format( + blob_name, + bucket_name, + blob.storage_class + ) + ) + return blob + + +# [START storage_change_file_storage_class] + +if __name__ == "__main__": + change_file_storage_class(bucket_name=sys.argv[1], blob_name=sys.argv[2]) diff --git a/storage_copy_file_archived_generation.py b/storage_copy_file_archived_generation.py new file mode 100644 index 000000000..988ebcbeb --- /dev/null +++ b/storage_copy_file_archived_generation.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_copy_file_archived_generation] +from google.cloud import storage + + +def copy_file_archived_generation( + bucket_name, blob_name, destination_bucket_name, destination_blob_name, generation +): + """Copies a blob from one bucket to another with a new name with the same generation.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # destination_bucket_name = "destination-bucket-name" + # destination_blob_name = "destination-object-name" + # generation = 1579287380533984 + + storage_client = storage.Client() + + source_bucket = storage_client.bucket(bucket_name) + source_blob = source_bucket.blob(blob_name) + destination_bucket = storage_client.bucket(destination_bucket_name) + + blob_copy = source_bucket.copy_blob( + source_blob, destination_bucket, destination_blob_name, source_generation=generation + ) + + print( + "Generation {} of the blob {} in bucket {} copied to blob {} in bucket {}.".format( + source_blob.generation, + source_blob.name, + source_bucket.name, + blob_copy.name, + destination_bucket.name, + ) + ) + + +# [END storage_copy_file_archived_generation] + +if __name__ == "__main__": + copy_file_archived_generation( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + destination_bucket_name=sys.argv[3], + destination_blob_name=sys.argv[4], + generation=sys.argv[5] + ) diff --git a/storage_cors_configuration.py b/storage_cors_configuration.py new file mode 100644 index 000000000..3d2595a9d --- /dev/null +++ b/storage_cors_configuration.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_cors_configuration] +from google.cloud import storage + + +def cors_configuration(bucket_name): + """Set a bucket's CORS policies configuration.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.cors = [ + { + "origin": ["*"], + "responseHeader": [ + "Content-Type", + "x-goog-resumable"], + "method": ['PUT', 'POST'], + "maxAgeSeconds": 3600 + } + ] + bucket.patch() + + print("Set CORS policies for bucket {} is {}".format(bucket.name, bucket.cors)) + return bucket + + +# [END storage_cors_configuration] + +if __name__ == "__main__": + cors_configuration(bucket_name=sys.argv[1]) diff --git a/storage_delete_file_archived_generation.py b/storage_delete_file_archived_generation.py new file mode 100644 index 000000000..4e4909001 --- /dev/null +++ b/storage_delete_file_archived_generation.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_delete_file_archived_generation] +from google.cloud import storage + + +def delete_file_archived_generation(bucket_name, blob_name, generation): + """Delete a blob in the bucket with the given generation.""" + # bucket_name = "your-bucket-name" + # blob_name = "your-object-name" + # generation = 1579287380533984 + + storage_client = storage.Client() + + bucket = storage_client.get_bucket(bucket_name) + bucket.delete_blob(blob_name, generation=generation) + print( + "Generation {} of blob {} was deleted from {}".format( + generation, blob_name, bucket_name + ) + ) + + +# [END storage_delete_file_archived_generation] + + +if __name__ == "__main__": + delete_file_archived_generation( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + generation=sys.argv[3] + ) diff --git a/storage_list_file_archived_generations.py b/storage_list_file_archived_generations.py new file mode 100644 index 000000000..a496ed6ad --- /dev/null +++ b/storage_list_file_archived_generations.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_list_file_archived_generations] +from google.cloud import storage + + +def list_file_archived_generations(bucket_name): + """Lists all the blobs in the bucket with generation.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + + blobs = storage_client.list_blobs(bucket_name) + + for blob in blobs: + print("{},{}".format(blob.name, blob.generation)) + + +# [END storage_list_file_archived_generations] + + +if __name__ == "__main__": + list_file_archived_generations(bucket_name=sys.argv[1]) diff --git a/storage_remove_cors_configuration.py b/storage_remove_cors_configuration.py new file mode 100644 index 000000000..48ee74338 --- /dev/null +++ b/storage_remove_cors_configuration.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_remove_cors_configuration] +from google.cloud import storage + + +def remove_cors_configuration(bucket_name): + """Remove a bucket's CORS policies configuration.""" + # bucket_name = "your-bucket-name" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + bucket.cors = [] + bucket.patch() + + print("Remove CORS policies for bucket {}.".format(bucket.name)) + return bucket + + +# [END storage_remove_cors_configuration] + +if __name__ == "__main__": + remove_cors_configuration(bucket_name=sys.argv[1]) From 1b1ef866b26d965389d8206371ef4612d179305d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 21 May 2020 04:50:04 +0200 Subject: [PATCH 105/197] chore(deps): update dependency google-cloud-pubsub to v1.5.0 (#3781) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b2d1ae09c..9f79b893a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.4.3 +google-cloud-pubsub==1.5.0 google-cloud-storage==1.28.1 From be94be04f697b5e7a51bc072a61cf6b8243347d6 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Tue, 9 Jun 2020 14:34:27 -0700 Subject: [PATCH 106/197] Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. (#4022) --- requester_pays_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requester_pays_test.py b/requester_pays_test.py index 7511be739..9f85c6bdb 100644 --- a/requester_pays_test.py +++ b/requester_pays_test.py @@ -26,7 +26,7 @@ # We use a different bucket from other tests. BUCKET = os.environ["REQUESTER_PAYS_TEST_BUCKET"] -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] def test_enable_requester_pays(capsys): From 3e00322084c06ef8b2a4020454d683f4e1aa4613 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Thu, 11 Jun 2020 12:19:40 -0700 Subject: [PATCH 107/197] [storage] testing: use multiple projects (#4048) * [storage] testing: use multiple projects We still need to use the old project for some tests. fixes #4033 fixes #4029 * remove print * use uuid instead of time.time() * lint fix --- acl_test.py | 8 +++ hmac_samples_test.py | 10 +++- iam_test.py | 36 ++++++++++--- noxfile_config.py | 84 +++++++++++++++++++++++++++++ snippets_test.py | 41 +++++++++++--- uniform_bucket_level_access_test.py | 10 +++- 6 files changed, 173 insertions(+), 16 deletions(-) create mode 100644 noxfile_config.py diff --git a/acl_test.py b/acl_test.py index 469d364ce..eda2c7df1 100644 --- a/acl_test.py +++ b/acl_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import uuid from google.cloud import storage @@ -39,6 +40,11 @@ @pytest.fixture(scope="module") def test_bucket(): """Yields a bucket that is deleted after the test completes.""" + + # The new projects have uniform bucket-level access and our tests don't + # pass with those buckets. We need to use the old main project for now. + original_value = os.environ['GOOGLE_CLOUD_PROJECT'] + os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] bucket = None while bucket is None or bucket.exists(): bucket_name = "acl-test-{}".format(uuid.uuid4()) @@ -46,6 +52,8 @@ def test_bucket(): bucket.create() yield bucket bucket.delete(force=True) + # Set the value back. + os.environ['GOOGLE_CLOUD_PROJECT'] = original_value @pytest.fixture diff --git a/hmac_samples_test.py b/hmac_samples_test.py index f9fcb7509..60eba2401 100644 --- a/hmac_samples_test.py +++ b/hmac_samples_test.py @@ -31,8 +31,14 @@ import storage_get_hmac_key import storage_list_hmac_keys - -PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +# We are reaching maximum number of HMAC keys on the service account. +# We change the service account based on the value of +# RUN_TESTS_SESSION in noxfile_config.py. +# The reason we can not use multiple project is that our new projects +# are enforced to have +# 'constraints/iam.disableServiceAccountKeyCreation' policy. + +PROJECT_ID = os.environ["MAIN_GOOGLE_CLOUD_PROJECT"] SERVICE_ACCOUNT_EMAIL = os.environ["HMAC_KEY_TEST_SERVICE_ACCOUNT"] STORAGE_CLIENT = storage.Client(project=PROJECT_ID) diff --git a/iam_test.py b/iam_test.py index 5186eb190..04d72c1a0 100644 --- a/iam_test.py +++ b/iam_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import re import time import uuid @@ -36,7 +37,7 @@ ) -@pytest.fixture +@pytest.fixture(scope="module") def bucket(): bucket = None while bucket is None or bucket.exists(): @@ -50,6 +51,26 @@ def bucket(): bucket.delete(force=True) +@pytest.fixture(scope="function") +def public_bucket(): + # The new projects don't allow to make a bucket available to public, so + # we need to use the old main project for now. + original_value = os.environ['GOOGLE_CLOUD_PROJECT'] + os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] + bucket = None + while bucket is None or bucket.exists(): + storage_client = storage.Client() + bucket_name = "test-iam-{}".format(uuid.uuid4()) + bucket = storage_client.bucket(bucket_name) + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + storage_client.create_bucket(bucket) + yield bucket + time.sleep(3) + bucket.delete(force=True) + # Set the value back. + os.environ['GOOGLE_CLOUD_PROJECT'] = original_value + + def test_view_bucket_iam_members(capsys, bucket): storage_view_bucket_iam_members.view_bucket_iam_members(bucket.name) assert re.match("Role: .*, Members: .*", capsys.readouterr().out) @@ -87,10 +108,11 @@ def test_add_bucket_conditional_iam_binding(bucket): ) -def test_remove_bucket_iam_member(bucket): - storage_remove_bucket_iam_member.remove_bucket_iam_member(bucket.name, ROLE, MEMBER) +def test_remove_bucket_iam_member(public_bucket): + storage_remove_bucket_iam_member.remove_bucket_iam_member( + public_bucket.name, ROLE, MEMBER) - policy = bucket.get_iam_policy(requested_policy_version=3) + policy = public_bucket.get_iam_policy(requested_policy_version=3) assert not any( binding["role"] == ROLE and MEMBER in binding["members"] for binding in policy.bindings @@ -114,13 +136,13 @@ def test_remove_bucket_conditional_iam_binding(bucket): ) -def test_set_bucket_public_iam(bucket): +def test_set_bucket_public_iam(public_bucket): role = "roles/storage.objectViewer" member = "allUsers" storage_set_bucket_public_iam.set_bucket_public_iam( - bucket.name, role, member + public_bucket.name, role, member ) - policy = bucket.get_iam_policy(requested_policy_version=3) + policy = public_bucket.get_iam_policy(requested_policy_version=3) assert any( binding["role"] == role and member in binding["members"] for binding in policy.bindings diff --git a/noxfile_config.py b/noxfile_config.py new file mode 100644 index 000000000..edcea1530 --- /dev/null +++ b/noxfile_config.py @@ -0,0 +1,84 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +import os + + +# We are reaching maximum number of HMAC keys on the service account. +# We change the service account based on the value of +# RUN_TESTS_SESSION. The reason we can not use multiple project is +# that our new projects are enforced to have +# 'constraints/iam.disableServiceAccountKeyCreation' policy. +def get_service_account_email(): + session = os.environ.get('RUN_TESTS_SESSION') + if session == 'py-3.6': + return ('py36-storage-test@' + 'python-docs-samples-tests.iam.gserviceaccount.com') + if session == 'py-3.7': + return ('py37-storage-test@' + 'python-docs-samples-tests.iam.gserviceaccount.com') + if session == 'py-3.8': + return ('py38-storage-test@' + 'python-docs-samples-tests.iam.gserviceaccount.com') + return os.environ['HMAC_KEY_TEST_SERVICE_ACCOUNT'] + + +# We change the value of CLOUD_KMS_KEY based on the value of +# RUN_TESTS_SESSION. +def get_cloud_kms_key(): + session = os.environ.get('RUN_TESTS_SESSION') + if session == 'py-3.6': + return ('projects/python-docs-samples-tests-py36/locations/us/' + 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') + if session == 'py-3.7': + return ('projects/python-docs-samples-tests-py37/locations/us/' + 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') + if session == 'py-3.8': + return ('projects/python-docs-samples-tests-py38/locations/us/' + 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') + return os.environ['CLOUD_KMS_KEY'] + + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': { + 'HMAC_KEY_TEST_SERVICE_ACCOUNT': get_service_account_email(), + 'CLOUD_KMS_KEY': get_cloud_kms_key(), + # Some tests can not use multiple projects because of several reasons: + # 1. The new projects is enforced to have the + # 'constraints/iam.disableServiceAccountKeyCreation' policy. + # 2. The new projects buckets need to have universal permission model. + # For those tests, we'll use the original project. + 'MAIN_GOOGLE_CLOUD_PROJECT': 'python-docs-samples-tests' + }, +} diff --git a/snippets_test.py b/snippets_test.py index e6c3d1c1d..edcf77939 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -105,6 +105,24 @@ def test_bucket(): bucket.delete(force=True) +@pytest.fixture(scope="function") +def test_public_bucket(): + # The new projects don't allow to make a bucket available to public, so + # for some tests we need to use the old main project for now. + original_value = os.environ['GOOGLE_CLOUD_PROJECT'] + os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] + bucket = None + while bucket is None or bucket.exists(): + storage_client = storage.Client() + bucket_name = "storage-snippets-test-{}".format(uuid.uuid4()) + bucket = storage_client.bucket(bucket_name) + storage_client.create_bucket(bucket) + yield bucket + bucket.delete(force=True) + # Set the value back. + os.environ['GOOGLE_CLOUD_PROJECT'] = original_value + + @pytest.fixture def test_blob(test_bucket): """Yields a blob that is deleted after the test completes.""" @@ -114,6 +132,15 @@ def test_blob(test_bucket): yield blob +@pytest.fixture(scope="function") +def test_public_blob(test_public_bucket): + """Yields a blob that is deleted after the test completes.""" + bucket = test_public_bucket + blob = bucket.blob("storage_snippets_test_sigil-{}".format(uuid.uuid4())) + blob.upload_from_string("Hello, is it me you're looking for?") + yield blob + + @pytest.fixture def test_bucket_create(): """Yields a bucket object that is deleted after the test completes.""" @@ -196,10 +223,11 @@ def test_delete_blob(test_blob): storage_delete_file.delete_blob(test_blob.bucket.name, test_blob.name) -def test_make_blob_public(test_blob): - storage_make_public.make_blob_public(test_blob.bucket.name, test_blob.name) +def test_make_blob_public(test_public_blob): + storage_make_public.make_blob_public( + test_public_blob.bucket.name, test_public_blob.name) - r = requests.get(test_blob.public_url) + r = requests.get(test_public_blob.public_url) assert r.text == "Hello, is it me you're looking for?" @@ -342,11 +370,12 @@ def test_get_service_account(capsys): assert "@gs-project-accounts.iam.gserviceaccount.com" in out -def test_download_public_file(test_blob): - storage_make_public.make_blob_public(test_blob.bucket.name, test_blob.name) +def test_download_public_file(test_public_blob): + storage_make_public.make_blob_public( + test_public_blob.bucket.name, test_public_blob.name) with tempfile.NamedTemporaryFile() as dest_file: storage_download_public_file.download_public_file( - test_blob.bucket.name, test_blob.name, dest_file.name + test_public_blob.bucket.name, test_public_blob.name, dest_file.name ) assert dest_file.read() == b"Hello, is it me you're looking for?" diff --git a/uniform_bucket_level_access_test.py b/uniform_bucket_level_access_test.py index 9d1e8e0b4..0d97641eb 100644 --- a/uniform_bucket_level_access_test.py +++ b/uniform_bucket_level_access_test.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import time +import uuid from google.cloud import storage import pytest @@ -25,14 +27,20 @@ @pytest.fixture() def bucket(): """Yields a bucket that is deleted after the test completes.""" + # The new projects enforces uniform bucket level access, so + # we need to use the old main project for now. + original_value = os.environ['GOOGLE_CLOUD_PROJECT'] + os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] bucket = None while bucket is None or bucket.exists(): - bucket_name = "uniform-bucket-level-access-{}".format(int(time.time())) + bucket_name = "uniform-bucket-level-access-{}".format(uuid.uuid4().hex) bucket = storage.Client().bucket(bucket_name) bucket.create() yield bucket time.sleep(3) bucket.delete(force=True) + # Set the value back. + os.environ['GOOGLE_CLOUD_PROJECT'] = original_value def test_get_uniform_bucket_level_access(bucket, capsys): From d927604aa5b316d726e33509f7e20ba98ab44ee9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 20 Jun 2020 01:03:47 +0200 Subject: [PATCH 108/197] chore(deps): update dependency google-cloud-storage to v1.29.0 (#4040) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9f79b893a..b4b349361 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.5.0 -google-cloud-storage==1.28.1 +google-cloud-storage==1.29.0 From 01bf3f60bd82c1ef90b7779b1fc1cee8cd48dd10 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 20 Jun 2020 06:08:08 +0200 Subject: [PATCH 109/197] Update dependency google-cloud-pubsub to v1.6.0 (#4039) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | minor | `==1.5.0` -> `==1.6.0` | --- ### Release Notes
googleapis/python-pubsub ### [`v1.6.0`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​160-httpswwwgithubcomgoogleapispython-pubsubcomparev150v160-2020-06-09) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) ##### Features - Add flow control for message publishing ([#​96](https://www.github.com/googleapis/python-pubsub/issues/96)) ([06085c4](https://www.github.com/googleapis/python-pubsub/commit/06085c4083b9dccdd50383257799904510bbf3a0)) ##### Bug Fixes - Fix PubSub incompatibility with api-core 1.17.0+ ([#​103](https://www.github.com/googleapis/python-pubsub/issues/103)) ([c02060f](https://www.github.com/googleapis/python-pubsub/commit/c02060fbbe6e2ca4664bee08d2de10665d41dc0b)) ##### Documentation - Clarify that Schedulers shouldn't be used with multiple SubscriberClients ([#​100](https://togithub.com/googleapis/python-pubsub/pull/100)) ([cf9e87c](https://togithub.com/googleapis/python-pubsub/commit/cf9e87c80c0771f3fa6ef784a8d76cb760ad37ef)) - Fix update subscription/snapshot/topic samples ([#​113](https://togithub.com/googleapis/python-pubsub/pull/113)) ([e62c38b](https://togithub.com/googleapis/python-pubsub/commit/e62c38bb33de2434e32f866979de769382dea34a)) ##### Internal / Testing Changes - Re-generated service implementaton using synth: removed experimental notes from the RetryPolicy and filtering features in anticipation of GA, added DetachSubscription (experimental) ([#​114](https://togithub.com/googleapis/python-pubsub/pull/114)) ([0132a46](https://togithub.com/googleapis/python-pubsub/commit/0132a4680e0727ce45d5e27d98ffc9f3541a0962)) - Incorporate will_accept() checks into publish() ([#​108](https://togithub.com/googleapis/python-pubsub/pull/108)) ([6c7677e](https://togithub.com/googleapis/python-pubsub/commit/6c7677ecb259672bbb9b6f7646919e602c698570))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b4b349361..12d2e64e0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.5.0 +google-cloud-pubsub==1.6.0 google-cloud-storage==1.29.0 From 00702c391dc21e82e535c36b6876f2219e0c214b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 6 Jul 2020 22:52:02 +0200 Subject: [PATCH 110/197] chore(deps): update dependency google-cloud-pubsub to v1.6.1 (#4242) Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 12d2e64e0..a796527a9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.6.0 +google-cloud-pubsub==1.6.1 google-cloud-storage==1.29.0 From 6bcb58df69904687f2a974b2b3285d561a9f83c5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 00:46:30 +0200 Subject: [PATCH 111/197] chore(deps): update dependency pytest to v5.4.3 (#4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 41c4d5110..1a71e3e32 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==5.3.2 +pytest==5.4.3 mock==3.0.5 From c7edd26ef6e44d0663bd8c3b5b0ad1cac83d838b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 07:10:34 +0200 Subject: [PATCH 112/197] chore(deps): update dependency mock to v4 (#4287) * chore(deps): update dependency mock to v4 * specify mock version for appengine python 2 Co-authored-by: Leah Cole --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 1a71e3e32..676ff949e 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ pytest==5.4.3 -mock==3.0.5 +mock==4.0.2 From f83c9b638102cd5a2ca25ea55b749820e462c0d9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 14 Jul 2020 19:20:03 +0200 Subject: [PATCH 113/197] chore(deps): update dependency google-cloud-pubsub to v1.7.0 (#4290) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | minor | `==1.6.1` -> `==1.7.0` | --- ### Release Notes
googleapis/python-pubsub ### [`v1.7.0`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​170-httpswwwgithubcomgoogleapispython-pubsubcomparev161v170-2020-07-13) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v1.6.1...v1.7.0) ##### New Features - Add support for server-side flow control. ([#​143](https://togithub.com/googleapis/python-pubsub/pull/143)) ([04e261c](https://www.github.com/googleapis/python-pubsub/commit/04e261c602a2919cc75b3efa3dab099fb2cf704c)) ##### Dependencies - Update samples dependency `google-cloud-pubsub` to `v1.6.1`. ([#​144](https://togithub.com/googleapis/python-pubsub/pull/144)) ([1cb6746](https://togithub.com/googleapis/python-pubsub/commit/1cb6746b00ebb23dbf1663bae301b32c3fc65a88)) ##### Documentation - Add pubsub/cloud-client samples from the common samples repo (with commit history). ([#​151](https://togithub.com/googleapis/python-pubsub/pull/151)) - Add flow control section to publish overview. ([#​129](https://togithub.com/googleapis/python-pubsub/pull/129)) ([acc19eb](https://www.github.com/googleapis/python-pubsub/commit/acc19eb048eef067d9818ef3e310b165d9c6307e)) - Add a link to Pub/Sub filtering language public documentation to `pubsub.proto`. ([#​121](https://togithub.com/googleapis/python-pubsub/pull/121)) ([8802d81](https://www.github.com/googleapis/python-pubsub/commit/8802d8126247f22e26057e68a42f5b5a82dcbf0d))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a796527a9..ae6e04ec9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.6.1 +google-cloud-pubsub==1.7.0 google-cloud-storage==1.29.0 From a8a789ab69b0532af82cb375a9e6928e98191efc Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Mon, 20 Jul 2020 13:28:03 -0700 Subject: [PATCH 114/197] Fix mismatched storage region tags (#4194) --- notification_polling.py | 2 +- storage_change_file_storage_class.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/notification_polling.py b/notification_polling.py index e468638e1..27680dd74 100644 --- a/notification_polling.py +++ b/notification_polling.py @@ -106,7 +106,7 @@ def summarize(message): def poll_notifications(project, subscription_name): """Polls a Cloud Pub/Sub subscription for new GCS events for display.""" - # [BEGIN poll_notifications] + # [START poll_notifications] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name diff --git a/storage_change_file_storage_class.py b/storage_change_file_storage_class.py index d0d9b45e8..1769b8c3f 100644 --- a/storage_change_file_storage_class.py +++ b/storage_change_file_storage_class.py @@ -40,9 +40,8 @@ def change_file_storage_class(bucket_name, blob_name): ) ) return blob +# [END storage_change_file_storage_class] -# [START storage_change_file_storage_class] - if __name__ == "__main__": change_file_storage_class(bucket_name=sys.argv[1], blob_name=sys.argv[2]) From 5d85bc4a04c078ac74895eabe42afd48915f3ff2 Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Wed, 29 Jul 2020 18:11:30 +0000 Subject: [PATCH 115/197] Update dependency google-cloud-storage to v1.30.0 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ae6e04ec9..a3aff6bdb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.29.0 +google-cloud-storage==1.30.0 From 6f4edb35ee5ffa2b640971f4ea3e6fb727dc9259 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 1 Aug 2020 21:51:00 +0200 Subject: [PATCH 116/197] Update dependency pytest to v6 (#4390) --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 676ff949e..2466e2508 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==5.4.3 +pytest==6.0.1 mock==4.0.2 From f6b9dccc8b451d42176a1afeeb24b6a45f2d40fe Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 Aug 2020 07:21:50 +0200 Subject: [PATCH 117/197] chore(deps): update dependency google-cloud-storage to v1.31.0 (#4564) Co-authored-by: Takashi Matsuo --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a3aff6bdb..4267e911f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.30.0 +google-cloud-storage==1.31.0 From a5c72fca2cd7a72d6c52c2340fe73ce8aa46d794 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Fri, 28 Aug 2020 17:49:30 +0000 Subject: [PATCH 118/197] chore: fix some more unmatched region tags (#4585) fixes #4549 Co-authored-by: Dina Graves Portman --- storage_disable_versioning.py | 2 +- storage_set_metadata.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/storage_disable_versioning.py b/storage_disable_versioning.py index bba4f7c07..98832ba68 100644 --- a/storage_disable_versioning.py +++ b/storage_disable_versioning.py @@ -34,7 +34,7 @@ def disable_versioning(bucket_name): return bucket -# [END storage_enable_versioning] +# [END storage_disable_versioning] if __name__ == "__main__": disable_versioning(bucket_name=sys.argv[1]) diff --git a/storage_set_metadata.py b/storage_set_metadata.py index d8c77fa5f..198477929 100644 --- a/storage_set_metadata.py +++ b/storage_set_metadata.py @@ -34,7 +34,7 @@ def set_blob_metadata(bucket_name, blob_name): print("The metadata for the blob {} is {}".format(blob.name, blob.metadata)) -# [END storage_get_metadata] +# [END storage_set_metadata] if __name__ == "__main__": set_blob_metadata(bucket_name=sys.argv[1], blob_name=sys.argv[2]) From 948cd7335f173135d3ecde133039228b6dc41506 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 2 Sep 2020 12:26:37 -0600 Subject: [PATCH 119/197] Update storage_get_metadata.py (#4615) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- storage_get_metadata.py | 1 + 1 file changed, 1 insertion(+) diff --git a/storage_get_metadata.py b/storage_get_metadata.py index 5166998c0..d9440ff44 100644 --- a/storage_get_metadata.py +++ b/storage_get_metadata.py @@ -48,6 +48,7 @@ def blob_metadata(bucket_name, blob_name): print("Content-encoding: {}".format(blob.content_encoding)) print("Content-language: {}".format(blob.content_language)) print("Metadata: {}".format(blob.metadata)) + print("Custom Time: {}".format(blob.custom_time)) print("Temporary hold: ", "enabled" if blob.temporary_hold else "disabled") print( "Event based hold: ", From d2fa080741cd335ea6423eb68d43081b80e45087 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 23 Sep 2020 16:59:33 +0200 Subject: [PATCH 120/197] chore(deps): update dependency google-cloud-storage to v1.31.1 (#4714) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4267e911f..de9d705ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.31.0 +google-cloud-storage==1.31.1 From e1f90e7eeffbc70ca7f293c13aacc514456d1b87 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 24 Sep 2020 17:14:03 +0200 Subject: [PATCH 121/197] chore(deps): update dependency google-cloud-storage to v1.31.2 (#4750) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | patch | `==1.31.1` -> `==1.31.2` | --- ### Release Notes
googleapis/python-storage ### [`v1.31.2`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1312-httpswwwgithubcomgoogleapispython-storagecomparev1311v1312-2020-09-23) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.31.1...v1.31.2)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index de9d705ad..db3b82841 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.31.1 +google-cloud-storage==1.31.2 From 19ebef4b7d60bbe2c354f41f8e10dae5909855a4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Oct 2020 22:11:23 +0200 Subject: [PATCH 122/197] chore(deps): update dependency pytest to v6.1.1 (#4761) --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 2466e2508..bad267dae 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==6.0.1 +pytest==6.1.1 mock==4.0.2 From 30dbe367b80094efb4c58fb2fc25d2f10c4ff10d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 21 Oct 2020 18:58:15 +0200 Subject: [PATCH 123/197] chore(deps): update dependency google-cloud-storage to v1.32.0 (#4871) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index db3b82841..821a0b5a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.31.2 +google-cloud-storage==1.32.0 From c2d76c464c9050d9b399eee49ee8ba02cbcf6dcd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Oct 2020 23:07:00 +0100 Subject: [PATCH 124/197] chore(deps): update dependency pytest to v6.1.2 (#4921) Co-authored-by: Charles Engelke --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index bad267dae..039981b3e 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==6.1.1 +pytest==6.1.2 mock==4.0.2 From 400fccf0733bfe248a238724e74241a5df0ed830 Mon Sep 17 00:00:00 2001 From: Shivaji Dutta Date: Thu, 29 Oct 2020 15:18:34 -0700 Subject: [PATCH 125/197] change pprint to print. (#4856) * change pprint to print. Line 57 had pprint.pprint.. changing it to print. * Update storage_get_bucket_metadata.py Removing pprint import Co-authored-by: Dina Graves Portman Co-authored-by: Charles Engelke --- storage_get_bucket_metadata.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/storage_get_bucket_metadata.py b/storage_get_bucket_metadata.py index 0b17ce716..2813d564b 100644 --- a/storage_get_bucket_metadata.py +++ b/storage_get_bucket_metadata.py @@ -15,9 +15,6 @@ # limitations under the License. -# [START storage_get_bucket_metadata] -import pprint -# [END storage_get_bucket_metadata] import sys # [START storage_get_bucket_metadata] @@ -54,7 +51,7 @@ def bucket_metadata(bucket_name): print("Time Created: {}".format(bucket.time_created)) print("Versioning Enabled: {}".format(bucket.versioning_enabled)) print("Labels:") - pprint.pprint(bucket.labels) + print(bucket.labels) # [END storage_get_bucket_metadata] From 1deb741ea5c10a6c895de57369faff696f093d20 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 Nov 2020 20:08:08 +0100 Subject: [PATCH 126/197] chore(deps): update dependency google-cloud-storage to v1.33.0 (#4990) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 821a0b5a7..5d61c8a46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.32.0 +google-cloud-storage==1.33.0 From 9c44c386da89f6369be44619433f81b53d6c87e9 Mon Sep 17 00:00:00 2001 From: Sarah Spikes Date: Mon, 30 Nov 2020 16:05:21 -0800 Subject: [PATCH 127/197] Add patch call (#5013) I believe a call to `blob.patch()` is necessary to actually save the metadata back to GCS. --- storage_set_metadata.py | 1 + 1 file changed, 1 insertion(+) diff --git a/storage_set_metadata.py b/storage_set_metadata.py index 198477929..07529ac68 100644 --- a/storage_set_metadata.py +++ b/storage_set_metadata.py @@ -30,6 +30,7 @@ def set_blob_metadata(bucket_name, blob_name): blob = bucket.get_blob(blob_name) metadata = {'color': 'Red', 'name': 'Test'} blob.metadata = metadata + blob.patch() print("The metadata for the blob {} is {}".format(blob.name, blob.metadata)) From 147937ec502551dc89ac025c69e895293338f229 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 8 Dec 2020 17:27:14 -0800 Subject: [PATCH 128/197] fix: add a comment to draw attention to using get_blob, not blob (#5052) * fix: add a comment to draw attention to using get_blob, not blob * docs: further elaboration * docs: add clarifying doc string to download file * Update storage_download_file.py * Update storage_download_file.py --- storage_download_file.py | 5 +++++ storage_get_metadata.py | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/storage_download_file.py b/storage_download_file.py index ffbe885a8..33395122e 100644 --- a/storage_download_file.py +++ b/storage_download_file.py @@ -29,6 +29,11 @@ def download_blob(bucket_name, source_blob_name, destination_file_name): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + + # Construct a client side representation of a blob. + # Note `Bucket.blob` differs from `Bucket.get_blob` as it doesn't retrieve + # any content from Google Cloud Storage. As we don't need additional data, + # using `Bucket.blob` is preferred here. blob = bucket.blob(source_blob_name) blob.download_to_filename(destination_file_name) diff --git a/storage_get_metadata.py b/storage_get_metadata.py index d9440ff44..c5ef0b4cc 100644 --- a/storage_get_metadata.py +++ b/storage_get_metadata.py @@ -27,6 +27,10 @@ def blob_metadata(bucket_name, blob_name): storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) + + # Retrieve a blob, and its metadata, from Google Cloud Storage. + # Note that `get_blob` differs from `Bucket.blob`, which does not + # make an HTTP request. blob = bucket.get_blob(blob_name) print("Blob: {}".format(blob.name)) From 92d9955f24fe3dda02a6b6b6959d32ee0e45eb23 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 10 Dec 2020 16:39:56 +0100 Subject: [PATCH 129/197] chore(deps): update dependency mock to v4.0.3 (#5062) --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 039981b3e..e1ff03d6a 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.1.2 -mock==4.0.2 +mock==4.0.3 From 39163568d4466289107149b47f1245a88a38256f Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 10 Dec 2020 12:08:24 -0800 Subject: [PATCH 130/197] fix(storage): Update comment, prefix should include delimiter (#5064) --- storage_list_files_with_prefix.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/storage_list_files_with_prefix.py b/storage_list_files_with_prefix.py index cf7c7394b..353fa3a72 100644 --- a/storage_list_files_with_prefix.py +++ b/storage_list_files_with_prefix.py @@ -32,12 +32,12 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): a/1.txt a/b/2.txt - If you just specify prefix = 'a', you'll get back: + If you specify prefix ='a/', without a delimiter, you'll get back: a/1.txt a/b/2.txt - However, if you specify prefix='a' and delimiter='/', you'll get back: + However, if you specify prefix='a/' and delimiter='/', you'll get back: a/1.txt From 5df5392b2953b7d87b232a50f6c6d15d153dbba3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 15 Dec 2020 18:18:17 +0100 Subject: [PATCH 131/197] chore(deps): update dependency google-cloud-storage to v1.35.0 (#5074) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5d61c8a46..3f059dfd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==1.7.0 -google-cloud-storage==1.33.0 +google-cloud-storage==1.35.0 From 56ef6f6596d1165de85a5d7d5d0e6859a29885c1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 21 Dec 2020 21:22:02 +0100 Subject: [PATCH 132/197] chore(deps): update dependency pytest to v6.2.1 (#5076) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest)) | minor | `==6.1.2` -> `==6.2.1` | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.1`](https://togithub.com/pytest-dev/pytest/releases/6.2.1) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.0...6.2.1) # pytest 6.2.1 (2020-12-15) ## Bug Fixes - [#​7678](https://togithub.com/pytest-dev/pytest/issues/7678): Fixed bug where `ImportPathMismatchError` would be raised for files compiled in the host and loaded later from an UNC mounted path (Windows). - [#​8132](https://togithub.com/pytest-dev/pytest/issues/8132): Fixed regression in `approx`: in 6.2.0 `approx` no longer raises `TypeError` when dealing with non-numeric types, falling back to normal comparison. Before 6.2.0, array types like tf.DeviceArray fell through to the scalar case, and happened to compare correctly to a scalar if they had only one element. After 6.2.0, these types began failing, because they inherited neither from standard Python number hierarchy nor from `numpy.ndarray`. `approx` now converts arguments to `numpy.ndarray` if they expose the array protocol and are not scalars. This treats array-like objects like numpy arrays, regardless of size. ### [`v6.2.0`](https://togithub.com/pytest-dev/pytest/releases/6.2.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.2...6.2.0) # pytest 6.2.0 (2020-12-12) ## Breaking Changes - [#​7808](https://togithub.com/pytest-dev/pytest/issues/7808): pytest now supports python3.6+ only. ## Deprecations - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469): Directly constructing/calling the following classes/functions is now deprecated: - `_pytest.cacheprovider.Cache` - `_pytest.cacheprovider.Cache.for_config()` - `_pytest.cacheprovider.Cache.clear_cache()` - `_pytest.cacheprovider.Cache.cache_dir_from_config()` - `_pytest.capture.CaptureFixture` - `_pytest.fixtures.FixtureRequest` - `_pytest.fixtures.SubRequest` - `_pytest.logging.LogCaptureFixture` - `_pytest.pytester.Pytester` - `_pytest.pytester.Testdir` - `_pytest.recwarn.WarningsRecorder` - `_pytest.recwarn.WarningsChecker` - `_pytest.tmpdir.TempPathFactory` - `_pytest.tmpdir.TempdirFactory` These have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 7.0.0. - [#​7530](https://togithub.com/pytest-dev/pytest/issues/7530): The `--strict` command-line option has been deprecated, use `--strict-markers` instead. We have plans to maybe in the future to reintroduce `--strict` and make it an encompassing flag for all strictness related options (`--strict-markers` and `--strict-config` at the moment, more might be introduced in the future). - [#​7988](https://togithub.com/pytest-dev/pytest/issues/7988): The `@pytest.yield_fixture` decorator/function is now deprecated. Use pytest.fixture instead. `yield_fixture` has been an alias for `fixture` for a very long time, so can be search/replaced safely. ## Features - [#​5299](https://togithub.com/pytest-dev/pytest/issues/5299): pytest now warns about unraisable exceptions and unhandled thread exceptions that occur in tests on Python>=3.8. See unraisable for more information. - [#​7425](https://togithub.com/pytest-dev/pytest/issues/7425): New pytester fixture, which is identical to testdir but its methods return pathlib.Path when appropriate instead of `py.path.local`. This is part of the movement to use pathlib.Path objects internally, in order to remove the dependency to `py` in the future. Internally, the old Testdir <\_pytest.pytester.Testdir> is now a thin wrapper around Pytester <\_pytest.pytester.Pytester>, preserving the old interface. - [#​7695](https://togithub.com/pytest-dev/pytest/issues/7695): A new hook was added, pytest_markeval_namespace which should return a dictionary. This dictionary will be used to augment the "global" variables available to evaluate skipif/xfail/xpass markers. Pseudo example `conftest.py`: ```{.sourceCode .python} def pytest_markeval_namespace(): return {"color": "red"} ``` `test_func.py`: ```{.sourceCode .python} @​pytest.mark.skipif("color == 'blue'", reason="Color is not red") def test_func(): assert False ``` - [#​8006](https://togithub.com/pytest-dev/pytest/issues/8006): It is now possible to construct a ~pytest.MonkeyPatch object directly as `pytest.MonkeyPatch()`, in cases when the monkeypatch fixture cannot be used. Previously some users imported it from the private \_pytest.monkeypatch.MonkeyPatch namespace. Additionally, MonkeyPatch.context <pytest.MonkeyPatch.context> is now a classmethod, and can be used as `with MonkeyPatch.context() as mp: ...`. This is the recommended way to use `MonkeyPatch` directly, since unlike the `monkeypatch` fixture, an instance created directly is not `undo()`-ed automatically. ## Improvements - [#​1265](https://togithub.com/pytest-dev/pytest/issues/1265): Added an `__str__` implementation to the ~pytest.pytester.LineMatcher class which is returned from `pytester.run_pytest().stdout` and similar. It returns the entire output, like the existing `str()` method. - [#​2044](https://togithub.com/pytest-dev/pytest/issues/2044): Verbose mode now shows the reason that a test was skipped in the test's terminal line after the "SKIPPED", "XFAIL" or "XPASS". - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469) The types of builtin pytest fixtures are now exported so they may be used in type annotations of test functions. The newly-exported types are: - `pytest.FixtureRequest` for the request fixture. - `pytest.Cache` for the cache fixture. - `pytest.CaptureFixture[str]` for the capfd and capsys fixtures. - `pytest.CaptureFixture[bytes]` for the capfdbinary and capsysbinary fixtures. - `pytest.LogCaptureFixture` for the caplog fixture. - `pytest.Pytester` for the pytester fixture. - `pytest.Testdir` for the testdir fixture. - `pytest.TempdirFactory` for the tmpdir_factory fixture. - `pytest.TempPathFactory` for the tmp_path_factory fixture. - `pytest.MonkeyPatch` for the monkeypatch fixture. - `pytest.WarningsRecorder` for the recwarn fixture. Constructing them is not supported (except for MonkeyPatch); they are only meant for use in type annotations. Doing so will emit a deprecation warning, and may become a hard-error in pytest 7.0. Subclassing them is also not supported. This is not currently enforced at runtime, but is detected by type-checkers such as mypy. - [#​7527](https://togithub.com/pytest-dev/pytest/issues/7527): When a comparison between namedtuple <collections.namedtuple> instances of the same type fails, pytest now shows the differing field names (possibly nested) instead of their indexes. - [#​7615](https://togithub.com/pytest-dev/pytest/issues/7615): Node.warn <\_pytest.nodes.Node.warn> now permits any subclass of Warning, not just PytestWarning <pytest.PytestWarning>. - [#​7701](https://togithub.com/pytest-dev/pytest/issues/7701): Improved reporting when using `--collected-only`. It will now show the number of collected tests in the summary stats. - [#​7710](https://togithub.com/pytest-dev/pytest/issues/7710): Use strict equality comparison for non-numeric types in pytest.approx instead of raising TypeError. This was the undocumented behavior before 3.7, but is now officially a supported feature. - [#​7938](https://togithub.com/pytest-dev/pytest/issues/7938): New `--sw-skip` argument which is a shorthand for `--stepwise-skip`. - [#​8023](https://togithub.com/pytest-dev/pytest/issues/8023): Added `'node_modules'` to default value for norecursedirs. - [#​8032](https://togithub.com/pytest-dev/pytest/issues/8032): doClassCleanups <unittest.TestCase.doClassCleanups> (introduced in unittest in Python and 3.8) is now called appropriately. ## Bug Fixes - [#​4824](https://togithub.com/pytest-dev/pytest/issues/4824): Fixed quadratic behavior and improved performance of collection of items using autouse fixtures and xunit fixtures. - [#​7758](https://togithub.com/pytest-dev/pytest/issues/7758): Fixed an issue where some files in packages are getting lost from `--lf` even though they contain tests that failed. Regressed in pytest 5.4.0. - [#​7911](https://togithub.com/pytest-dev/pytest/issues/7911): Directories created by by tmp_path and tmpdir are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites. - [#​7913](https://togithub.com/pytest-dev/pytest/issues/7913): Fixed a crash or hang in pytester.spawn <\_pytest.pytester.Pytester.spawn> when the readline module is involved. - [#​7951](https://togithub.com/pytest-dev/pytest/issues/7951): Fixed handling of recursive symlinks when collecting tests. - [#​7981](https://togithub.com/pytest-dev/pytest/issues/7981): Fixed symlinked directories not being followed during collection. Regressed in pytest 6.1.0. - [#​8016](https://togithub.com/pytest-dev/pytest/issues/8016): Fixed only one doctest being collected when using `pytest --doctest-modules path/to/an/__init__.py`. ## Improved Documentation - [#​7429](https://togithub.com/pytest-dev/pytest/issues/7429): Add more information and use cases about skipping doctests. - [#​7780](https://togithub.com/pytest-dev/pytest/issues/7780): Classes which should not be inherited from are now marked `final class` in the API reference. - [#​7872](https://togithub.com/pytest-dev/pytest/issues/7872): `_pytest.config.argparsing.Parser.addini()` accepts explicit `None` and `"string"`. - [#​7878](https://togithub.com/pytest-dev/pytest/issues/7878): In pull request section, ask to commit after editing changelog and authors file. ## Trivial/Internal Changes - [#​7802](https://togithub.com/pytest-dev/pytest/issues/7802): The `attrs` dependency requirement is now >=19.2.0 instead of >=17.4.0. - [#​8014](https://togithub.com/pytest-dev/pytest/issues/8014): .pyc files created by pytest's assertion rewriting now conform to the newer PEP-552 format on Python>=3.7. (These files are internal and only interpreted by pytest itself.)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index e1ff03d6a..8650bde16 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==6.1.2 +pytest==6.2.1 mock==4.0.3 From 8191ef8f23a8be7a3b015875a2caf6fc78a18cb3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 5 Jan 2021 23:33:46 +0100 Subject: [PATCH 133/197] chore(deps): update dependency google-cloud-pubsub to v2.2.0 (#4673) * chore(deps): update dependency google-cloud-pubsub to v2.2.0 * run pubsub script on healthcare/api-client/v1/dicom * iot pubsub fixes, fix lint * revert some changes pubsub script made * try using return_value for mock * undo previous change * try adding publish_time in mock * move publish_time param * make publish_time a float * make publish_time a datetime * try using object instead of datetime * another attempt * undo the black stuff that messed up lint Co-authored-by: Leah Cole Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- notification_polling_test.py | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notification_polling_test.py b/notification_polling_test.py index 692f2d6ce..dfb241b84 100644 --- a/notification_polling_test.py +++ b/notification_polling_test.py @@ -42,7 +42,7 @@ def test_parse_json_message(): b"}" ) message = Message( - mock.Mock(data=data, attributes=attributes), MESSAGE_ID, delivery_attempt=0, request_queue=mock.Mock() + mock.Mock(data=data, attributes=attributes, publish_time=mock.Mock(seconds=0.0, nanos=0.0)), MESSAGE_ID, delivery_attempt=0, request_queue=mock.Mock() ) assert summarize(message) == ( "\tEvent type: OBJECT_FINALIZE\n" diff --git a/requirements.txt b/requirements.txt index 3f059dfd4..8f965f66d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==1.7.0 +google-cloud-pubsub==2.2.0 google-cloud-storage==1.35.0 From acadcf7e413ad38397d9709cb537e09f3fad32a8 Mon Sep 17 00:00:00 2001 From: Ace Nassri Date: Fri, 15 Jan 2021 19:55:57 -0800 Subject: [PATCH 134/197] chore(Dockerfile): add Python 3.9 (#4968) * chore(Dockerfile): add Python 3.9 * Add py3.9 kokoro dir * fix typo * Add GPG keys * Add 3.9 to noxfiles * Update composer dep to avoid deprecation spam * fix(storage): add py-3.9 specific key * update psycopg2-binary, only run test in py-3.9 build * add libmemcached-dev to the Dockerfile * disable appengine standard test in py-3.9 build * disable py-3.9 build for appengine cloud_debugger * skip py-3.9 build for composer/workflows * skip tests with pyarrow for py-3.9 build * avoid ReferenceError in iot builds * skip some tests due to pip error * add a temporary statement for debugging * fix lint * use correct constant * disable 2.7 builds * disable builds due to pip conflict The conflict is between google-cloud-monitoring==2.0.0 and opencensus-ext-stackdriver. * remove temporary debugging statement * really skip py-3.9 build for pubsub/streaming-analytics * copyright year fix * fix(storage): explicitly use the test project for the test bucket * fix(storage): use correct cloud project * fix: disable py-3.9 builds - appengine/standard_python3/bigquery - data-science-onramp/data-ingestion * disable py-3.9 build - dataflow/encryption-keys - dataflow/flex-templates/streaming_beam * disable type hint checks Co-authored-by: Takashi Matsuo --- noxfile_config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/noxfile_config.py b/noxfile_config.py index edcea1530..a0e0cc9bb 100644 --- a/noxfile_config.py +++ b/noxfile_config.py @@ -55,6 +55,9 @@ def get_cloud_kms_key(): if session == 'py-3.8': return ('projects/python-docs-samples-tests-py38/locations/us/' 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') + if session == 'py-3.9': + return ('projects/python-docs-samples-tests-py39/locations/us/' + 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') return os.environ['CLOUD_KMS_KEY'] From 63ca12737c58eac311251327fb0cccbcab2367a5 Mon Sep 17 00:00:00 2001 From: Aldo D'Aquino Date: Thu, 4 Feb 2021 04:18:02 +0100 Subject: [PATCH 135/197] fix(storage): list all versions (#5325) ## Description Add the `versions=True` variable to the `list_file_archived_generations function` to actually list all the versions instead of the last one only. Fixes the incongruency between python and the other languages in the [Listing noncurrent object versions code samples](https://cloud.google.com/storage/docs/using-object-versioning#list). ## Checklist - [x] I have followed [Sample Guidelines from AUTHORING_GUIDE.MD](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md) - [x] README is updated to include [all relevant information](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#readme-file) - [x] **Tests** pass: `nox -s py-3.6` (see [Test Environment Setup](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#test-environment-setup)) - [x] **Lint** pass: `nox -s lint` (see [Test Environment Setup](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#test-environment-setup)) - [ ] These samples need a new **API enabled** in testing projects to pass (let us know which ones) - [ ] These samples need a new/updated **env vars** in testing projects set to pass (let us know which ones) - [x] Please **merge** this PR for me once it is approved. - [ ] This sample adds a new sample directory, and I updated the [CODEOWNERS file](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/.github/CODEOWNERS) with the codeowners for this sample --- storage_list_file_archived_generations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/storage_list_file_archived_generations.py b/storage_list_file_archived_generations.py index a496ed6ad..dc2f5eaf5 100644 --- a/storage_list_file_archived_generations.py +++ b/storage_list_file_archived_generations.py @@ -26,7 +26,7 @@ def list_file_archived_generations(bucket_name): storage_client = storage.Client() - blobs = storage_client.list_blobs(bucket_name) + blobs = storage_client.list_blobs(bucket_name, versions=True) for blob in blobs: print("{},{}".format(blob.name, blob.generation)) From 5e3776acb51071fb5973481db488f95764093d14 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Fri, 5 Feb 2021 13:00:02 -0800 Subject: [PATCH 136/197] docs: address sample feedback issues (#5329) ## Description Fixes #5180, captures work from #5181 authored by @keegan2149, thank you! ## Checklist - [x] I have followed [Sample Guidelines from AUTHORING_GUIDE.MD](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md) - [x] README is updated to include [all relevant information](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#readme-file) - [x] **Tests** pass: `nox -s py-3.6` (see [Test Environment Setup](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#test-environment-setup)) - [x] **Lint** pass: `nox -s lint` (see [Test Environment Setup](https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md#test-environment-setup)) - [x] Please **merge** this PR for me once it is approved. --- snippets_test.py | 4 +--- storage_change_file_storage_class.py | 3 +-- storage_compose_file.py | 17 ++++++++++++----- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/snippets_test.py b/snippets_test.py index edcf77939..286ae284b 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -405,15 +405,13 @@ def test_object_get_kms_key(test_bucket): def test_storage_compose_file(test_bucket): source_files = ["test_upload_blob_1", "test_upload_blob_2"] - blob_list = [] for source in source_files: blob = test_bucket.blob(source) blob.upload_from_string(source) - blob_list.append(blob) with tempfile.NamedTemporaryFile() as dest_file: destination = storage_compose_file.compose_file( - test_bucket.name, blob_list, dest_file.name + test_bucket.name, source_files[0], source_files[1], dest_file.name ) composed = destination.download_as_string() diff --git a/storage_change_file_storage_class.py b/storage_change_file_storage_class.py index 1769b8c3f..d5dda56a7 100644 --- a/storage_change_file_storage_class.py +++ b/storage_change_file_storage_class.py @@ -18,7 +18,6 @@ # [START storage_change_file_storage_class] from google.cloud import storage -from google.cloud.storage import constants def change_file_storage_class(bucket_name, blob_name): @@ -30,7 +29,7 @@ def change_file_storage_class(bucket_name, blob_name): bucket = storage_client.get_bucket(bucket_name) blob = bucket.get_blob(blob_name) - blob.update_storage_class(constants.NEARLINE_STORAGE_CLASS) + blob.update_storage_class("NEARLINE") print( "Blob {} in bucket {} had its storage class set to {}".format( diff --git a/storage_compose_file.py b/storage_compose_file.py index 234bf3f68..2c1443f22 100644 --- a/storage_compose_file.py +++ b/storage_compose_file.py @@ -20,21 +20,25 @@ from google.cloud import storage -def compose_file(bucket_name, sources, destination_blob_name): +def compose_file(bucket_name, first_blob_name, second_blob_name, destination_blob_name): """Concatenate source blobs into destination blob.""" # bucket_name = "your-bucket-name" - # sources = [blob_1, blob_2] + # first_blob_name = "first-object-name" + # second_blob_name = "second-blob-name" # destination_blob_name = "destination-object-name" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) destination = bucket.blob(destination_blob_name) destination.content_type = "text/plain" + + # sources is a list of Blob instances, up to the max of 32 instances per request + sources = [bucket.get_blob(first_blob_name), bucket.get_blob(second_blob_name)] destination.compose(sources) print( - "Composed new object {} in the bucket {}".format( - destination_blob_name, bucket.name + "New composite object {} in the bucket {} was created by combining {} and {}".format( + destination_blob_name, bucket_name, first_blob_name, second_blob_name ) ) return destination @@ -44,5 +48,8 @@ def compose_file(bucket_name, sources, destination_blob_name): if __name__ == "__main__": compose_file( - bucket_name=sys.argv[1], sources=sys.argv[2], destination_blob_name=sys.argv[3], + bucket_name=sys.argv[1], + first_blob_name=sys.argv[2], + second_blob_name=sys.argv[3], + destination_blob_name=sys.argv[4], ) From 30b4c0c3606a7d46cb2c21e8bb97627e78d72e0b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Feb 2021 18:56:05 +0100 Subject: [PATCH 137/197] chore(deps): update dependency google-cloud-pubsub to v2.3.0 (#5347) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.2.0` -> `==2.3.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.3.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.3.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.3.0/compatibility-slim/2.2.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.3.0/confidence-slim/2.2.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.3.0`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​230-httpswwwgithubcomgoogleapispython-pubsubcomparev220v230-2021-02-08) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.2.0...v2.3.0) ##### Features - surface SchemaServiceClient in google.cloud.pubsub ([#​281](https://www.github.com/googleapis/python-pubsub/issues/281)) ([8751bcc](https://www.github.com/googleapis/python-pubsub/commit/8751bcc5eb782df55769b48253629a3bde3d4661)) ##### Bug Fixes - client version missing from the user agent header ([#​275](https://www.github.com/googleapis/python-pubsub/issues/275)) ([b112f4f](https://www.github.com/googleapis/python-pubsub/commit/b112f4fcbf6f2bce8dcf37871bdc540b11f54fe3)) - Don't open the google.cloud package by adding pubsub.py ([#​269](https://www.github.com/googleapis/python-pubsub/issues/269)) ([542d79d](https://www.github.com/googleapis/python-pubsub/commit/542d79d7c5fb7403016150ba477485756cd4097b)) - flaky samples tests ([#​263](https://www.github.com/googleapis/python-pubsub/issues/263)) ([3d6a29d](https://www.github.com/googleapis/python-pubsub/commit/3d6a29de07cc09be663c90a3333f4cd33633994f)) - Modify synth.py to update grpc transport options ([#​266](https://www.github.com/googleapis/python-pubsub/issues/266)) ([41dcd30](https://www.github.com/googleapis/python-pubsub/commit/41dcd30636168f3dd1248f1d99170d531fc9bcb8)) - pass anonymous credentials for emulator ([#​250](https://www.github.com/googleapis/python-pubsub/issues/250)) ([8eed8e1](https://www.github.com/googleapis/python-pubsub/commit/8eed8e16019510dc8b20fb6b009d61a7ac532d26)) - remove grpc send/recieve limits ([#​259](https://www.github.com/googleapis/python-pubsub/issues/259)) ([fd2840c](https://www.github.com/googleapis/python-pubsub/commit/fd2840c10f92b03da7f4b40ac69c602220757c0a))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8f965f66d..b539a8c35 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.2.0 +google-cloud-pubsub==2.3.0 google-cloud-storage==1.35.0 From e6cf484900e29f5c5bc421be5dd81f6a242e3eb6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Feb 2021 20:10:49 +0100 Subject: [PATCH 138/197] chore(deps): update dependency google-cloud-storage to v1.35.1 (#5321) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b539a8c35..ac3026346 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.3.0 -google-cloud-storage==1.35.0 +google-cloud-storage==1.35.1 From 173579b388c39231878946cd7188a7b055504de3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Feb 2021 00:58:50 +0100 Subject: [PATCH 139/197] chore(deps): update dependency google-cloud-pubsub to v2.4.0 (#5399) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ac3026346..0231ebcfb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.3.0 +google-cloud-pubsub==2.4.0 google-cloud-storage==1.35.1 From a75684d1caee1254d3f36ed4233ae5393dccb7bf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 24 Feb 2021 21:33:33 +0100 Subject: [PATCH 140/197] chore(deps): update dependency google-cloud-storage to v1.36.1 (#5353) * chore(deps): update dependency google-cloud-storage to v1.36.1 * moving media transcoder separately Co-authored-by: Leah Cole Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0231ebcfb..a3d39398e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.4.0 -google-cloud-storage==1.35.1 +google-cloud-storage==1.36.1 From 179c9f5f2eb5600173c13258d625afedefba2a64 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Mar 2021 19:19:50 +0100 Subject: [PATCH 141/197] chore(deps): update dependency google-cloud-storage to v1.36.2 (#5520) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a3d39398e..be91b2747 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.4.0 -google-cloud-storage==1.36.1 +google-cloud-storage==1.36.2 From 22838657adcafdd8875f75d9ea6facadf2ec015d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 26 Mar 2021 21:28:01 +0100 Subject: [PATCH 142/197] chore(deps): update dependency google-cloud-storage to v1.37.0 (#5580) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==1.36.2` -> `==1.37.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.37.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.37.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.37.0/compatibility-slim/1.36.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.37.0/confidence-slim/1.36.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v1.37.0`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1370-httpswwwgithubcomgoogleapispython-storagecomparev1362v1370-2021-03-24) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.36.2...v1.37.0) ##### Features - add blob.open() for file-like I/O ([#​385](https://www.github.com/googleapis/python-storage/issues/385)) ([440a0a4](https://www.github.com/googleapis/python-storage/commit/440a0a4ffe00b1f7c562b0e9c1e47dbadeca33e1)), closes [#​29](https://www.github.com/googleapis/python-storage/issues/29) ##### Bug Fixes - update user_project usage and documentation in bucket/client class methods ([#​396](https://www.github.com/googleapis/python-storage/issues/396)) ([1a2734b](https://www.github.com/googleapis/python-storage/commit/1a2734ba6d316ce51e4e141571331e86196462b9)) ##### [1.36.2](https://www.github.com/googleapis/python-storage/compare/v1.36.1...v1.36.2) (2021-03-09) ##### Bug Fixes - update batch connection to request api endpoint info from client ([#​392](https://www.github.com/googleapis/python-storage/issues/392)) ([91fc6d9](https://www.github.com/googleapis/python-storage/commit/91fc6d9870a36308b15a827ed6a691e5b4669b62)) ##### [1.36.1](https://www.github.com/googleapis/python-storage/compare/v1.36.0...v1.36.1) (2021-02-19) ##### Bug Fixes - allow metadata keys to be cleared ([#​383](https://www.github.com/googleapis/python-storage/issues/383)) ([79d27da](https://www.github.com/googleapis/python-storage/commit/79d27da9fe842e44a9091076ea0ef52c5ef5ff72)), closes [#​381](https://www.github.com/googleapis/python-storage/issues/381) - allow signed url version v4 without signed credentials ([#​356](https://www.github.com/googleapis/python-storage/issues/356)) ([3e69bf9](https://www.github.com/googleapis/python-storage/commit/3e69bf92496616c5de28094dd42260b35c3bf982)) - correctly encode bytes for V2 signature ([#​382](https://www.github.com/googleapis/python-storage/issues/382)) ([f44212b](https://www.github.com/googleapis/python-storage/commit/f44212b7b91a67ca661898400fe632f9fb3ec8f6))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index be91b2747..213ab5ca0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.4.0 -google-cloud-storage==1.36.2 +google-cloud-storage==1.37.0 From ada6fca9b347095d5c40ba41142754d90ad63aa3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 2 Apr 2021 07:32:05 +0200 Subject: [PATCH 143/197] chore(deps): update dependency google-cloud-pubsub to v2.4.1 (#5610) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.4.0` -> `==2.4.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.4.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.4.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.4.1/compatibility-slim/2.4.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.4.1/confidence-slim/2.4.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.4.1`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​241-httpswwwgithubcomgoogleapispython-pubsubcomparev240v241-2021-03-30) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.4.0...2.4.1) ##### Bug Fixes - Move `await_msg_callbacks` flag to `subscribe()` method, fixing a regression in Pub/Sub Lite client. ([#​320](https://www.github.com/googleapis/python-pubsub/issues/320)) ([d40d027](https://www.github.com/googleapis/python-pubsub/commit/d40d02713c8c189937ae5c21d099b88a3131a59f)) - SSL error when using the client with the emulator. ([#​297](https://www.github.com/googleapis/python-pubsub/issues/297)) ([83db672](https://www.github.com/googleapis/python-pubsub/commit/83db67239d3521457138699109f766d574a0a2c4)) ##### Implementation Changes - (samples) Bump the max_time to 10 minutes for a flaky test. ([#​311](https://www.github.com/googleapis/python-pubsub/issues/311)) ([e2678d4](https://www.github.com/googleapis/python-pubsub/commit/e2678d47c08e6b03782d2d744a4e630b933fdd51)), closes [#​291](https://www.github.com/googleapis/python-pubsub/issues/291) - (samples) Mark delivery attempts test as flaky. ([#​326](https://www.github.com/googleapis/python-pubsub/issues/326)) ([5a97ef1](https://www.github.com/googleapis/python-pubsub/commit/5a97ef1bb7512fe814a8f72a43b3e9698434cd8d)) - (samples) Mitigate flakiness in subscriber_tests. ([#​304](https://www.github.com/googleapis/python-pubsub/issues/304)) ([271a385](https://www.github.com/googleapis/python-pubsub/commit/271a3856d835967f18f6becdae5ad53d585d0ccf)) - (samples) Retry `InternalServerError` in dead letter policy test. ([#​329](https://www.github.com/googleapis/python-pubsub/issues/329)) ([34c9b11](https://www.github.com/googleapis/python-pubsub/commit/34c9b11ae697c280f32642c3101b7f7da971f589)), closes [#​321](https://www.github.com/googleapis/python-pubsub/issues/321) ##### Documentation - Remove EXPERIMENTAL tag for ordering keys in `types.py`. ([#​323](https://www.github.com/googleapis/python-pubsub/issues/323)) ([659cd7a](https://www.github.com/googleapis/python-pubsub/commit/659cd7ae2784245d4217fbc722dac04bd3222d32)) - Remove EXPERIMENTAL tag from `Schema` service (via synth). ([#​307](https://www.github.com/googleapis/python-pubsub/issues/307)) ([ad85202](https://www.github.com/googleapis/python-pubsub/commit/ad852028836520db779c5cc33689ffd7e5458a7d))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 213ab5ca0..c4a07b64e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.4.0 +google-cloud-pubsub==2.4.1 google-cloud-storage==1.37.0 From 510cf4095d1d3169385d579410fdfc450a8f808b Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Fri, 16 Apr 2021 13:46:42 -0400 Subject: [PATCH 144/197] docs: update description of parameters in storage_upload_file (#5707) * following Java's example https://github.com/googleapis/google-cloud-java/blob/b36db6a957bcfb7b6ccdb77fb12b4cc7fa22b807/google-cloud-examples/src/main/java/com/google/cloud/examples/storage/objects/UploadObject.java#L33-L40 --- storage_upload_file.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/storage_upload_file.py b/storage_upload_file.py index c067f7063..fb02c3632 100644 --- a/storage_upload_file.py +++ b/storage_upload_file.py @@ -22,8 +22,11 @@ def upload_blob(bucket_name, source_file_name, destination_blob_name): """Uploads a file to the bucket.""" + # The ID of your GCS bucket # bucket_name = "your-bucket-name" + # The path to your file to upload # source_file_name = "local/path/to/file" + # The ID of your GCS object # destination_blob_name = "storage-object-name" storage_client = storage.Client() From 3ff97706bb581f342afed3110fae207a677102d1 Mon Sep 17 00:00:00 2001 From: BenWhitehead Date: Fri, 16 Apr 2021 15:06:04 -0400 Subject: [PATCH 145/197] samples(storage): update storage_set_bucket_public_iam to explicitly set role and member (#5708) --- iam_test.py | 9 +++------ storage_set_bucket_public_iam.py | 10 +++++----- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/iam_test.py b/iam_test.py index 04d72c1a0..eb7638de5 100644 --- a/iam_test.py +++ b/iam_test.py @@ -137,13 +137,10 @@ def test_remove_bucket_conditional_iam_binding(bucket): def test_set_bucket_public_iam(public_bucket): - role = "roles/storage.objectViewer" - member = "allUsers" - storage_set_bucket_public_iam.set_bucket_public_iam( - public_bucket.name, role, member - ) + storage_set_bucket_public_iam.set_bucket_public_iam(public_bucket.name) policy = public_bucket.get_iam_policy(requested_policy_version=3) assert any( - binding["role"] == role and member in binding["members"] + binding["role"] == "roles/storage.objectViewer" + and "allUsers" in binding["members"] for binding in policy.bindings ) diff --git a/storage_set_bucket_public_iam.py b/storage_set_bucket_public_iam.py index 426318a36..c43b3eee5 100644 --- a/storage_set_bucket_public_iam.py +++ b/storage_set_bucket_public_iam.py @@ -20,17 +20,17 @@ from google.cloud import storage -def set_bucket_public_iam(bucket_name, role, member): +def set_bucket_public_iam(bucket_name): """Set a public IAM Policy to bucket""" # bucket_name = "your-bucket-name" - # role = "IAM role, e.g. roles/storage.objectViewer" - # member = "IAM identity, e.g. allUsers" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) policy = bucket.get_iam_policy(requested_policy_version=3) - policy.bindings.append({"role": role, "members": {member}}) + policy.bindings.append( + {"role": "roles/storage.objectViewer", "members": {"allUsers"}} + ) bucket.set_iam_policy(policy) @@ -41,5 +41,5 @@ def set_bucket_public_iam(bucket_name, role, member): if __name__ == "__main__": set_bucket_public_iam( - bucket_name=sys.argv[1], role=sys.argv[2], member=sys.argv[3], + bucket_name=sys.argv[1], ) From 1715fa6dc9213c1a497b5337324a4b83b49ea6d7 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 15:01:17 -0400 Subject: [PATCH 146/197] chore: fix typo on noxfile (#5739) * chore: add noxfile config * chore: fix typo on noxfile * Remove "chore: add noxfile config" This reverts commit 61972125cbbf110941da1227afed53f169bad3a6. * chore: fix the base noxfile_config --- noxfile_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile_config.py b/noxfile_config.py index a0e0cc9bb..ebd479f7e 100644 --- a/noxfile_config.py +++ b/noxfile_config.py @@ -14,7 +14,7 @@ # Default TEST_CONFIG_OVERRIDE for python repos. -# You can copy this file into your directory, then it will be inported from +# You can copy this file into your directory, then it will be imported from # the noxfile.py. # The source of truth: From 5eaf0e8854ee1a499323a808c55eb4042d4ff067 Mon Sep 17 00:00:00 2001 From: cojenco <59401799+cojenco@users.noreply.github.com> Date: Mon, 26 Apr 2021 14:12:10 -0700 Subject: [PATCH 147/197] fix(storage): retry flaky test (#5744) Fixes #5684 --- acl_test.py | 3 +++ requirements-test.txt | 1 + requirements.txt | 1 + 3 files changed, 5 insertions(+) diff --git a/acl_test.py b/acl_test.py index eda2c7df1..c715d8742 100644 --- a/acl_test.py +++ b/acl_test.py @@ -15,7 +15,9 @@ import os import uuid +import backoff from google.cloud import storage +from googleapiclient.errors import HttpError import pytest import storage_add_bucket_default_owner @@ -131,6 +133,7 @@ def test_print_blob_acl(test_blob, capsys): assert out +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_print_blob_acl_for_user(test_blob, capsys): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() diff --git a/requirements-test.txt b/requirements-test.txt index 8650bde16..c10a1de8b 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,2 +1,3 @@ pytest==6.2.1 mock==4.0.3 +backoff==1.10.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index c4a07b64e..96d669115 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ google-cloud-pubsub==2.4.1 google-cloud-storage==1.37.0 +google-api-python-client==2.2.0 \ No newline at end of file From bd2f6380469a63f250c5e7fcdc0ec53a3b278f5d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Apr 2021 19:44:45 +0200 Subject: [PATCH 148/197] chore(deps): update dependency google-api-python-client to v2.3.0 (#5689) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 96d669115..54989f860 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.4.1 google-cloud-storage==1.37.0 -google-api-python-client==2.2.0 \ No newline at end of file +google-api-python-client==2.3.0 \ No newline at end of file From 37efbc8bfc07b653307c24d623580990674c68a1 Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Wed, 28 Apr 2021 11:34:02 -0700 Subject: [PATCH 149/197] Update storage_list_files_with_prefix.py (#5747) --- storage_list_files_with_prefix.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/storage_list_files_with_prefix.py b/storage_list_files_with_prefix.py index 353fa3a72..f79413fb6 100644 --- a/storage_list_files_with_prefix.py +++ b/storage_list_files_with_prefix.py @@ -37,11 +37,13 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): a/1.txt a/b/2.txt - However, if you specify prefix='a/' and delimiter='/', you'll get back: + However, if you specify prefix='a/' and delimiter='/', you'll get back + only the file directly under 'a/': a/1.txt - Additionally, the same request will return blobs.prefixes populated with: + As part of the response, you'll also get back a blobs.prefixes entity + that lists the "subfolders" under `a/`: a/b/ """ @@ -49,9 +51,7 @@ def list_blobs_with_prefix(bucket_name, prefix, delimiter=None): storage_client = storage.Client() # Note: Client.list_blobs requires at least package version 1.17.0. - blobs = storage_client.list_blobs( - bucket_name, prefix=prefix, delimiter=delimiter - ) + blobs = storage_client.list_blobs(bucket_name, prefix=prefix, delimiter=delimiter) print("Blobs:") for blob in blobs: From 396bfe902f25e4142535eb52d2cc7765f86000a7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Apr 2021 00:42:16 +0200 Subject: [PATCH 150/197] chore(deps): update dependency google-cloud-storage to v1.38.0 (#5640) Test failures are unrelated --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 54989f860..83b8c44f0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.4.1 -google-cloud-storage==1.37.0 +google-cloud-storage==1.38.0 google-api-python-client==2.3.0 \ No newline at end of file From dad104c8e044f09cd3208a66dbc617dfc8ecd75a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 8 May 2021 06:15:59 +0200 Subject: [PATCH 151/197] chore(deps): update dependency pytest to v6.2.4 (#5787) Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index c10a1de8b..46cf0f855 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==6.2.1 +pytest==6.2.4 mock==4.0.3 backoff==1.10.0 \ No newline at end of file From 618b574aed4aaf16e364495e2c1d34459dd2b2bb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 12 May 2021 03:27:27 +0200 Subject: [PATCH 152/197] chore(deps): update dependency google-cloud-pubsub to v2.4.2 (#5810) Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 83b8c44f0..1b836fa31 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.4.1 +google-cloud-pubsub==2.4.2 google-cloud-storage==1.38.0 google-api-python-client==2.3.0 \ No newline at end of file From 676d926e6e46687494e26a4cf478d12e71fcb89f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 May 2021 18:25:19 +0200 Subject: [PATCH 153/197] chore(deps): update dependency google-cloud-pubsub to v2.5.0 (#5845) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1b836fa31..a8d38d862 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.4.2 +google-cloud-pubsub==2.5.0 google-cloud-storage==1.38.0 google-api-python-client==2.3.0 \ No newline at end of file From 735dfad5e0a7046976ab9ccfd3bdb5d7f1972579 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 19:46:17 +0200 Subject: [PATCH 154/197] chore(deps): update dependency google-api-python-client to v2.4.0 (#5820) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a8d38d862..b99933b9d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.5.0 google-cloud-storage==1.38.0 -google-api-python-client==2.3.0 \ No newline at end of file +google-api-python-client==2.4.0 \ No newline at end of file From 8a0558a89578a34fa1ca360bf915b80c7409ad46 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 21 May 2021 07:19:22 +0200 Subject: [PATCH 155/197] chore(deps): update dependency google-api-python-client to v2.5.0 (#5857) Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b99933b9d..02335ef85 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.5.0 google-cloud-storage==1.38.0 -google-api-python-client==2.4.0 \ No newline at end of file +google-api-python-client==2.5.0 \ No newline at end of file From 36da4d0ea41817d1d3c8cff43d72dc3294a69955 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 May 2021 18:18:38 +0200 Subject: [PATCH 156/197] chore(deps): update dependency google-api-python-client to v2.6.0 (#5890) Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 02335ef85..2e51405db 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.5.0 google-cloud-storage==1.38.0 -google-api-python-client==2.5.0 \ No newline at end of file +google-api-python-client==2.6.0 \ No newline at end of file From 0307fc0831d2af74469353554a1611cc3119893c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 2 Jun 2021 20:12:36 +0200 Subject: [PATCH 157/197] chore(deps): update dependency google-api-python-client to v2.7.0 (#6062) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2e51405db..a8dbb9817 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.5.0 google-cloud-storage==1.38.0 -google-api-python-client==2.6.0 \ No newline at end of file +google-api-python-client==2.7.0 \ No newline at end of file From 79c76c3cc363b29b73fcce85947f9ecffbc37bac Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 25 Jun 2021 22:36:28 +0200 Subject: [PATCH 158/197] chore(deps): update dependency google-cloud-pubsub to v2.6.0 (#6233) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a8dbb9817..e66429dd9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.5.0 +google-cloud-pubsub==2.6.0 google-cloud-storage==1.38.0 google-api-python-client==2.7.0 \ No newline at end of file From 4e210ca3b7bb1ec4385ab5c855315a4c41190019 Mon Sep 17 00:00:00 2001 From: Sameena Shaffeeullah Date: Wed, 30 Jun 2021 16:07:20 -0700 Subject: [PATCH 159/197] public access prevention samples & tests (#4971) * public access prevention samples & tests * linted files * respnded to PR comments * updated docstring * updated docstring * refactored fixture code * renamed samples * updated location for constants * updated location for constants * updated samples to conform to sample guidelines * added license * updated headers * Updating requirements * used f strings * linted files * f string suggestions from code review Co-authored-by: Dina Graves Portman Co-authored-by: Dina Graves Portman --- conftest.py | 41 ++++++++++++++ public_access_prevention_test.py | 53 +++++++++++++++++++ requirements.txt | 2 +- storage_get_bucket_metadata.py | 41 +++++++------- storage_get_public_access_prevention.py | 40 ++++++++++++++ ...e_set_public_access_prevention_enforced.py | 43 +++++++++++++++ ...et_public_access_prevention_unspecified.py | 43 +++++++++++++++ uniform_bucket_level_access_test.py | 26 --------- 8 files changed, 240 insertions(+), 49 deletions(-) create mode 100644 conftest.py create mode 100644 public_access_prevention_test.py create mode 100644 storage_get_public_access_prevention.py create mode 100644 storage_set_public_access_prevention_enforced.py create mode 100644 storage_set_public_access_prevention_unspecified.py diff --git a/conftest.py b/conftest.py new file mode 100644 index 000000000..b0db57561 --- /dev/null +++ b/conftest.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time +import uuid + +from google.cloud import storage +import pytest + + +@pytest.fixture(scope="function") +def bucket(): + """Yields a bucket that is deleted after the test completes.""" + # The new projects enforces uniform bucket level access, so + # we need to use the old main project for now. + original_value = os.environ['GOOGLE_CLOUD_PROJECT'] + os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] + bucket = None + while bucket is None or bucket.exists(): + bucket_name = f"uniform-bucket-level-access-{uuid.uuid4().hex}" + bucket = storage.Client().bucket(bucket_name) + bucket.create() + yield bucket + time.sleep(3) + bucket.delete(force=True) + # Set the value back. + os.environ['GOOGLE_CLOUD_PROJECT'] = original_value diff --git a/public_access_prevention_test.py b/public_access_prevention_test.py new file mode 100644 index 000000000..613e7c1ad --- /dev/null +++ b/public_access_prevention_test.py @@ -0,0 +1,53 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import storage_get_public_access_prevention +import storage_set_public_access_prevention_enforced +import storage_set_public_access_prevention_unspecified + + +def test_get_public_access_prevention(bucket, capsys): + short_name = storage_get_public_access_prevention + short_name.get_public_access_prevention( + bucket.name + ) + out, _ = capsys.readouterr() + assert ( + f"Public access prevention is unspecified for {bucket.name}." + in out + ) + + +def test_set_public_access_prevention_enforced(bucket, capsys): + short_name = storage_set_public_access_prevention_enforced + short_name.set_public_access_prevention_enforced( + bucket.name + ) + out, _ = capsys.readouterr() + assert ( + f"Public access prevention is set to enforced for {bucket.name}." + in out + ) + + +def test_set_public_access_prevention_unspecified(bucket, capsys): + short_name = storage_set_public_access_prevention_unspecified + short_name.set_public_access_prevention_unspecified( + bucket.name + ) + out, _ = capsys.readouterr() + assert ( + f"Public access prevention is 'unspecified' for {bucket.name}." + in out + ) diff --git a/requirements.txt b/requirements.txt index e66429dd9..108394cf1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.6.0 -google-cloud-storage==1.38.0 +google-cloud-storage==1.40.0 google-api-python-client==2.7.0 \ No newline at end of file diff --git a/storage_get_bucket_metadata.py b/storage_get_bucket_metadata.py index 2813d564b..87cd5eddc 100644 --- a/storage_get_bucket_metadata.py +++ b/storage_get_bucket_metadata.py @@ -16,6 +16,7 @@ import sys + # [START storage_get_bucket_metadata] from google.cloud import storage @@ -28,30 +29,26 @@ def bucket_metadata(bucket_name): storage_client = storage.Client() bucket = storage_client.get_bucket(bucket_name) - print("ID: {}".format(bucket.id)) - print("Name: {}".format(bucket.name)) - print("Storage Class: {}".format(bucket.storage_class)) - print("Location: {}".format(bucket.location)) - print("Location Type: {}".format(bucket.location_type)) - print("Cors: {}".format(bucket.cors)) - print( - "Default Event Based Hold: {}".format(bucket.default_event_based_hold) - ) - print("Default KMS Key Name: {}".format(bucket.default_kms_key_name)) - print("Metageneration: {}".format(bucket.metageneration)) + print(f"ID: {bucket.id}") + print(f"Name: {bucket.name}") + print(f"Storage Class: {bucket.storage_class}") + print(f"Location: {bucket.location}") + print(f"Location Type: {bucket.location_type}") + print(f"Cors: {bucket.cors}") + print(f"Default Event Based Hold: {bucket.default_event_based_hold}") + print(f"Default KMS Key Name: {bucket.default_kms_key_name}") + print(f"Metageneration: {bucket.metageneration}") print( - "Retention Effective Time: {}".format( - bucket.retention_policy_effective_time - ) + f"Public Access Prevention: {bucket.iam_configuration.public_access_prevention}" ) - print("Retention Period: {}".format(bucket.retention_period)) - print("Retention Policy Locked: {}".format(bucket.retention_policy_locked)) - print("Requester Pays: {}".format(bucket.requester_pays)) - print("Self Link: {}".format(bucket.self_link)) - print("Time Created: {}".format(bucket.time_created)) - print("Versioning Enabled: {}".format(bucket.versioning_enabled)) - print("Labels:") - print(bucket.labels) + print(f"Retention Effective Time: {bucket.retention_policy_effective_time}") + print(f"Retention Period: {bucket.retention_period}") + print(f"Retention Policy Locked: {bucket.retention_policy_locked}") + print(f"Requester Pays: {bucket.requester_pays}") + print(f"Self Link: {bucket.self_link}") + print(f"Time Created: {bucket.time_created}") + print(f"Versioning Enabled: {bucket.versioning_enabled}") + print(f"Labels: {bucket.labels}") # [END storage_get_bucket_metadata] diff --git a/storage_get_public_access_prevention.py b/storage_get_public_access_prevention.py new file mode 100644 index 000000000..151cf4ca8 --- /dev/null +++ b/storage_get_public_access_prevention.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_get_public_access_prevention] +from google.cloud import storage + + +def get_public_access_prevention(bucket_name): + """Gets the public access prevention setting (either 'unspecified' or 'enforced') for a bucket.""" + # The ID of your GCS bucket + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + iam_configuration = bucket.iam_configuration + + print( + f"Public access prevention is {iam_configuration.public_access_prevention} for {bucket.name}." + ) + + +# [END storage_get_public_access_prevention] + +if __name__ == "__main__": + get_public_access_prevention(bucket_name=sys.argv[1]) diff --git a/storage_set_public_access_prevention_enforced.py b/storage_set_public_access_prevention_enforced.py new file mode 100644 index 000000000..59ce5ce56 --- /dev/null +++ b/storage_set_public_access_prevention_enforced.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_public_access_prevention_enforced] +from google.cloud import storage +from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_ENFORCED + + +def set_public_access_prevention_enforced(bucket_name): + """Enforce public access prevention for a bucket.""" + # The ID of your GCS bucket + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + bucket.iam_configuration.public_access_prevention = ( + PUBLIC_ACCESS_PREVENTION_ENFORCED + ) + bucket.patch() + + print(f"Public access prevention is set to enforced for {bucket.name}.") + + +# [END storage_set_public_access_prevention_enforced] + +if __name__ == "__main__": + set_public_access_prevention_enforced(bucket_name=sys.argv[1]) diff --git a/storage_set_public_access_prevention_unspecified.py b/storage_set_public_access_prevention_unspecified.py new file mode 100644 index 000000000..ae2c4701c --- /dev/null +++ b/storage_set_public_access_prevention_unspecified.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_set_public_access_prevention_unspecified] +from google.cloud import storage +from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_UNSPECIFIED + + +def set_public_access_prevention_unspecified(bucket_name): + """Sets the public access prevention status to unspecified, so that the bucket inherits its setting from its parent project.""" + # The ID of your GCS bucket + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + bucket.iam_configuration.public_access_prevention = ( + PUBLIC_ACCESS_PREVENTION_UNSPECIFIED + ) + bucket.patch() + + print(f"Public access prevention is 'unspecified' for {bucket.name}.") + + +# [END storage_set_public_access_prevention_unspecified] + +if __name__ == "__main__": + set_public_access_prevention_unspecified(bucket_name=sys.argv[1]) diff --git a/uniform_bucket_level_access_test.py b/uniform_bucket_level_access_test.py index 0d97641eb..b43fa016f 100644 --- a/uniform_bucket_level_access_test.py +++ b/uniform_bucket_level_access_test.py @@ -12,37 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import time -import uuid - -from google.cloud import storage -import pytest - import storage_disable_uniform_bucket_level_access import storage_enable_uniform_bucket_level_access import storage_get_uniform_bucket_level_access -@pytest.fixture() -def bucket(): - """Yields a bucket that is deleted after the test completes.""" - # The new projects enforces uniform bucket level access, so - # we need to use the old main project for now. - original_value = os.environ['GOOGLE_CLOUD_PROJECT'] - os.environ['GOOGLE_CLOUD_PROJECT'] = os.environ['MAIN_GOOGLE_CLOUD_PROJECT'] - bucket = None - while bucket is None or bucket.exists(): - bucket_name = "uniform-bucket-level-access-{}".format(uuid.uuid4().hex) - bucket = storage.Client().bucket(bucket_name) - bucket.create() - yield bucket - time.sleep(3) - bucket.delete(force=True) - # Set the value back. - os.environ['GOOGLE_CLOUD_PROJECT'] = original_value - - def test_get_uniform_bucket_level_access(bucket, capsys): storage_get_uniform_bucket_level_access.get_uniform_bucket_level_access( bucket.name From 742d0ecedb33ec96b01c60b05e752e55ecb25d5a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 1 Jul 2021 20:22:35 +0200 Subject: [PATCH 160/197] chore(deps): update dependency google-api-python-client to v2.11.0 (#6101) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 108394cf1..107418cc8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.6.0 google-cloud-storage==1.40.0 -google-api-python-client==2.7.0 \ No newline at end of file +google-api-python-client==2.11.0 \ No newline at end of file From d925aaaf4aeacf28a38ae4c0c30e1fa83b15d121 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 12 Jul 2021 19:11:55 +0200 Subject: [PATCH 161/197] chore(deps): update dependency google-cloud-pubsub to v2.6.1 (#6284) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 107418cc8..1c4814046 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.6.0 +google-cloud-pubsub==2.6.1 google-cloud-storage==1.40.0 google-api-python-client==2.11.0 \ No newline at end of file From ce6c7b40a12b58b04df2a70387057bd0dafbe3c3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 16 Jul 2021 00:19:53 +0200 Subject: [PATCH 162/197] chore(deps): update dependency backoff to v1.11.0 (#6285) Co-authored-by: Dina Graves Portman --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 46cf0f855..2917e77e0 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,3 +1,3 @@ pytest==6.2.4 mock==4.0.3 -backoff==1.10.0 \ No newline at end of file +backoff==1.11.0 \ No newline at end of file From 421cd6abc562930a1205cfe4b6b2f736ec9c6a65 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Jul 2021 20:16:43 +0200 Subject: [PATCH 163/197] chore(deps): update dependency google-cloud-storage to v1.41.0 (#6197) * chore(deps): update dependency google-cloud-storage to v1.41.0 * revert dataflow flex templates * revert all dataflow changes * correct my mistake with dataflow stuff * restore dataflow file Co-authored-by: Leah Cole --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1c4814046..7f4cf3164 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.6.1 -google-cloud-storage==1.40.0 +google-cloud-storage==1.41.0 google-api-python-client==2.11.0 \ No newline at end of file From c6c10693f3973f64b3dff3a71b252ff19de32678 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Jul 2021 22:32:55 +0200 Subject: [PATCH 164/197] chore(deps): update dependency google-api-python-client to v2.12.0 (#6269) * chore(deps): update dependency google-api-python-client to v2.12.0 * revert dataflow * revert dataflow Co-authored-by: Leah Cole --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7f4cf3164..c5617fb28 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.6.1 google-cloud-storage==1.41.0 -google-api-python-client==2.11.0 \ No newline at end of file +google-api-python-client==2.12.0 From 458cc6145667a8b52c1061a63a73e36b065432e3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Jul 2021 16:59:13 +0200 Subject: [PATCH 165/197] chore(deps): update dependency google-cloud-pubsub to v2.7.0 (#6486) Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c5617fb28..8b4a182de 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.6.1 +google-cloud-pubsub==2.7.0 google-cloud-storage==1.41.0 google-api-python-client==2.12.0 From e070c9052bb4a0a79f546a5194f52f6420773ca5 Mon Sep 17 00:00:00 2001 From: cojenco Date: Thu, 5 Aug 2021 09:56:43 -0700 Subject: [PATCH 166/197] fix(storage): update service account email for acl tests (#6529) * fix: update test email for acl tests. previous email was deleted in the project * update to service account without project editor owner permissions * update test email to avoid creating new service accounts --- acl_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/acl_test.py b/acl_test.py index c715d8742..0fcc21c32 100644 --- a/acl_test.py +++ b/acl_test.py @@ -32,9 +32,9 @@ import storage_remove_file_owner # Typically we'd use a @example.com address, but GCS requires a real Google -# account. +# account. Retrieve a service account email with storage admin permissions. TEST_EMAIL = ( - "google-auth-system-tests" + "py38-storage-test" "@python-docs-samples-tests.iam.gserviceaccount.com" ) From 8762b1358f582a20d57b52d8286824bf6a264c35 Mon Sep 17 00:00:00 2001 From: cojenco Date: Thu, 12 Aug 2021 09:15:08 -0700 Subject: [PATCH 167/197] docs(storage): update description in storage_download_file (#6553) --- storage_download_file.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/storage_download_file.py b/storage_download_file.py index 33395122e..f8a1c93c8 100644 --- a/storage_download_file.py +++ b/storage_download_file.py @@ -22,8 +22,13 @@ def download_blob(bucket_name, source_blob_name, destination_file_name): """Downloads a blob from the bucket.""" + # The ID of your GCS bucket # bucket_name = "your-bucket-name" + + # The ID of your GCS object # source_blob_name = "storage-object-name" + + # The path to which the file should be downloaded # destination_file_name = "local/path/to/file" storage_client = storage.Client() @@ -38,8 +43,8 @@ def download_blob(bucket_name, source_blob_name, destination_file_name): blob.download_to_filename(destination_file_name) print( - "Blob {} downloaded to {}.".format( - source_blob_name, destination_file_name + "Downloaded storage object {} from bucket {} to local file {}.".format( + source_blob_name, bucket_name, destination_file_name ) ) From bd89e1d5353bc92865a17ea6d144be22a3275c77 Mon Sep 17 00:00:00 2001 From: Aaron Gabriel Neyer Date: Fri, 13 Aug 2021 14:37:22 -0600 Subject: [PATCH 168/197] Add storage move_blob sample and fix confusion with rename (#6554) * Add storage move_blob sample and fix confusion with rename * fix license heading issues * Add descriptive comments to parameters * Update storage/cloud-client/storage_move_file.py * Apply suggestions from code review Add print statement in except clause Co-authored-by: cojenco Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- snippets_test.py | 22 +++++++++++++++++++-- storage_move_file.py | 40 ++++++++++++++++++++++++++++---------- storage_rename_file.py | 44 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 12 deletions(-) create mode 100644 storage_rename_file.py diff --git a/snippets_test.py b/snippets_test.py index 286ae284b..f4d7987fe 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -57,6 +57,7 @@ import storage_object_get_kms_key import storage_remove_bucket_label import storage_remove_cors_configuration +import storage_rename_file import storage_set_bucket_default_kms_key import storage_set_metadata import storage_upload_file @@ -285,14 +286,31 @@ def test_rename_blob(test_blob): try: bucket.delete_blob("test_rename_blob") except google.cloud.exceptions.exceptions.NotFound: - pass + print("test_rename_blob not found in bucket {}".format(bucket.name)) - storage_move_file.rename_blob(bucket.name, test_blob.name, "test_rename_blob") + storage_rename_file.rename_blob(bucket.name, test_blob.name, "test_rename_blob") assert bucket.get_blob("test_rename_blob") is not None assert bucket.get_blob(test_blob.name) is None +def test_move_blob(test_bucket_create, test_blob): + bucket = test_blob.bucket + storage.Client().create_bucket(test_bucket_create) + + try: + test_bucket_create.delete_blob("test_move_blob") + except google.cloud.exceptions.NotFound: + print("test_move_blob not found in bucket {}".format(test_bucket_create.name)) + + storage_move_file.move_blob( + bucket.name, test_blob.name, test_bucket_create.name, "test_move_blob" + ) + + assert test_bucket_create.get_blob("test_move_blob") is not None + assert bucket.get_blob(test_blob.name) is None + + def test_copy_blob(test_blob): bucket = storage.Client().bucket(test_blob.bucket.name) diff --git a/storage_move_file.py b/storage_move_file.py index 9c075e230..a881a38ba 100644 --- a/storage_move_file.py +++ b/storage_move_file.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -20,24 +20,44 @@ from google.cloud import storage -def rename_blob(bucket_name, blob_name, new_name): - """Renames a blob.""" +def move_blob(bucket_name, blob_name, destination_bucket_name, destination_blob_name): + """Moves a blob from one bucket to another with a new name.""" + # The ID of your GCS bucket # bucket_name = "your-bucket-name" + # The ID of your GCS object # blob_name = "your-object-name" - # new_name = "new-object-name" + # The ID of the bucket to move the object to + # destination_bucket_name = "destination-bucket-name" + # The ID of your new GCS object (optional) + # destination_blob_name = "destination-object-name" storage_client = storage.Client() - bucket = storage_client.bucket(bucket_name) - blob = bucket.blob(blob_name) - new_blob = bucket.rename_blob(blob, new_name) + source_bucket = storage_client.bucket(bucket_name) + source_blob = source_bucket.blob(blob_name) + destination_bucket = storage_client.bucket(destination_bucket_name) - print("Blob {} has been renamed to {}".format(blob.name, new_blob.name)) + blob_copy = source_bucket.copy_blob( + source_blob, destination_bucket, destination_blob_name + ) + source_bucket.delete_blob(blob_name) + + print( + "Blob {} in bucket {} moved to blob {} in bucket {}.".format( + source_blob.name, + source_bucket.name, + blob_copy.name, + destination_bucket.name, + ) + ) # [END storage_move_file] if __name__ == "__main__": - rename_blob( - bucket_name=sys.argv[1], blob_name=sys.argv[2], new_name=sys.argv[3] + move_blob( + bucket_name=sys.argv[1], + blob_name=sys.argv[2], + destination_bucket_name=sys.argv[3], + destination_blob_name=sys.argv[4], ) diff --git a/storage_rename_file.py b/storage_rename_file.py new file mode 100644 index 000000000..b47e18621 --- /dev/null +++ b/storage_rename_file.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START storage_rename_file] +from google.cloud import storage + + +def rename_blob(bucket_name, blob_name, new_name): + """Renames a blob.""" + # The ID of your GCS bucket + # bucket_name = "your-bucket-name" + # The ID of the GCS object to rename + # blob_name = "your-object-name" + # The new ID of the GCS object + # new_name = "new-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + new_blob = bucket.rename_blob(blob, new_name) + + print("Blob {} has been renamed to {}".format(blob.name, new_blob.name)) + + +# [END storage_rename_file] + +if __name__ == "__main__": + rename_blob(bucket_name=sys.argv[1], blob_name=sys.argv[2], new_name=sys.argv[3]) From 9f3fe3df59252ec1dfb3e100bb198b7d762d0753 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 Aug 2021 01:34:47 +0200 Subject: [PATCH 169/197] chore(deps): update dependency backoff to v1.11.1 (#6571) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-test.txt b/requirements-test.txt index 2917e77e0..2b550f467 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,3 +1,3 @@ pytest==6.2.4 mock==4.0.3 -backoff==1.11.0 \ No newline at end of file +backoff==1.11.1 \ No newline at end of file From ef138d636213405b45981c668456c1b933d678d7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 Aug 2021 17:44:23 +0200 Subject: [PATCH 170/197] chore(deps): update dependency google-api-python-client to v2.15.0 (#6574) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.12.0` -> `==2.15.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/compatibility-slim/2.12.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/confidence-slim/2.12.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.11.0` -> `==2.15.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/compatibility-slim/2.11.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/confidence-slim/2.11.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.1.0` -> `==2.15.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/compatibility-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.15.0/confidence-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-api-python-client ### [`v2.15.0`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2150-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2141v2150-2021-07-27) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.14.1...v2.15.0) ##### Features - **alertcenter:** update the api https://github.com/googleapis/google-api-python-client/commit/70810a52c85c6d0d6f00d7afb41c8608261eaebc ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **chat:** update the api https://github.com/googleapis/google-api-python-client/commit/a577cd0b71951176bbf849c1f7f139127205da54 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **cloudbuild:** update the api https://github.com/googleapis/google-api-python-client/commit/9066056a8b106d441fb7686fe84359484d0d58bc ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **content:** update the api https://github.com/googleapis/google-api-python-client/commit/b123349da33c11c0172a8efb3fadef685a30e6e1 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **displayvideo:** update the api https://github.com/googleapis/google-api-python-client/commit/c525d726ee6cffdd4bc7afd69080d5e52bae83a0 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **dns:** update the api https://github.com/googleapis/google-api-python-client/commit/13436ccd2b835fda5cb86952ac4ea991ee8651d8 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **eventarc:** update the api https://github.com/googleapis/google-api-python-client/commit/6be3394a64a5eb509f68ef779680fd36837708ee ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **file:** update the api https://github.com/googleapis/google-api-python-client/commit/817a0e636771445a988ef479bd52740f754b901a ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **monitoring:** update the api https://github.com/googleapis/google-api-python-client/commit/bd32149f308467f0f659119587afc77dcec65b14 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **people:** update the api https://github.com/googleapis/google-api-python-client/commit/aa6b47df40c5289f33aef6fb6aa007df2d038e20 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **retail:** update the api https://github.com/googleapis/google-api-python-client/commit/d39f06e2d77034bc837604a41dd52c577f158bf2 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **securitycenter:** update the api https://github.com/googleapis/google-api-python-client/commit/999fab5178208639c9eef289f9f441052ed832fc ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **speech:** update the api https://github.com/googleapis/google-api-python-client/commit/3b2c0fa62b2a0c86bba1e97f1b18f93250dbd551 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) - **sqladmin:** update the api https://github.com/googleapis/google-api-python-client/commit/cef24d829ab5be71563a2b668b8f6cf5dda2c8e4 ([a36e3b1](https://www.github.com/googleapis/google-api-python-client/commit/a36e3b130d609dfdc5b3ac0a70ff1b014c4bc75f)) ##### Documentation - update license to be Apache-2.0 compliant ([#​1461](https://www.togithub.com/googleapis/google-api-python-client/issues/1461)) ([882844c](https://www.github.com/googleapis/google-api-python-client/commit/882844c7b6a15493d0fb8693cd5e9159e3a12535)) ##### [2.14.1](https://www.github.com/googleapis/google-api-python-client/compare/v2.14.0...v2.14.1) (2021-07-25) ##### Bug Fixes - drop six dependency ([#​1452](https://www.togithub.com/googleapis/google-api-python-client/issues/1452)) ([9f7b410](https://www.github.com/googleapis/google-api-python-client/commit/9f7b4109b370e89c29db6c58c6bd2e09002c8d42)) ### [`v2.14.1`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2141-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2140v2141-2021-07-25) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.14.0...v2.14.1) ### [`v2.14.0`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2140-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2130v2140-2021-07-20) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.13.0...v2.14.0) ##### Features - **analyticsadmin:** update the api https://github.com/googleapis/google-api-python-client/commit/a2e2d768e5412072ef11891ae7fb9145e2c4693d ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **androiddeviceprovisioning:** update the api https://github.com/googleapis/google-api-python-client/commit/83151f4ebd2992a53f815133304d8cb2c72d50c5 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **chat:** update the api https://github.com/googleapis/google-api-python-client/commit/8e39e1ef5482735fbaaed3be74ee472cf44cd941 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **cloudasset:** update the api https://github.com/googleapis/google-api-python-client/commit/ebd9b97ec74f0f257ccb4833f747f88d02075926 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **cloudfunctions:** update the api https://github.com/googleapis/google-api-python-client/commit/06332af99b1b1a9894bf4f553e014936225761de ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **cloudsearch:** update the api https://github.com/googleapis/google-api-python-client/commit/4aab6137bb350cb841a6b48fd37df67a209ba031 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **content:** update the api https://github.com/googleapis/google-api-python-client/commit/c65f297a775687fbfcbae827f892fc996a3d1ab1 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **datacatalog:** update the api https://github.com/googleapis/google-api-python-client/commit/af28eef0b37a5d0bb3a299f9fd9740b63f9e23bd ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **dns:** update the api https://github.com/googleapis/google-api-python-client/commit/e2ba913fc51f78ce4b9fb6f9de97f61bd35cd356 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **documentai:** update the api https://github.com/googleapis/google-api-python-client/commit/d1b9df7ee0a041d4cf632a77a626764c37e72889 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **file:** update the api https://github.com/googleapis/google-api-python-client/commit/0cd6277980d02363e3d609901d12d62b594adc92 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **firebaseappcheck:** update the api https://github.com/googleapis/google-api-python-client/commit/f8c39017aa392c0930ab79cdf7f828fe1e97e313 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **firebasestorage:** update the api https://github.com/googleapis/google-api-python-client/commit/66b6961871fea5b1a41a5b8359d7f76d6e390386 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **gameservices:** update the api https://github.com/googleapis/google-api-python-client/commit/31fd4dc22bd1e615caeafc22482caad65bbd55e9 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **gkehub:** update the api https://github.com/googleapis/google-api-python-client/commit/58ae34d8dfb4a7827b4f56e99fd48dedc64b4364 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **ml:** update the api https://github.com/googleapis/google-api-python-client/commit/15e0de32f2ea94d6ed3e0c18cd6e59cc239b37e7 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **monitoring:** update the api https://github.com/googleapis/google-api-python-client/commit/2b52d9ff5341caec20577538c0c4eaf83a896651 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **notebooks:** update the api https://github.com/googleapis/google-api-python-client/commit/c4698a84e526ab47710d2bde22827b337f2f480c ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **people:** update the api https://github.com/googleapis/google-api-python-client/commit/a646e56d40f2c7df40f48d42442c1941fc1c6674 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **recommender:** update the api https://github.com/googleapis/google-api-python-client/commit/ef997b0293c0e075208c7af15fa4e9bd6f29e883 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **secretmanager:** update the api https://github.com/googleapis/google-api-python-client/commit/489541e760eae9745724eb8cad74007903dd4f5b ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **spanner:** update the api https://github.com/googleapis/google-api-python-client/commit/acdb8fccfbb9f243f06dfff68d61cee2e58c9e45 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) - **testing:** update the api https://github.com/googleapis/google-api-python-client/commit/e2bde192a3e20ebd00995185cd92b47a086be8d9 ([0770807](https://www.github.com/googleapis/google-api-python-client/commit/0770807d690618cb51196d2c1ef812a8e0c03115)) ##### Bug Fixes - **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#​1449](https://www.togithub.com/googleapis/google-api-python-client/issues/1449)) ([96d2bb0](https://www.github.com/googleapis/google-api-python-client/commit/96d2bb04f99d57f0fff2b81e8f4e792782deb712)) ### [`v2.13.0`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2130-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2120v2130-2021-07-13) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.12.0...v2.13.0) ##### Features - **analyticsadmin:** update the api https://github.com/googleapis/google-api-python-client/commit/96675a8d9158ec13353fe241f858201fc51b784d ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **composer:** update the api https://github.com/googleapis/google-api-python-client/commit/add2fbdc3afb6696537eb087bc1d79df9194a37a ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **container:** update the api https://github.com/googleapis/google-api-python-client/commit/f8fae98db6d1943411b1a6c0f5a65dea336569f6 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **content:** update the api https://github.com/googleapis/google-api-python-client/commit/0814e009a4a11800db5b4afd7b6260e504c98047 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **datacatalog:** update the api https://github.com/googleapis/google-api-python-client/commit/99706059e58bb3d616253a1af2cd162b5a0b0279 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **dataflow:** update the api https://github.com/googleapis/google-api-python-client/commit/d5f09ef30392532bcfdd82901148bdd3ac6eec01 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **docs:** update the api https://github.com/googleapis/google-api-python-client/commit/dc66f4cafba86baff6149b2f6e59ae1888006911 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **file:** update the api https://github.com/googleapis/google-api-python-client/commit/523fc5c900f53489d56400deb650f6586c9681a0 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **firebasehosting:** update the api https://github.com/googleapis/google-api-python-client/commit/c83ac386b65f82e7ba29851d56b496b09a29cf98 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **healthcare:** update the api https://github.com/googleapis/google-api-python-client/commit/a407471b14349b8c08018196041568f2a35f8d4f ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **ideahub:** update the api https://github.com/googleapis/google-api-python-client/commit/c6b0d83940f238b1330896240492e8db397dcd15 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **managedidentities:** update the api https://github.com/googleapis/google-api-python-client/commit/863b333da7848029fd1614fd48b46cfbe12afcd5 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **memcache:** update the api https://github.com/googleapis/google-api-python-client/commit/17dc001e4649f54944066ce153e3c552c850a146 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **metastore:** update the api https://github.com/googleapis/google-api-python-client/commit/f3a76c9359babc48cc0b76ce7e3be0711ba028ae ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **slides:** update the api https://github.com/googleapis/google-api-python-client/commit/314d61b9ef8c5c30f9756462504dc0df92284cb2 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **sqladmin:** update the api https://github.com/googleapis/google-api-python-client/commit/62784e0b1b5752b480afe1ddd77dcf412bb35dbb ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **tpu:** update the api https://github.com/googleapis/google-api-python-client/commit/16bf712cca4a393d96e4135de3d02e5005051b6d ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) - **youtube:** update the api https://github.com/googleapis/google-api-python-client/commit/ec21dff96d9538ad6c7f9b318eca88178533aa95 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) ##### Bug Fixes - **keep:** update the api https://github.com/googleapis/google-api-python-client/commit/08fee732e96d3220e624c8fca7b8a9b0c0bcb146 ([1a4514d](https://www.github.com/googleapis/google-api-python-client/commit/1a4514d2862f81fc97e424cd550c286cda0fc859)) ##### Documentation - add recommendation to use v2.x and static discovery artifacts ([#​1434](https://www.togithub.com/googleapis/google-api-python-client/issues/1434)) ([ca7328c](https://www.github.com/googleapis/google-api-python-client/commit/ca7328cb5340ea282a3d98782926a0b6881a33ed))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8b4a182de..e696c1c86 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.7.0 google-cloud-storage==1.41.0 -google-api-python-client==2.12.0 +google-api-python-client==2.15.0 From 1a01d0c8da253784abff4111aac504c3954aaa54 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 Aug 2021 18:58:23 +0200 Subject: [PATCH 171/197] chore(deps): update dependency google-cloud-storage to v1.42.0 (#6576) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==1.41.0` -> `==1.42.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/compatibility-slim/1.41.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/confidence-slim/1.41.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==1.38.0` -> `==1.42.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/compatibility-slim/1.38.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.0/confidence-slim/1.38.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v1.42.0`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1420-httpswwwgithubcomgoogleapispython-storagecomparev1411v1420-2021-08-05) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.41.1...v1.42.0) ##### Features - add 'page_size' parameter to 'Bucket.list_blobs, list_buckets ([#​520](https://www.togithub.com/googleapis/python-storage/issues/520)) ([c5f4ad8](https://www.github.com/googleapis/python-storage/commit/c5f4ad8fddd1849a4229b0126c4c022bccb90128)) ##### Bug Fixes - **deps:** add explicit ranges for 'google-api-core' and 'google-auth' ([#​530](https://www.togithub.com/googleapis/python-storage/issues/530)) ([310f207](https://www.github.com/googleapis/python-storage/commit/310f207411da0382af310172344f19c644c14e6a)) - downloading no longer marks metadata fields as 'changed' ([#​523](https://www.togithub.com/googleapis/python-storage/issues/523)) ([160d1ec](https://www.github.com/googleapis/python-storage/commit/160d1ecb41f1f269b25cb68b2d2f7daf418bf01c)) - make 'requests.exceptions.ChunkedEncodingError retryable by default ([#​526](https://www.togithub.com/googleapis/python-storage/issues/526)) ([4abb403](https://www.github.com/googleapis/python-storage/commit/4abb40310eca7ec45afc4bc5e4dfafbe083e74d2)) ##### Documentation - update supported / removed Python versions in README ([#​519](https://www.togithub.com/googleapis/python-storage/issues/519)) ([1f1b138](https://www.github.com/googleapis/python-storage/commit/1f1b138865fb171535ee0cf768aff1987ed58914)) ##### [1.41.1](https://www.github.com/googleapis/python-storage/compare/v1.41.0...v1.41.1) (2021-07-20) ##### Bug Fixes - **deps:** pin `{api,cloud}-core`, `auth` to allow 2.x versions on Python 3 ([#​512](https://www.togithub.com/googleapis/python-storage/issues/512)) ([4d7500e](https://www.github.com/googleapis/python-storage/commit/4d7500e39c51efd817b8363b69c88be040f3edb8)) - remove trailing commas from error message constants ([#​505](https://www.togithub.com/googleapis/python-storage/issues/505)) ([d4a86ce](https://www.github.com/googleapis/python-storage/commit/d4a86ceb7a7c5e00ba7bae37c7078d52478040ff)), closes [#​501](https://www.togithub.com/googleapis/python-storage/issues/501) ##### Documentation - replace usage of deprecated function `download_as_string` in docs ([#​508](https://www.togithub.com/googleapis/python-storage/issues/508)) ([8dfa4d4](https://www.github.com/googleapis/python-storage/commit/8dfa4d429dce94b671dc3e3755e52ab82733f61a)) ### [`v1.41.1`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1411-httpswwwgithubcomgoogleapispython-storagecomparev1410v1411-2021-07-20) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.41.0...v1.41.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Never, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e696c1c86..fcea3517c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.7.0 -google-cloud-storage==1.41.0 +google-cloud-storage==1.42.0 google-api-python-client==2.15.0 From f546502ea50290ed56b819b1488e9ca4d7068f0d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 Aug 2021 22:19:50 +0200 Subject: [PATCH 172/197] chore(deps): update dependency google-api-python-client to v2.17.0 (#6586) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fcea3517c..a604f1b7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.7.0 google-cloud-storage==1.42.0 -google-api-python-client==2.15.0 +google-api-python-client==2.17.0 From 9d1326caad9805b4d8f796e83595ebf4c8dce9e8 Mon Sep 17 00:00:00 2001 From: pallabiwrites <87546424+pallabiwrites@users.noreply.github.com> Date: Wed, 18 Aug 2021 23:14:25 +0530 Subject: [PATCH 173/197] chore: Review the language fixes. (#6591) * Update main.py Fixed formatting changes. * Update storage_create_hmac_key.py * Update storage_add_bucket_iam_member.py Fixed minor language issues. --- storage_add_bucket_iam_member.py | 4 ++-- storage_create_hmac_key.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/storage_add_bucket_iam_member.py b/storage_add_bucket_iam_member.py index 19d7ab7c0..727f18483 100644 --- a/storage_add_bucket_iam_member.py +++ b/storage_add_bucket_iam_member.py @@ -23,8 +23,8 @@ def add_bucket_iam_member(bucket_name, role, member): """Add a new member to an IAM Policy""" # bucket_name = "your-bucket-name" - # role = "IAM role, e.g. roles/storage.objectViewer" - # member = "IAM identity, e.g. user: name@example.com" + # role = "IAM role, e.g., roles/storage.objectViewer" + # member = "IAM identity, e.g., user: name@example.com" storage_client = storage.Client() bucket = storage_client.bucket(bucket_name) diff --git a/storage_create_hmac_key.py b/storage_create_hmac_key.py index 33c4a98c2..27a418c39 100644 --- a/storage_create_hmac_key.py +++ b/storage_create_hmac_key.py @@ -25,7 +25,7 @@ def create_key(project_id, service_account_email): Create a new HMAC key using the given project and service account. """ # project_id = 'Your Google Cloud project ID' - # service_account_email = 'Service account used to generate HMAC key' + # service_account_email = 'Service account used to generate the HMAC key' storage_client = storage.Client(project=project_id) From 9ffb02c9f1024af6d66136af6a1c0719204f5c4f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 20 Aug 2021 07:08:23 +0200 Subject: [PATCH 174/197] chore(deps): update dependency google-cloud-pubsub to v2.7.1 (#6598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.7.0` -> `==2.7.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/compatibility-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/confidence-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.7.1`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​271-httpswwwgithubcomgoogleapispython-pubsubcomparev270v271-2021-08-13) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Never, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a604f1b7c..033d85c4a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.7.0 +google-cloud-pubsub==2.7.1 google-cloud-storage==1.42.0 google-api-python-client==2.17.0 From 0bd3c15502d27d515321c997661179fa64f927e6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 24 Aug 2021 16:18:36 +0200 Subject: [PATCH 175/197] chore(deps): update dependency google-api-python-client to v2.18.0 (#6606) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 033d85c4a..e10a86839 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.7.1 google-cloud-storage==1.42.0 -google-api-python-client==2.17.0 +google-api-python-client==2.18.0 From 5417de9def64d0edaa1124e59642ef1439fb687e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 31 Aug 2021 19:34:04 +0200 Subject: [PATCH 176/197] chore(deps): update dependency google-api-python-client to v2.19.0 (#6642) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e10a86839..54ae492af 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.7.1 google-cloud-storage==1.42.0 -google-api-python-client==2.18.0 +google-api-python-client==2.19.0 From 0685d49eb20d9a486668f1d4a8736c4992a63ce4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 6 Sep 2021 20:00:53 +0200 Subject: [PATCH 177/197] chore(deps): update dependency google-cloud-pubsub to v2.8.0 (#6664) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 54ae492af..3446c06df 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-pubsub==2.7.1 +google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.0 google-api-python-client==2.19.0 From 40fa6dc93a55c2c06a2713feb055d277772aa65f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 6 Sep 2021 22:12:24 +0200 Subject: [PATCH 178/197] chore(deps): update dependency google-api-python-client to v2.19.1 (#6656) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.19.0` -> `==2.19.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.19.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.19.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.19.1/compatibility-slim/2.19.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.19.1/confidence-slim/2.19.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-api-python-client ### [`v2.19.1`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2191-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2190v2191-2021-09-02) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.19.0...v2.19.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3446c06df..e40bfc833 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.0 -google-api-python-client==2.19.0 +google-api-python-client==2.19.1 From 20a809643da32fff5f3560587a1bd39af41c88d8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 9 Sep 2021 16:41:42 +0200 Subject: [PATCH 179/197] chore(deps): update dependency google-cloud-storage to v1.42.1 (#6677) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e40bfc833..2057bcf7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 -google-cloud-storage==1.42.0 +google-cloud-storage==1.42.1 google-api-python-client==2.19.1 From 83033731ef2d158dc2ce19bc58ea5b9e9fc58f1c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Sep 2021 16:53:28 +0200 Subject: [PATCH 180/197] chore(deps): update dependency google-api-python-client to v2.20.0 (#6675) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2057bcf7c..a8f7e3957 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.1 -google-api-python-client==2.19.1 +google-api-python-client==2.20.0 From 556555c5cc6adfb52b8d31dbe76f0a4516187412 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 17 Sep 2021 01:40:08 +0200 Subject: [PATCH 181/197] chore(deps): update dependency google-cloud-storage to v1.42.2 (#6700) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==1.42.1` -> `==1.42.2` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.2/compatibility-slim/1.42.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/1.42.2/confidence-slim/1.42.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v1.42.2`](https://togithub.com/googleapis/python-storage/blob/master/CHANGELOG.md#​1422-httpswwwgithubcomgoogleapispython-storagecomparev1421v1422-2021-09-16) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v1.42.1...v1.42.2)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a8f7e3957..fe893618f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 -google-cloud-storage==1.42.1 +google-cloud-storage==1.42.2 google-api-python-client==2.20.0 From 4d6b185712b12b4ac8434c4cd84061c2be4ad839 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 20 Sep 2021 18:50:28 +0200 Subject: [PATCH 182/197] chore(deps): update dependency google-api-python-client to v2.21.0 (#6689) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.20.0` -> `==2.21.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.21.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.21.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.21.0/compatibility-slim/2.20.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.21.0/confidence-slim/2.20.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-api-python-client ### [`v2.21.0`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2210-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2200v2210-2021-09-14) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.20.0...v2.21.0) ##### Features - **apigee:** update the api https://github.com/googleapis/google-api-python-client/commit/0e4fed7f1e08a616cbc81243c24391bc20ce5edb ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **bigquery:** update the api https://github.com/googleapis/google-api-python-client/commit/04e112ce89d6ddb3aeaae889c2de36070d6c2814 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **bigtableadmin:** update the api https://github.com/googleapis/google-api-python-client/commit/6b77931c3c9aba59d5b326c570a2080252c8beb1 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **cloudprofiler:** update the api https://github.com/googleapis/google-api-python-client/commit/3009ee3c238ae1fa51c529e9f187ec26693138a4 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **container:** update the api https://github.com/googleapis/google-api-python-client/commit/e5d01ecee51da0c7a2543b833a1395a94c27bef6 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **dataproc:** update the api https://github.com/googleapis/google-api-python-client/commit/fec73562a93b5a532bce6c91f0d30ec4fbd54ddb ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **displayvideo:** update the api https://github.com/googleapis/google-api-python-client/commit/22caa4f2f8ecb0f0ad6cfac547f9deb76fdcbebb ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **documentai:** update the api https://github.com/googleapis/google-api-python-client/commit/444836b9afe8d3eb8d52a1431652bfda1ad3288b ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **healthcare:** update the api https://github.com/googleapis/google-api-python-client/commit/2f3173aa4b4d154c909eea853a0c4c306834e0ab ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **ideahub:** update the api https://github.com/googleapis/google-api-python-client/commit/8ebf9d2bd419561d5eacb78823aa1fc519fe2710 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **memcache:** update the api https://github.com/googleapis/google-api-python-client/commit/393dce7a3e584ad6be58c832ec826fe3b44e353b ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **mybusinesslodging:** update the api https://github.com/googleapis/google-api-python-client/commit/c51a0d15e634c2ab1c7762533f33d59f10b01875 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **speech:** update the api https://github.com/googleapis/google-api-python-client/commit/bf6e86f6ee8c3985e1ce6f0475ef4f8685b52060 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **streetviewpublish:** update the api https://github.com/googleapis/google-api-python-client/commit/c8cf30cd67f5588d7cbe60631e42f0a49ea6c307 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea)) - **youtube:** update the api https://github.com/googleapis/google-api-python-client/commit/855cbfea1f6d46af07c4b80ab26fc30ca46370b7 ([e5e87b1](https://www.github.com/googleapis/google-api-python-client/commit/e5e87b1ca5fb6e81f6d83d970c3e4f683ecdcdea))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fe893618f..6f69b0557 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.2 -google-api-python-client==2.20.0 +google-api-python-client==2.21.0 \ No newline at end of file From 2882b2ac30bd4f6759b218e3576763bc08e3c599 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 21 Sep 2021 18:47:58 +0200 Subject: [PATCH 183/197] chore(deps): update dependency google-api-python-client to v2.22.0 (#6716) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6f69b0557..e762b290c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.2 -google-api-python-client==2.21.0 \ No newline at end of file +google-api-python-client==2.22.0 \ No newline at end of file From 7c13d0f9843378db5cc1526c5f4cb7c95f1e74ac Mon Sep 17 00:00:00 2001 From: cojenco Date: Mon, 27 Sep 2021 16:22:41 -0700 Subject: [PATCH 184/197] fix(storage): skip test temporarily before pap changes release (#6750) --- public_access_prevention_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/public_access_prevention_test.py b/public_access_prevention_test.py index 613e7c1ad..5b19b43c0 100644 --- a/public_access_prevention_test.py +++ b/public_access_prevention_test.py @@ -12,11 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import storage_get_public_access_prevention import storage_set_public_access_prevention_enforced import storage_set_public_access_prevention_unspecified +@pytest.mark.skip(reason="Unspecified PAP is changing to inherited") def test_get_public_access_prevention(bucket, capsys): short_name = storage_get_public_access_prevention short_name.get_public_access_prevention( From a844fcaaa7e917c4594d22e38f407b7d4ce291c3 Mon Sep 17 00:00:00 2001 From: cojenco Date: Tue, 28 Sep 2021 08:16:55 -0700 Subject: [PATCH 185/197] fix(storage): add time for bucket patch changes to propagate (#6752) --- bucket_lock_test.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bucket_lock_test.py b/bucket_lock_test.py index 99c5e1e56..67d4ec685 100644 --- a/bucket_lock_test.py +++ b/bucket_lock_test.py @@ -123,6 +123,10 @@ def test_enable_disable_bucket_default_event_based_hold(bucket, capsys): "Default event-based hold is enabled for {}".format(bucket.name) in out ) + # Changes to the bucket will be readable immediately after writing, + # but configuration changes may take time to propagate. + time.sleep(10) + blob = bucket.blob(BLOB_NAME) blob.upload_from_string(BLOB_CONTENT) assert blob.event_based_hold is True From 2b2e9d9b16b872db8df90014dea8a144e64fb1fa Mon Sep 17 00:00:00 2001 From: cojenco Date: Tue, 28 Sep 2021 13:48:47 -0700 Subject: [PATCH 186/197] fix(storage): add backoff to retry flaky test (#6745) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- acl_test.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/acl_test.py b/acl_test.py index 0fcc21c32..fd2088ad6 100644 --- a/acl_test.py +++ b/acl_test.py @@ -85,6 +85,7 @@ def test_print_bucket_acl_for_user(test_bucket, capsys): assert "OWNER" in out +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_add_bucket_owner(test_bucket): storage_add_bucket_owner.add_bucket_owner(test_bucket.name, TEST_EMAIL) @@ -92,6 +93,7 @@ def test_add_bucket_owner(test_bucket): assert "OWNER" in test_bucket.acl.user(TEST_EMAIL).get_roles() +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_remove_bucket_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() @@ -103,6 +105,7 @@ def test_remove_bucket_owner(test_bucket): assert "OWNER" not in test_bucket.acl.user(TEST_EMAIL).get_roles() +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_add_bucket_default_owner(test_bucket): storage_add_bucket_default_owner.add_bucket_default_owner( test_bucket.name, TEST_EMAIL @@ -113,6 +116,7 @@ def test_add_bucket_default_owner(test_bucket): assert "OWNER" in roles +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_remove_bucket_default_owner(test_bucket): test_bucket.acl.user(TEST_EMAIL).grant_owner() test_bucket.acl.save() @@ -146,6 +150,7 @@ def test_print_blob_acl_for_user(test_blob, capsys): assert "OWNER" in out +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_add_blob_owner(test_blob): storage_add_file_owner.add_blob_owner( test_blob.bucket.name, test_blob.name, TEST_EMAIL) @@ -154,6 +159,7 @@ def test_add_blob_owner(test_blob): assert "OWNER" in test_blob.acl.user(TEST_EMAIL).get_roles() +@backoff.on_exception(backoff.expo, HttpError, max_time=60) def test_remove_blob_owner(test_blob): test_blob.acl.user(TEST_EMAIL).grant_owner() test_blob.acl.save() From fd9d96f842097b5260406e6d5b2b94a45dcd07b9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 29 Sep 2021 20:01:19 +0200 Subject: [PATCH 187/197] chore(deps): update dependency google-api-python-client to v2.23.0 (#6754) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e762b290c..ff27d67cb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.2 -google-api-python-client==2.22.0 \ No newline at end of file +google-api-python-client==2.23.0 \ No newline at end of file From cbb361b6170e3289face1237d93f0127120a9b7e Mon Sep 17 00:00:00 2001 From: Aaron Gabriel Neyer Date: Mon, 4 Oct 2021 12:20:56 -0600 Subject: [PATCH 188/197] fix: update samples for pap, unspecified -> inherited (#6757) * fix: update samples for pap, unspecified->inherited * restore original unspecified while adding new inherited * woops, missed a place * remove test skip #6750 * lint fix * oops * Update storage/cloud-client/storage_set_public_access_prevention_inherited.py Co-authored-by: cojenco * add top level comment * move comment above regionalization tag * update version * change over unspecified test to inherited * this one too * skip inconsistent tests Co-authored-by: cojenco --- public_access_prevention_test.py | 38 ++++++-------- requirements.txt | 4 +- storage_get_public_access_prevention.py | 2 +- ..._set_public_access_prevention_inherited.py | 50 +++++++++++++++++++ 4 files changed, 69 insertions(+), 25 deletions(-) create mode 100644 storage_set_public_access_prevention_inherited.py diff --git a/public_access_prevention_test.py b/public_access_prevention_test.py index 5b19b43c0..40d3924b2 100644 --- a/public_access_prevention_test.py +++ b/public_access_prevention_test.py @@ -16,41 +16,35 @@ import storage_get_public_access_prevention import storage_set_public_access_prevention_enforced +import storage_set_public_access_prevention_inherited import storage_set_public_access_prevention_unspecified -@pytest.mark.skip(reason="Unspecified PAP is changing to inherited") +@pytest.mark.skip(reason="Inconsistent due to unspecified->inherited change") def test_get_public_access_prevention(bucket, capsys): short_name = storage_get_public_access_prevention - short_name.get_public_access_prevention( - bucket.name - ) + short_name.get_public_access_prevention(bucket.name) out, _ = capsys.readouterr() - assert ( - f"Public access prevention is unspecified for {bucket.name}." - in out - ) + assert f"Public access prevention is inherited for {bucket.name}." in out def test_set_public_access_prevention_enforced(bucket, capsys): short_name = storage_set_public_access_prevention_enforced - short_name.set_public_access_prevention_enforced( - bucket.name - ) + short_name.set_public_access_prevention_enforced(bucket.name) out, _ = capsys.readouterr() - assert ( - f"Public access prevention is set to enforced for {bucket.name}." - in out - ) + assert f"Public access prevention is set to enforced for {bucket.name}." in out +@pytest.mark.skip(reason="Inconsistent due to unspecified->inherited change") def test_set_public_access_prevention_unspecified(bucket, capsys): short_name = storage_set_public_access_prevention_unspecified - short_name.set_public_access_prevention_unspecified( - bucket.name - ) + short_name.set_public_access_prevention_unspecified(bucket.name) out, _ = capsys.readouterr() - assert ( - f"Public access prevention is 'unspecified' for {bucket.name}." - in out - ) + assert f"Public access prevention is 'unspecified' for {bucket.name}." in out + + +def test_set_public_access_prevention_inherited(bucket, capsys): + short_name = storage_set_public_access_prevention_inherited + short_name.set_public_access_prevention_inherited(bucket.name) + out, _ = capsys.readouterr() + assert f"Public access prevention is 'inherited' for {bucket.name}." in out diff --git a/requirements.txt b/requirements.txt index ff27d67cb..f3b01c8eb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 -google-cloud-storage==1.42.2 -google-api-python-client==2.23.0 \ No newline at end of file +google-cloud-storage==1.42.3 +google-api-python-client==2.23.0 diff --git a/storage_get_public_access_prevention.py b/storage_get_public_access_prevention.py index 151cf4ca8..275b84e35 100644 --- a/storage_get_public_access_prevention.py +++ b/storage_get_public_access_prevention.py @@ -21,7 +21,7 @@ def get_public_access_prevention(bucket_name): - """Gets the public access prevention setting (either 'unspecified' or 'enforced') for a bucket.""" + """Gets the public access prevention setting (either 'inherited' or 'enforced') for a bucket.""" # The ID of your GCS bucket # bucket_name = "my-bucket" diff --git a/storage_set_public_access_prevention_inherited.py b/storage_set_public_access_prevention_inherited.py new file mode 100644 index 000000000..97e218f9d --- /dev/null +++ b/storage_set_public_access_prevention_inherited.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +"""Sample that sets public access prevention to inherited. +This sample is used on this page: + https://cloud.google.com/storage/docs/using-public-access-prevention +For more information, see README.md. +""" + +# [START storage_set_public_access_prevention_inherited] + +from google.cloud import storage +from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_INHERITED + + +def set_public_access_prevention_inherited(bucket_name): + """Sets the public access prevention status to inherited, so that the bucket inherits its setting from its parent project.""" + # The ID of your GCS bucket + # bucket_name = "my-bucket" + + storage_client = storage.Client() + bucket = storage_client.get_bucket(bucket_name) + + bucket.iam_configuration.public_access_prevention = ( + PUBLIC_ACCESS_PREVENTION_INHERITED + ) + bucket.patch() + + print(f"Public access prevention is 'inherited' for {bucket.name}.") + + +# [END storage_set_public_access_prevention_inherited] + +if __name__ == "__main__": + set_public_access_prevention_inherited(bucket_name=sys.argv[1]) From 12952eca652d8642b5e7f0a10174f0fea93e1c6b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 5 Oct 2021 16:28:59 +0200 Subject: [PATCH 189/197] chore(deps): update dependency google-api-python-client to v2.24.0 (#6795) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f3b01c8eb..f1cb420bf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.3 -google-api-python-client==2.23.0 +google-api-python-client==2.24.0 From cbe5676ef18936381f3e5cebceebea7f2e3d34e1 Mon Sep 17 00:00:00 2001 From: cojenco Date: Tue, 12 Oct 2021 11:20:02 -0700 Subject: [PATCH 190/197] fix(storage): add py-3.10 configs to noxfile config (#6903) * fix(storage): update noxfile_config to add py-3.10 configs * update service account email configs --- noxfile_config.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/noxfile_config.py b/noxfile_config.py index ebd479f7e..463da97de 100644 --- a/noxfile_config.py +++ b/noxfile_config.py @@ -39,6 +39,12 @@ def get_service_account_email(): if session == 'py-3.8': return ('py38-storage-test@' 'python-docs-samples-tests.iam.gserviceaccount.com') + if session == 'py-3.9': + return ('py39-storage-test@' + 'python-docs-samples-tests.iam.gserviceaccount.com') + if session == 'py-3.10': + return ('py310-storage-test@' + 'python-docs-samples-tests.iam.gserviceaccount.com') return os.environ['HMAC_KEY_TEST_SERVICE_ACCOUNT'] @@ -58,6 +64,9 @@ def get_cloud_kms_key(): if session == 'py-3.9': return ('projects/python-docs-samples-tests-py39/locations/us/' 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') + if session == 'py-3.10': + return ('projects/python-docs-samples-tests-310/locations/us/' + 'keyRings/gcs-kms-key-ring/cryptoKeys/gcs-kms-key') return os.environ['CLOUD_KMS_KEY'] From 12d7382d710c2243a33d353c42abd1f31176f1de Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 Oct 2021 02:22:13 +0200 Subject: [PATCH 191/197] chore(deps): update dependency google-api-python-client to v2.25.0 (#6901) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-python-client](https://togithub.com/googleapis/google-api-python-client) | `==2.24.0` -> `==2.25.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.25.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.25.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.25.0/compatibility-slim/2.24.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-python-client/2.25.0/confidence-slim/2.24.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-api-python-client ### [`v2.25.0`](https://togithub.com/googleapis/google-api-python-client/blob/master/CHANGELOG.md#​2250-httpswwwgithubcomgoogleapisgoogle-api-python-clientcomparev2240v2250-2021-10-09) [Compare Source](https://togithub.com/googleapis/google-api-python-client/compare/v2.24.0...v2.25.0) ##### Features - enable self signed jwt for service account credentials ([#​1553](https://www.togithub.com/googleapis/google-api-python-client/issues/1553)) ([1fb3c8e](https://www.github.com/googleapis/google-api-python-client/commit/1fb3c8ec61295adc876fa449e92fe5d682f33cbd))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Never, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/GoogleCloudPlatform/python-docs-samples). --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f1cb420bf..76ac6ee7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ google-cloud-pubsub==2.8.0 google-cloud-storage==1.42.3 -google-api-python-client==2.24.0 +google-api-python-client==2.25.0 From 6982834690faffa18bf6ada09c5c8311ef09c598 Mon Sep 17 00:00:00 2001 From: cojenco Date: Fri, 15 Oct 2021 09:39:21 -0700 Subject: [PATCH 192/197] feat(storage): add retry configurations sample and test (#6900) * storage: add configure_retries sample * add test for storage_configure_retries sample * revise comments per discussion with TW * update comment * address comments add configs demo --- snippets_test.py | 11 +++++++ storage_configure_retries.py | 63 ++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 storage_configure_retries.py diff --git a/snippets_test.py b/snippets_test.py index f4d7987fe..dd8e6aeaf 100644 --- a/snippets_test.py +++ b/snippets_test.py @@ -27,6 +27,7 @@ import storage_change_default_storage_class import storage_change_file_storage_class import storage_compose_file +import storage_configure_retries import storage_copy_file import storage_copy_file_archived_generation import storage_cors_configuration @@ -498,3 +499,13 @@ def test_list_blobs_archived_generation(test_blob, capsys): ) out, _ = capsys.readouterr() assert str(test_blob.generation) in out + + +def test_storage_configure_retries(test_blob, capsys): + storage_configure_retries.configure_retries(test_blob.bucket.name, test_blob.name) + + # This simply checks if the retry configurations were set and printed as intended. + out, _ = capsys.readouterr() + assert "The following library method is customized to be retried" in out + assert "_should_retry" in out + assert "initial=1.5, maximum=45.0, multiplier=1.2, deadline=500.0" in out diff --git a/storage_configure_retries.py b/storage_configure_retries.py new file mode 100644 index 000000000..9543111b3 --- /dev/null +++ b/storage_configure_retries.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +"""Sample that configures retries on an operation call. +This sample is used on this page: + https://cloud.google.com/storage/docs/retry-strategy +For more information, see README.md. +""" + +# [START storage_configure_retries] +from google.cloud import storage +from google.cloud.storage.retry import DEFAULT_RETRY + + +def configure_retries(bucket_name, blob_name): + """Configures retries with customizations.""" + # The ID of your GCS bucket + # bucket_name = "your-bucket-name" + # The ID of your GCS object + # blob_name = "your-object-name" + + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + blob = bucket.blob(blob_name) + + # Customize retry with a deadline of 500 seconds (default=120 seconds). + modified_retry = DEFAULT_RETRY.with_deadline(500.0) + # Customize retry with an initial wait time of 1.5 (default=1.0). + # Customize retry with a wait time multiplier per iteration of 1.2 (default=2.0). + # Customize retry with a maximum wait time of 45.0 (default=60.0). + modified_retry = modified_retry.with_delay(initial=1.5, multiplier=1.2, maximum=45.0) + + # blob.delete() uses DEFAULT_RETRY_IF_GENERATION_SPECIFIED by default. + # Override with modified_retry so the function retries even if the generation + # number is not specified. + print( + f"The following library method is customized to be retried according to the following configurations: {modified_retry}" + ) + + blob.delete(retry=modified_retry) + print("Blob {} deleted with a customized retry strategy.".format(blob_name)) + + +# [END storage_configure_retries] + + +if __name__ == "__main__": + configure_retries(bucket_name=sys.argv[1], blob_name=sys.argv[2]) From 121fa16acbedae826fc0fe9f06a20479a668ecbf Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 18 Oct 2021 11:03:06 -0700 Subject: [PATCH 193/197] samples: add cloud client samples from python-docs-samples --- README.md => samples/snippets/README.md | 0 acl_test.py => samples/snippets/acl_test.py | 0 bucket_lock_test.py => samples/snippets/bucket_lock_test.py | 0 conftest.py => samples/snippets/conftest.py | 0 encryption_test.py => samples/snippets/encryption_test.py | 0 hmac_samples_test.py => samples/snippets/hmac_samples_test.py | 0 iam_test.py => samples/snippets/iam_test.py | 0 .../snippets/notification_polling.py | 0 .../snippets/notification_polling_test.py | 0 noxfile_config.py => samples/snippets/noxfile_config.py | 0 .../snippets/public_access_prevention_test.py | 0 quickstart.py => samples/snippets/quickstart.py | 0 quickstart_test.py => samples/snippets/quickstart_test.py | 0 requester_pays_test.py => samples/snippets/requester_pays_test.py | 0 requirements-test.txt => samples/snippets/requirements-test.txt | 0 requirements.txt => samples/snippets/requirements.txt | 0 snippets_test.py => samples/snippets/snippets_test.py | 0 .../snippets/storage_activate_hmac_key.py | 0 .../snippets/storage_add_bucket_conditional_iam_binding.py | 0 .../snippets/storage_add_bucket_default_owner.py | 0 .../snippets/storage_add_bucket_iam_member.py | 0 .../snippets/storage_add_bucket_label.py | 0 .../snippets/storage_add_bucket_owner.py | 0 .../snippets/storage_add_file_owner.py | 0 .../snippets/storage_bucket_delete_default_kms_key.py | 0 .../snippets/storage_change_default_storage_class.py | 0 .../snippets/storage_change_file_storage_class.py | 0 .../snippets/storage_compose_file.py | 0 .../snippets/storage_configure_retries.py | 0 storage_copy_file.py => samples/snippets/storage_copy_file.py | 0 .../snippets/storage_copy_file_archived_generation.py | 0 .../snippets/storage_cors_configuration.py | 0 .../snippets/storage_create_bucket.py | 0 .../snippets/storage_create_bucket_class_location.py | 0 .../snippets/storage_create_hmac_key.py | 0 .../snippets/storage_deactivate_hmac_key.py | 0 .../snippets/storage_define_bucket_website_configuration.py | 0 .../snippets/storage_delete_bucket.py | 0 storage_delete_file.py => samples/snippets/storage_delete_file.py | 0 .../snippets/storage_delete_file_archived_generation.py | 0 .../snippets/storage_delete_hmac_key.py | 0 .../snippets/storage_disable_bucket_lifecycle_management.py | 0 .../snippets/storage_disable_default_event_based_hold.py | 0 .../snippets/storage_disable_requester_pays.py | 0 .../snippets/storage_disable_uniform_bucket_level_access.py | 0 .../snippets/storage_disable_versioning.py | 0 .../snippets/storage_download_encrypted_file.py | 0 .../snippets/storage_download_file.py | 0 .../snippets/storage_download_file_requester_pays.py | 0 .../snippets/storage_download_public_file.py | 0 .../snippets/storage_enable_bucket_lifecycle_management.py | 0 .../snippets/storage_enable_default_event_based_hold.py | 0 .../snippets/storage_enable_requester_pays.py | 0 .../snippets/storage_enable_uniform_bucket_level_access.py | 0 .../snippets/storage_enable_versioning.py | 0 .../snippets/storage_generate_encryption_key.py | 0 .../snippets/storage_generate_signed_post_policy_v4.py | 0 .../snippets/storage_generate_signed_url_v2.py | 0 .../snippets/storage_generate_signed_url_v4.py | 0 .../snippets/storage_generate_upload_signed_url_v4.py | 0 .../snippets/storage_get_bucket_labels.py | 0 .../snippets/storage_get_bucket_metadata.py | 0 .../snippets/storage_get_default_event_based_hold.py | 0 .../snippets/storage_get_hmac_key.py | 0 .../snippets/storage_get_metadata.py | 0 .../snippets/storage_get_public_access_prevention.py | 0 .../snippets/storage_get_requester_pays_status.py | 0 .../snippets/storage_get_retention_policy.py | 0 .../snippets/storage_get_service_account.py | 0 .../snippets/storage_get_uniform_bucket_level_access.py | 0 .../snippets/storage_list_buckets.py | 0 .../snippets/storage_list_file_archived_generations.py | 0 storage_list_files.py => samples/snippets/storage_list_files.py | 0 .../snippets/storage_list_files_with_prefix.py | 0 .../snippets/storage_list_hmac_keys.py | 0 .../snippets/storage_lock_retention_policy.py | 0 storage_make_public.py => samples/snippets/storage_make_public.py | 0 storage_move_file.py => samples/snippets/storage_move_file.py | 0 .../snippets/storage_object_csek_to_cmek.py | 0 .../snippets/storage_object_get_kms_key.py | 0 .../snippets/storage_print_bucket_acl.py | 0 .../snippets/storage_print_bucket_acl_for_user.py | 0 .../snippets/storage_print_file_acl.py | 0 .../snippets/storage_print_file_acl_for_user.py | 0 .../snippets/storage_release_event_based_hold.py | 0 .../snippets/storage_release_temporary_hold.py | 0 .../snippets/storage_remove_bucket_conditional_iam_binding.py | 0 .../snippets/storage_remove_bucket_default_owner.py | 0 .../snippets/storage_remove_bucket_iam_member.py | 0 .../snippets/storage_remove_bucket_label.py | 0 .../snippets/storage_remove_bucket_owner.py | 0 .../snippets/storage_remove_cors_configuration.py | 0 .../snippets/storage_remove_file_owner.py | 0 .../snippets/storage_remove_retention_policy.py | 0 storage_rename_file.py => samples/snippets/storage_rename_file.py | 0 .../snippets/storage_rotate_encryption_key.py | 0 .../snippets/storage_set_bucket_default_kms_key.py | 0 .../snippets/storage_set_bucket_public_iam.py | 0 .../snippets/storage_set_event_based_hold.py | 0 .../snippets/storage_set_metadata.py | 0 .../snippets/storage_set_public_access_prevention_enforced.py | 0 .../snippets/storage_set_public_access_prevention_inherited.py | 0 .../snippets/storage_set_public_access_prevention_unspecified.py | 0 .../snippets/storage_set_retention_policy.py | 0 .../snippets/storage_set_temporary_hold.py | 0 .../snippets/storage_upload_encrypted_file.py | 0 storage_upload_file.py => samples/snippets/storage_upload_file.py | 0 .../snippets/storage_upload_with_kms_key.py | 0 .../snippets/storage_view_bucket_iam_members.py | 0 .../snippets/uniform_bucket_level_access_test.py | 0 110 files changed, 0 insertions(+), 0 deletions(-) rename README.md => samples/snippets/README.md (100%) rename acl_test.py => samples/snippets/acl_test.py (100%) rename bucket_lock_test.py => samples/snippets/bucket_lock_test.py (100%) rename conftest.py => samples/snippets/conftest.py (100%) rename encryption_test.py => samples/snippets/encryption_test.py (100%) rename hmac_samples_test.py => samples/snippets/hmac_samples_test.py (100%) rename iam_test.py => samples/snippets/iam_test.py (100%) rename notification_polling.py => samples/snippets/notification_polling.py (100%) rename notification_polling_test.py => samples/snippets/notification_polling_test.py (100%) rename noxfile_config.py => samples/snippets/noxfile_config.py (100%) rename public_access_prevention_test.py => samples/snippets/public_access_prevention_test.py (100%) rename quickstart.py => samples/snippets/quickstart.py (100%) rename quickstart_test.py => samples/snippets/quickstart_test.py (100%) rename requester_pays_test.py => samples/snippets/requester_pays_test.py (100%) rename requirements-test.txt => samples/snippets/requirements-test.txt (100%) rename requirements.txt => samples/snippets/requirements.txt (100%) rename snippets_test.py => samples/snippets/snippets_test.py (100%) rename storage_activate_hmac_key.py => samples/snippets/storage_activate_hmac_key.py (100%) rename storage_add_bucket_conditional_iam_binding.py => samples/snippets/storage_add_bucket_conditional_iam_binding.py (100%) rename storage_add_bucket_default_owner.py => samples/snippets/storage_add_bucket_default_owner.py (100%) rename storage_add_bucket_iam_member.py => samples/snippets/storage_add_bucket_iam_member.py (100%) rename storage_add_bucket_label.py => samples/snippets/storage_add_bucket_label.py (100%) rename storage_add_bucket_owner.py => samples/snippets/storage_add_bucket_owner.py (100%) rename storage_add_file_owner.py => samples/snippets/storage_add_file_owner.py (100%) rename storage_bucket_delete_default_kms_key.py => samples/snippets/storage_bucket_delete_default_kms_key.py (100%) rename storage_change_default_storage_class.py => samples/snippets/storage_change_default_storage_class.py (100%) rename storage_change_file_storage_class.py => samples/snippets/storage_change_file_storage_class.py (100%) rename storage_compose_file.py => samples/snippets/storage_compose_file.py (100%) rename storage_configure_retries.py => samples/snippets/storage_configure_retries.py (100%) rename storage_copy_file.py => samples/snippets/storage_copy_file.py (100%) rename storage_copy_file_archived_generation.py => samples/snippets/storage_copy_file_archived_generation.py (100%) rename storage_cors_configuration.py => samples/snippets/storage_cors_configuration.py (100%) rename storage_create_bucket.py => samples/snippets/storage_create_bucket.py (100%) rename storage_create_bucket_class_location.py => samples/snippets/storage_create_bucket_class_location.py (100%) rename storage_create_hmac_key.py => samples/snippets/storage_create_hmac_key.py (100%) rename storage_deactivate_hmac_key.py => samples/snippets/storage_deactivate_hmac_key.py (100%) rename storage_define_bucket_website_configuration.py => samples/snippets/storage_define_bucket_website_configuration.py (100%) rename storage_delete_bucket.py => samples/snippets/storage_delete_bucket.py (100%) rename storage_delete_file.py => samples/snippets/storage_delete_file.py (100%) rename storage_delete_file_archived_generation.py => samples/snippets/storage_delete_file_archived_generation.py (100%) rename storage_delete_hmac_key.py => samples/snippets/storage_delete_hmac_key.py (100%) rename storage_disable_bucket_lifecycle_management.py => samples/snippets/storage_disable_bucket_lifecycle_management.py (100%) rename storage_disable_default_event_based_hold.py => samples/snippets/storage_disable_default_event_based_hold.py (100%) rename storage_disable_requester_pays.py => samples/snippets/storage_disable_requester_pays.py (100%) rename storage_disable_uniform_bucket_level_access.py => samples/snippets/storage_disable_uniform_bucket_level_access.py (100%) rename storage_disable_versioning.py => samples/snippets/storage_disable_versioning.py (100%) rename storage_download_encrypted_file.py => samples/snippets/storage_download_encrypted_file.py (100%) rename storage_download_file.py => samples/snippets/storage_download_file.py (100%) rename storage_download_file_requester_pays.py => samples/snippets/storage_download_file_requester_pays.py (100%) rename storage_download_public_file.py => samples/snippets/storage_download_public_file.py (100%) rename storage_enable_bucket_lifecycle_management.py => samples/snippets/storage_enable_bucket_lifecycle_management.py (100%) rename storage_enable_default_event_based_hold.py => samples/snippets/storage_enable_default_event_based_hold.py (100%) rename storage_enable_requester_pays.py => samples/snippets/storage_enable_requester_pays.py (100%) rename storage_enable_uniform_bucket_level_access.py => samples/snippets/storage_enable_uniform_bucket_level_access.py (100%) rename storage_enable_versioning.py => samples/snippets/storage_enable_versioning.py (100%) rename storage_generate_encryption_key.py => samples/snippets/storage_generate_encryption_key.py (100%) rename storage_generate_signed_post_policy_v4.py => samples/snippets/storage_generate_signed_post_policy_v4.py (100%) rename storage_generate_signed_url_v2.py => samples/snippets/storage_generate_signed_url_v2.py (100%) rename storage_generate_signed_url_v4.py => samples/snippets/storage_generate_signed_url_v4.py (100%) rename storage_generate_upload_signed_url_v4.py => samples/snippets/storage_generate_upload_signed_url_v4.py (100%) rename storage_get_bucket_labels.py => samples/snippets/storage_get_bucket_labels.py (100%) rename storage_get_bucket_metadata.py => samples/snippets/storage_get_bucket_metadata.py (100%) rename storage_get_default_event_based_hold.py => samples/snippets/storage_get_default_event_based_hold.py (100%) rename storage_get_hmac_key.py => samples/snippets/storage_get_hmac_key.py (100%) rename storage_get_metadata.py => samples/snippets/storage_get_metadata.py (100%) rename storage_get_public_access_prevention.py => samples/snippets/storage_get_public_access_prevention.py (100%) rename storage_get_requester_pays_status.py => samples/snippets/storage_get_requester_pays_status.py (100%) rename storage_get_retention_policy.py => samples/snippets/storage_get_retention_policy.py (100%) rename storage_get_service_account.py => samples/snippets/storage_get_service_account.py (100%) rename storage_get_uniform_bucket_level_access.py => samples/snippets/storage_get_uniform_bucket_level_access.py (100%) rename storage_list_buckets.py => samples/snippets/storage_list_buckets.py (100%) rename storage_list_file_archived_generations.py => samples/snippets/storage_list_file_archived_generations.py (100%) rename storage_list_files.py => samples/snippets/storage_list_files.py (100%) rename storage_list_files_with_prefix.py => samples/snippets/storage_list_files_with_prefix.py (100%) rename storage_list_hmac_keys.py => samples/snippets/storage_list_hmac_keys.py (100%) rename storage_lock_retention_policy.py => samples/snippets/storage_lock_retention_policy.py (100%) rename storage_make_public.py => samples/snippets/storage_make_public.py (100%) rename storage_move_file.py => samples/snippets/storage_move_file.py (100%) rename storage_object_csek_to_cmek.py => samples/snippets/storage_object_csek_to_cmek.py (100%) rename storage_object_get_kms_key.py => samples/snippets/storage_object_get_kms_key.py (100%) rename storage_print_bucket_acl.py => samples/snippets/storage_print_bucket_acl.py (100%) rename storage_print_bucket_acl_for_user.py => samples/snippets/storage_print_bucket_acl_for_user.py (100%) rename storage_print_file_acl.py => samples/snippets/storage_print_file_acl.py (100%) rename storage_print_file_acl_for_user.py => samples/snippets/storage_print_file_acl_for_user.py (100%) rename storage_release_event_based_hold.py => samples/snippets/storage_release_event_based_hold.py (100%) rename storage_release_temporary_hold.py => samples/snippets/storage_release_temporary_hold.py (100%) rename storage_remove_bucket_conditional_iam_binding.py => samples/snippets/storage_remove_bucket_conditional_iam_binding.py (100%) rename storage_remove_bucket_default_owner.py => samples/snippets/storage_remove_bucket_default_owner.py (100%) rename storage_remove_bucket_iam_member.py => samples/snippets/storage_remove_bucket_iam_member.py (100%) rename storage_remove_bucket_label.py => samples/snippets/storage_remove_bucket_label.py (100%) rename storage_remove_bucket_owner.py => samples/snippets/storage_remove_bucket_owner.py (100%) rename storage_remove_cors_configuration.py => samples/snippets/storage_remove_cors_configuration.py (100%) rename storage_remove_file_owner.py => samples/snippets/storage_remove_file_owner.py (100%) rename storage_remove_retention_policy.py => samples/snippets/storage_remove_retention_policy.py (100%) rename storage_rename_file.py => samples/snippets/storage_rename_file.py (100%) rename storage_rotate_encryption_key.py => samples/snippets/storage_rotate_encryption_key.py (100%) rename storage_set_bucket_default_kms_key.py => samples/snippets/storage_set_bucket_default_kms_key.py (100%) rename storage_set_bucket_public_iam.py => samples/snippets/storage_set_bucket_public_iam.py (100%) rename storage_set_event_based_hold.py => samples/snippets/storage_set_event_based_hold.py (100%) rename storage_set_metadata.py => samples/snippets/storage_set_metadata.py (100%) rename storage_set_public_access_prevention_enforced.py => samples/snippets/storage_set_public_access_prevention_enforced.py (100%) rename storage_set_public_access_prevention_inherited.py => samples/snippets/storage_set_public_access_prevention_inherited.py (100%) rename storage_set_public_access_prevention_unspecified.py => samples/snippets/storage_set_public_access_prevention_unspecified.py (100%) rename storage_set_retention_policy.py => samples/snippets/storage_set_retention_policy.py (100%) rename storage_set_temporary_hold.py => samples/snippets/storage_set_temporary_hold.py (100%) rename storage_upload_encrypted_file.py => samples/snippets/storage_upload_encrypted_file.py (100%) rename storage_upload_file.py => samples/snippets/storage_upload_file.py (100%) rename storage_upload_with_kms_key.py => samples/snippets/storage_upload_with_kms_key.py (100%) rename storage_view_bucket_iam_members.py => samples/snippets/storage_view_bucket_iam_members.py (100%) rename uniform_bucket_level_access_test.py => samples/snippets/uniform_bucket_level_access_test.py (100%) diff --git a/README.md b/samples/snippets/README.md similarity index 100% rename from README.md rename to samples/snippets/README.md diff --git a/acl_test.py b/samples/snippets/acl_test.py similarity index 100% rename from acl_test.py rename to samples/snippets/acl_test.py diff --git a/bucket_lock_test.py b/samples/snippets/bucket_lock_test.py similarity index 100% rename from bucket_lock_test.py rename to samples/snippets/bucket_lock_test.py diff --git a/conftest.py b/samples/snippets/conftest.py similarity index 100% rename from conftest.py rename to samples/snippets/conftest.py diff --git a/encryption_test.py b/samples/snippets/encryption_test.py similarity index 100% rename from encryption_test.py rename to samples/snippets/encryption_test.py diff --git a/hmac_samples_test.py b/samples/snippets/hmac_samples_test.py similarity index 100% rename from hmac_samples_test.py rename to samples/snippets/hmac_samples_test.py diff --git a/iam_test.py b/samples/snippets/iam_test.py similarity index 100% rename from iam_test.py rename to samples/snippets/iam_test.py diff --git a/notification_polling.py b/samples/snippets/notification_polling.py similarity index 100% rename from notification_polling.py rename to samples/snippets/notification_polling.py diff --git a/notification_polling_test.py b/samples/snippets/notification_polling_test.py similarity index 100% rename from notification_polling_test.py rename to samples/snippets/notification_polling_test.py diff --git a/noxfile_config.py b/samples/snippets/noxfile_config.py similarity index 100% rename from noxfile_config.py rename to samples/snippets/noxfile_config.py diff --git a/public_access_prevention_test.py b/samples/snippets/public_access_prevention_test.py similarity index 100% rename from public_access_prevention_test.py rename to samples/snippets/public_access_prevention_test.py diff --git a/quickstart.py b/samples/snippets/quickstart.py similarity index 100% rename from quickstart.py rename to samples/snippets/quickstart.py diff --git a/quickstart_test.py b/samples/snippets/quickstart_test.py similarity index 100% rename from quickstart_test.py rename to samples/snippets/quickstart_test.py diff --git a/requester_pays_test.py b/samples/snippets/requester_pays_test.py similarity index 100% rename from requester_pays_test.py rename to samples/snippets/requester_pays_test.py diff --git a/requirements-test.txt b/samples/snippets/requirements-test.txt similarity index 100% rename from requirements-test.txt rename to samples/snippets/requirements-test.txt diff --git a/requirements.txt b/samples/snippets/requirements.txt similarity index 100% rename from requirements.txt rename to samples/snippets/requirements.txt diff --git a/snippets_test.py b/samples/snippets/snippets_test.py similarity index 100% rename from snippets_test.py rename to samples/snippets/snippets_test.py diff --git a/storage_activate_hmac_key.py b/samples/snippets/storage_activate_hmac_key.py similarity index 100% rename from storage_activate_hmac_key.py rename to samples/snippets/storage_activate_hmac_key.py diff --git a/storage_add_bucket_conditional_iam_binding.py b/samples/snippets/storage_add_bucket_conditional_iam_binding.py similarity index 100% rename from storage_add_bucket_conditional_iam_binding.py rename to samples/snippets/storage_add_bucket_conditional_iam_binding.py diff --git a/storage_add_bucket_default_owner.py b/samples/snippets/storage_add_bucket_default_owner.py similarity index 100% rename from storage_add_bucket_default_owner.py rename to samples/snippets/storage_add_bucket_default_owner.py diff --git a/storage_add_bucket_iam_member.py b/samples/snippets/storage_add_bucket_iam_member.py similarity index 100% rename from storage_add_bucket_iam_member.py rename to samples/snippets/storage_add_bucket_iam_member.py diff --git a/storage_add_bucket_label.py b/samples/snippets/storage_add_bucket_label.py similarity index 100% rename from storage_add_bucket_label.py rename to samples/snippets/storage_add_bucket_label.py diff --git a/storage_add_bucket_owner.py b/samples/snippets/storage_add_bucket_owner.py similarity index 100% rename from storage_add_bucket_owner.py rename to samples/snippets/storage_add_bucket_owner.py diff --git a/storage_add_file_owner.py b/samples/snippets/storage_add_file_owner.py similarity index 100% rename from storage_add_file_owner.py rename to samples/snippets/storage_add_file_owner.py diff --git a/storage_bucket_delete_default_kms_key.py b/samples/snippets/storage_bucket_delete_default_kms_key.py similarity index 100% rename from storage_bucket_delete_default_kms_key.py rename to samples/snippets/storage_bucket_delete_default_kms_key.py diff --git a/storage_change_default_storage_class.py b/samples/snippets/storage_change_default_storage_class.py similarity index 100% rename from storage_change_default_storage_class.py rename to samples/snippets/storage_change_default_storage_class.py diff --git a/storage_change_file_storage_class.py b/samples/snippets/storage_change_file_storage_class.py similarity index 100% rename from storage_change_file_storage_class.py rename to samples/snippets/storage_change_file_storage_class.py diff --git a/storage_compose_file.py b/samples/snippets/storage_compose_file.py similarity index 100% rename from storage_compose_file.py rename to samples/snippets/storage_compose_file.py diff --git a/storage_configure_retries.py b/samples/snippets/storage_configure_retries.py similarity index 100% rename from storage_configure_retries.py rename to samples/snippets/storage_configure_retries.py diff --git a/storage_copy_file.py b/samples/snippets/storage_copy_file.py similarity index 100% rename from storage_copy_file.py rename to samples/snippets/storage_copy_file.py diff --git a/storage_copy_file_archived_generation.py b/samples/snippets/storage_copy_file_archived_generation.py similarity index 100% rename from storage_copy_file_archived_generation.py rename to samples/snippets/storage_copy_file_archived_generation.py diff --git a/storage_cors_configuration.py b/samples/snippets/storage_cors_configuration.py similarity index 100% rename from storage_cors_configuration.py rename to samples/snippets/storage_cors_configuration.py diff --git a/storage_create_bucket.py b/samples/snippets/storage_create_bucket.py similarity index 100% rename from storage_create_bucket.py rename to samples/snippets/storage_create_bucket.py diff --git a/storage_create_bucket_class_location.py b/samples/snippets/storage_create_bucket_class_location.py similarity index 100% rename from storage_create_bucket_class_location.py rename to samples/snippets/storage_create_bucket_class_location.py diff --git a/storage_create_hmac_key.py b/samples/snippets/storage_create_hmac_key.py similarity index 100% rename from storage_create_hmac_key.py rename to samples/snippets/storage_create_hmac_key.py diff --git a/storage_deactivate_hmac_key.py b/samples/snippets/storage_deactivate_hmac_key.py similarity index 100% rename from storage_deactivate_hmac_key.py rename to samples/snippets/storage_deactivate_hmac_key.py diff --git a/storage_define_bucket_website_configuration.py b/samples/snippets/storage_define_bucket_website_configuration.py similarity index 100% rename from storage_define_bucket_website_configuration.py rename to samples/snippets/storage_define_bucket_website_configuration.py diff --git a/storage_delete_bucket.py b/samples/snippets/storage_delete_bucket.py similarity index 100% rename from storage_delete_bucket.py rename to samples/snippets/storage_delete_bucket.py diff --git a/storage_delete_file.py b/samples/snippets/storage_delete_file.py similarity index 100% rename from storage_delete_file.py rename to samples/snippets/storage_delete_file.py diff --git a/storage_delete_file_archived_generation.py b/samples/snippets/storage_delete_file_archived_generation.py similarity index 100% rename from storage_delete_file_archived_generation.py rename to samples/snippets/storage_delete_file_archived_generation.py diff --git a/storage_delete_hmac_key.py b/samples/snippets/storage_delete_hmac_key.py similarity index 100% rename from storage_delete_hmac_key.py rename to samples/snippets/storage_delete_hmac_key.py diff --git a/storage_disable_bucket_lifecycle_management.py b/samples/snippets/storage_disable_bucket_lifecycle_management.py similarity index 100% rename from storage_disable_bucket_lifecycle_management.py rename to samples/snippets/storage_disable_bucket_lifecycle_management.py diff --git a/storage_disable_default_event_based_hold.py b/samples/snippets/storage_disable_default_event_based_hold.py similarity index 100% rename from storage_disable_default_event_based_hold.py rename to samples/snippets/storage_disable_default_event_based_hold.py diff --git a/storage_disable_requester_pays.py b/samples/snippets/storage_disable_requester_pays.py similarity index 100% rename from storage_disable_requester_pays.py rename to samples/snippets/storage_disable_requester_pays.py diff --git a/storage_disable_uniform_bucket_level_access.py b/samples/snippets/storage_disable_uniform_bucket_level_access.py similarity index 100% rename from storage_disable_uniform_bucket_level_access.py rename to samples/snippets/storage_disable_uniform_bucket_level_access.py diff --git a/storage_disable_versioning.py b/samples/snippets/storage_disable_versioning.py similarity index 100% rename from storage_disable_versioning.py rename to samples/snippets/storage_disable_versioning.py diff --git a/storage_download_encrypted_file.py b/samples/snippets/storage_download_encrypted_file.py similarity index 100% rename from storage_download_encrypted_file.py rename to samples/snippets/storage_download_encrypted_file.py diff --git a/storage_download_file.py b/samples/snippets/storage_download_file.py similarity index 100% rename from storage_download_file.py rename to samples/snippets/storage_download_file.py diff --git a/storage_download_file_requester_pays.py b/samples/snippets/storage_download_file_requester_pays.py similarity index 100% rename from storage_download_file_requester_pays.py rename to samples/snippets/storage_download_file_requester_pays.py diff --git a/storage_download_public_file.py b/samples/snippets/storage_download_public_file.py similarity index 100% rename from storage_download_public_file.py rename to samples/snippets/storage_download_public_file.py diff --git a/storage_enable_bucket_lifecycle_management.py b/samples/snippets/storage_enable_bucket_lifecycle_management.py similarity index 100% rename from storage_enable_bucket_lifecycle_management.py rename to samples/snippets/storage_enable_bucket_lifecycle_management.py diff --git a/storage_enable_default_event_based_hold.py b/samples/snippets/storage_enable_default_event_based_hold.py similarity index 100% rename from storage_enable_default_event_based_hold.py rename to samples/snippets/storage_enable_default_event_based_hold.py diff --git a/storage_enable_requester_pays.py b/samples/snippets/storage_enable_requester_pays.py similarity index 100% rename from storage_enable_requester_pays.py rename to samples/snippets/storage_enable_requester_pays.py diff --git a/storage_enable_uniform_bucket_level_access.py b/samples/snippets/storage_enable_uniform_bucket_level_access.py similarity index 100% rename from storage_enable_uniform_bucket_level_access.py rename to samples/snippets/storage_enable_uniform_bucket_level_access.py diff --git a/storage_enable_versioning.py b/samples/snippets/storage_enable_versioning.py similarity index 100% rename from storage_enable_versioning.py rename to samples/snippets/storage_enable_versioning.py diff --git a/storage_generate_encryption_key.py b/samples/snippets/storage_generate_encryption_key.py similarity index 100% rename from storage_generate_encryption_key.py rename to samples/snippets/storage_generate_encryption_key.py diff --git a/storage_generate_signed_post_policy_v4.py b/samples/snippets/storage_generate_signed_post_policy_v4.py similarity index 100% rename from storage_generate_signed_post_policy_v4.py rename to samples/snippets/storage_generate_signed_post_policy_v4.py diff --git a/storage_generate_signed_url_v2.py b/samples/snippets/storage_generate_signed_url_v2.py similarity index 100% rename from storage_generate_signed_url_v2.py rename to samples/snippets/storage_generate_signed_url_v2.py diff --git a/storage_generate_signed_url_v4.py b/samples/snippets/storage_generate_signed_url_v4.py similarity index 100% rename from storage_generate_signed_url_v4.py rename to samples/snippets/storage_generate_signed_url_v4.py diff --git a/storage_generate_upload_signed_url_v4.py b/samples/snippets/storage_generate_upload_signed_url_v4.py similarity index 100% rename from storage_generate_upload_signed_url_v4.py rename to samples/snippets/storage_generate_upload_signed_url_v4.py diff --git a/storage_get_bucket_labels.py b/samples/snippets/storage_get_bucket_labels.py similarity index 100% rename from storage_get_bucket_labels.py rename to samples/snippets/storage_get_bucket_labels.py diff --git a/storage_get_bucket_metadata.py b/samples/snippets/storage_get_bucket_metadata.py similarity index 100% rename from storage_get_bucket_metadata.py rename to samples/snippets/storage_get_bucket_metadata.py diff --git a/storage_get_default_event_based_hold.py b/samples/snippets/storage_get_default_event_based_hold.py similarity index 100% rename from storage_get_default_event_based_hold.py rename to samples/snippets/storage_get_default_event_based_hold.py diff --git a/storage_get_hmac_key.py b/samples/snippets/storage_get_hmac_key.py similarity index 100% rename from storage_get_hmac_key.py rename to samples/snippets/storage_get_hmac_key.py diff --git a/storage_get_metadata.py b/samples/snippets/storage_get_metadata.py similarity index 100% rename from storage_get_metadata.py rename to samples/snippets/storage_get_metadata.py diff --git a/storage_get_public_access_prevention.py b/samples/snippets/storage_get_public_access_prevention.py similarity index 100% rename from storage_get_public_access_prevention.py rename to samples/snippets/storage_get_public_access_prevention.py diff --git a/storage_get_requester_pays_status.py b/samples/snippets/storage_get_requester_pays_status.py similarity index 100% rename from storage_get_requester_pays_status.py rename to samples/snippets/storage_get_requester_pays_status.py diff --git a/storage_get_retention_policy.py b/samples/snippets/storage_get_retention_policy.py similarity index 100% rename from storage_get_retention_policy.py rename to samples/snippets/storage_get_retention_policy.py diff --git a/storage_get_service_account.py b/samples/snippets/storage_get_service_account.py similarity index 100% rename from storage_get_service_account.py rename to samples/snippets/storage_get_service_account.py diff --git a/storage_get_uniform_bucket_level_access.py b/samples/snippets/storage_get_uniform_bucket_level_access.py similarity index 100% rename from storage_get_uniform_bucket_level_access.py rename to samples/snippets/storage_get_uniform_bucket_level_access.py diff --git a/storage_list_buckets.py b/samples/snippets/storage_list_buckets.py similarity index 100% rename from storage_list_buckets.py rename to samples/snippets/storage_list_buckets.py diff --git a/storage_list_file_archived_generations.py b/samples/snippets/storage_list_file_archived_generations.py similarity index 100% rename from storage_list_file_archived_generations.py rename to samples/snippets/storage_list_file_archived_generations.py diff --git a/storage_list_files.py b/samples/snippets/storage_list_files.py similarity index 100% rename from storage_list_files.py rename to samples/snippets/storage_list_files.py diff --git a/storage_list_files_with_prefix.py b/samples/snippets/storage_list_files_with_prefix.py similarity index 100% rename from storage_list_files_with_prefix.py rename to samples/snippets/storage_list_files_with_prefix.py diff --git a/storage_list_hmac_keys.py b/samples/snippets/storage_list_hmac_keys.py similarity index 100% rename from storage_list_hmac_keys.py rename to samples/snippets/storage_list_hmac_keys.py diff --git a/storage_lock_retention_policy.py b/samples/snippets/storage_lock_retention_policy.py similarity index 100% rename from storage_lock_retention_policy.py rename to samples/snippets/storage_lock_retention_policy.py diff --git a/storage_make_public.py b/samples/snippets/storage_make_public.py similarity index 100% rename from storage_make_public.py rename to samples/snippets/storage_make_public.py diff --git a/storage_move_file.py b/samples/snippets/storage_move_file.py similarity index 100% rename from storage_move_file.py rename to samples/snippets/storage_move_file.py diff --git a/storage_object_csek_to_cmek.py b/samples/snippets/storage_object_csek_to_cmek.py similarity index 100% rename from storage_object_csek_to_cmek.py rename to samples/snippets/storage_object_csek_to_cmek.py diff --git a/storage_object_get_kms_key.py b/samples/snippets/storage_object_get_kms_key.py similarity index 100% rename from storage_object_get_kms_key.py rename to samples/snippets/storage_object_get_kms_key.py diff --git a/storage_print_bucket_acl.py b/samples/snippets/storage_print_bucket_acl.py similarity index 100% rename from storage_print_bucket_acl.py rename to samples/snippets/storage_print_bucket_acl.py diff --git a/storage_print_bucket_acl_for_user.py b/samples/snippets/storage_print_bucket_acl_for_user.py similarity index 100% rename from storage_print_bucket_acl_for_user.py rename to samples/snippets/storage_print_bucket_acl_for_user.py diff --git a/storage_print_file_acl.py b/samples/snippets/storage_print_file_acl.py similarity index 100% rename from storage_print_file_acl.py rename to samples/snippets/storage_print_file_acl.py diff --git a/storage_print_file_acl_for_user.py b/samples/snippets/storage_print_file_acl_for_user.py similarity index 100% rename from storage_print_file_acl_for_user.py rename to samples/snippets/storage_print_file_acl_for_user.py diff --git a/storage_release_event_based_hold.py b/samples/snippets/storage_release_event_based_hold.py similarity index 100% rename from storage_release_event_based_hold.py rename to samples/snippets/storage_release_event_based_hold.py diff --git a/storage_release_temporary_hold.py b/samples/snippets/storage_release_temporary_hold.py similarity index 100% rename from storage_release_temporary_hold.py rename to samples/snippets/storage_release_temporary_hold.py diff --git a/storage_remove_bucket_conditional_iam_binding.py b/samples/snippets/storage_remove_bucket_conditional_iam_binding.py similarity index 100% rename from storage_remove_bucket_conditional_iam_binding.py rename to samples/snippets/storage_remove_bucket_conditional_iam_binding.py diff --git a/storage_remove_bucket_default_owner.py b/samples/snippets/storage_remove_bucket_default_owner.py similarity index 100% rename from storage_remove_bucket_default_owner.py rename to samples/snippets/storage_remove_bucket_default_owner.py diff --git a/storage_remove_bucket_iam_member.py b/samples/snippets/storage_remove_bucket_iam_member.py similarity index 100% rename from storage_remove_bucket_iam_member.py rename to samples/snippets/storage_remove_bucket_iam_member.py diff --git a/storage_remove_bucket_label.py b/samples/snippets/storage_remove_bucket_label.py similarity index 100% rename from storage_remove_bucket_label.py rename to samples/snippets/storage_remove_bucket_label.py diff --git a/storage_remove_bucket_owner.py b/samples/snippets/storage_remove_bucket_owner.py similarity index 100% rename from storage_remove_bucket_owner.py rename to samples/snippets/storage_remove_bucket_owner.py diff --git a/storage_remove_cors_configuration.py b/samples/snippets/storage_remove_cors_configuration.py similarity index 100% rename from storage_remove_cors_configuration.py rename to samples/snippets/storage_remove_cors_configuration.py diff --git a/storage_remove_file_owner.py b/samples/snippets/storage_remove_file_owner.py similarity index 100% rename from storage_remove_file_owner.py rename to samples/snippets/storage_remove_file_owner.py diff --git a/storage_remove_retention_policy.py b/samples/snippets/storage_remove_retention_policy.py similarity index 100% rename from storage_remove_retention_policy.py rename to samples/snippets/storage_remove_retention_policy.py diff --git a/storage_rename_file.py b/samples/snippets/storage_rename_file.py similarity index 100% rename from storage_rename_file.py rename to samples/snippets/storage_rename_file.py diff --git a/storage_rotate_encryption_key.py b/samples/snippets/storage_rotate_encryption_key.py similarity index 100% rename from storage_rotate_encryption_key.py rename to samples/snippets/storage_rotate_encryption_key.py diff --git a/storage_set_bucket_default_kms_key.py b/samples/snippets/storage_set_bucket_default_kms_key.py similarity index 100% rename from storage_set_bucket_default_kms_key.py rename to samples/snippets/storage_set_bucket_default_kms_key.py diff --git a/storage_set_bucket_public_iam.py b/samples/snippets/storage_set_bucket_public_iam.py similarity index 100% rename from storage_set_bucket_public_iam.py rename to samples/snippets/storage_set_bucket_public_iam.py diff --git a/storage_set_event_based_hold.py b/samples/snippets/storage_set_event_based_hold.py similarity index 100% rename from storage_set_event_based_hold.py rename to samples/snippets/storage_set_event_based_hold.py diff --git a/storage_set_metadata.py b/samples/snippets/storage_set_metadata.py similarity index 100% rename from storage_set_metadata.py rename to samples/snippets/storage_set_metadata.py diff --git a/storage_set_public_access_prevention_enforced.py b/samples/snippets/storage_set_public_access_prevention_enforced.py similarity index 100% rename from storage_set_public_access_prevention_enforced.py rename to samples/snippets/storage_set_public_access_prevention_enforced.py diff --git a/storage_set_public_access_prevention_inherited.py b/samples/snippets/storage_set_public_access_prevention_inherited.py similarity index 100% rename from storage_set_public_access_prevention_inherited.py rename to samples/snippets/storage_set_public_access_prevention_inherited.py diff --git a/storage_set_public_access_prevention_unspecified.py b/samples/snippets/storage_set_public_access_prevention_unspecified.py similarity index 100% rename from storage_set_public_access_prevention_unspecified.py rename to samples/snippets/storage_set_public_access_prevention_unspecified.py diff --git a/storage_set_retention_policy.py b/samples/snippets/storage_set_retention_policy.py similarity index 100% rename from storage_set_retention_policy.py rename to samples/snippets/storage_set_retention_policy.py diff --git a/storage_set_temporary_hold.py b/samples/snippets/storage_set_temporary_hold.py similarity index 100% rename from storage_set_temporary_hold.py rename to samples/snippets/storage_set_temporary_hold.py diff --git a/storage_upload_encrypted_file.py b/samples/snippets/storage_upload_encrypted_file.py similarity index 100% rename from storage_upload_encrypted_file.py rename to samples/snippets/storage_upload_encrypted_file.py diff --git a/storage_upload_file.py b/samples/snippets/storage_upload_file.py similarity index 100% rename from storage_upload_file.py rename to samples/snippets/storage_upload_file.py diff --git a/storage_upload_with_kms_key.py b/samples/snippets/storage_upload_with_kms_key.py similarity index 100% rename from storage_upload_with_kms_key.py rename to samples/snippets/storage_upload_with_kms_key.py diff --git a/storage_view_bucket_iam_members.py b/samples/snippets/storage_view_bucket_iam_members.py similarity index 100% rename from storage_view_bucket_iam_members.py rename to samples/snippets/storage_view_bucket_iam_members.py diff --git a/uniform_bucket_level_access_test.py b/samples/snippets/uniform_bucket_level_access_test.py similarity index 100% rename from uniform_bucket_level_access_test.py rename to samples/snippets/uniform_bucket_level_access_test.py From 0a5c42ba4c3760bd51e8fc2018a39a6b8a4ed5ff Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 18 Oct 2021 18:16:58 +0000 Subject: [PATCH 194/197] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- samples/snippets/noxfile.py | 270 ++++++++++++++++++++++++++++++++++++ 1 file changed, 270 insertions(+) create mode 100644 samples/snippets/noxfile.py diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py new file mode 100644 index 000000000..93a9122cc --- /dev/null +++ b/samples/snippets/noxfile.py @@ -0,0 +1,270 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) From f609a814a41220827ea14e19c67175d35fd42587 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 18 Oct 2021 11:21:39 -0700 Subject: [PATCH 195/197] add noxfile --- samples/snippets/noxfile.py | 262 ++++++++++++++++++++++++++++++++++++ 1 file changed, 262 insertions(+) create mode 100644 samples/snippets/noxfile.py diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py new file mode 100644 index 000000000..df5a5e7ba --- /dev/null +++ b/samples/snippets/noxfile.py @@ -0,0 +1,262 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret['GCLOUD_PROJECT'] = os.environ[env_key] # deprecated + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Black +# + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. + # Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) From ab7ec099d50aa6ed806b83fe7945cec5ef79ea23 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 18 Oct 2021 18:46:47 +0000 Subject: [PATCH 196/197] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- samples/snippets/noxfile.py | 60 +++++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 26 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index df5a5e7ba..93a9122cc 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -38,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -56,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -77,26 +76,28 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] - ret['GCLOUD_PROJECT'] = os.environ[env_key] # deprecated + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # Error if a python version is missing nox.options.error_on_missing_interpreters = True @@ -145,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -154,7 +155,7 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) @@ -163,9 +164,10 @@ def lint(session: nox.sessions.Session) -> None: # Black # + @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) @@ -179,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -209,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -219,9 +223,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -231,14 +235,18 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ - # Get root of this repository. - # Assume we don't have directories nested deeper than 10 items. + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): if p is None: break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") From 4f80ae05a1a6195a53451037580c2ab097013cb6 Mon Sep 17 00:00:00 2001 From: Cathy Ouyang Date: Mon, 18 Oct 2021 13:41:52 -0700 Subject: [PATCH 197/197] remove invalid unused region tags --- samples/snippets/notification_polling.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/samples/snippets/notification_polling.py b/samples/snippets/notification_polling.py index 27680dd74..3182db6da 100644 --- a/samples/snippets/notification_polling.py +++ b/samples/snippets/notification_polling.py @@ -56,7 +56,6 @@ def summarize(message): - # [START parse_message] data = message.data.decode("utf-8") attributes = message.attributes @@ -101,12 +100,10 @@ def summarize(message): metageneration=metageneration, ) return description - # [END parse_message] def poll_notifications(project, subscription_name): """Polls a Cloud Pub/Sub subscription for new GCS events for display.""" - # [START poll_notifications] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name @@ -123,7 +120,6 @@ def callback(message): print("Listening for messages on {}".format(subscription_path)) while True: time.sleep(60) - # [END poll_notifications] if __name__ == "__main__":