Skip to content

Commit

Permalink
Adding storage factories for owned objects.
Browse files Browse the repository at this point in the history
This includes
- `Bucket<--Client` (bucket owned by client)
- `Batch<--Client`
- `Blob<--Bucket`

I refrained from implementing factories for
- `BucketACL<--Bucket`
- `ObjectACL<--Blob`
  • Loading branch information
dhermes committed Jul 21, 2015
1 parent f634292 commit a0a54ef
Show file tree
Hide file tree
Showing 9 changed files with 98 additions and 13 deletions.
2 changes: 1 addition & 1 deletion docs/_components/storage-getting-started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ Python built-in ``object``.
If you want to set some data, you just create a ``Blob`` inside your bucket
and store your data inside the blob::

>>> blob = storage.Blob('greeting.txt', bucket=bucket)
>>> blob = bucket.blob('greeting.txt')
>>> blob.upload_from_string('Hello world!')

This creates a :class:`Blob <gcloud.storage.blob.Blob>` object locally and
Expand Down
2 changes: 1 addition & 1 deletion docs/_components/storage-quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ you can create buckets and blobs::
>>> bucket = client.create_bucket('my-new-bucket')
>>> print bucket
<Bucket: my-new-bucket>
>>> blob = storage.Blob('my-test-file.txt', bucket=bucket)
>>> blob = bucket.blob('my-test-file.txt')
>>> print blob
<Blob: my-new-bucket, my-test-file.txt>
>>> blob = blob.upload_from_string('this is test content!')
Expand Down
2 changes: 1 addition & 1 deletion docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,5 @@ Cloud Storage
from gcloud import storage
client = storage.Client()
bucket = client.get_bucket('<your-bucket-name>')
blob = storage.Blob('my-test-file.txt', bucket=bucket)
blob = bucket.blob('my-test-file.txt')
blob.upload_from_string('this is test content!')
20 changes: 20 additions & 0 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,26 @@ def __init__(self, client, name=None):
def __repr__(self):
return '<Bucket: %s>' % self.name

def blob(self, blob_name, chunk_size=None):
"""Factory constructor for blob object.
.. note::
This will not make an HTTP request; it simply instantiates
a blob object owned by this bucket.
:type blob_name: string
:param blob_name: The name of the blob to be instantiated.
:type chunk_size: integer
:param chunk_size: The size of a chunk of data whenever iterating
(1 MB). This must be a multiple of 256 KB per the
API specification.
:rtype: :class:`gcloud.storage.blob.Blob`
:returns: The blob object created.
"""
return Blob(name=blob_name, bucket=self, chunk_size=chunk_size)

def exists(self, client=None):
"""Determines whether or not this bucket exists.
Expand Down
28 changes: 28 additions & 0 deletions gcloud/storage/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from gcloud.client import JSONClient
from gcloud.exceptions import NotFound
from gcloud.iterator import Iterator
from gcloud.storage.batch import Batch
from gcloud.storage.bucket import Bucket
from gcloud.storage.connection import Connection

Expand Down Expand Up @@ -113,6 +114,33 @@ def current_batch(self):
"""
return self._batch_stack.top

def bucket(self, bucket_name):
"""Factory constructor for bucket object.
.. note::
This will not make an HTTP request; it simply instantiates
a bucket object owned by this client.
:type bucket_name: string
:param bucket_name: The name of the bucket to be instantiated.
:rtype: :class:`gcloud.storage.bucket.Bucket`
:returns: The bucket object created.
"""
return Bucket(client=self, name=bucket_name)

def batch(self):
"""Factory constructor for batch object.
.. note::
This will not make an HTTP request; it simply instantiates
a batch object owned by this client.
:rtype: :class:`gcloud.storage.batch.Batch`
:returns: The batch object created.
"""
return Batch(client=self)

def get_bucket(self, bucket_name):
"""Get a bucket by name.
Expand Down
2 changes: 1 addition & 1 deletion gcloud/storage/demo/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
print(list(client.list_buckets()))

# How about we create a new blob inside this bucket.
blob = storage.Blob("my-new-file.txt", bucket=bucket)
blob = bucket.blob("my-new-file.txt")

# Now let's put some data in there.
blob.upload_from_string("this is some data!")
Expand Down
13 changes: 13 additions & 0 deletions gcloud/storage/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,19 @@ def test_ctor(self):
self.assertFalse(bucket._default_object_acl.loaded)
self.assertTrue(bucket._default_object_acl.bucket is bucket)

def test_blob(self):
from gcloud.storage.blob import Blob

BUCKET_NAME = 'BUCKET_NAME'
BLOB_NAME = 'BLOB_NAME'

bucket = self._makeOne(name=BUCKET_NAME)
blob = bucket.blob(BLOB_NAME)
self.assertTrue(isinstance(blob, Blob))
self.assertTrue(blob.bucket is bucket)
self.assertTrue(blob.client is bucket.client)
self.assertEqual(blob.name, BLOB_NAME)

def test_exists_miss(self):
from gcloud.exceptions import NotFound

Expand Down
24 changes: 24 additions & 0 deletions gcloud/storage/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,30 @@ def test_connection_getter_with_batch(self):
self.assertTrue(client.connection is batch)
self.assertTrue(client.current_batch is batch)

def test_bucket(self):
from gcloud.storage.bucket import Bucket

PROJECT = object()
CREDENTIALS = _Credentials()
BUCKET_NAME = 'BUCKET_NAME'

client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
bucket = client.bucket(BUCKET_NAME)
self.assertTrue(isinstance(bucket, Bucket))
self.assertTrue(bucket.client is client)
self.assertEqual(bucket.name, BUCKET_NAME)

def test_batch(self):
from gcloud.storage.batch import Batch

PROJECT = object()
CREDENTIALS = _Credentials()

client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)
batch = client.batch()
self.assertTrue(isinstance(batch, Batch))
self.assertTrue(batch._client is client)

def test_get_bucket_miss(self):
from gcloud.exceptions import NotFound

Expand Down
18 changes: 9 additions & 9 deletions system_tests/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ def setUp(self):
self.case_buckets_to_delete = []

def tearDown(self):
with storage.Batch(CLIENT):
with CLIENT.batch():
for bucket_name in self.case_buckets_to_delete:
storage.Bucket(CLIENT, name=bucket_name).delete()
CLIENT.bucket(bucket_name).delete()

def test_create_bucket(self):
new_bucket_name = 'a-new-bucket'
Expand Down Expand Up @@ -115,7 +115,7 @@ def tearDown(self):
class TestStorageWriteFiles(TestStorageFiles):

def test_large_file_write_from_stream(self):
blob = storage.Blob(bucket=self.bucket, name='LargeFile')
blob = self.bucket.blob('LargeFile')
self.assertEqual(blob._properties, {})

file_data = self.FILES['big']
Expand All @@ -129,7 +129,7 @@ def test_large_file_write_from_stream(self):
self.assertEqual(md5_hash, file_data['hash'])

def test_small_file_write_from_filename(self):
blob = storage.Blob(bucket=self.bucket, name='SmallFile')
blob = self.bucket.blob('SmallFile')
self.assertEqual(blob._properties, {})

file_data = self.FILES['simple']
Expand All @@ -155,12 +155,12 @@ def test_write_metadata(self):
self.assertEqual(blob.content_type, 'image/png')

def test_direct_write_and_read_into_file(self):
blob = storage.Blob(bucket=self.bucket, name='MyBuffer')
blob = self.bucket.blob('MyBuffer')
file_contents = b'Hello World'
blob.upload_from_string(file_contents)
self.case_blobs_to_delete.append(blob)

same_blob = storage.Blob(bucket=self.bucket, name='MyBuffer')
same_blob = self.bucket.blob('MyBuffer')
same_blob.reload() # Initialize properties.
temp_filename = tempfile.mktemp()
with open(temp_filename, 'wb') as file_obj:
Expand Down Expand Up @@ -317,7 +317,7 @@ def setUp(self):
with open(logo_path, 'rb') as file_obj:
self.LOCAL_FILE = file_obj.read()

blob = storage.Blob(bucket=self.bucket, name='LogoToSign.jpg')
blob = self.bucket.blob('LogoToSign.jpg')
blob.upload_from_string(self.LOCAL_FILE)
self.case_blobs_to_delete.append(blob)

Expand All @@ -327,7 +327,7 @@ def tearDown(self):
blob.delete()

def test_create_signed_read_url(self):
blob = storage.Blob(bucket=self.bucket, name='LogoToSign.jpg')
blob = self.bucket.blob('LogoToSign.jpg')
expiration = int(time.time() + 5)
signed_url = blob.generate_signed_url(expiration, method='GET',
client=CLIENT)
Expand All @@ -337,7 +337,7 @@ def test_create_signed_read_url(self):
self.assertEqual(content, self.LOCAL_FILE)

def test_create_signed_delete_url(self):
blob = storage.Blob(bucket=self.bucket, name='LogoToSign.jpg')
blob = self.bucket.blob('LogoToSign.jpg')
expiration = int(time.time() + 283473274)
signed_delete_url = blob.generate_signed_url(expiration,
method='DELETE',
Expand Down

0 comments on commit a0a54ef

Please sign in to comment.