Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@

import os
from datetime import datetime
from datetime import timezone
from azure.cli.command_modules.storage.url_quote_util import encode_for_url, make_encoded_file_url_and_params
from azure.cli.command_modules.storage.util import (create_blob_service_from_storage_client,
create_file_share_from_storage_client,
create_short_lived_share_sas,
create_short_lived_container_sas,
filter_none, collect_blobs, collect_files,
filter_none, collect_blobs, collect_blob_objects, collect_files,
mkdir_p, guess_content_type, normalize_blob_file_path,
check_precondition_success)
from knack.log import get_logger
Expand Down Expand Up @@ -398,23 +399,26 @@ def _delete_blob(blob_name):
return client.delete_blob(**delete_blob_args)

logger = get_logger(__name__)
source_blobs = list(collect_blobs(client, source_container_name, pattern))
source_blobs = list(collect_blob_objects(client, source_container_name, pattern))

if dryrun:
if if_modified_since:
logger.warning('--if-modified-since argument is ignored when using --dry-run.')
if if_unmodified_since:
logger.warning('--if-unmodified-since argument is ignored when using --dry-run.')
delete_blobs = []
if_modified_since_utc = if_modified_since.replace(tzinfo=timezone.utc) if if_modified_since else None
if_unmodified_since_utc = if_unmodified_since.replace(tzinfo=timezone.utc) if if_unmodified_since else None
for blob in source_blobs:
if not if_modified_since or blob[1].properties.last_modified >= if_modified_since_utc:
if not if_unmodified_since or blob[1].properties.last_modified <= if_unmodified_since_utc:
delete_blobs.append(blob[0])
logger.warning('delete action: from %s', source)
logger.warning(' pattern %s', pattern)
logger.warning(' container %s', source_container_name)
logger.warning(' total %d', len(source_blobs))
logger.warning(' total %d', len(delete_blobs))
logger.warning(' operations')
for blob in source_blobs:
for blob in delete_blobs:
logger.warning(' - %s', blob)
return []

results = [result for include, result in (_delete_blob(blob) for blob in source_blobs) if include]
results = [result for include, result in (_delete_blob(blob[0]) for blob in source_blobs) if include]
num_failures = len(source_blobs) - len(results)
if num_failures:
logger.warning('%s of %s blobs not deleted due to "Failed Precondition"', num_failures, len(source_blobs))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# --------------------------------------------------------------------------------------------

import os
from datetime import datetime
from azure.cli.testsdk import LiveScenarioTest, StorageAccountPreparer, ResourceGroupPreparer, JMESPathCheck
from ..storage_test_util import StorageScenarioMixin, StorageTestFilesPreparer

Expand Down Expand Up @@ -386,6 +387,24 @@ def create_and_populate_container():
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 41))

# delete recursively with if-modified-since
src_container = create_and_populate_container()
self.storage_cmd('storage blob delete-batch -s {} --if-modified-since {} --dryrun',
storage_account_info, src_container, datetime.min.strftime('%Y-%m-%dT%H:%MZ'))
self.storage_cmd('storage blob delete-batch -s {} --if-modified-since {}',
storage_account_info, src_container, datetime.min.strftime('%Y-%m-%dT%H:%MZ'))
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 0))

# delete recursively with if-unmodified-since
src_container = create_and_populate_container()
self.storage_cmd('storage blob delete-batch -s {} --if-unmodified-since {} --dryrun',
storage_account_info, src_container, datetime.max.strftime('%Y-%m-%dT%H:%MZ'))
self.storage_cmd('storage blob delete-batch -s {} --if-unmodified-since {}',
storage_account_info, src_container, datetime.max.strftime('%Y-%m-%dT%H:%MZ'))
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 0))

@ResourceGroupPreparer()
@StorageAccountPreparer()
@StorageTestFilesPreparer()
Expand Down
32 changes: 19 additions & 13 deletions src/azure-cli/azure/cli/command_modules/storage/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,26 +11,32 @@ def collect_blobs(blob_service, container, pattern=None):
"""
List the blobs in the given blob container, filter the blob by comparing their path to the given pattern.
"""
return [name for (name, _) in collect_blob_objects(blob_service, container, pattern)]


def collect_blob_objects(blob_service, container, pattern=None):
"""
List the blob name and blob in the given blob container, filter the blob by comparing their path to
the given pattern.
"""
if not blob_service:
raise ValueError('missing parameter blob_service')

if not container:
raise ValueError('missing parameter container')

if not _pattern_has_wildcards(pattern):
return [pattern] if blob_service.exists(container, pattern) else []

results = []
for blob in blob_service.list_blobs(container):
try:
blob_name = blob.name.encode('utf-8') if isinstance(blob.name, unicode) else blob.name
except NameError:
blob_name = blob.name

if not pattern or _match_path(blob_name, pattern):
results.append(blob_name)

return results
if blob_service.exists(container, pattern):
yield pattern, blob_service.get_blob_properties(container, pattern)
else:
for blob in blob_service.list_blobs(container):
try:
blob_name = blob.name.encode('utf-8') if isinstance(blob.name, unicode) else blob.name
except NameError:
blob_name = blob.name

if not pattern or _match_path(blob_name, pattern):
yield blob_name, blob


def collect_files(cmd, file_service, share, pattern=None):
Expand Down