Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions src/azure-cli/azure/cli/command_modules/storage/_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,11 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
min_api='2019-12-12', is_preview=True
)

blobs_type = CLIArgumentType(
nargs='+',
help="space-separated blobs: blobname1 [blobname2 ....]"
)

with self.argument_context('storage') as c:
c.argument('container_name', container_name_type)
c.argument('directory_name', directory_type)
Expand Down Expand Up @@ -1003,6 +1008,20 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
is_preview=True, help="Indicate the priority with which to rehydrate an archived blob. "
"The priority can be set on a blob only once, default value is Standard.")

with self.argument_context('storage blob set-tier-batch') as c:
from azure.cli.command_modules.storage._validators import (blob_rehydrate_priority_validator,
block_blob_tier_validator)
c.register_container_arguments()
c.argument('blobs', blobs_type)
c.argument('blob_type', options_list=('--type', '-t'), arg_type=get_enum_type(['block']))
c.argument('tier', validator=block_blob_tier_validator, help="The tier value to set the blob to.")
c.extra('rehydrate_priority', options_list=('--rehydrate-priority', '-r'),
arg_type=get_enum_type(('High', 'Standard')), validator=blob_rehydrate_priority_validator,
is_preview=True, help="Indicate the priority with which to rehydrate an archived blob. "
"The priority can be set on a blob only once, default value is Standard.")
c.argument('if_tags_match_condition', arg_group='Precondition', options_list=('--tags-condition'),
help="Specify a SQL where clause on blob tags to operate only on blob with a matching value. ")

with self.argument_context('storage blob set-legal-hold') as c:
c.register_blob_arguments()
c.argument('legal_hold', arg_type=get_three_state_flag(),
Expand Down Expand Up @@ -1181,10 +1200,12 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem

with self.argument_context('storage blob delete-batch') as c:
c.ignore('source_container_name')
c.register_precondition_options()
c.argument('source', options_list=('--source', '-s'))
c.argument('delete_snapshots', arg_type=get_enum_type(get_delete_blob_snapshot_type_names()),
help='Required if the blob has associated snapshots.')
c.argument('lease_id', help='The active lease id for the blob.')
c.argument('blobs', blobs_type, help="space-separated blobs: blobname1 [blobname2 ....]")

with self.argument_context('storage blob lease') as c:
c.argument('blob_name', arg_type=blob_name_type)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -501,3 +501,14 @@ def _decode_bytearray(result):
result[k] = base64.urlsafe_b64encode(v).decode()
elif isinstance(v, dict):
_decode_bytearray(v)


def transform_blobs_batch_output(result):
new_result = []
for res in result:
new_result.append({
"partial_blob_url": res.request.url.split('?')[0],
"status_code": res.status_code,
"reason": res.reason
})
return new_result
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
resource_type=ResourceType.DATA_STORAGE_BLOB)) as g:
from ._transformers import (transform_blob_list_output, transform_blob_json_output,
transform_blob_upload_output, transform_url_without_encode,
create_boolean_result_output_transformer)
create_boolean_result_output_transformer, transform_blobs_batch_output)
from ._format import transform_blob_output, transform_boolean_for_table
from ._exception_handler import file_related_exception_handler
from ._validators import (process_blob_upload_batch_parameters, process_blob_download_batch_parameters,
Expand All @@ -349,6 +349,8 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
table_transformer=transform_blob_output,
exception_handler=show_exception_handler)
g.storage_custom_command_oauth('set-tier', 'set_blob_tier_v2')
g.storage_custom_command_oauth('set-tier-batch', 'set_blob_tier_batch', client_factory=cf_container_client,
transform=transform_blobs_batch_output)
g.storage_custom_command_oauth('list', 'list_blobs', client_factory=cf_container_client,
transform=transform_blob_list_output,
table_transformer=transform_blob_output)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -763,9 +763,9 @@ def show_blob(cmd, client, container_name, blob_name, snapshot=None, lease_id=No
return blob


def storage_blob_delete_batch(client, source, source_container_name, pattern=None, lease_id=None,
def storage_blob_delete_batch(client, source, source_container_name, blobs=None, pattern=None, lease_id=None,
delete_snapshots=None, if_modified_since=None, if_unmodified_since=None, if_match=None,
if_none_match=None, timeout=None, dryrun=False):
if_none_match=None, if_tags_match_condition=None, timeout=None, dryrun=False):
container_client = client.get_container_client(source_container_name)

from datetime import timezone
Expand All @@ -774,33 +774,25 @@ def storage_blob_delete_batch(client, source, source_container_name, pattern=Non
if if_unmodified_since and not if_unmodified_since.tzinfo:
if_unmodified_since = if_unmodified_since.replace(tzinfo=timezone.utc)

@check_precondition_success
def _delete_blob(blob_name):
delete_blob_args = {
'blob': blob_name,
'lease': lease_id,
'delete_snapshots': delete_snapshots,
'if_modified_since': if_modified_since,
'if_unmodified_since': if_unmodified_since,
'if_match': if_match,
'if_none_match': if_none_match,
'timeout': timeout
}
try:
container_client.delete_blob(**delete_blob_args)
return blob_name
except HttpResponseError as ex:
logger.debug(ex.exc_msg)
return None

source_blobs = list(collect_blob_objects(client, source_container_name, pattern))
if blobs:
source_blobs = blobs
if pattern:
from ..util import _match_path
source_blobs = [blob for blob in blobs if _match_path(blob, pattern)]
else:
source_blobs = list(collect_blob_objects(client, source_container_name, pattern))

if dryrun:
delete_blobs = []
for blob in source_blobs:
if not if_modified_since or blob[1].last_modified >= if_modified_since:
if not if_unmodified_since or blob[1].last_modified <= if_unmodified_since:
delete_blobs.append(blob[0])
if blobs:
logger.warning('if --blobs is specified with --dryrun, blobs are not filtered by --if-modified-since '
'or --if-unmodified-since ')
delete_blobs = source_blobs
else:
for blob in source_blobs:
if not if_modified_since or blob[1].last_modified >= if_modified_since:
if not if_unmodified_since or blob[1].last_modified <= if_unmodified_since:
delete_blobs.append(blob[0])
logger.warning('delete action: from %s', source)
logger.warning(' pattern %s', pattern)
logger.warning(' container %s', source_container_name)
Expand All @@ -810,10 +802,23 @@ def _delete_blob(blob_name):
logger.warning(' - %s', blob)
return []

results = [result for (include, result) in (_delete_blob(blob[0]) for blob in source_blobs) if result]
num_failures = len(source_blobs) - len(results)
if blobs is None:
source_blobs = [blob[0] for blob in source_blobs]

# max single request allows 256 blobs
total_blobs = len(source_blobs)
num_failures = 0
for i in range(0, total_blobs, 256):
results = container_client.delete_blobs(*source_blobs[i:i+256], delete_snapshots=delete_snapshots,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
if_tags_match_condition=if_tags_match_condition,
timeout=timeout,
raise_on_any_failure=False)
num_failures += len([res for res in results if res.status_code != 202])

if num_failures:
logger.warning('%s of %s blobs not deleted due to "Failed Precondition"', num_failures, len(source_blobs))
logger.warning('%s of %s blobs not deleted due to "Failed Precondition"', num_failures, total_blobs)


def generate_sas_blob_uri(cmd, client, permission=None, expiry=None, start=None, id=None, ip=None, # pylint: disable=redefined-builtin
Expand Down Expand Up @@ -961,6 +966,21 @@ def set_blob_tier_v2(client, tier, blob_type='block', rehydrate_priority=None, t
raise ValueError('Blob tier is only applicable to block or page blob.')


def set_blob_tier_batch(client, blobs, tier, blob_type='block', rehydrate_priority=None, if_tags_match_condition=None,
**kwargs):
if blob_type == 'block':
# max single request allows 256 blobs
total_blobs = len(blobs)
results = []
for i in range(0, total_blobs, 256):
res = client.set_standard_blob_tier_blobs(tier, *blobs[i:i+256], rehydrate_priority=rehydrate_priority,
if_tags_match_condition=if_tags_match_condition,
raise_on_any_failure=False, **kwargs)
results.extend(res)
return results
raise ValueError('Batch set blob tier is only applicable to block blobs.')


def acquire_blob_lease(client, lease_duration=-1, **kwargs):
client.acquire(lease_duration=lease_duration, **kwargs)
return client.id
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -537,6 +537,20 @@ def create_and_populate_container():
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 0))

# delete with --blobs
src_container = create_and_populate_container()
self.storage_cmd('storage blob delete-batch -s {} --blobs apple/file_1 apple/file_2',
storage_account_info, src_container)
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 39))

# test with --tags-condition
src_container = create_and_populate_container()
self.storage_cmd('storage blob delete-batch -s {} --tags-condition tagname=tag1',
storage_account_info, src_container)
self.storage_cmd('storage blob list -c {}', storage_account_info, src_container).assert_with_checks(
JMESPathCheck('length(@)', 41))

@ResourceGroupPreparer()
@StorageAccountPreparer()
@StorageTestFilesPreparer()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -800,6 +800,32 @@ def test_storage_block_blob_set_tier(self, resource_group, storage_account):
.assert_with_checks(JMESPathCheck('properties.blobTier', 'Archive'),
JMESPathCheck('properties.rehydrationStatus', 'rehydrate-pending-to-cold'))

@ResourceGroupPreparer()
@StorageAccountPreparer(kind='StorageV2')
def test_storage_block_blob_set_tier_batch(self, resource_group, storage_account):
source_file = self.create_temp_file(16)
account_info = self.get_account_info(resource_group, storage_account)
container_name = self.create_container(account_info)

blob_1_name = self.create_random_name(prefix='blob', length=24)
blob_2_name = self.create_random_name(prefix='blob', length=24)

self.storage_cmd('storage blob upload -c {} -n {} -f "{}"', account_info,
container_name, blob_1_name, source_file)
self.storage_cmd('storage blob upload -c {} -n {} -f "{}"', account_info,
container_name, blob_2_name, source_file)
result = self.storage_cmd('storage blob set-tier-batch -c {} --blobs {} --tier Cool',
account_info, container_name, blob_1_name + " " + blob_2_name).get_output_in_json()
for res in result:
self.assertEqual(res["status_code"], 200)
result = self.storage_cmd('storage blob set-tier-batch -c {} --blobs {} --tier Cold',
account_info, container_name, blob_1_name + " " + blob_2_name).get_output_in_json()
for res in result:
self.assertEqual(res["status_code"], 200)
result = self.storage_cmd('storage blob set-tier-batch -c {} --blobs {} --tier Archive',
account_info, container_name, blob_1_name + " " + blob_2_name).get_output_in_json()
for res in result:
self.assertEqual(res["status_code"], 200)

@api_version_constraint(ResourceType.DATA_STORAGE_BLOB, min_api='2020-10-02')
class StorageBlobImmutabilityTests(StorageScenarioMixin, ScenarioTest):
Expand Down
Loading