Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

{Storage} az storage file metadata/copy: Track2 SDK Migration #22824

Merged
merged 10 commits into from
Jun 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/azure-cli/azure/cli/command_modules/storage/_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,3 +123,7 @@ def transform_file_directory_result(result):
return_list.append(each)

return return_list


def transform_metadata_show(result):
return result.metadata
28 changes: 28 additions & 0 deletions src/azure-cli/azure/cli/command_modules/storage/_help.py
Original file line number Diff line number Diff line change
Expand Up @@ -1801,6 +1801,18 @@
helps['storage file copy start'] = """
type: command
short-summary: Copy a file asynchronously.
parameters:
- name: --source-uri -u
type: string
short-summary: >
A URL of up to 2 KB in length that specifies an Azure file or blob.
The value should be URL-encoded as it would appear in a request URI.
If the source is in another account, the source must either be public
or must be authenticated via a shared access signature. If the source
is public, no authentication is required.
Examples:
https://myaccount.file.core.windows.net/myshare/mydir/myfile
https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken.
examples:
- name: Copy a file asynchronously.
text: |
Expand Down Expand Up @@ -1995,6 +2007,22 @@
short-summary: Manage file metadata.
"""

helps['storage file metadata show'] = """
type: command
short-summary: Return all user-defined metadata for the file.
examples:
- name: Show metadata for the file
text: az storage file metadata show -s MyShare --path /path/to/file
"""

helps['storage file metadata update'] = """
type: command
short-summary: Update file metadata.
examples:
- name: Update metadata for the file
text: az storage file metadata update -s MyShare --path /path/to/file --metadata key1=value1
"""

helps['storage file update'] = """
type: command
short-summary: Set system properties on the file.
Expand Down
55 changes: 36 additions & 19 deletions src/azure-cli/azure/cli/command_modules/storage/_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -1827,8 +1827,39 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
c.argument('share_name', share_name_type, options_list=('--destination-share', '-s'),
help='Name of the destination share. The share must exist.')

with self.argument_context('storage file copy start') as c:
from .completers import dir_path_completer
from azure.cli.command_modules.storage._validators import validate_source_uri
c.register_path_argument(options_list=('--destination-path', '-p'))
c.register_source_uri_arguments(validator=validate_source_uri)
c.extra('share_name', share_name_type, options_list=('--destination-share', '-s'), required=True,
help='Name of the destination share. The share must exist.')
c.extra('file_snapshot', default=None, arg_group='Copy Source',
help='The file snapshot for the source storage account.')
c.extra('metadata', nargs='+',
help='Metadata in space-separated key=value pairs. This overwrites any existing metadata.',
validator=validate_metadata)
c.extra('timeout', help='Request timeout in seconds. Applies to each call to the service.', type=int)

with self.argument_context('storage file copy cancel') as c:
c.register_path_argument(options_list=('--destination-path', '-p'))
c.extra('share_name', share_name_type, options_list=('--destination-share', '-s'), required=True,
help='Name of the destination share. The share must exist.')
c.extra('timeout', help='Request timeout in seconds. Applies to each call to the service.', type=int)

with self.argument_context('storage file copy start-batch') as c:
c.argument('share_name', share_name_type, options_list=('--destination-share'),
help='Name of the destination share. The share must exist.')

with self.argument_context('storage file copy start-batch', arg_group='Copy Source') as c:
from ._validators import get_source_file_or_blob_service_client_track2
c.argument('source_client', ignore_type, validator=get_source_file_or_blob_service_client_track2)
c.extra('source_account_name')
c.extra('source_account_key')
c.extra('source_uri')
c.argument('source_sas')
c.argument('source_container')
c.argument('source_share')

with self.argument_context('storage file delete') as c:
c.register_path_argument()
Expand Down Expand Up @@ -1880,7 +1911,6 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
c.ignore('sas_token')

with self.argument_context('storage file list') as c:
from .completers import dir_path_completer
c.extra('share_name', share_name_type, required=True)
c.extra('snapshot', help="A string that represents the snapshot version, if applicable.")
c.argument('directory_name', options_list=('--path', '-p'), help='The directory path within the file share.',
Expand All @@ -1892,9 +1922,14 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem

with self.argument_context('storage file metadata show') as c:
c.register_path_argument()
c.extra('share_name', share_name_type, required=True)
c.extra('snapshot', help="A string that represents the snapshot version, if applicable.")
c.extra('timeout', help='Request timeout in seconds. Applies to each call to the service.', type=int)

with self.argument_context('storage file metadata update') as c:
c.register_path_argument()
c.extra('share_name', share_name_type, required=True)
c.extra('timeout', help='Request timeout in seconds. Applies to each call to the service.', type=int)

with self.argument_context('storage file resize') as c:
c.register_path_argument()
Expand Down Expand Up @@ -1951,24 +1986,6 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
from ._validators import process_file_batch_source_parameters
c.argument('source', options_list=('--source', '-s'), validator=process_file_batch_source_parameters)

with self.argument_context('storage file copy start') as c:
from azure.cli.command_modules.storage._validators import validate_source_uri

c.register_path_argument(options_list=('--destination-path', '-p'))
c.register_source_uri_arguments(validator=validate_source_uri)
c.extra('file_snapshot', default=None, arg_group='Copy Source',
help='The file snapshot for the source storage account.')

with self.argument_context('storage file copy start-batch', arg_group='Copy Source') as c:
from ._validators import get_source_file_or_blob_service_client
c.argument('source_client', ignore_type, validator=get_source_file_or_blob_service_client)
c.extra('source_account_name')
c.extra('source_account_key')
c.extra('source_uri')
c.argument('source_sas')
c.argument('source_container')
c.argument('source_share')

with self.argument_context('storage cors list') as c:
c.extra('services', validator=get_char_options_validator('bfqt', 'services'), default='bfqt',
options_list='--services', required=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -650,7 +650,7 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
c.argument('source_lease_id', arg_group='Copy Source')

with self.argument_context('storage blob copy start') as c:
from azure.cli.command_modules.storage._validators import validate_source_uri
from azure.cli.command_modules.storage._validators_azure_stack import validate_source_uri

c.register_source_uri_arguments(validator=validate_source_uri)
c.argument('requires_sync', arg_type=get_three_state_flag(),
Expand Down Expand Up @@ -987,7 +987,7 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
c.argument('source', options_list=('--source', '-s'), validator=process_file_batch_source_parameters)

with self.argument_context('storage file copy start') as c:
from azure.cli.command_modules.storage._validators import validate_source_uri
from azure.cli.command_modules.storage._validators_azure_stack import validate_source_uri

c.register_path_argument(options_list=('--destination-path', '-p'))
c.register_source_uri_arguments(validator=validate_source_uri)
Expand Down
18 changes: 14 additions & 4 deletions src/azure-cli/azure/cli/command_modules/storage/_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,8 @@ def process_blob_source_uri(cmd, namespace):


def validate_source_uri(cmd, namespace): # pylint: disable=too-many-statements
from .util import create_short_lived_blob_sas, create_short_lived_file_sas
from .util import create_short_lived_blob_sas, create_short_lived_blob_sas_v2, \
create_short_lived_file_sas, create_short_lived_file_sas_v2
usage_string = \
'Invalid usage: {}. Supply only one of the following argument sets to specify source:' \
'\n\t --source-uri [--source-sas]' \
Expand Down Expand Up @@ -394,12 +395,21 @@ def validate_source_uri(cmd, namespace): # pylint: disable=too-many-statements
# Both source account name and either key or sas (or both) are now available
if not source_sas:
# generate a sas token even in the same account when the source and destination are not the same kind.
prefix = cmd.command_kwargs['resource_type'].value[0]
if valid_file_source and (ns.get('container_name', None) or not same_account):
dir_name, file_name = os.path.split(path) if path else (None, '')
source_sas = create_short_lived_file_sas(cmd, source_account_name, source_account_key, share,
dir_name, file_name)
if is_storagev2(prefix):
source_sas = create_short_lived_file_sas_v2(cmd, source_account_name, source_account_key, share,
dir_name, file_name)
else:
source_sas = create_short_lived_file_sas(cmd, source_account_name, source_account_key, share,
dir_name, file_name)
elif valid_blob_source and (ns.get('share_name', None) or not same_account):
source_sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)
if is_storagev2(prefix):
source_sas = create_short_lived_blob_sas_v2(cmd, source_account_name, source_account_key, container,
blob)
else:
source_sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)

query_params = []
if source_sas:
Expand Down
13 changes: 7 additions & 6 deletions src/azure-cli/azure/cli/command_modules/storage/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -673,20 +673,15 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
from ._exception_handler import file_related_exception_handler
g.storage_command('download', 'get_file_to_path', exception_handler=file_related_exception_handler)
g.storage_command('upload', 'create_file_from_path', exception_handler=file_related_exception_handler)
g.storage_command('metadata show', 'get_file_metadata',
exception_handler=show_exception_handler)
g.storage_command('metadata update', 'set_file_metadata')
g.storage_command('copy start', 'copy_file')
g.storage_command('copy cancel', 'abort_copy_file')
g.storage_custom_command('upload-batch', 'storage_file_upload_batch')
g.storage_custom_command(
'download-batch', 'storage_file_download_batch')
g.storage_custom_command('delete-batch', 'storage_file_delete_batch')
g.storage_custom_command('copy start-batch', 'storage_file_copy_batch')

with self.command_group('storage file', command_type=file_client_sdk,
custom_command_type=get_custom_sdk('file', cf_share_file_client)) as g:
from ._transformers import transform_file_show_result
from ._format import transform_metadata_show
g.storage_custom_command('list', 'list_share_files', client_factory=cf_share_client,
transform=transform_file_directory_result,
table_transformer=transform_file_output)
Expand All @@ -701,6 +696,12 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
exception_handler=show_exception_handler)
g.storage_custom_command('update', 'file_updates', resource_type=ResourceType.DATA_STORAGE_FILESHARE)
g.storage_custom_command('exists', 'file_exists', transform=create_boolean_result_output_transformer('exists'))
g.storage_command('metadata show', 'get_file_properties', exception_handler=show_exception_handler,
transform=transform_metadata_show)
g.storage_command('metadata update', 'set_file_metadata')
g.storage_custom_command('copy start', 'storage_file_copy', resource_type=ResourceType.DATA_STORAGE_FILESHARE)
g.storage_command('copy cancel', 'abort_copy')
g.storage_custom_command('copy start-batch', 'storage_file_copy_batch', client_factory=cf_share_client)

with self.command_group('storage cors', get_custom_sdk('cors', multi_service_properties_factory)) as g:
from ._transformers import transform_cors_list_output
Expand Down
Loading