From 114ad3620ab81d4b32b09ff045430b156197d4cc Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Thu, 26 Nov 2020 10:06:28 +0000 Subject: [PATCH] CodeGen from PR 11826 in Azure/azure-rest-api-specs Merge b97299c968df5f99b724bd1231fd2161731d3b8f into cf617af566918903c7916e025d47d31f25648b06 --- .../azure/mgmt/streamanalytics/__init__.py | 19 + .../mgmt/streamanalytics/_configuration.py | 48 + .../_stream_analytics_management_client.py | 88 + .../mgmt/streamanalytics/models/__init__.py | 346 ++ .../mgmt/streamanalytics/models/_models.py | 3875 +++++++++++++++++ .../streamanalytics/models/_models_py3.py | 3875 +++++++++++++++++ .../streamanalytics/models/_paged_models.py | 118 + ...tream_analytics_management_client_enums.py | 112 + .../streamanalytics/operations/__init__.py | 32 + .../operations/_clusters_operations.py | 616 +++ .../operations/_functions_operations.py | 635 +++ .../operations/_inputs_operations.py | 555 +++ .../streamanalytics/operations/_operations.py | 102 + .../operations/_outputs_operations.py | 550 +++ .../_private_endpoints_operations.py | 348 ++ .../operations/_streaming_jobs_operations.py | 754 ++++ .../operations/_subscriptions_operations.py | 103 + .../operations/_transformations_operations.py | 298 ++ .../azure/mgmt/streamanalytics/version.py | 13 + 19 files changed, 12487 insertions(+) create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_paged_models.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py create mode 100644 sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/version.py diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py new file mode 100644 index 000000000000..48fbd7046351 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from ._configuration import StreamAnalyticsManagementClientConfiguration +from ._stream_analytics_management_client import StreamAnalyticsManagementClient +__all__ = ['StreamAnalyticsManagementClient', 'StreamAnalyticsManagementClientConfiguration'] + +from .version import VERSION + +__version__ = VERSION + diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py new file mode 100644 index 000000000000..cb6e6e56a9d1 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +from msrestazure import AzureConfiguration + +from .version import VERSION + + +class StreamAnalyticsManagementClientConfiguration(AzureConfiguration): + """Configuration for StreamAnalyticsManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + if not base_url: + base_url = 'https://management.azure.com' + + super(StreamAnalyticsManagementClientConfiguration, self).__init__(base_url) + + # Starting Autorest.Python 4.0.64, make connection pool activated by default + self.keep_alive = True + + self.add_user_agent('azure-mgmt-streamanalytics/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.subscription_id = subscription_id diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py new file mode 100644 index 000000000000..19ff5d2c8ddc --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.service_client import SDKClient +from msrest import Serializer, Deserializer + +from ._configuration import StreamAnalyticsManagementClientConfiguration +from .operations import FunctionsOperations +from .operations import InputsOperations +from .operations import OutputsOperations +from .operations import StreamingJobsOperations +from .operations import SubscriptionsOperations +from .operations import TransformationsOperations +from .operations import Operations +from .operations import ClustersOperations +from .operations import PrivateEndpointsOperations +from . import models + + +class StreamAnalyticsManagementClient(SDKClient): + """Stream Analytics Client + + :ivar config: Configuration for client. + :vartype config: StreamAnalyticsManagementClientConfiguration + + :ivar functions: Functions operations + :vartype functions: azure.mgmt.streamanalytics.operations.FunctionsOperations + :ivar inputs: Inputs operations + :vartype inputs: azure.mgmt.streamanalytics.operations.InputsOperations + :ivar outputs: Outputs operations + :vartype outputs: azure.mgmt.streamanalytics.operations.OutputsOperations + :ivar streaming_jobs: StreamingJobs operations + :vartype streaming_jobs: azure.mgmt.streamanalytics.operations.StreamingJobsOperations + :ivar subscriptions: Subscriptions operations + :vartype subscriptions: azure.mgmt.streamanalytics.operations.SubscriptionsOperations + :ivar transformations: Transformations operations + :vartype transformations: azure.mgmt.streamanalytics.operations.TransformationsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.streamanalytics.operations.Operations + :ivar clusters: Clusters operations + :vartype clusters: azure.mgmt.streamanalytics.operations.ClustersOperations + :ivar private_endpoints: PrivateEndpoints operations + :vartype private_endpoints: azure.mgmt.streamanalytics.operations.PrivateEndpointsOperations + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + self.config = StreamAnalyticsManagementClientConfiguration(credentials, subscription_id, base_url) + super(StreamAnalyticsManagementClient, self).__init__(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.functions = FunctionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.inputs = InputsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.outputs = OutputsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.transformations = TransformationsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self.config, self._serialize, self._deserialize) + self.clusters = ClustersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_endpoints = PrivateEndpointsOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py new file mode 100644 index 000000000000..498f8ae645e8 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -0,0 +1,346 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AggregateFunctionProperties + from ._models_py3 import AvroSerialization + from ._models_py3 import AzureDataLakeStoreOutputDataSource + from ._models_py3 import AzureFunctionOutputDataSource + from ._models_py3 import AzureMachineLearningServiceFunctionBinding + from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import AzureMachineLearningServiceInputColumn + from ._models_py3 import AzureMachineLearningServiceInputs + from ._models_py3 import AzureMachineLearningServiceOutputColumn + from ._models_py3 import AzureMachineLearningStudioFunctionBinding + from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import AzureMachineLearningStudioInputColumn + from ._models_py3 import AzureMachineLearningStudioInputs + from ._models_py3 import AzureMachineLearningStudioOutputColumn + from ._models_py3 import AzureSqlDatabaseDataSourceProperties + from ._models_py3 import AzureSqlDatabaseOutputDataSource + from ._models_py3 import AzureSqlReferenceInputDataSource + from ._models_py3 import AzureSqlReferenceInputDataSourceProperties + from ._models_py3 import AzureSynapseDataSourceProperties + from ._models_py3 import AzureSynapseOutputDataSource + from ._models_py3 import AzureTableOutputDataSource + from ._models_py3 import BlobDataSourceProperties + from ._models_py3 import BlobOutputDataSource + from ._models_py3 import BlobReferenceInputDataSource + from ._models_py3 import BlobStreamInputDataSource + from ._models_py3 import Cluster + from ._models_py3 import ClusterInfo + from ._models_py3 import ClusterJob + from ._models_py3 import ClusterProperties + from ._models_py3 import ClusterSku + from ._models_py3 import Compression + from ._models_py3 import CSharpFunctionBinding + from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import CsvSerialization + from ._models_py3 import CustomClrSerialization + from ._models_py3 import DiagnosticCondition + from ._models_py3 import Diagnostics + from ._models_py3 import DocumentDbOutputDataSource + from ._models_py3 import Error, ErrorException + from ._models_py3 import ErrorDetails + from ._models_py3 import ErrorError + from ._models_py3 import ErrorResponse + from ._models_py3 import EventHubDataSourceProperties + from ._models_py3 import EventHubOutputDataSource + from ._models_py3 import EventHubStreamInputDataSource + from ._models_py3 import EventHubV2OutputDataSource + from ._models_py3 import EventHubV2StreamInputDataSource + from ._models_py3 import External + from ._models_py3 import Function + from ._models_py3 import FunctionBinding + from ._models_py3 import FunctionInput + from ._models_py3 import FunctionOutput + from ._models_py3 import FunctionProperties + from ._models_py3 import FunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import Identity + from ._models_py3 import Input + from ._models_py3 import InputProperties + from ._models_py3 import IoTHubStreamInputDataSource + from ._models_py3 import JavaScriptFunctionBinding + from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import JobStorageAccount + from ._models_py3 import JsonSerialization + from ._models_py3 import OAuthBasedDataSourceProperties + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import Output + from ._models_py3 import OutputDataSource + from ._models_py3 import ParquetSerialization + from ._models_py3 import PowerBIOutputDataSource + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointProperties + from ._models_py3 import PrivateLinkConnectionState + from ._models_py3 import PrivateLinkServiceConnection + from ._models_py3 import ProxyResource + from ._models_py3 import ReferenceInputDataSource + from ._models_py3 import ReferenceInputProperties + from ._models_py3 import Resource + from ._models_py3 import ResourceTestStatus + from ._models_py3 import ScalarFunctionProperties + from ._models_py3 import Serialization + from ._models_py3 import ServiceBusDataSourceProperties + from ._models_py3 import ServiceBusQueueOutputDataSource + from ._models_py3 import ServiceBusTopicOutputDataSource + from ._models_py3 import StartStreamingJobParameters + from ._models_py3 import StorageAccount + from ._models_py3 import StreamingJob + from ._models_py3 import StreamingJobSku + from ._models_py3 import StreamInputDataSource + from ._models_py3 import StreamInputProperties + from ._models_py3 import SubResource + from ._models_py3 import SubscriptionQuota + from ._models_py3 import SubscriptionQuotasListResult + from ._models_py3 import TrackedResource + from ._models_py3 import Transformation +except (SyntaxError, ImportError): + from ._models import AggregateFunctionProperties + from ._models import AvroSerialization + from ._models import AzureDataLakeStoreOutputDataSource + from ._models import AzureFunctionOutputDataSource + from ._models import AzureMachineLearningServiceFunctionBinding + from ._models import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters + from ._models import AzureMachineLearningServiceInputColumn + from ._models import AzureMachineLearningServiceInputs + from ._models import AzureMachineLearningServiceOutputColumn + from ._models import AzureMachineLearningStudioFunctionBinding + from ._models import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters + from ._models import AzureMachineLearningStudioInputColumn + from ._models import AzureMachineLearningStudioInputs + from ._models import AzureMachineLearningStudioOutputColumn + from ._models import AzureSqlDatabaseDataSourceProperties + from ._models import AzureSqlDatabaseOutputDataSource + from ._models import AzureSqlReferenceInputDataSource + from ._models import AzureSqlReferenceInputDataSourceProperties + from ._models import AzureSynapseDataSourceProperties + from ._models import AzureSynapseOutputDataSource + from ._models import AzureTableOutputDataSource + from ._models import BlobDataSourceProperties + from ._models import BlobOutputDataSource + from ._models import BlobReferenceInputDataSource + from ._models import BlobStreamInputDataSource + from ._models import Cluster + from ._models import ClusterInfo + from ._models import ClusterJob + from ._models import ClusterProperties + from ._models import ClusterSku + from ._models import Compression + from ._models import CSharpFunctionBinding + from ._models import CSharpFunctionRetrieveDefaultDefinitionParameters + from ._models import CsvSerialization + from ._models import CustomClrSerialization + from ._models import DiagnosticCondition + from ._models import Diagnostics + from ._models import DocumentDbOutputDataSource + from ._models import Error, ErrorException + from ._models import ErrorDetails + from ._models import ErrorError + from ._models import ErrorResponse + from ._models import EventHubDataSourceProperties + from ._models import EventHubOutputDataSource + from ._models import EventHubStreamInputDataSource + from ._models import EventHubV2OutputDataSource + from ._models import EventHubV2StreamInputDataSource + from ._models import External + from ._models import Function + from ._models import FunctionBinding + from ._models import FunctionInput + from ._models import FunctionOutput + from ._models import FunctionProperties + from ._models import FunctionRetrieveDefaultDefinitionParameters + from ._models import Identity + from ._models import Input + from ._models import InputProperties + from ._models import IoTHubStreamInputDataSource + from ._models import JavaScriptFunctionBinding + from ._models import JavaScriptFunctionRetrieveDefaultDefinitionParameters + from ._models import JobStorageAccount + from ._models import JsonSerialization + from ._models import OAuthBasedDataSourceProperties + from ._models import Operation + from ._models import OperationDisplay + from ._models import Output + from ._models import OutputDataSource + from ._models import ParquetSerialization + from ._models import PowerBIOutputDataSource + from ._models import PrivateEndpoint + from ._models import PrivateEndpointProperties + from ._models import PrivateLinkConnectionState + from ._models import PrivateLinkServiceConnection + from ._models import ProxyResource + from ._models import ReferenceInputDataSource + from ._models import ReferenceInputProperties + from ._models import Resource + from ._models import ResourceTestStatus + from ._models import ScalarFunctionProperties + from ._models import Serialization + from ._models import ServiceBusDataSourceProperties + from ._models import ServiceBusQueueOutputDataSource + from ._models import ServiceBusTopicOutputDataSource + from ._models import StartStreamingJobParameters + from ._models import StorageAccount + from ._models import StreamingJob + from ._models import StreamingJobSku + from ._models import StreamInputDataSource + from ._models import StreamInputProperties + from ._models import SubResource + from ._models import SubscriptionQuota + from ._models import SubscriptionQuotasListResult + from ._models import TrackedResource + from ._models import Transformation +from ._paged_models import ClusterJobPaged +from ._paged_models import ClusterPaged +from ._paged_models import FunctionPaged +from ._paged_models import InputPaged +from ._paged_models import OperationPaged +from ._paged_models import OutputPaged +from ._paged_models import PrivateEndpointPaged +from ._paged_models import StreamingJobPaged +from ._stream_analytics_management_client_enums import ( + UdfType, + AuthenticationMode, + Encoding, + JsonOutputSerializationFormat, + EventSerializationType, + StreamingJobSkuName, + JobType, + OutputStartMode, + EventsOutOfOrderPolicy, + OutputErrorPolicy, + CompatibilityLevel, + ContentStoragePolicy, + ClusterSkuName, + ClusterProvisioningState, + JobState, +) + +__all__ = [ + 'AggregateFunctionProperties', + 'AvroSerialization', + 'AzureDataLakeStoreOutputDataSource', + 'AzureFunctionOutputDataSource', + 'AzureMachineLearningServiceFunctionBinding', + 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningServiceInputColumn', + 'AzureMachineLearningServiceInputs', + 'AzureMachineLearningServiceOutputColumn', + 'AzureMachineLearningStudioFunctionBinding', + 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningStudioInputColumn', + 'AzureMachineLearningStudioInputs', + 'AzureMachineLearningStudioOutputColumn', + 'AzureSqlDatabaseDataSourceProperties', + 'AzureSqlDatabaseOutputDataSource', + 'AzureSqlReferenceInputDataSource', + 'AzureSqlReferenceInputDataSourceProperties', + 'AzureSynapseDataSourceProperties', + 'AzureSynapseOutputDataSource', + 'AzureTableOutputDataSource', + 'BlobDataSourceProperties', + 'BlobOutputDataSource', + 'BlobReferenceInputDataSource', + 'BlobStreamInputDataSource', + 'Cluster', + 'ClusterInfo', + 'ClusterJob', + 'ClusterProperties', + 'ClusterSku', + 'Compression', + 'CSharpFunctionBinding', + 'CSharpFunctionRetrieveDefaultDefinitionParameters', + 'CsvSerialization', + 'CustomClrSerialization', + 'DiagnosticCondition', + 'Diagnostics', + 'DocumentDbOutputDataSource', + 'Error', 'ErrorException', + 'ErrorDetails', + 'ErrorError', + 'ErrorResponse', + 'EventHubDataSourceProperties', + 'EventHubOutputDataSource', + 'EventHubStreamInputDataSource', + 'EventHubV2OutputDataSource', + 'EventHubV2StreamInputDataSource', + 'External', + 'Function', + 'FunctionBinding', + 'FunctionInput', + 'FunctionOutput', + 'FunctionProperties', + 'FunctionRetrieveDefaultDefinitionParameters', + 'Identity', + 'Input', + 'InputProperties', + 'IoTHubStreamInputDataSource', + 'JavaScriptFunctionBinding', + 'JavaScriptFunctionRetrieveDefaultDefinitionParameters', + 'JobStorageAccount', + 'JsonSerialization', + 'OAuthBasedDataSourceProperties', + 'Operation', + 'OperationDisplay', + 'Output', + 'OutputDataSource', + 'ParquetSerialization', + 'PowerBIOutputDataSource', + 'PrivateEndpoint', + 'PrivateEndpointProperties', + 'PrivateLinkConnectionState', + 'PrivateLinkServiceConnection', + 'ProxyResource', + 'ReferenceInputDataSource', + 'ReferenceInputProperties', + 'Resource', + 'ResourceTestStatus', + 'ScalarFunctionProperties', + 'Serialization', + 'ServiceBusDataSourceProperties', + 'ServiceBusQueueOutputDataSource', + 'ServiceBusTopicOutputDataSource', + 'StartStreamingJobParameters', + 'StorageAccount', + 'StreamingJob', + 'StreamingJobSku', + 'StreamInputDataSource', + 'StreamInputProperties', + 'SubResource', + 'SubscriptionQuota', + 'SubscriptionQuotasListResult', + 'TrackedResource', + 'Transformation', + 'FunctionPaged', + 'InputPaged', + 'OutputPaged', + 'StreamingJobPaged', + 'OperationPaged', + 'ClusterPaged', + 'ClusterJobPaged', + 'PrivateEndpointPaged', + 'UdfType', + 'AuthenticationMode', + 'Encoding', + 'JsonOutputSerializationFormat', + 'EventSerializationType', + 'StreamingJobSkuName', + 'JobType', + 'OutputStartMode', + 'EventsOutOfOrderPolicy', + 'OutputErrorPolicy', + 'CompatibilityLevel', + 'ContentStoragePolicy', + 'ClusterSkuName', + 'ClusterProvisioningState', + 'JobState', +] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py new file mode 100644 index 000000000000..ac0d154cd946 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py @@ -0,0 +1,3875 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model +from msrest.exceptions import HttpOperationError + + +class FunctionProperties(Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ScalarFunctionProperties, AggregateFunctionProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Scalar': 'ScalarFunctionProperties', 'Aggregate': 'AggregateFunctionProperties'} + } + + def __init__(self, **kwargs): + super(FunctionProperties, self).__init__(**kwargs) + self.etag = None + self.inputs = kwargs.get('inputs', None) + self.output = kwargs.get('output', None) + self.binding = kwargs.get('binding', None) + self.type = None + + +class AggregateFunctionProperties(FunctionProperties): + """The properties that are associated with an aggregate function. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AggregateFunctionProperties, self).__init__(**kwargs) + self.type = 'Aggregate' + + +class Serialization(Model): + """Describes how data from an input is serialized or how data is serialized + when written to an output. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetSerialization, CustomClrSerialization, + CsvSerialization, JsonSerialization, AvroSerialization + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Parquet': 'ParquetSerialization', 'CustomClr': 'CustomClrSerialization', 'Csv': 'CsvSerialization', 'Json': 'JsonSerialization', 'Avro': 'AvroSerialization'} + } + + def __init__(self, **kwargs): + super(Serialization, self).__init__(**kwargs) + self.type = None + + +class AvroSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: The properties that are associated with the Avro + serialization type. Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AvroSerialization, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.type = 'Avro' + + +class OutputDataSource(Model): + """Describes the data source that output will be written to. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobOutputDataSource, AzureTableOutputDataSource, + EventHubOutputDataSource, EventHubV2OutputDataSource, + AzureSqlDatabaseOutputDataSource, AzureSynapseOutputDataSource, + DocumentDbOutputDataSource, AzureFunctionOutputDataSource, + ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, + PowerBIOutputDataSource, AzureDataLakeStoreOutputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource'} + } + + def __init__(self, **kwargs): + super(OutputDataSource, self).__init__(**kwargs) + self.type = None + + +class AzureDataLakeStoreOutputDataSource(OutputDataSource): + """Describes an Azure Data Lake Store output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. + Required on PUT (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh + token. Required on PUT (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output + should be written to. Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in + filePathPrefix, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + filePathPrefix, the value of this property is used as the time format + instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'properties.filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + self.account_name = kwargs.get('account_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.file_path_prefix = kwargs.get('file_path_prefix', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.type = 'Microsoft.DataLake/Accounts' + + +class AzureFunctionOutputDataSource(OutputDataSource): + """Defines the metadata of AzureFunctionOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param function_app_name: The name of your Azure Functions app. + :type function_app_name: str + :param function_name: The name of the function in your Azure Functions + app. + :type function_name: str + :param api_key: If you want to use an Azure Function from another + subscription, you can do so by providing the key to access your function. + :type api_key: str + :param max_batch_size: A property that lets you set the maximum size for + each output batch that's sent to your Azure function. The input unit is in + bytes. By default, this value is 262,144 bytes (256 KB). + :type max_batch_size: float + :param max_batch_count: A property that lets you specify the maximum + number of events in each batch that's sent to Azure Functions. The default + value is 100. + :type max_batch_count: float + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, + 'function_name': {'key': 'properties.functionName', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionOutputDataSource, self).__init__(**kwargs) + self.function_app_name = kwargs.get('function_app_name', None) + self.function_name = kwargs.get('function_name', None) + self.api_key = kwargs.get('api_key', None) + self.max_batch_size = kwargs.get('max_batch_size', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + self.type = 'Microsoft.AzureFunction' + + +class FunctionBinding(Model): + """The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionBinding, + JavaScriptFunctionBinding, CSharpFunctionBinding, + AzureMachineLearningServiceFunctionBinding + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding'} + } + + def __init__(self, **kwargs): + super(FunctionBinding, self).__init__(**kwargs) + self.type = None + + +class AzureMachineLearningServiceFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning web service. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure + Machine Learning web service. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response + endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning web service + endpoint. + :type inputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :param outputs: A list of outputs from the Azure Machine Learning web + service endpoint execution. + :type outputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of + rows for every Azure ML RRS execute request. Default is 1000. + :type batch_size: int + :param number_of_parallel_requests: The number of parallel requests that + will be sent per partition of your job to the machine learning service. + Default is 1. + :type number_of_parallel_requests: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.batch_size = kwargs.get('batch_size', None) + self.number_of_parallel_requests = kwargs.get('number_of_parallel_requests', None) + self.type = 'Microsoft.MachineLearningServices' + + +class FunctionRetrieveDefaultDefinitionParameters(Model): + """Parameters used to specify the type of function to retrieve the default + definition for. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: + AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, + AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, + JavaScriptFunctionRetrieveDefaultDefinitionParameters, + CSharpFunctionRetrieveDefaultDefinitionParameters + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + } + + _subtype_map = { + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters'} + } + + def __init__(self, **kwargs): + super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = None + + +class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an + Azure Machine Learning web service function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the + Azure Machine Learning web service. + :type execute_endpoint: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.execute_endpoint = kwargs.get('execute_endpoint', None) + self.udf_type = kwargs.get('udf_type', None) + self.binding_type = 'Microsoft.MachineLearningServices' + + +class AzureMachineLearningServiceInputColumn(Model): + """Describes an input column for the Azure Machine Learning web service + endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + input column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningServiceInputs(Model): + """The inputs for the Azure Machine Learning web service endpoint. + + :param name: The name of the input. This is the name provided while + authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine + Learning web service endpoint. + :type column_names: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.column_names = kwargs.get('column_names', None) + + +class AzureMachineLearningServiceOutputColumn(Model): + """Describes an output column for the Azure Machine Learning web service + endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + output column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningStudioFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning Studio. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure + Machine Learning Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response + endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning Studio endpoint. + :type inputs: + ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs + :param outputs: A list of outputs from the Azure Machine Learning Studio + endpoint execution. + :type outputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of + rows for every Azure ML RRS execute request. Default is 1000. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'Microsoft.MachineLearning/WebService' + + +class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an + Azure Machine Learning Studio function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the + Azure Machine Learning Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + :type execute_endpoint: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.execute_endpoint = kwargs.get('execute_endpoint', None) + self.udf_type = kwargs.get('udf_type', None) + self.binding_type = 'Microsoft.MachineLearning/WebService' + + +class AzureMachineLearningStudioInputColumn(Model): + """Describes an input column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + input column. A list of valid Azure Machine Learning data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx + . + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + self.map_to = kwargs.get('map_to', None) + + +class AzureMachineLearningStudioInputs(Model): + """The inputs for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input. This is the name provided while + authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine + Learning Studio endpoint. + :type column_names: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.column_names = kwargs.get('column_names', None) + + +class AzureMachineLearningStudioOutputColumn(Model): + """Describes an output column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + output column. A list of valid Azure Machine Learning data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx + . + :type data_type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.data_type = kwargs.get('data_type', None) + + +class AzureSqlDatabaseDataSourceProperties(Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the + default value is 10,000. Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single + writer) and 0(based on query partition) are available. Optional on PUT + requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + self.max_writer_count = kwargs.get('max_writer_count', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class AzureSqlDatabaseOutputDataSource(OutputDataSource): + """Describes an Azure SQL database output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the + default value is 10,000. Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single + writer) and 0(based on query partition) are available. Optional on PUT + requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'properties.maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.max_batch_count = kwargs.get('max_batch_count', None) + self.max_writer_count = kwargs.get('max_writer_count', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.type = 'Microsoft.Sql/Server/Database' + + +class ReferenceInputDataSource(Model): + """Describes an input data source that contains reference data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobReferenceInputDataSource, + AzureSqlReferenceInputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource'} + } + + def __init__(self, **kwargs): + super(ReferenceInputDataSource, self).__init__(**kwargs) + self.type = None + + +class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): + """Describes an Azure SQL database reference input data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: + :type properties: + ~azure.mgmt.streamanalytics.models.AzureSqlReferenceInputDataSourceProperties + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, + } + + def __init__(self, **kwargs): + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.type = 'Microsoft.Sql/Server/Database' + + +class AzureSqlReferenceInputDataSourceProperties(Model): + """AzureSqlReferenceInputDataSourceProperties. + + :param server: This element is associated with the datasource element. + This is the name of the server that contains the database that will be + written to. + :type server: str + :param database: This element is associated with the datasource element. + This is the name of the database that output will be written to. + :type database: str + :param user: This element is associated with the datasource element. This + is the user name that will be used to connect to the SQL Database + instance. + :type user: str + :param password: This element is associated with the datasource element. + This is the password that will be used to connect to the SQL Database + instance. + :type password: str + :param table: This element is associated with the datasource element. The + name of the table in the Azure SQL database.. + :type table: str + :param refresh_type: This element is associated with the datasource + element. This element is of enum type. It indicates what kind of data + refresh option do we want to + use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta + :type refresh_type: str + :param refresh_rate: This element is associated with the datasource + element. This indicates how frequently the data will be fetched from the + database. It is of DateTime format. + :type refresh_rate: str + :param full_snapshot_query: This element is associated with the datasource + element. This query is used to fetch data from the sql database. + :type full_snapshot_query: str + :param delta_snapshot_query: This element is associated with the + datasource element. This query is used to fetch incremental changes from + the SQL database. To use this option, we recommend using temporal tables + in Azure SQL Database. + :type delta_snapshot_query: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'refresh_type': {'key': 'refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.table = kwargs.get('table', None) + self.refresh_type = kwargs.get('refresh_type', None) + self.refresh_rate = kwargs.get('refresh_rate', None) + self.full_snapshot_query = kwargs.get('full_snapshot_query', None) + self.delta_snapshot_query = kwargs.get('delta_snapshot_query', None) + + +class AzureSynapseDataSourceProperties(Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + + +class AzureSynapseOutputDataSource(OutputDataSource): + """Describes an Azure Synapse output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSynapseOutputDataSource, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.user = kwargs.get('user', None) + self.password = kwargs.get('password', None) + self.type = 'Microsoft.Sql/Server/DataWarehouse' + + +class AzureTableOutputDataSource(OutputDataSource): + """Describes an Azure Table output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + :param table: The name of the Azure Table. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param partition_key: This element indicates the name of a column from the + SELECT statement in the query that will be used as the partition key for + the Azure Table. Required on PUT (CreateOrReplace) requests. + :type partition_key: str + :param row_key: This element indicates the name of a column from the + SELECT statement in the query that will be used as the row key for the + Azure Table. Required on PUT (CreateOrReplace) requests. + :type row_key: str + :param columns_to_remove: If specified, each item in the array is the name + of a column to remove (if present) from output event entities. + :type columns_to_remove: list[str] + :param batch_size: The number of rows to write to the Azure Table at a + time. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'row_key': {'key': 'properties.rowKey', 'type': 'str'}, + 'columns_to_remove': {'key': 'properties.columnsToRemove', 'type': '[str]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AzureTableOutputDataSource, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.account_key = kwargs.get('account_key', None) + self.table = kwargs.get('table', None) + self.partition_key = kwargs.get('partition_key', None) + self.row_key = kwargs.get('row_key', None) + self.columns_to_remove = kwargs.get('columns_to_remove', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'Microsoft.Storage/Table' + + +class BlobDataSourceProperties(Model): + """The properties that are associated with a blob data source. + + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobDataSourceProperties, self).__init__(**kwargs) + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + + +class BlobOutputDataSource(OutputDataSource): + """Describes a blob output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobOutputDataSource, self).__init__(**kwargs) + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.type = 'Microsoft.Storage/Blob' + + +class BlobReferenceInputDataSource(ReferenceInputDataSource): + """Describes a blob input data source that contains reference data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobReferenceInputDataSource, self).__init__(**kwargs) + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.type = 'Microsoft.Storage/Blob' + + +class StreamInputDataSource(Model): + """Describes an input data source that contains stream data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobStreamInputDataSource, EventHubStreamInputDataSource, + EventHubV2StreamInputDataSource, IoTHubStreamInputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobStreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource'} + } + + def __init__(self, **kwargs): + super(StreamInputDataSource, self).__init__(**kwargs) + self.type = None + + +class BlobStreamInputDataSource(StreamInputDataSource): + """Describes a blob input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data + source. Range 1 - 256. + :type source_partition_count: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(BlobStreamInputDataSource, self).__init__(**kwargs) + self.storage_accounts = kwargs.get('storage_accounts', None) + self.container = kwargs.get('container', None) + self.path_pattern = kwargs.get('path_pattern', None) + self.date_format = kwargs.get('date_format', None) + self.time_format = kwargs.get('time_format', None) + self.source_partition_count = kwargs.get('source_partition_count', None) + self.type = 'Microsoft.Storage/Blob' + + +class CloudError(Model): + """CloudError. + """ + + _attribute_map = { + } + + +class Resource(Model): + """Resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs.get('location', None) + + +class Cluster(TrackedResource): + """A Stream Analytics Cluster object. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + :param sku: + :type sku: ~azure.mgmt.streamanalytics.models.ClusterSku + :ivar etag: The current entity tag for the cluster. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param properties: The properties associated with a Stream Analytics + cluster. + :type properties: ~azure.mgmt.streamanalytics.models.ClusterProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, + } + + def __init__(self, **kwargs): + super(Cluster, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.etag = None + self.properties = kwargs.get('properties', None) + + +class ClusterInfo(Model): + """The properties associated with a Stream Analytics cluster. + + :param id: The resource id of cluster. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ClusterInfo, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class ClusterJob(Model): + """A streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID of the streaming job. + :vartype id: str + :ivar streaming_units: The number of streaming units that are used by the + streaming job. + :vartype streaming_units: int + :param job_state: Possible values include: 'Created', 'Starting', + 'Running', 'Stopping', 'Stopped', 'Deleting', 'Failed', 'Degraded', + 'Restarting', 'Scaling' + :type job_state: str or ~azure.mgmt.streamanalytics.models.JobState + """ + + _validation = { + 'id': {'readonly': True}, + 'streaming_units': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + 'job_state': {'key': 'jobState', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ClusterJob, self).__init__(**kwargs) + self.id = None + self.streaming_units = None + self.job_state = kwargs.get('job_state', None) + + +class ClusterProperties(Model): + """The properties associated with a Stream Analytics cluster. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar created_date: The date this cluster was created. + :vartype created_date: datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :param provisioning_state: Possible values include: 'Succeeded', 'Failed', + 'Canceled', 'InProgress' + :type provisioning_state: str or + ~azure.mgmt.streamanalytics.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units + currently being used on the cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming + jobs associated with the cluster. If all of the jobs were running, this + would be the capacity allocated. + :vartype capacity_assigned: int + """ + + _validation = { + 'created_date': {'readonly': True}, + 'cluster_id': {'readonly': True}, + 'capacity_allocated': {'readonly': True}, + 'capacity_assigned': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, + 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ClusterProperties, self).__init__(**kwargs) + self.created_date = None + self.cluster_id = None + self.provisioning_state = kwargs.get('provisioning_state', None) + self.capacity_allocated = None + self.capacity_assigned = None + + +class ClusterSku(Model): + """The SKU of the cluster. This determines the size/capacity of the cluster. + Required on PUT (CreateOrUpdate) requests. + + :param name: Specifies the SKU name of the cluster. Required on PUT + (CreateOrUpdate) requests. Possible values include: 'Default' + :type name: str or ~azure.mgmt.streamanalytics.models.ClusterSkuName + :param capacity: Denotes the number of streaming units the cluster can + support. Valid values for this property are multiples of 36 with a minimum + value of 36 and maximum value of 216. Required on PUT (CreateOrUpdate) + requests. + :type capacity: int + """ + + _validation = { + 'capacity': {'maximum': 216, 'minimum': 36}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ClusterSku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.capacity = kwargs.get('capacity', None) + + +class Compression(Model): + """Describes how input data is compressed. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Compression, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + + +class CSharpFunctionBinding(FunctionBinding): + """The binding to a CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param script: The Csharp code containing a single function definition. + :type script: str + :param dll_path: The Csharp code containing a single function definition. + :type dll_path: str + :param class_property: The Csharp code containing a single function + definition. + :type class_property: str + :param method: The Csharp code containing a single function definition. + :type method: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, + 'class_property': {'key': 'properties.class', 'type': 'str'}, + 'method': {'key': 'properties.method', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CSharpFunctionBinding, self).__init__(**kwargs) + self.script = kwargs.get('script', None) + self.dll_path = kwargs.get('dll_path', None) + self.class_property = kwargs.get('class_property', None) + self.method = kwargs.get('method', None) + self.type = 'Microsoft.StreamAnalytics/CLRUdf' + + +class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a + CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param script: The CSharp code containing a single function definition. + :type script: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, **kwargs): + super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.script = kwargs.get('script', None) + self.udf_type = kwargs.get('udf_type', None) + self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' + + +class CsvSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in CSV format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param field_delimiter: Specifies the delimiter that will be used to + separate comma-separated value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a list of supported values. Required on PUT (CreateOrReplace) + requests. + :type field_delimiter: str + :param encoding: Specifies the encoding of the incoming data in the case + of input and the encoding of outgoing data in the case of output. Required + on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + :type encoding: str or ~azure.mgmt.streamanalytics.models.Encoding + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CsvSerialization, self).__init__(**kwargs) + self.field_delimiter = kwargs.get('field_delimiter', None) + self.encoding = kwargs.get('encoding', None) + self.type = 'Csv' + + +class CustomClrSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in custom format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param serialization_dll_path: The serialization library path. + :type serialization_dll_path: str + :param serialization_class_name: The serialization class name. + :type serialization_class_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, + 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CustomClrSerialization, self).__init__(**kwargs) + self.serialization_dll_path = kwargs.get('serialization_dll_path', None) + self.serialization_class_name = kwargs.get('serialization_class_name', None) + self.type = 'CustomClr' + + +class DiagnosticCondition(Model): + """Condition applicable to the resource, or to the job overall, that warrant + customer attention. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar since: The UTC timestamp of when the condition started. Customers + should be able to find a corresponding event in the ops log around this + time. + :vartype since: str + :ivar code: The opaque diagnostic code. + :vartype code: str + :ivar message: The human-readable message describing the condition in + detail. Localized in the Accept-Language of the client request. + :vartype message: str + """ + + _validation = { + 'since': {'readonly': True}, + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'since': {'key': 'since', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DiagnosticCondition, self).__init__(**kwargs) + self.since = None + self.code = None + self.message = None + + +class Diagnostics(Model): + """Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar conditions: A collection of zero or more conditions applicable to + the resource, or to the job overall, that warrant customer attention. + :vartype conditions: + list[~azure.mgmt.streamanalytics.models.DiagnosticCondition] + """ + + _validation = { + 'conditions': {'readonly': True}, + } + + _attribute_map = { + 'conditions': {'key': 'conditions', 'type': '[DiagnosticCondition]'}, + } + + def __init__(self, **kwargs): + super(Diagnostics, self).__init__(**kwargs) + self.conditions = None + + +class DocumentDbOutputDataSource(OutputDataSource): + """Describes a DocumentDB output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param account_id: The DocumentDB account name or ID. Required on PUT + (CreateOrReplace) requests. + :type account_id: str + :param account_key: The account key for the DocumentDB account. Required + on PUT (CreateOrReplace) requests. + :type account_key: str + :param database: The name of the DocumentDB database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param collection_name_pattern: The collection name pattern for the + collections to be used. The collection name format can be constructed + using the optional {partition} token, where partitions start from 0. See + the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for more information. Required on PUT (CreateOrReplace) requests. + :type collection_name_pattern: str + :param partition_key: The name of the field in output events used to + specify the key for partitioning output across collections. If + 'collectionNamePattern' contains the {partition} token, this property is + required to be specified. + :type partition_key: str + :param document_id: The name of the field in output events used to specify + the primary key which insert or update operations are based on. + :type document_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'collection_name_pattern': {'key': 'properties.collectionNamePattern', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'document_id': {'key': 'properties.documentId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DocumentDbOutputDataSource, self).__init__(**kwargs) + self.account_id = kwargs.get('account_id', None) + self.account_key = kwargs.get('account_key', None) + self.database = kwargs.get('database', None) + self.collection_name_pattern = kwargs.get('collection_name_pattern', None) + self.partition_key = kwargs.get('partition_key', None) + self.document_id = kwargs.get('document_id', None) + self.type = 'Microsoft.Storage/DocumentDB' + + +class Error(Model): + """Common error representation. + + :param error: Error definition properties. + :type error: ~azure.mgmt.streamanalytics.models.ErrorError + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + } + + def __init__(self, **kwargs): + super(Error, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorException(HttpOperationError): + """Server responsed with exception of type: 'Error'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(ErrorException, self).__init__(deserialize, response, 'Error', *args) + + +class ErrorDetails(Model): + """Common error details representation. + + :param code: Error code. + :type code: str + :param target: Error target. + :type target: str + :param message: Error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ErrorDetails, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.target = kwargs.get('target', None) + self.message = kwargs.get('message', None) + + +class ErrorError(Model): + """Error definition properties. + + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~azure.mgmt.streamanalytics.models.ErrorDetails] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + } + + def __init__(self, **kwargs): + super(ErrorError, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class ErrorResponse(Model): + """Describes the error that occurred. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: Error code associated with the error that occurred. + :vartype code: str + :ivar message: Describes the error in detail. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + + +class ServiceBusDataSourceProperties(Model): + """The common properties that are associated with Service Bus data sources + (Queues, Topics, Event Hubs, etc.). + + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ServiceBusDataSourceProperties, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class EventHubDataSourceProperties(ServiceBusDataSourceProperties): + """The common properties that are associated with Event Hub data sources. + + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EventHubDataSourceProperties, self).__init__(**kwargs) + self.event_hub_name = kwargs.get('event_hub_name', None) + + +class EventHubOutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which + partition to send event data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(EventHubOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.partition_key = kwargs.get('partition_key', None) + self.property_columns = kwargs.get('property_columns', None) + self.type = 'Microsoft.ServiceBus/EventHub' + + +class EventHubStreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that + should be used to read events from the Event Hub. Specifying distinct + consumer group names for multiple inputs allows each of those inputs to + receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EventHubStreamInputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + self.type = 'Microsoft.ServiceBus/EventHub' + + +class EventHubV2OutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which + partition to send event data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(EventHubV2OutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.partition_key = kwargs.get('partition_key', None) + self.property_columns = kwargs.get('property_columns', None) + self.type = 'Microsoft.EventHub/EventHub' + + +class EventHubV2StreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that + should be used to read events from the Event Hub. Specifying distinct + consumer group names for multiple inputs allows each of those inputs to + receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.event_hub_name = kwargs.get('event_hub_name', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + self.type = 'Microsoft.EventHub/EventHub' + + +class External(Model): + """The storage account where the custom code artifacts are located. + + :param storage_account: + :type storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount + :param container: + :type container: str + :param path: + :type path: str + """ + + _attribute_map = { + 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(External, self).__init__(**kwargs) + self.storage_account = kwargs.get('storage_account', None) + self.container = kwargs.get('container', None) + self.path = kwargs.get('path', None) + + +class SubResource(Model): + """The base sub-resource model definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = kwargs.get('name', None) + self.type = None + + +class Function(SubResource): + """A function object, containing all information associated with the named + function. All functions are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param properties: The properties that are associated with a function. + :type properties: ~azure.mgmt.streamanalytics.models.FunctionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'FunctionProperties'}, + } + + def __init__(self, **kwargs): + super(Function, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class FunctionInput(Model): + """Describes one input parameter of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the + function input parameter. A list of valid Azure Stream Analytics data + types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + :type data_type: str + :param is_configuration_parameter: A flag indicating if the parameter is a + configuration parameter. True if this input parameter is expected to be a + constant. Default is false. + :type is_configuration_parameter: bool + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'is_configuration_parameter': {'key': 'isConfigurationParameter', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FunctionInput, self).__init__(**kwargs) + self.data_type = kwargs.get('data_type', None) + self.is_configuration_parameter = kwargs.get('is_configuration_parameter', None) + + +class FunctionOutput(Model): + """Describes the output of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the + function output. A list of valid Azure Stream Analytics data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + :type data_type: str + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FunctionOutput, self).__init__(**kwargs) + self.data_type = kwargs.get('data_type', None) + + +class Identity(Model): + """Describes how identity is verified. + + :param tenant_id: + :type tenant_id: str + :param principal_id: + :type principal_id: str + :param type: + :type type: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Identity, self).__init__(**kwargs) + self.tenant_id = kwargs.get('tenant_id', None) + self.principal_id = kwargs.get('principal_id', None) + self.type = kwargs.get('type', None) + + +class Input(SubResource): + """An input object, containing all information associated with the named + input. All inputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param properties: The properties that are associated with an input. + Required on PUT (CreateOrReplace) requests. + :type properties: ~azure.mgmt.streamanalytics.models.InputProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'InputProperties'}, + } + + def __init__(self, **kwargs): + super(Input, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class InputProperties(Model): + """The properties that are associated with an input. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: StreamInputProperties, ReferenceInputProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Stream': 'StreamInputProperties', 'Reference': 'ReferenceInputProperties'} + } + + def __init__(self, **kwargs): + super(InputProperties, self).__init__(**kwargs) + self.serialization = kwargs.get('serialization', None) + self.diagnostics = None + self.etag = None + self.compression = kwargs.get('compression', None) + self.partition_key = kwargs.get('partition_key', None) + self.type = None + + +class IoTHubStreamInputDataSource(StreamInputDataSource): + """Describes an IoT Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on + PUT (CreateOrReplace) requests. + :type iot_hub_namespace: str + :param shared_access_policy_name: The shared access policy name for the + IoT Hub. This policy must contain at least the Service connect permission. + Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param consumer_group_name: The name of an IoT Hub Consumer Group that + should be used to read events from the IoT Hub. If not specified, the + input uses the Iot Hub’s default consumer group. + :type consumer_group_name: str + :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + messages/operationsMonitoringEvents, etc.). + :type endpoint: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'iot_hub_namespace': {'key': 'properties.iotHubNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IoTHubStreamInputDataSource, self).__init__(**kwargs) + self.iot_hub_namespace = kwargs.get('iot_hub_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.consumer_group_name = kwargs.get('consumer_group_name', None) + self.endpoint = kwargs.get('endpoint', None) + self.type = 'Microsoft.Devices/IotHubs' + + +class JavaScriptFunctionBinding(FunctionBinding): + """The binding to a JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param script: The JavaScript code containing a single function + definition. For example: 'function (x, y) { return x + y; }' + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JavaScriptFunctionBinding, self).__init__(**kwargs) + self.script = kwargs.get('script', None) + self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' + + +class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a + JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param script: The JavaScript code containing a single function + definition. For example: 'function (x, y) { return x + y; }'. + :type script: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, **kwargs): + super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.script = kwargs.get('script', None) + self.udf_type = kwargs.get('udf_type', None) + self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' + + +class StorageAccount(Model): + """The properties that are associated with an Azure Storage account. + + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StorageAccount, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.account_key = kwargs.get('account_key', None) + + +class JobStorageAccount(StorageAccount): + """The properties that are associated with an Azure Storage account with MSI. + + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JobStorageAccount, self).__init__(**kwargs) + self.authentication_mode = kwargs.get('authentication_mode', None) + + +class JsonSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param encoding: Specifies the encoding of the incoming data in the case + of input and the encoding of outgoing data in the case of output. Required + on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + :type encoding: str or ~azure.mgmt.streamanalytics.models.Encoding + :param format: This property only applies to JSON serialization of outputs + only. It is not applicable to inputs. This property specifies the format + of the JSON the output will be written in. The currently supported values + are 'lineSeparated' indicating the output will be formatted by having each + JSON object separated by a new line and 'array' indicating the output will + be formatted as an array of JSON objects. Default value is 'lineSeparated' + if left null. Possible values include: 'LineSeparated', 'Array' + :type format: str or + ~azure.mgmt.streamanalytics.models.JsonOutputSerializationFormat + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + 'format': {'key': 'properties.format', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JsonSerialization, self).__init__(**kwargs) + self.encoding = kwargs.get('encoding', None) + self.format = kwargs.get('format', None) + self.type = 'Json' + + +class OAuthBasedDataSourceProperties(Model): + """The properties that are associated with data sources that use OAuth as + their authentication model. + + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + + +class Operation(Model): + """A Stream Analytics REST API operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the operation being performed on this particular + object. + :vartype name: str + :ivar display: Contains the localized display information for this + particular operation / action. + :vartype display: ~azure.mgmt.streamanalytics.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + 'display': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = None + + +class OperationDisplay(Model): + """Contains the localized display information for this particular operation / + action. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provider: The localized friendly form of the resource provider name. + :vartype provider: str + :ivar resource: The localized friendly form of the resource type related + to this action/operation. + :vartype resource: str + :ivar operation: The localized friendly name for the operation. + :vartype operation: str + :ivar description: The localized friendly description for the operation. + :vartype description: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + 'description': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + self.description = None + + +class Output(SubResource): + """An output object, containing all information associated with the named + output. All outputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param datasource: Describes the data source that output will be written + to. Required on PUT (CreateOrReplace) requests. + :type datasource: ~azure.mgmt.streamanalytics.models.OutputDataSource + :param time_window: + :type time_window: str + :param size_window: + :type size_window: float + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the output. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'properties.datasource', 'type': 'OutputDataSource'}, + 'time_window': {'key': 'properties.timeWindow', 'type': 'str'}, + 'size_window': {'key': 'properties.sizeWindow', 'type': 'float'}, + 'serialization': {'key': 'properties.serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Output, self).__init__(**kwargs) + self.datasource = kwargs.get('datasource', None) + self.time_window = kwargs.get('time_window', None) + self.size_window = kwargs.get('size_window', None) + self.serialization = kwargs.get('serialization', None) + self.diagnostics = None + self.etag = None + + +class ParquetSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: The properties that are associated with the Parquet + serialization type. Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetSerialization, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.type = 'Parquet' + + +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT + (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. + Required on PUT (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to + help remember which specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'properties.dataset', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'group_name': {'key': 'properties.groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PowerBIOutputDataSource, self).__init__(**kwargs) + self.refresh_token = kwargs.get('refresh_token', None) + self.token_user_principal_name = kwargs.get('token_user_principal_name', None) + self.token_user_display_name = kwargs.get('token_user_display_name', None) + self.dataset = kwargs.get('dataset', None) + self.table = kwargs.get('table', None) + self.group_id = kwargs.get('group_id', None) + self.group_name = kwargs.get('group_name', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.type = 'PowerBI' + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have + everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): + """Complete information about the private endpoint. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: The properties associated with a private endpoint. + :type properties: + ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties + :ivar etag: Unique opaque string (generally a GUID) that represents the + metadata state of the resource (private endpoint) and changes whenever the + resource is updated. Required on PUT (CreateOrUpdate) requests. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PrivateEndpoint, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.etag = None + + +class PrivateEndpointProperties(Model): + """The properties associated with a private endpoint. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :param manual_private_link_service_connections: A list of connections to + the remote resource. Immutable after it is set. + :type manual_private_link_service_connections: + list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] + """ + + _validation = { + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'str'}, + 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, + } + + def __init__(self, **kwargs): + super(PrivateEndpointProperties, self).__init__(**kwargs) + self.created_date = None + self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None) + + +class PrivateLinkConnectionState(Model): + """A collection of read-only information about the state of the connection to + the private remote resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the remote resource/service. + :vartype status: str + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service + provider require any updates on the consumer. + :vartype actions_required: str + """ + + _validation = { + 'status': {'readonly': True}, + 'description': {'readonly': True}, + 'actions_required': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = None + self.description = None + self.actions_required = None + + +class PrivateLinkServiceConnection(Model): + """A grouping of information about the connection to the remote resource. + + :param private_link_service_id: The resource id of the private link + service. Required on PUT (CreateOrUpdate) requests. + :type private_link_service_id: str + :param group_ids: The ID(s) of the group(s) obtained from the remote + resource that this private endpoint should connect to. Required on PUT + (CreateOrUpdate) requests. + :type group_ids: list[str] + :param request_message: A message passed to the owner of the remote + resource with this connection request. Restricted to 140 chars. + :type request_message: str + :param private_link_service_connection_state: A collection of read-only + information about the state of the connection to the private remote + resource. + :type private_link_service_connection_state: + ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, + 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkServiceConnection, self).__init__(**kwargs) + self.private_link_service_id = kwargs.get('private_link_service_id', None) + self.group_ids = kwargs.get('group_ids', None) + self.request_message = kwargs.get('request_message', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + +class ReferenceInputProperties(InputProperties): + """The properties that are associated with an input containing reference data. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + :param datasource: Describes an input data source that contains reference + data. Required on PUT (CreateOrReplace) requests. + :type datasource: + ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'ReferenceInputDataSource'}, + } + + def __init__(self, **kwargs): + super(ReferenceInputProperties, self).__init__(**kwargs) + self.datasource = kwargs.get('datasource', None) + self.type = 'Reference' + + +class ResourceTestStatus(Model): + """Describes the status of the test operation along with error information, if + applicable. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar status: The status of the test operation. + :vartype status: str + :ivar error: Describes the error that occurred. + :vartype error: ~azure.mgmt.streamanalytics.models.ErrorResponse + """ + + _validation = { + 'status': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__(self, **kwargs): + super(ResourceTestStatus, self).__init__(**kwargs) + self.status = None + self.error = None + + +class ScalarFunctionProperties(FunctionProperties): + """The properties that are associated with a scalar function. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ScalarFunctionProperties, self).__init__(**kwargs) + self.type = 'Scalar' + + +class ServiceBusQueueOutputDataSource(OutputDataSource): + """Describes a Service Bus Queue output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT + (CreateOrReplace) requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to + be attached to Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.queue_name = kwargs.get('queue_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + self.type = 'Microsoft.ServiceBus/Queue' + + +class ServiceBusTopicOutputDataSource(OutputDataSource): + """Describes a Service Bus Topic output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT + (CreateOrReplace) requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to + be attached to Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'properties.topicName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = kwargs.get('service_bus_namespace', None) + self.shared_access_policy_name = kwargs.get('shared_access_policy_name', None) + self.shared_access_policy_key = kwargs.get('shared_access_policy_key', None) + self.authentication_mode = kwargs.get('authentication_mode', None) + self.topic_name = kwargs.get('topic_name', None) + self.property_columns = kwargs.get('property_columns', None) + self.system_property_columns = kwargs.get('system_property_columns', None) + self.type = 'Microsoft.ServiceBus/Topic' + + +class StartStreamingJobParameters(Model): + """Parameters supplied to the Start Streaming Job operation. + + :param output_start_mode: Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the output + event stream should start whenever the job is started, start at a custom + user time stamp specified via the outputStartTime property, or start from + the last event output time. Possible values include: 'JobStartTime', + 'CustomTime', 'LastOutputEventTime' + :type output_start_mode: str or + ~azure.mgmt.streamanalytics.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp + that indicates the starting point of the output event stream, or null to + indicate that the output event stream will start whenever the streaming + job is started. This property must have a value if outputStartMode is set + to CustomTime. + :type output_start_time: datetime + """ + + _attribute_map = { + 'output_start_mode': {'key': 'outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'outputStartTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(StartStreamingJobParameters, self).__init__(**kwargs) + self.output_start_mode = kwargs.get('output_start_mode', None) + self.output_start_time = kwargs.get('output_start_time', None) + + +class StreamingJob(TrackedResource): + """A streaming job object, containing all information associated with the + named streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + :param sku: Describes the SKU of the streaming job. Required on PUT + (CreateOrReplace) requests. + :type sku: ~azure.mgmt.streamanalytics.models.StreamingJobSku + :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is + generated upon creation of the streaming job. + :vartype job_id: str + :ivar provisioning_state: Describes the provisioning status of the + streaming job. + :vartype provisioning_state: str + :ivar job_state: Describes the state of the streaming job. + :vartype job_state: str + :param job_type: Describes the type of the job. Valid modes are `Cloud` + and 'Edge'. Possible values include: 'Cloud', 'Edge' + :type job_type: str or ~azure.mgmt.streamanalytics.models.JobType + :param output_start_mode: This property should only be utilized when it is + desired that the job be started immediately upon creation. Value may be + JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the + starting point of the output event stream should start whenever the job is + started, start at a custom user time stamp specified via the + outputStartTime property, or start from the last event output time. + Possible values include: 'JobStartTime', 'CustomTime', + 'LastOutputEventTime' + :type output_start_mode: str or + ~azure.mgmt.streamanalytics.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp + that indicates the starting point of the output event stream, or null to + indicate that the output event stream will start whenever the streaming + job is started. This property must have a value if outputStartMode is set + to CustomTime. + :type output_start_time: datetime + :ivar last_output_event_time: Value is either an ISO-8601 formatted + timestamp indicating the last output event time of the streaming job or + null indicating that output has not yet been produced. In case of multiple + outputs or multiple streams, this shows the latest value in that set. + :vartype last_output_event_time: datetime + :param events_out_of_order_policy: Indicates the policy to apply to events + that arrive out of order in the input event stream. Possible values + include: 'Adjust', 'Drop' + :type events_out_of_order_policy: str or + ~azure.mgmt.streamanalytics.models.EventsOutOfOrderPolicy + :param output_error_policy: Indicates the policy to apply to events that + arrive at the output and cannot be written to the external storage due to + being malformed (missing column values, column values of wrong type or + size). Possible values include: 'Stop', 'Drop' + :type output_error_policy: str or + ~azure.mgmt.streamanalytics.models.OutputErrorPolicy + :param events_out_of_order_max_delay_in_seconds: The maximum tolerable + delay in seconds where out-of-order events can be adjusted to be back in + order. + :type events_out_of_order_max_delay_in_seconds: int + :param events_late_arrival_max_delay_in_seconds: The maximum tolerable + delay in seconds where events arriving late could be included. Supported + range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait + indefinitely. If the property is absent, it is interpreted to have a value + of -1. + :type events_late_arrival_max_delay_in_seconds: int + :param data_locale: The data locale of the stream analytics job. Value + should be the name of a supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. + :type data_locale: str + :param compatibility_level: Controls certain runtime behaviors of the + streaming job. Possible values include: '1.0' + :type compatibility_level: str or + ~azure.mgmt.streamanalytics.models.CompatibilityLevel + :ivar created_date: Value is an ISO-8601 formatted UTC timestamp + indicating when the streaming job was created. + :vartype created_date: datetime + :param inputs: A list of one or more inputs to the streaming job. The name + property for each input is required when specifying this property in a PUT + request. This property cannot be modify via a PATCH operation. You must + use the PATCH API available for the individual input. + :type inputs: list[~azure.mgmt.streamanalytics.models.Input] + :param transformation: Indicates the query and the number of streaming + units to use for the streaming job. The name property of the + transformation is required when specifying this property in a PUT request. + This property cannot be modify via a PATCH operation. You must use the + PATCH API available for the individual transformation. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation + :param outputs: A list of one or more outputs for the streaming job. The + name property for each output is required when specifying this property in + a PUT request. This property cannot be modify via a PATCH operation. You + must use the PATCH API available for the individual output. + :type outputs: list[~azure.mgmt.streamanalytics.models.Output] + :param functions: A list of one or more functions for the streaming job. + The name property for each function is required when specifying this + property in a PUT request. This property cannot be modify via a PATCH + operation. You must use the PATCH API available for the individual + transformation. + :type functions: list[~azure.mgmt.streamanalytics.models.Function] + :ivar etag: The current entity tag for the streaming job. This is an + opaque string. You can use it to detect whether the resource has changed + between requests. You can also use it in the If-Match or If-None-Match + headers for write operations for optimistic concurrency. + :vartype etag: str + :param job_storage_account: + :type job_storage_account: + ~azure.mgmt.streamanalytics.models.JobStorageAccount + :param content_storage_policy: Valid values are JobStorageAccount and + SystemAccount. If set to JobStorageAccount, this requires the user to also + specify jobStorageAccount property. Possible values include: + 'SystemAccount', 'JobStorageAccount' + :type content_storage_policy: str or + ~azure.mgmt.streamanalytics.models.ContentStoragePolicy + :param externals: The storage account where the custom code artifacts are + located. + :type externals: ~azure.mgmt.streamanalytics.models.External + :param cluster: The cluster which streaming jobs will run on. + :type cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo + :param identity: Describes the system-assigned managed identity assigned + to this job that can be used to authenticate with inputs and outputs. + :type identity: ~azure.mgmt.streamanalytics.models.Identity + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'job_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_state': {'readonly': True}, + 'last_output_event_time': {'readonly': True}, + 'created_date': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'job_id': {'key': 'properties.jobId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'job_state': {'key': 'properties.jobState', 'type': 'str'}, + 'job_type': {'key': 'properties.jobType', 'type': 'str'}, + 'output_start_mode': {'key': 'properties.outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'properties.outputStartTime', 'type': 'iso-8601'}, + 'last_output_event_time': {'key': 'properties.lastOutputEventTime', 'type': 'iso-8601'}, + 'events_out_of_order_policy': {'key': 'properties.eventsOutOfOrderPolicy', 'type': 'str'}, + 'output_error_policy': {'key': 'properties.outputErrorPolicy', 'type': 'str'}, + 'events_out_of_order_max_delay_in_seconds': {'key': 'properties.eventsOutOfOrderMaxDelayInSeconds', 'type': 'int'}, + 'events_late_arrival_max_delay_in_seconds': {'key': 'properties.eventsLateArrivalMaxDelayInSeconds', 'type': 'int'}, + 'data_locale': {'key': 'properties.dataLocale', 'type': 'str'}, + 'compatibility_level': {'key': 'properties.compatibilityLevel', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, + 'inputs': {'key': 'properties.inputs', 'type': '[Input]'}, + 'transformation': {'key': 'properties.transformation', 'type': 'Transformation'}, + 'outputs': {'key': 'properties.outputs', 'type': '[Output]'}, + 'functions': {'key': 'properties.functions', 'type': '[Function]'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, + 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, + 'externals': {'key': 'properties.externals', 'type': 'External'}, + 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + } + + def __init__(self, **kwargs): + super(StreamingJob, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.job_id = None + self.provisioning_state = None + self.job_state = None + self.job_type = kwargs.get('job_type', None) + self.output_start_mode = kwargs.get('output_start_mode', None) + self.output_start_time = kwargs.get('output_start_time', None) + self.last_output_event_time = None + self.events_out_of_order_policy = kwargs.get('events_out_of_order_policy', None) + self.output_error_policy = kwargs.get('output_error_policy', None) + self.events_out_of_order_max_delay_in_seconds = kwargs.get('events_out_of_order_max_delay_in_seconds', None) + self.events_late_arrival_max_delay_in_seconds = kwargs.get('events_late_arrival_max_delay_in_seconds', None) + self.data_locale = kwargs.get('data_locale', None) + self.compatibility_level = kwargs.get('compatibility_level', None) + self.created_date = None + self.inputs = kwargs.get('inputs', None) + self.transformation = kwargs.get('transformation', None) + self.outputs = kwargs.get('outputs', None) + self.functions = kwargs.get('functions', None) + self.etag = None + self.job_storage_account = kwargs.get('job_storage_account', None) + self.content_storage_policy = kwargs.get('content_storage_policy', None) + self.externals = kwargs.get('externals', None) + self.cluster = kwargs.get('cluster', None) + self.identity = kwargs.get('identity', None) + + +class StreamingJobSku(Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) + requests. Possible values include: 'Standard' + :type name: str or ~azure.mgmt.streamanalytics.models.StreamingJobSkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StreamingJobSku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class StreamInputProperties(InputProperties): + """The properties that are associated with an input containing stream data. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + :param datasource: Describes an input data source that contains stream + data. Required on PUT (CreateOrReplace) requests. + :type datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'StreamInputDataSource'}, + } + + def __init__(self, **kwargs): + super(StreamInputProperties, self).__init__(**kwargs) + self.datasource = kwargs.get('datasource', None) + self.type = 'Stream' + + +class SubscriptionQuota(SubResource): + """Describes the current quota for the subscription. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :ivar max_count: The max permitted usage of this resource. + :vartype max_count: int + :ivar current_count: The current usage of this resource. + :vartype current_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'max_count': {'readonly': True}, + 'current_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_count': {'key': 'properties.maxCount', 'type': 'int'}, + 'current_count': {'key': 'properties.currentCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(SubscriptionQuota, self).__init__(**kwargs) + self.max_count = None + self.current_count = None + + +class SubscriptionQuotasListResult(Model): + """Result of the GetQuotas operation. It contains a list of quotas for the + subscription in a particular region. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: List of quotas for the subscription in a particular region. + :vartype value: list[~azure.mgmt.streamanalytics.models.SubscriptionQuota] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SubscriptionQuota]'}, + } + + def __init__(self, **kwargs): + super(SubscriptionQuotasListResult, self).__init__(**kwargs) + self.value = None + + +class Transformation(SubResource): + """A transformation object, containing all information associated with the + named transformation. All transformations are contained under a streaming + job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param streaming_units: Specifies the number of streaming units that the + streaming job uses. + :type streaming_units: int + :param query: Specifies the query that will be run in the streaming job. + You can learn more about the Stream Analytics Query Language (SAQL) here: + https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT + (CreateOrReplace) requests. + :type query: str + :ivar etag: The current entity tag for the transformation. This is an + opaque string. You can use it to detect whether the resource has changed + between requests. You can also use it in the If-Match or If-None-Match + headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Transformation, self).__init__(**kwargs) + self.streaming_units = kwargs.get('streaming_units', None) + self.query = kwargs.get('query', None) + self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py new file mode 100644 index 000000000000..e32d8fb85b18 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -0,0 +1,3875 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model +from msrest.exceptions import HttpOperationError + + +class FunctionProperties(Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ScalarFunctionProperties, AggregateFunctionProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Scalar': 'ScalarFunctionProperties', 'Aggregate': 'AggregateFunctionProperties'} + } + + def __init__(self, *, inputs=None, output=None, binding=None, **kwargs) -> None: + super(FunctionProperties, self).__init__(**kwargs) + self.etag = None + self.inputs = inputs + self.output = output + self.binding = binding + self.type = None + + +class AggregateFunctionProperties(FunctionProperties): + """The properties that are associated with an aggregate function. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, inputs=None, output=None, binding=None, **kwargs) -> None: + super(AggregateFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + self.type = 'Aggregate' + + +class Serialization(Model): + """Describes how data from an input is serialized or how data is serialized + when written to an output. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetSerialization, CustomClrSerialization, + CsvSerialization, JsonSerialization, AvroSerialization + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Parquet': 'ParquetSerialization', 'CustomClr': 'CustomClrSerialization', 'Csv': 'CsvSerialization', 'Json': 'JsonSerialization', 'Avro': 'AvroSerialization'} + } + + def __init__(self, **kwargs) -> None: + super(Serialization, self).__init__(**kwargs) + self.type = None + + +class AvroSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: The properties that are associated with the Avro + serialization type. Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(AvroSerialization, self).__init__(**kwargs) + self.properties = properties + self.type = 'Avro' + + +class OutputDataSource(Model): + """Describes the data source that output will be written to. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobOutputDataSource, AzureTableOutputDataSource, + EventHubOutputDataSource, EventHubV2OutputDataSource, + AzureSqlDatabaseOutputDataSource, AzureSynapseOutputDataSource, + DocumentDbOutputDataSource, AzureFunctionOutputDataSource, + ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, + PowerBIOutputDataSource, AzureDataLakeStoreOutputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource'} + } + + def __init__(self, **kwargs) -> None: + super(OutputDataSource, self).__init__(**kwargs) + self.type = None + + +class AzureDataLakeStoreOutputDataSource(OutputDataSource): + """Describes an Azure Data Lake Store output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + :param account_name: The name of the Azure Data Lake Store account. + Required on PUT (CreateOrReplace) requests. + :type account_name: str + :param tenant_id: The tenant id of the user used to obtain the refresh + token. Required on PUT (CreateOrReplace) requests. + :type tenant_id: str + :param file_path_prefix: The location of the file to which the output + should be written to. Required on PUT (CreateOrReplace) requests. + :type file_path_prefix: str + :param date_format: The date format. Wherever {date} appears in + filePathPrefix, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + filePathPrefix, the value of this property is used as the time format + instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'file_path_prefix': {'key': 'properties.filePathPrefix', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, refresh_token: str=None, token_user_principal_name: str=None, token_user_display_name: str=None, account_name: str=None, tenant_id: str=None, file_path_prefix: str=None, date_format: str=None, time_format: str=None, authentication_mode=None, **kwargs) -> None: + super(AzureDataLakeStoreOutputDataSource, self).__init__(**kwargs) + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.account_name = account_name + self.tenant_id = tenant_id + self.file_path_prefix = file_path_prefix + self.date_format = date_format + self.time_format = time_format + self.authentication_mode = authentication_mode + self.type = 'Microsoft.DataLake/Accounts' + + +class AzureFunctionOutputDataSource(OutputDataSource): + """Defines the metadata of AzureFunctionOutputDataSource. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param function_app_name: The name of your Azure Functions app. + :type function_app_name: str + :param function_name: The name of the function in your Azure Functions + app. + :type function_name: str + :param api_key: If you want to use an Azure Function from another + subscription, you can do so by providing the key to access your function. + :type api_key: str + :param max_batch_size: A property that lets you set the maximum size for + each output batch that's sent to your Azure function. The input unit is in + bytes. By default, this value is 262,144 bytes (256 KB). + :type max_batch_size: float + :param max_batch_count: A property that lets you specify the maximum + number of events in each batch that's sent to Azure Functions. The default + value is 100. + :type max_batch_count: float + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, + 'function_name': {'key': 'properties.functionName', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + } + + def __init__(self, *, function_app_name: str=None, function_name: str=None, api_key: str=None, max_batch_size: float=None, max_batch_count: float=None, **kwargs) -> None: + super(AzureFunctionOutputDataSource, self).__init__(**kwargs) + self.function_app_name = function_app_name + self.function_name = function_name + self.api_key = api_key + self.max_batch_size = max_batch_size + self.max_batch_count = max_batch_count + self.type = 'Microsoft.AzureFunction' + + +class FunctionBinding(Model): + """The physical binding of the function. For example, in the Azure Machine + Learning web service’s case, this describes the endpoint. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureMachineLearningStudioFunctionBinding, + JavaScriptFunctionBinding, CSharpFunctionBinding, + AzureMachineLearningServiceFunctionBinding + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding'} + } + + def __init__(self, **kwargs) -> None: + super(FunctionBinding, self).__init__(**kwargs) + self.type = None + + +class AzureMachineLearningServiceFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning web service. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure + Machine Learning web service. + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response + endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning web service + endpoint. + :type inputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :param outputs: A list of outputs from the Azure Machine Learning web + service endpoint execution. + :type outputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of + rows for every Azure ML RRS execute request. Default is 1000. + :type batch_size: int + :param number_of_parallel_requests: The number of parallel requests that + will be sent per partition of your job to the machine learning service. + Default is 1. + :type number_of_parallel_requests: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, + } + + def __init__(self, *, endpoint: str=None, api_key: str=None, inputs=None, outputs=None, batch_size: int=None, number_of_parallel_requests: int=None, **kwargs) -> None: + super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) + self.endpoint = endpoint + self.api_key = api_key + self.inputs = inputs + self.outputs = outputs + self.batch_size = batch_size + self.number_of_parallel_requests = number_of_parallel_requests + self.type = 'Microsoft.MachineLearningServices' + + +class FunctionRetrieveDefaultDefinitionParameters(Model): + """Parameters used to specify the type of function to retrieve the default + definition for. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: + AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, + AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, + JavaScriptFunctionRetrieveDefaultDefinitionParameters, + CSharpFunctionRetrieveDefaultDefinitionParameters + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + } + + _subtype_map = { + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters'} + } + + def __init__(self, **kwargs) -> None: + super(FunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = None + + +class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an + Azure Machine Learning web service function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the + Azure Machine Learning web service. + :type execute_endpoint: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, *, execute_endpoint: str=None, udf_type=None, **kwargs) -> None: + super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.execute_endpoint = execute_endpoint + self.udf_type = udf_type + self.binding_type = 'Microsoft.MachineLearningServices' + + +class AzureMachineLearningServiceInputColumn(Model): + """Describes an input column for the Azure Machine Learning web service + endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + input column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, *, name: str=None, data_type: str=None, map_to: int=None, **kwargs) -> None: + super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningServiceInputs(Model): + """The inputs for the Azure Machine Learning web service endpoint. + + :param name: The name of the input. This is the name provided while + authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine + Learning web service endpoint. + :type column_names: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + } + + def __init__(self, *, name: str=None, column_names=None, **kwargs) -> None: + super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + self.name = name + self.column_names = column_names + + +class AzureMachineLearningServiceOutputColumn(Model): + """Describes an output column for the Azure Machine Learning web service + endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + output column. + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, *, name: str=None, data_type: str=None, map_to: int=None, **kwargs) -> None: + super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningStudioFunctionBinding(FunctionBinding): + """The binding to an Azure Machine Learning Studio. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: The Request-Response execute endpoint of the Azure + Machine Learning Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + :type endpoint: str + :param api_key: The API key used to authenticate with Request-Response + endpoint. + :type api_key: str + :param inputs: The inputs for the Azure Machine Learning Studio endpoint. + :type inputs: + ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs + :param outputs: A list of outputs from the Azure Machine Learning Studio + endpoint execution. + :type outputs: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] + :param batch_size: Number between 1 and 10000 describing maximum number of + rows for every Azure ML RRS execute request. Default is 1000. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__(self, *, endpoint: str=None, api_key: str=None, inputs=None, outputs=None, batch_size: int=None, **kwargs) -> None: + super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) + self.endpoint = endpoint + self.api_key = api_key + self.inputs = inputs + self.outputs = outputs + self.batch_size = batch_size + self.type = 'Microsoft.MachineLearning/WebService' + + +class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for an + Azure Machine Learning Studio function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param execute_endpoint: The Request-Response execute endpoint of the + Azure Machine Learning Studio. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs + :type execute_endpoint: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, *, execute_endpoint: str=None, udf_type=None, **kwargs) -> None: + super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.execute_endpoint = execute_endpoint + self.udf_type = udf_type + self.binding_type = 'Microsoft.MachineLearning/WebService' + + +class AzureMachineLearningStudioInputColumn(Model): + """Describes an input column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + input column. A list of valid Azure Machine Learning data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx + . + :type data_type: str + :param map_to: The zero based index of the function parameter this input + maps to. + :type map_to: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'map_to': {'key': 'mapTo', 'type': 'int'}, + } + + def __init__(self, *, name: str=None, data_type: str=None, map_to: int=None, **kwargs) -> None: + super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + self.map_to = map_to + + +class AzureMachineLearningStudioInputs(Model): + """The inputs for the Azure Machine Learning Studio endpoint. + + :param name: The name of the input. This is the name provided while + authoring the endpoint. + :type name: str + :param column_names: A list of input columns for the Azure Machine + Learning Studio endpoint. + :type column_names: + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, + } + + def __init__(self, *, name: str=None, column_names=None, **kwargs) -> None: + super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) + self.name = name + self.column_names = column_names + + +class AzureMachineLearningStudioOutputColumn(Model): + """Describes an output column for the Azure Machine Learning Studio endpoint. + + :param name: The name of the output column. + :type name: str + :param data_type: The (Azure Machine Learning supported) data type of the + output column. A list of valid Azure Machine Learning data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx + . + :type data_type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, data_type: str=None, **kwargs) -> None: + super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + self.name = name + self.data_type = data_type + + +class AzureSqlDatabaseDataSourceProperties(Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the + default value is 10,000. Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single + writer) and 0(based on query partition) are available. Optional on PUT + requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'max_batch_count': {'key': 'maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, server: str=None, database: str=None, user: str=None, password: str=None, table: str=None, max_batch_count: float=None, max_writer_count: float=None, authentication_mode=None, **kwargs) -> None: + super(AzureSqlDatabaseDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.max_batch_count = max_batch_count + self.max_writer_count = max_writer_count + self.authentication_mode = authentication_mode + + +class AzureSqlDatabaseOutputDataSource(OutputDataSource): + """Describes an Azure SQL database output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param max_batch_count: Max Batch count for write to Sql database, the + default value is 10,000. Optional on PUT requests. + :type max_batch_count: float + :param max_writer_count: Max Write r count, currently only 1(single + writer) and 0(based on query partition) are available. Optional on PUT + requests. + :type max_writer_count: float + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, + 'max_writer_count': {'key': 'properties.maxWriterCount', 'type': 'float'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, server: str=None, database: str=None, user: str=None, password: str=None, table: str=None, max_batch_count: float=None, max_writer_count: float=None, authentication_mode=None, **kwargs) -> None: + super(AzureSqlDatabaseOutputDataSource, self).__init__(**kwargs) + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.max_batch_count = max_batch_count + self.max_writer_count = max_writer_count + self.authentication_mode = authentication_mode + self.type = 'Microsoft.Sql/Server/Database' + + +class ReferenceInputDataSource(Model): + """Describes an input data source that contains reference data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobReferenceInputDataSource, + AzureSqlReferenceInputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobReferenceInputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlReferenceInputDataSource'} + } + + def __init__(self, **kwargs) -> None: + super(ReferenceInputDataSource, self).__init__(**kwargs) + self.type = None + + +class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): + """Describes an Azure SQL database reference input data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: + :type properties: + ~azure.mgmt.streamanalytics.models.AzureSqlReferenceInputDataSourceProperties + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.properties = properties + self.type = 'Microsoft.Sql/Server/Database' + + +class AzureSqlReferenceInputDataSourceProperties(Model): + """AzureSqlReferenceInputDataSourceProperties. + + :param server: This element is associated with the datasource element. + This is the name of the server that contains the database that will be + written to. + :type server: str + :param database: This element is associated with the datasource element. + This is the name of the database that output will be written to. + :type database: str + :param user: This element is associated with the datasource element. This + is the user name that will be used to connect to the SQL Database + instance. + :type user: str + :param password: This element is associated with the datasource element. + This is the password that will be used to connect to the SQL Database + instance. + :type password: str + :param table: This element is associated with the datasource element. The + name of the table in the Azure SQL database.. + :type table: str + :param refresh_type: This element is associated with the datasource + element. This element is of enum type. It indicates what kind of data + refresh option do we want to + use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta + :type refresh_type: str + :param refresh_rate: This element is associated with the datasource + element. This indicates how frequently the data will be fetched from the + database. It is of DateTime format. + :type refresh_rate: str + :param full_snapshot_query: This element is associated with the datasource + element. This query is used to fetch data from the sql database. + :type full_snapshot_query: str + :param delta_snapshot_query: This element is associated with the + datasource element. This query is used to fetch incremental changes from + the SQL database. To use this option, we recommend using temporal tables + in Azure SQL Database. + :type delta_snapshot_query: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'refresh_type': {'key': 'refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + } + + def __init__(self, *, server: str=None, database: str=None, user: str=None, password: str=None, table: str=None, refresh_type: str=None, refresh_rate: str=None, full_snapshot_query: str=None, delta_snapshot_query: str=None, **kwargs) -> None: + super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.user = user + self.password = password + self.table = table + self.refresh_type = refresh_type + self.refresh_rate = refresh_rate + self.full_snapshot_query = full_snapshot_query + self.delta_snapshot_query = delta_snapshot_query + + +class AzureSynapseDataSourceProperties(Model): + """The properties that are associated with an Azure SQL database data source. + + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + """ + + _attribute_map = { + 'server': {'key': 'server', 'type': 'str'}, + 'database': {'key': 'database', 'type': 'str'}, + 'table': {'key': 'table', 'type': 'str'}, + 'user': {'key': 'user', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, *, server: str=None, database: str=None, table: str=None, user: str=None, password: str=None, **kwargs) -> None: + super(AzureSynapseDataSourceProperties, self).__init__(**kwargs) + self.server = server + self.database = database + self.table = table + self.user = user + self.password = password + + +class AzureSynapseOutputDataSource(OutputDataSource): + """Describes an Azure Synapse output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param server: The name of the SQL server containing the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type server: str + :param database: The name of the Azure SQL database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param table: The name of the table in the Azure SQL database. Required on + PUT (CreateOrReplace) requests. + :type table: str + :param user: The user name that will be used to connect to the Azure SQL + database. Required on PUT (CreateOrReplace) requests. + :type user: str + :param password: The password that will be used to connect to the Azure + SQL database. Required on PUT (CreateOrReplace) requests. + :type password: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + } + + def __init__(self, *, server: str=None, database: str=None, table: str=None, user: str=None, password: str=None, **kwargs) -> None: + super(AzureSynapseOutputDataSource, self).__init__(**kwargs) + self.server = server + self.database = database + self.table = table + self.user = user + self.password = password + self.type = 'Microsoft.Sql/Server/DataWarehouse' + + +class AzureTableOutputDataSource(OutputDataSource): + """Describes an Azure Table output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + :param table: The name of the Azure Table. Required on PUT + (CreateOrReplace) requests. + :type table: str + :param partition_key: This element indicates the name of a column from the + SELECT statement in the query that will be used as the partition key for + the Azure Table. Required on PUT (CreateOrReplace) requests. + :type partition_key: str + :param row_key: This element indicates the name of a column from the + SELECT statement in the query that will be used as the row key for the + Azure Table. Required on PUT (CreateOrReplace) requests. + :type row_key: str + :param columns_to_remove: If specified, each item in the array is the name + of a column to remove (if present) from output event entities. + :type columns_to_remove: list[str] + :param batch_size: The number of rows to write to the Azure Table at a + time. + :type batch_size: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'properties.accountName', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'row_key': {'key': 'properties.rowKey', 'type': 'str'}, + 'columns_to_remove': {'key': 'properties.columnsToRemove', 'type': '[str]'}, + 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, + } + + def __init__(self, *, account_name: str=None, account_key: str=None, table: str=None, partition_key: str=None, row_key: str=None, columns_to_remove=None, batch_size: int=None, **kwargs) -> None: + super(AzureTableOutputDataSource, self).__init__(**kwargs) + self.account_name = account_name + self.account_key = account_key + self.table = table + self.partition_key = partition_key + self.row_key = row_key + self.columns_to_remove = columns_to_remove + self.batch_size = batch_size + self.type = 'Microsoft.Storage/Table' + + +class BlobDataSourceProperties(Model): + """The properties that are associated with a blob data source. + + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + """ + + _attribute_map = { + 'storage_accounts': {'key': 'storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path_pattern': {'key': 'pathPattern', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'str'}, + 'time_format': {'key': 'timeFormat', 'type': 'str'}, + } + + def __init__(self, *, storage_accounts=None, container: str=None, path_pattern: str=None, date_format: str=None, time_format: str=None, **kwargs) -> None: + super(BlobDataSourceProperties, self).__init__(**kwargs) + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + + +class BlobOutputDataSource(OutputDataSource): + """Describes a blob output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, storage_accounts=None, container: str=None, path_pattern: str=None, date_format: str=None, time_format: str=None, authentication_mode=None, **kwargs) -> None: + super(BlobOutputDataSource, self).__init__(**kwargs) + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + self.authentication_mode = authentication_mode + self.type = 'Microsoft.Storage/Blob' + + +class BlobReferenceInputDataSource(ReferenceInputDataSource): + """Describes a blob input data source that contains reference data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + } + + def __init__(self, *, storage_accounts=None, container: str=None, path_pattern: str=None, date_format: str=None, time_format: str=None, **kwargs) -> None: + super(BlobReferenceInputDataSource, self).__init__(**kwargs) + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + self.type = 'Microsoft.Storage/Blob' + + +class StreamInputDataSource(Model): + """Describes an input data source that contains stream data. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobStreamInputDataSource, EventHubStreamInputDataSource, + EventHubV2StreamInputDataSource, IoTHubStreamInputDataSource + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Microsoft.Storage/Blob': 'BlobStreamInputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubStreamInputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2StreamInputDataSource', 'Microsoft.Devices/IotHubs': 'IoTHubStreamInputDataSource'} + } + + def __init__(self, **kwargs) -> None: + super(StreamInputDataSource, self).__init__(**kwargs) + self.type = None + + +class BlobStreamInputDataSource(StreamInputDataSource): + """Describes a blob input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param storage_accounts: A list of one or more Azure Storage accounts. + Required on PUT (CreateOrReplace) requests. + :type storage_accounts: + list[~azure.mgmt.streamanalytics.models.StorageAccount] + :param container: The name of a container within the associated Storage + account. This container contains either the blob(s) to be read from or + written to. Required on PUT (CreateOrReplace) requests. + :type container: str + :param path_pattern: The blob path pattern. Not a regular expression. It + represents a pattern against which blob names will be matched to determine + whether or not they should be included as input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a more detailed explanation and example. + :type path_pattern: str + :param date_format: The date format. Wherever {date} appears in + pathPattern, the value of this property is used as the date format + instead. + :type date_format: str + :param time_format: The time format. Wherever {time} appears in + pathPattern, the value of this property is used as the time format + instead. + :type time_format: str + :param source_partition_count: The partition count of the blob input data + source. Range 1 - 256. + :type source_partition_count: int + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccount]'}, + 'container': {'key': 'properties.container', 'type': 'str'}, + 'path_pattern': {'key': 'properties.pathPattern', 'type': 'str'}, + 'date_format': {'key': 'properties.dateFormat', 'type': 'str'}, + 'time_format': {'key': 'properties.timeFormat', 'type': 'str'}, + 'source_partition_count': {'key': 'properties.sourcePartitionCount', 'type': 'int'}, + } + + def __init__(self, *, storage_accounts=None, container: str=None, path_pattern: str=None, date_format: str=None, time_format: str=None, source_partition_count: int=None, **kwargs) -> None: + super(BlobStreamInputDataSource, self).__init__(**kwargs) + self.storage_accounts = storage_accounts + self.container = container + self.path_pattern = path_pattern + self.date_format = date_format + self.time_format = time_format + self.source_partition_count = source_partition_count + self.type = 'Microsoft.Storage/Blob' + + +class CloudError(Model): + """CloudError. + """ + + _attribute_map = { + } + + +class Resource(Model): + """Resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__(self, *, tags=None, location: str=None, **kwargs) -> None: + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + + +class Cluster(TrackedResource): + """A Stream Analytics Cluster object. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + :param sku: + :type sku: ~azure.mgmt.streamanalytics.models.ClusterSku + :ivar etag: The current entity tag for the cluster. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param properties: The properties associated with a Stream Analytics + cluster. + :type properties: ~azure.mgmt.streamanalytics.models.ClusterProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ClusterProperties'}, + } + + def __init__(self, *, tags=None, location: str=None, sku=None, properties=None, **kwargs) -> None: + super(Cluster, self).__init__(tags=tags, location=location, **kwargs) + self.sku = sku + self.etag = None + self.properties = properties + + +class ClusterInfo(Model): + """The properties associated with a Stream Analytics cluster. + + :param id: The resource id of cluster. + :type id: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, *, id: str=None, **kwargs) -> None: + super(ClusterInfo, self).__init__(**kwargs) + self.id = id + + +class ClusterJob(Model): + """A streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource ID of the streaming job. + :vartype id: str + :ivar streaming_units: The number of streaming units that are used by the + streaming job. + :vartype streaming_units: int + :param job_state: Possible values include: 'Created', 'Starting', + 'Running', 'Stopping', 'Stopped', 'Deleting', 'Failed', 'Degraded', + 'Restarting', 'Scaling' + :type job_state: str or ~azure.mgmt.streamanalytics.models.JobState + """ + + _validation = { + 'id': {'readonly': True}, + 'streaming_units': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + 'job_state': {'key': 'jobState', 'type': 'str'}, + } + + def __init__(self, *, job_state=None, **kwargs) -> None: + super(ClusterJob, self).__init__(**kwargs) + self.id = None + self.streaming_units = None + self.job_state = job_state + + +class ClusterProperties(Model): + """The properties associated with a Stream Analytics cluster. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar created_date: The date this cluster was created. + :vartype created_date: datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :param provisioning_state: Possible values include: 'Succeeded', 'Failed', + 'Canceled', 'InProgress' + :type provisioning_state: str or + ~azure.mgmt.streamanalytics.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units + currently being used on the cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming + jobs associated with the cluster. If all of the jobs were running, this + would be the capacity allocated. + :vartype capacity_assigned: int + """ + + _validation = { + 'created_date': {'readonly': True}, + 'cluster_id': {'readonly': True}, + 'capacity_allocated': {'readonly': True}, + 'capacity_assigned': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'iso-8601'}, + 'cluster_id': {'key': 'clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'capacity_allocated': {'key': 'capacityAllocated', 'type': 'int'}, + 'capacity_assigned': {'key': 'capacityAssigned', 'type': 'int'}, + } + + def __init__(self, *, provisioning_state=None, **kwargs) -> None: + super(ClusterProperties, self).__init__(**kwargs) + self.created_date = None + self.cluster_id = None + self.provisioning_state = provisioning_state + self.capacity_allocated = None + self.capacity_assigned = None + + +class ClusterSku(Model): + """The SKU of the cluster. This determines the size/capacity of the cluster. + Required on PUT (CreateOrUpdate) requests. + + :param name: Specifies the SKU name of the cluster. Required on PUT + (CreateOrUpdate) requests. Possible values include: 'Default' + :type name: str or ~azure.mgmt.streamanalytics.models.ClusterSkuName + :param capacity: Denotes the number of streaming units the cluster can + support. Valid values for this property are multiples of 36 with a minimum + value of 36 and maximum value of 216. Required on PUT (CreateOrUpdate) + requests. + :type capacity: int + """ + + _validation = { + 'capacity': {'maximum': 216, 'minimum': 36}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__(self, *, name=None, capacity: int=None, **kwargs) -> None: + super(ClusterSku, self).__init__(**kwargs) + self.name = name + self.capacity = capacity + + +class Compression(Model): + """Describes how input data is compressed. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, **kwargs) -> None: + super(Compression, self).__init__(**kwargs) + self.type = type + + +class CSharpFunctionBinding(FunctionBinding): + """The binding to a CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param script: The Csharp code containing a single function definition. + :type script: str + :param dll_path: The Csharp code containing a single function definition. + :type dll_path: str + :param class_property: The Csharp code containing a single function + definition. + :type class_property: str + :param method: The Csharp code containing a single function definition. + :type method: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, + 'class_property': {'key': 'properties.class', 'type': 'str'}, + 'method': {'key': 'properties.method', 'type': 'str'}, + } + + def __init__(self, *, script: str=None, dll_path: str=None, class_property: str=None, method: str=None, **kwargs) -> None: + super(CSharpFunctionBinding, self).__init__(**kwargs) + self.script = script + self.dll_path = dll_path + self.class_property = class_property + self.method = method + self.type = 'Microsoft.StreamAnalytics/CLRUdf' + + +class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a + CSharp function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param script: The CSharp code containing a single function definition. + :type script: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, *, script: str=None, udf_type=None, **kwargs) -> None: + super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.script = script + self.udf_type = udf_type + self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' + + +class CsvSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in CSV format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param field_delimiter: Specifies the delimiter that will be used to + separate comma-separated value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input + or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for a list of supported values. Required on PUT (CreateOrReplace) + requests. + :type field_delimiter: str + :param encoding: Specifies the encoding of the incoming data in the case + of input and the encoding of outgoing data in the case of output. Required + on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + :type encoding: str or ~azure.mgmt.streamanalytics.models.Encoding + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + } + + def __init__(self, *, field_delimiter: str=None, encoding=None, **kwargs) -> None: + super(CsvSerialization, self).__init__(**kwargs) + self.field_delimiter = field_delimiter + self.encoding = encoding + self.type = 'Csv' + + +class CustomClrSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in custom format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param serialization_dll_path: The serialization library path. + :type serialization_dll_path: str + :param serialization_class_name: The serialization class name. + :type serialization_class_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, + 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, + } + + def __init__(self, *, serialization_dll_path: str=None, serialization_class_name: str=None, **kwargs) -> None: + super(CustomClrSerialization, self).__init__(**kwargs) + self.serialization_dll_path = serialization_dll_path + self.serialization_class_name = serialization_class_name + self.type = 'CustomClr' + + +class DiagnosticCondition(Model): + """Condition applicable to the resource, or to the job overall, that warrant + customer attention. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar since: The UTC timestamp of when the condition started. Customers + should be able to find a corresponding event in the ops log around this + time. + :vartype since: str + :ivar code: The opaque diagnostic code. + :vartype code: str + :ivar message: The human-readable message describing the condition in + detail. Localized in the Accept-Language of the client request. + :vartype message: str + """ + + _validation = { + 'since': {'readonly': True}, + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'since': {'key': 'since', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(DiagnosticCondition, self).__init__(**kwargs) + self.since = None + self.code = None + self.message = None + + +class Diagnostics(Model): + """Describes conditions applicable to the Input, Output, or the job overall, + that warrant customer attention. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar conditions: A collection of zero or more conditions applicable to + the resource, or to the job overall, that warrant customer attention. + :vartype conditions: + list[~azure.mgmt.streamanalytics.models.DiagnosticCondition] + """ + + _validation = { + 'conditions': {'readonly': True}, + } + + _attribute_map = { + 'conditions': {'key': 'conditions', 'type': '[DiagnosticCondition]'}, + } + + def __init__(self, **kwargs) -> None: + super(Diagnostics, self).__init__(**kwargs) + self.conditions = None + + +class DocumentDbOutputDataSource(OutputDataSource): + """Describes a DocumentDB output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param account_id: The DocumentDB account name or ID. Required on PUT + (CreateOrReplace) requests. + :type account_id: str + :param account_key: The account key for the DocumentDB account. Required + on PUT (CreateOrReplace) requests. + :type account_key: str + :param database: The name of the DocumentDB database. Required on PUT + (CreateOrReplace) requests. + :type database: str + :param collection_name_pattern: The collection name pattern for the + collections to be used. The collection name format can be constructed + using the optional {partition} token, where partitions start from 0. See + the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output + for more information. Required on PUT (CreateOrReplace) requests. + :type collection_name_pattern: str + :param partition_key: The name of the field in output events used to + specify the key for partitioning output across collections. If + 'collectionNamePattern' contains the {partition} token, this property is + required to be specified. + :type partition_key: str + :param document_id: The name of the field in output events used to specify + the primary key which insert or update operations are based on. + :type document_id: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + 'account_key': {'key': 'properties.accountKey', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'collection_name_pattern': {'key': 'properties.collectionNamePattern', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'document_id': {'key': 'properties.documentId', 'type': 'str'}, + } + + def __init__(self, *, account_id: str=None, account_key: str=None, database: str=None, collection_name_pattern: str=None, partition_key: str=None, document_id: str=None, **kwargs) -> None: + super(DocumentDbOutputDataSource, self).__init__(**kwargs) + self.account_id = account_id + self.account_key = account_key + self.database = database + self.collection_name_pattern = collection_name_pattern + self.partition_key = partition_key + self.document_id = document_id + self.type = 'Microsoft.Storage/DocumentDB' + + +class Error(Model): + """Common error representation. + + :param error: Error definition properties. + :type error: ~azure.mgmt.streamanalytics.models.ErrorError + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorError'}, + } + + def __init__(self, *, error=None, **kwargs) -> None: + super(Error, self).__init__(**kwargs) + self.error = error + + +class ErrorException(HttpOperationError): + """Server responsed with exception of type: 'Error'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(ErrorException, self).__init__(deserialize, response, 'Error', *args) + + +class ErrorDetails(Model): + """Common error details representation. + + :param code: Error code. + :type code: str + :param target: Error target. + :type target: str + :param message: Error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, code: str=None, target: str=None, message: str=None, **kwargs) -> None: + super(ErrorDetails, self).__init__(**kwargs) + self.code = code + self.target = target + self.message = message + + +class ErrorError(Model): + """Error definition properties. + + :param code: Error code. + :type code: str + :param message: Error message. + :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~azure.mgmt.streamanalytics.models.ErrorDetails] + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + } + + def __init__(self, *, code: str=None, message: str=None, target: str=None, details=None, **kwargs) -> None: + super(ErrorError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class ErrorResponse(Model): + """Describes the error that occurred. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: Error code associated with the error that occurred. + :vartype code: str + :ivar message: Describes the error in detail. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + + +class ServiceBusDataSourceProperties(Model): + """The common properties that are associated with Service Bus data sources + (Queues, Topics, Event Hubs, etc.). + + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, **kwargs) -> None: + super(ServiceBusDataSourceProperties, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + + +class EventHubDataSourceProperties(ServiceBusDataSourceProperties): + """The common properties that are associated with Event Hub data sources. + + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + """ + + _attribute_map = { + 'service_bus_namespace': {'key': 'serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'eventHubName', 'type': 'str'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, event_hub_name: str=None, **kwargs) -> None: + super(EventHubDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) + self.event_hub_name = event_hub_name + + +class EventHubOutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which + partition to send event data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, event_hub_name: str=None, partition_key: str=None, property_columns=None, **kwargs) -> None: + super(EventHubOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.partition_key = partition_key + self.property_columns = property_columns + self.type = 'Microsoft.ServiceBus/EventHub' + + +class EventHubStreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that + should be used to read events from the Event Hub. Specifying distinct + consumer group names for multiple inputs allows each of those inputs to + receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, event_hub_name: str=None, consumer_group_name: str=None, **kwargs) -> None: + super(EventHubStreamInputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.consumer_group_name = consumer_group_name + self.type = 'Microsoft.ServiceBus/EventHub' + + +class EventHubV2OutputDataSource(OutputDataSource): + """Describes an Event Hub output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param partition_key: The key/column that is used to determine to which + partition to send event data. + :type partition_key: str + :param property_columns: + :type property_columns: list[str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'partition_key': {'key': 'properties.partitionKey', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, event_hub_name: str=None, partition_key: str=None, property_columns=None, **kwargs) -> None: + super(EventHubV2OutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.partition_key = partition_key + self.property_columns = property_columns + self.type = 'Microsoft.EventHub/EventHub' + + +class EventHubV2StreamInputDataSource(StreamInputDataSource): + """Describes an Event Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param event_hub_name: The name of the Event Hub. Required on PUT + (CreateOrReplace) requests. + :type event_hub_name: str + :param consumer_group_name: The name of an Event Hub Consumer Group that + should be used to read events from the Event Hub. Specifying distinct + consumer group names for multiple inputs allows each of those inputs to + receive the same events from the Event Hub. If not specified, the input + uses the Event Hub’s default consumer group. + :type consumer_group_name: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.eventHubName', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, event_hub_name: str=None, consumer_group_name: str=None, **kwargs) -> None: + super(EventHubV2StreamInputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.event_hub_name = event_hub_name + self.consumer_group_name = consumer_group_name + self.type = 'Microsoft.EventHub/EventHub' + + +class External(Model): + """The storage account where the custom code artifacts are located. + + :param storage_account: + :type storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount + :param container: + :type container: str + :param path: + :type path: str + """ + + _attribute_map = { + 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, + 'container': {'key': 'container', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, storage_account=None, container: str=None, path: str=None, **kwargs) -> None: + super(External, self).__init__(**kwargs) + self.storage_account = storage_account + self.container = container + self.path = path + + +class SubResource(Model): + """The base sub-resource model definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = name + self.type = None + + +class Function(SubResource): + """A function object, containing all information associated with the named + function. All functions are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param properties: The properties that are associated with a function. + :type properties: ~azure.mgmt.streamanalytics.models.FunctionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'FunctionProperties'}, + } + + def __init__(self, *, name: str=None, properties=None, **kwargs) -> None: + super(Function, self).__init__(name=name, **kwargs) + self.properties = properties + + +class FunctionInput(Model): + """Describes one input parameter of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the + function input parameter. A list of valid Azure Stream Analytics data + types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + :type data_type: str + :param is_configuration_parameter: A flag indicating if the parameter is a + configuration parameter. True if this input parameter is expected to be a + constant. Default is false. + :type is_configuration_parameter: bool + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'is_configuration_parameter': {'key': 'isConfigurationParameter', 'type': 'bool'}, + } + + def __init__(self, *, data_type: str=None, is_configuration_parameter: bool=None, **kwargs) -> None: + super(FunctionInput, self).__init__(**kwargs) + self.data_type = data_type + self.is_configuration_parameter = is_configuration_parameter + + +class FunctionOutput(Model): + """Describes the output of a function. + + :param data_type: The (Azure Stream Analytics supported) data type of the + function output. A list of valid Azure Stream Analytics data types are + described at https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx + :type data_type: str + """ + + _attribute_map = { + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__(self, *, data_type: str=None, **kwargs) -> None: + super(FunctionOutput, self).__init__(**kwargs) + self.data_type = data_type + + +class Identity(Model): + """Describes how identity is verified. + + :param tenant_id: + :type tenant_id: str + :param principal_id: + :type principal_id: str + :param type: + :type type: str + """ + + _attribute_map = { + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, tenant_id: str=None, principal_id: str=None, type: str=None, **kwargs) -> None: + super(Identity, self).__init__(**kwargs) + self.tenant_id = tenant_id + self.principal_id = principal_id + self.type = type + + +class Input(SubResource): + """An input object, containing all information associated with the named + input. All inputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param properties: The properties that are associated with an input. + Required on PUT (CreateOrReplace) requests. + :type properties: ~azure.mgmt.streamanalytics.models.InputProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'InputProperties'}, + } + + def __init__(self, *, name: str=None, properties=None, **kwargs) -> None: + super(Input, self).__init__(name=name, **kwargs) + self.properties = properties + + +class InputProperties(Model): + """The properties that are associated with an input. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: StreamInputProperties, ReferenceInputProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Stream': 'StreamInputProperties', 'Reference': 'ReferenceInputProperties'} + } + + def __init__(self, *, serialization=None, compression=None, partition_key: str=None, **kwargs) -> None: + super(InputProperties, self).__init__(**kwargs) + self.serialization = serialization + self.diagnostics = None + self.etag = None + self.compression = compression + self.partition_key = partition_key + self.type = None + + +class IoTHubStreamInputDataSource(StreamInputDataSource): + """Describes an IoT Hub input data source that contains stream data. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param iot_hub_namespace: The name or the URI of the IoT Hub. Required on + PUT (CreateOrReplace) requests. + :type iot_hub_namespace: str + :param shared_access_policy_name: The shared access policy name for the + IoT Hub. This policy must contain at least the Service connect permission. + Required on PUT (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param consumer_group_name: The name of an IoT Hub Consumer Group that + should be used to read events from the IoT Hub. If not specified, the + input uses the Iot Hub’s default consumer group. + :type consumer_group_name: str + :param endpoint: The IoT Hub endpoint to connect to (ie. messages/events, + messages/operationsMonitoringEvents, etc.). + :type endpoint: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'iot_hub_namespace': {'key': 'properties.iotHubNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'consumer_group_name': {'key': 'properties.consumerGroupName', 'type': 'str'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + } + + def __init__(self, *, iot_hub_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, consumer_group_name: str=None, endpoint: str=None, **kwargs) -> None: + super(IoTHubStreamInputDataSource, self).__init__(**kwargs) + self.iot_hub_namespace = iot_hub_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.consumer_group_name = consumer_group_name + self.endpoint = endpoint + self.type = 'Microsoft.Devices/IotHubs' + + +class JavaScriptFunctionBinding(FunctionBinding): + """The binding to a JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param script: The JavaScript code containing a single function + definition. For example: 'function (x, y) { return x + y; }' + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'script': {'key': 'properties.script', 'type': 'str'}, + } + + def __init__(self, *, script: str=None, **kwargs) -> None: + super(JavaScriptFunctionBinding, self).__init__(**kwargs) + self.script = script + self.type = 'Microsoft.StreamAnalytics/JavascriptUdf' + + +class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): + """The parameters needed to retrieve the default function definition for a + JavaScript function. + + All required parameters must be populated in order to send to Azure. + + :param binding_type: Required. Constant filled by server. + :type binding_type: str + :param script: The JavaScript code containing a single function + definition. For example: 'function (x, y) { return x + y; }'. + :type script: str + :param udf_type: The function type. Possible values include: 'Scalar' + :type udf_type: str or ~azure.mgmt.streamanalytics.models.UdfType + """ + + _validation = { + 'binding_type': {'required': True}, + } + + _attribute_map = { + 'binding_type': {'key': 'bindingType', 'type': 'str'}, + 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, + 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'UdfType'}, + } + + def __init__(self, *, script: str=None, udf_type=None, **kwargs) -> None: + super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.script = script + self.udf_type = udf_type + self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' + + +class StorageAccount(Model): + """The properties that are associated with an Azure Storage account. + + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + } + + def __init__(self, *, account_name: str=None, account_key: str=None, **kwargs) -> None: + super(StorageAccount, self).__init__(**kwargs) + self.account_name = account_name + self.account_key = account_key + + +class JobStorageAccount(StorageAccount): + """The properties that are associated with an Azure Storage account with MSI. + + :param account_name: The name of the Azure Storage account. Required on + PUT (CreateOrReplace) requests. + :type account_name: str + :param account_key: The account key for the Azure Storage account. + Required on PUT (CreateOrReplace) requests. + :type account_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'account_key': {'key': 'accountKey', 'type': 'str'}, + 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, account_name: str=None, account_key: str=None, authentication_mode=None, **kwargs) -> None: + super(JobStorageAccount, self).__init__(account_name=account_name, account_key=account_key, **kwargs) + self.authentication_mode = authentication_mode + + +class JsonSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param encoding: Specifies the encoding of the incoming data in the case + of input and the encoding of outgoing data in the case of output. Required + on PUT (CreateOrReplace) requests. Possible values include: 'UTF8' + :type encoding: str or ~azure.mgmt.streamanalytics.models.Encoding + :param format: This property only applies to JSON serialization of outputs + only. It is not applicable to inputs. This property specifies the format + of the JSON the output will be written in. The currently supported values + are 'lineSeparated' indicating the output will be formatted by having each + JSON object separated by a new line and 'array' indicating the output will + be formatted as an array of JSON objects. Default value is 'lineSeparated' + if left null. Possible values include: 'LineSeparated', 'Array' + :type format: str or + ~azure.mgmt.streamanalytics.models.JsonOutputSerializationFormat + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, + 'format': {'key': 'properties.format', 'type': 'str'}, + } + + def __init__(self, *, encoding=None, format=None, **kwargs) -> None: + super(JsonSerialization, self).__init__(**kwargs) + self.encoding = encoding + self.format = format + self.type = 'Json' + + +class OAuthBasedDataSourceProperties(Model): + """The properties that are associated with data sources that use OAuth as + their authentication model. + + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + """ + + _attribute_map = { + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'tokenUserDisplayName', 'type': 'str'}, + } + + def __init__(self, *, refresh_token: str=None, token_user_principal_name: str=None, token_user_display_name: str=None, **kwargs) -> None: + super(OAuthBasedDataSourceProperties, self).__init__(**kwargs) + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + + +class Operation(Model): + """A Stream Analytics REST API operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the operation being performed on this particular + object. + :vartype name: str + :ivar display: Contains the localized display information for this + particular operation / action. + :vartype display: ~azure.mgmt.streamanalytics.models.OperationDisplay + """ + + _validation = { + 'name': {'readonly': True}, + 'display': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__(self, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = None + + +class OperationDisplay(Model): + """Contains the localized display information for this particular operation / + action. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provider: The localized friendly form of the resource provider name. + :vartype provider: str + :ivar resource: The localized friendly form of the resource type related + to this action/operation. + :vartype resource: str + :ivar operation: The localized friendly name for the operation. + :vartype operation: str + :ivar description: The localized friendly description for the operation. + :vartype description: str + """ + + _validation = { + 'provider': {'readonly': True}, + 'resource': {'readonly': True}, + 'operation': {'readonly': True}, + 'description': {'readonly': True}, + } + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + self.description = None + + +class Output(SubResource): + """An output object, containing all information associated with the named + output. All outputs are contained under a streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param datasource: Describes the data source that output will be written + to. Required on PUT (CreateOrReplace) requests. + :type datasource: ~azure.mgmt.streamanalytics.models.OutputDataSource + :param time_window: + :type time_window: str + :param size_window: + :type size_window: float + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the output. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'properties.datasource', 'type': 'OutputDataSource'}, + 'time_window': {'key': 'properties.timeWindow', 'type': 'str'}, + 'size_window': {'key': 'properties.sizeWindow', 'type': 'float'}, + 'serialization': {'key': 'properties.serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, datasource=None, time_window: str=None, size_window: float=None, serialization=None, **kwargs) -> None: + super(Output, self).__init__(name=name, **kwargs) + self.datasource = datasource + self.time_window = time_window + self.size_window = size_window + self.serialization = serialization + self.diagnostics = None + self.etag = None + + +class ParquetSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized + when written to an output in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param properties: The properties that are associated with the Parquet + serialization type. Required on PUT (CreateOrReplace) requests. + :type properties: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(ParquetSerialization, self).__init__(**kwargs) + self.properties = properties + self.type = 'Parquet' + + +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param refresh_token: A refresh token that can be used to obtain a valid + access token that can then be used to authenticate with the data source. A + valid refresh token is currently only obtainable via the Azure Portal. It + is recommended to put a dummy string value here when creating the data + source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on + PUT (CreateOrReplace) requests. + :type refresh_token: str + :param token_user_principal_name: The user principal name (UPN) of the + user that was used to obtain the refresh token. Use this property to help + remember which user was used to obtain the refresh token. + :type token_user_principal_name: str + :param token_user_display_name: The user display name of the user that was + used to obtain the refresh token. Use this property to help remember which + user was used to obtain the refresh token. + :type token_user_display_name: str + :param dataset: The name of the Power BI dataset. Required on PUT + (CreateOrReplace) requests. + :type dataset: str + :param table: The name of the Power BI table under the specified dataset. + Required on PUT (CreateOrReplace) requests. + :type table: str + :param group_id: The ID of the Power BI group. + :type group_id: str + :param group_name: The name of the Power BI group. Use this property to + help remember which specific Power BI group id was used. + :type group_name: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'}, + 'token_user_principal_name': {'key': 'properties.tokenUserPrincipalName', 'type': 'str'}, + 'token_user_display_name': {'key': 'properties.tokenUserDisplayName', 'type': 'str'}, + 'dataset': {'key': 'properties.dataset', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'group_name': {'key': 'properties.groupName', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + } + + def __init__(self, *, refresh_token: str=None, token_user_principal_name: str=None, token_user_display_name: str=None, dataset: str=None, table: str=None, group_id: str=None, group_name: str=None, authentication_mode=None, **kwargs) -> None: + super(PowerBIOutputDataSource, self).__init__(**kwargs) + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.dataset = dataset + self.table = table + self.group_id = group_id + self.group_name = group_name + self.authentication_mode = authentication_mode + self.type = 'PowerBI' + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have + everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): + """Complete information about the private endpoint. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: The properties associated with a private endpoint. + :type properties: + ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties + :ivar etag: Unique opaque string (generally a GUID) that represents the + metadata state of the resource (private endpoint) and changes whenever the + resource is updated. Required on PUT (CreateOrUpdate) requests. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointProperties'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(PrivateEndpoint, self).__init__(**kwargs) + self.properties = properties + self.etag = None + + +class PrivateEndpointProperties(Model): + """The properties associated with a private endpoint. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :param manual_private_link_service_connections: A list of connections to + the remote resource. Immutable after it is set. + :type manual_private_link_service_connections: + list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] + """ + + _validation = { + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'created_date': {'key': 'createdDate', 'type': 'str'}, + 'manual_private_link_service_connections': {'key': 'manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, + } + + def __init__(self, *, manual_private_link_service_connections=None, **kwargs) -> None: + super(PrivateEndpointProperties, self).__init__(**kwargs) + self.created_date = None + self.manual_private_link_service_connections = manual_private_link_service_connections + + +class PrivateLinkConnectionState(Model): + """A collection of read-only information about the state of the connection to + the private remote resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the remote resource/service. + :vartype status: str + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service + provider require any updates on the consumer. + :vartype actions_required: str + """ + + _validation = { + 'status': {'readonly': True}, + 'description': {'readonly': True}, + 'actions_required': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = None + self.description = None + self.actions_required = None + + +class PrivateLinkServiceConnection(Model): + """A grouping of information about the connection to the remote resource. + + :param private_link_service_id: The resource id of the private link + service. Required on PUT (CreateOrUpdate) requests. + :type private_link_service_id: str + :param group_ids: The ID(s) of the group(s) obtained from the remote + resource that this private endpoint should connect to. Required on PUT + (CreateOrUpdate) requests. + :type group_ids: list[str] + :param request_message: A message passed to the owner of the remote + resource with this connection request. Restricted to 140 chars. + :type request_message: str + :param private_link_service_connection_state: A collection of read-only + information about the state of the connection to the private remote + resource. + :type private_link_service_connection_state: + ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, + 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, *, private_link_service_id: str=None, group_ids=None, request_message: str=None, private_link_service_connection_state=None, **kwargs) -> None: + super(PrivateLinkServiceConnection, self).__init__(**kwargs) + self.private_link_service_id = private_link_service_id + self.group_ids = group_ids + self.request_message = request_message + self.private_link_service_connection_state = private_link_service_connection_state + + +class ReferenceInputProperties(InputProperties): + """The properties that are associated with an input containing reference data. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + :param datasource: Describes an input data source that contains reference + data. Required on PUT (CreateOrReplace) requests. + :type datasource: + ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'ReferenceInputDataSource'}, + } + + def __init__(self, *, serialization=None, compression=None, partition_key: str=None, datasource=None, **kwargs) -> None: + super(ReferenceInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.datasource = datasource + self.type = 'Reference' + + +class ResourceTestStatus(Model): + """Describes the status of the test operation along with error information, if + applicable. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar status: The status of the test operation. + :vartype status: str + :ivar error: Describes the error that occurred. + :vartype error: ~azure.mgmt.streamanalytics.models.ErrorResponse + """ + + _validation = { + 'status': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__(self, **kwargs) -> None: + super(ResourceTestStatus, self).__init__(**kwargs) + self.status = None + self.error = None + + +class ScalarFunctionProperties(FunctionProperties): + """The properties that are associated with a scalar function. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar etag: The current entity tag for the function. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param inputs: + :type inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] + :param output: + :type output: ~azure.mgmt.streamanalytics.models.FunctionOutput + :param binding: + :type binding: ~azure.mgmt.streamanalytics.models.FunctionBinding + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'etag': {'key': 'etag', 'type': 'str'}, + 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, + 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, + 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, inputs=None, output=None, binding=None, **kwargs) -> None: + super(ScalarFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + self.type = 'Scalar' + + +class ServiceBusQueueOutputDataSource(OutputDataSource): + """Describes a Service Bus Queue output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param queue_name: The name of the Service Bus Queue. Required on PUT + (CreateOrReplace) requests. + :type queue_name: str + :param property_columns: A string array of the names of output columns to + be attached to Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, queue_name: str=None, property_columns=None, system_property_columns=None, **kwargs) -> None: + super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.queue_name = queue_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + self.type = 'Microsoft.ServiceBus/Queue' + + +class ServiceBusTopicOutputDataSource(OutputDataSource): + """Describes a Service Bus Topic output data source. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param service_bus_namespace: The namespace that is associated with the + desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on + PUT (CreateOrReplace) requests. + :type service_bus_namespace: str + :param shared_access_policy_name: The shared access policy name for the + Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT + (CreateOrReplace) requests. + :type shared_access_policy_name: str + :param shared_access_policy_key: The shared access policy key for the + specified shared access policy. Required on PUT (CreateOrReplace) + requests. + :type shared_access_policy_key: str + :param authentication_mode: Authentication Mode. Possible values include: + 'Msi', 'UserToken', 'ConnectionString' + :type authentication_mode: str or + ~azure.mgmt.streamanalytics.models.AuthenticationMode + :param topic_name: The name of the Service Bus Topic. Required on PUT + (CreateOrReplace) requests. + :type topic_name: str + :param property_columns: A string array of the names of output columns to + be attached to Service Bus messages as custom properties. + :type property_columns: list[str] + :param system_property_columns: + :type system_property_columns: dict[str, str] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'}, + 'shared_access_policy_name': {'key': 'properties.sharedAccessPolicyName', 'type': 'str'}, + 'shared_access_policy_key': {'key': 'properties.sharedAccessPolicyKey', 'type': 'str'}, + 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, + 'topic_name': {'key': 'properties.topicName', 'type': 'str'}, + 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + } + + def __init__(self, *, service_bus_namespace: str=None, shared_access_policy_name: str=None, shared_access_policy_key: str=None, authentication_mode=None, topic_name: str=None, property_columns=None, system_property_columns=None, **kwargs) -> None: + super(ServiceBusTopicOutputDataSource, self).__init__(**kwargs) + self.service_bus_namespace = service_bus_namespace + self.shared_access_policy_name = shared_access_policy_name + self.shared_access_policy_key = shared_access_policy_key + self.authentication_mode = authentication_mode + self.topic_name = topic_name + self.property_columns = property_columns + self.system_property_columns = system_property_columns + self.type = 'Microsoft.ServiceBus/Topic' + + +class StartStreamingJobParameters(Model): + """Parameters supplied to the Start Streaming Job operation. + + :param output_start_mode: Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the output + event stream should start whenever the job is started, start at a custom + user time stamp specified via the outputStartTime property, or start from + the last event output time. Possible values include: 'JobStartTime', + 'CustomTime', 'LastOutputEventTime' + :type output_start_mode: str or + ~azure.mgmt.streamanalytics.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp + that indicates the starting point of the output event stream, or null to + indicate that the output event stream will start whenever the streaming + job is started. This property must have a value if outputStartMode is set + to CustomTime. + :type output_start_time: datetime + """ + + _attribute_map = { + 'output_start_mode': {'key': 'outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'outputStartTime', 'type': 'iso-8601'}, + } + + def __init__(self, *, output_start_mode=None, output_start_time=None, **kwargs) -> None: + super(StartStreamingJobParameters, self).__init__(**kwargs) + self.output_start_mode = output_start_mode + self.output_start_time = output_start_time + + +class StreamingJob(TrackedResource): + """A streaming job object, containing all information associated with the + named streaming job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. Ex- + Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: The geo-location where the resource lives + :type location: str + :param sku: Describes the SKU of the streaming job. Required on PUT + (CreateOrReplace) requests. + :type sku: ~azure.mgmt.streamanalytics.models.StreamingJobSku + :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is + generated upon creation of the streaming job. + :vartype job_id: str + :ivar provisioning_state: Describes the provisioning status of the + streaming job. + :vartype provisioning_state: str + :ivar job_state: Describes the state of the streaming job. + :vartype job_state: str + :param job_type: Describes the type of the job. Valid modes are `Cloud` + and 'Edge'. Possible values include: 'Cloud', 'Edge' + :type job_type: str or ~azure.mgmt.streamanalytics.models.JobType + :param output_start_mode: This property should only be utilized when it is + desired that the job be started immediately upon creation. Value may be + JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the + starting point of the output event stream should start whenever the job is + started, start at a custom user time stamp specified via the + outputStartTime property, or start from the last event output time. + Possible values include: 'JobStartTime', 'CustomTime', + 'LastOutputEventTime' + :type output_start_mode: str or + ~azure.mgmt.streamanalytics.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time stamp + that indicates the starting point of the output event stream, or null to + indicate that the output event stream will start whenever the streaming + job is started. This property must have a value if outputStartMode is set + to CustomTime. + :type output_start_time: datetime + :ivar last_output_event_time: Value is either an ISO-8601 formatted + timestamp indicating the last output event time of the streaming job or + null indicating that output has not yet been produced. In case of multiple + outputs or multiple streams, this shows the latest value in that set. + :vartype last_output_event_time: datetime + :param events_out_of_order_policy: Indicates the policy to apply to events + that arrive out of order in the input event stream. Possible values + include: 'Adjust', 'Drop' + :type events_out_of_order_policy: str or + ~azure.mgmt.streamanalytics.models.EventsOutOfOrderPolicy + :param output_error_policy: Indicates the policy to apply to events that + arrive at the output and cannot be written to the external storage due to + being malformed (missing column values, column values of wrong type or + size). Possible values include: 'Stop', 'Drop' + :type output_error_policy: str or + ~azure.mgmt.streamanalytics.models.OutputErrorPolicy + :param events_out_of_order_max_delay_in_seconds: The maximum tolerable + delay in seconds where out-of-order events can be adjusted to be back in + order. + :type events_out_of_order_max_delay_in_seconds: int + :param events_late_arrival_max_delay_in_seconds: The maximum tolerable + delay in seconds where events arriving late could be included. Supported + range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait + indefinitely. If the property is absent, it is interpreted to have a value + of -1. + :type events_late_arrival_max_delay_in_seconds: int + :param data_locale: The data locale of the stream analytics job. Value + should be the name of a supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. + :type data_locale: str + :param compatibility_level: Controls certain runtime behaviors of the + streaming job. Possible values include: '1.0' + :type compatibility_level: str or + ~azure.mgmt.streamanalytics.models.CompatibilityLevel + :ivar created_date: Value is an ISO-8601 formatted UTC timestamp + indicating when the streaming job was created. + :vartype created_date: datetime + :param inputs: A list of one or more inputs to the streaming job. The name + property for each input is required when specifying this property in a PUT + request. This property cannot be modify via a PATCH operation. You must + use the PATCH API available for the individual input. + :type inputs: list[~azure.mgmt.streamanalytics.models.Input] + :param transformation: Indicates the query and the number of streaming + units to use for the streaming job. The name property of the + transformation is required when specifying this property in a PUT request. + This property cannot be modify via a PATCH operation. You must use the + PATCH API available for the individual transformation. + :type transformation: ~azure.mgmt.streamanalytics.models.Transformation + :param outputs: A list of one or more outputs for the streaming job. The + name property for each output is required when specifying this property in + a PUT request. This property cannot be modify via a PATCH operation. You + must use the PATCH API available for the individual output. + :type outputs: list[~azure.mgmt.streamanalytics.models.Output] + :param functions: A list of one or more functions for the streaming job. + The name property for each function is required when specifying this + property in a PUT request. This property cannot be modify via a PATCH + operation. You must use the PATCH API available for the individual + transformation. + :type functions: list[~azure.mgmt.streamanalytics.models.Function] + :ivar etag: The current entity tag for the streaming job. This is an + opaque string. You can use it to detect whether the resource has changed + between requests. You can also use it in the If-Match or If-None-Match + headers for write operations for optimistic concurrency. + :vartype etag: str + :param job_storage_account: + :type job_storage_account: + ~azure.mgmt.streamanalytics.models.JobStorageAccount + :param content_storage_policy: Valid values are JobStorageAccount and + SystemAccount. If set to JobStorageAccount, this requires the user to also + specify jobStorageAccount property. Possible values include: + 'SystemAccount', 'JobStorageAccount' + :type content_storage_policy: str or + ~azure.mgmt.streamanalytics.models.ContentStoragePolicy + :param externals: The storage account where the custom code artifacts are + located. + :type externals: ~azure.mgmt.streamanalytics.models.External + :param cluster: The cluster which streaming jobs will run on. + :type cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo + :param identity: Describes the system-assigned managed identity assigned + to this job that can be used to authenticate with inputs and outputs. + :type identity: ~azure.mgmt.streamanalytics.models.Identity + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'job_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_state': {'readonly': True}, + 'last_output_event_time': {'readonly': True}, + 'created_date': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'job_id': {'key': 'properties.jobId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'job_state': {'key': 'properties.jobState', 'type': 'str'}, + 'job_type': {'key': 'properties.jobType', 'type': 'str'}, + 'output_start_mode': {'key': 'properties.outputStartMode', 'type': 'str'}, + 'output_start_time': {'key': 'properties.outputStartTime', 'type': 'iso-8601'}, + 'last_output_event_time': {'key': 'properties.lastOutputEventTime', 'type': 'iso-8601'}, + 'events_out_of_order_policy': {'key': 'properties.eventsOutOfOrderPolicy', 'type': 'str'}, + 'output_error_policy': {'key': 'properties.outputErrorPolicy', 'type': 'str'}, + 'events_out_of_order_max_delay_in_seconds': {'key': 'properties.eventsOutOfOrderMaxDelayInSeconds', 'type': 'int'}, + 'events_late_arrival_max_delay_in_seconds': {'key': 'properties.eventsLateArrivalMaxDelayInSeconds', 'type': 'int'}, + 'data_locale': {'key': 'properties.dataLocale', 'type': 'str'}, + 'compatibility_level': {'key': 'properties.compatibilityLevel', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'iso-8601'}, + 'inputs': {'key': 'properties.inputs', 'type': '[Input]'}, + 'transformation': {'key': 'properties.transformation', 'type': 'Transformation'}, + 'outputs': {'key': 'properties.outputs', 'type': '[Output]'}, + 'functions': {'key': 'properties.functions', 'type': '[Function]'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, + 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, + 'externals': {'key': 'properties.externals', 'type': 'External'}, + 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + } + + def __init__(self, *, tags=None, location: str=None, sku=None, job_type=None, output_start_mode=None, output_start_time=None, events_out_of_order_policy=None, output_error_policy=None, events_out_of_order_max_delay_in_seconds: int=None, events_late_arrival_max_delay_in_seconds: int=None, data_locale: str=None, compatibility_level=None, inputs=None, transformation=None, outputs=None, functions=None, job_storage_account=None, content_storage_policy=None, externals=None, cluster=None, identity=None, **kwargs) -> None: + super(StreamingJob, self).__init__(tags=tags, location=location, **kwargs) + self.sku = sku + self.job_id = None + self.provisioning_state = None + self.job_state = None + self.job_type = job_type + self.output_start_mode = output_start_mode + self.output_start_time = output_start_time + self.last_output_event_time = None + self.events_out_of_order_policy = events_out_of_order_policy + self.output_error_policy = output_error_policy + self.events_out_of_order_max_delay_in_seconds = events_out_of_order_max_delay_in_seconds + self.events_late_arrival_max_delay_in_seconds = events_late_arrival_max_delay_in_seconds + self.data_locale = data_locale + self.compatibility_level = compatibility_level + self.created_date = None + self.inputs = inputs + self.transformation = transformation + self.outputs = outputs + self.functions = functions + self.etag = None + self.job_storage_account = job_storage_account + self.content_storage_policy = content_storage_policy + self.externals = externals + self.cluster = cluster + self.identity = identity + + +class StreamingJobSku(Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) + requests. Possible values include: 'Standard' + :type name: str or ~azure.mgmt.streamanalytics.models.StreamingJobSkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name=None, **kwargs) -> None: + super(StreamingJobSku, self).__init__(**kwargs) + self.name = name + + +class StreamInputProperties(InputProperties): + """The properties that are associated with an input containing stream data. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param serialization: Describes how data from an input is serialized or + how data is serialized when written to an output. Required on PUT + (CreateOrReplace) requests. + :type serialization: ~azure.mgmt.streamanalytics.models.Serialization + :ivar diagnostics: Describes conditions applicable to the Input, Output, + or the job overall, that warrant customer attention. + :vartype diagnostics: ~azure.mgmt.streamanalytics.models.Diagnostics + :ivar etag: The current entity tag for the input. This is an opaque + string. You can use it to detect whether the resource has changed between + requests. You can also use it in the If-Match or If-None-Match headers for + write operations for optimistic concurrency. + :vartype etag: str + :param compression: + :type compression: ~azure.mgmt.streamanalytics.models.Compression + :param partition_key: partitionKey Describes a key in the input data which + is used for partitioning the input data + :type partition_key: str + :param type: Required. Constant filled by server. + :type type: str + :param datasource: Describes an input data source that contains stream + data. Required on PUT (CreateOrReplace) requests. + :type datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource + """ + + _validation = { + 'diagnostics': {'readonly': True}, + 'etag': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'serialization': {'key': 'serialization', 'type': 'Serialization'}, + 'diagnostics': {'key': 'diagnostics', 'type': 'Diagnostics'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'compression': {'key': 'compression', 'type': 'Compression'}, + 'partition_key': {'key': 'partitionKey', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'datasource': {'key': 'datasource', 'type': 'StreamInputDataSource'}, + } + + def __init__(self, *, serialization=None, compression=None, partition_key: str=None, datasource=None, **kwargs) -> None: + super(StreamInputProperties, self).__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.datasource = datasource + self.type = 'Stream' + + +class SubscriptionQuota(SubResource): + """Describes the current quota for the subscription. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :ivar max_count: The max permitted usage of this resource. + :vartype max_count: int + :ivar current_count: The current usage of this resource. + :vartype current_count: int + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'max_count': {'readonly': True}, + 'current_count': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_count': {'key': 'properties.maxCount', 'type': 'int'}, + 'current_count': {'key': 'properties.currentCount', 'type': 'int'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(SubscriptionQuota, self).__init__(name=name, **kwargs) + self.max_count = None + self.current_count = None + + +class SubscriptionQuotasListResult(Model): + """Result of the GetQuotas operation. It contains a list of quotas for the + subscription in a particular region. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: List of quotas for the subscription in a particular region. + :vartype value: list[~azure.mgmt.streamanalytics.models.SubscriptionQuota] + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SubscriptionQuota]'}, + } + + def __init__(self, **kwargs) -> None: + super(SubscriptionQuotasListResult, self).__init__(**kwargs) + self.value = None + + +class Transformation(SubResource): + """A transformation object, containing all information associated with the + named transformation. All transformations are contained under a streaming + job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param streaming_units: Specifies the number of streaming units that the + streaming job uses. + :type streaming_units: int + :param query: Specifies the query that will be run in the streaming job. + You can learn more about the Stream Analytics Query Language (SAQL) here: + https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT + (CreateOrReplace) requests. + :type query: str + :ivar etag: The current entity tag for the transformation. This is an + opaque string. You can use it to detect whether the resource has changed + between requests. You can also use it in the If-Match or If-None-Match + headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'etag': {'key': 'properties.etag', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, streaming_units: int=None, query: str=None, **kwargs) -> None: + super(Transformation, self).__init__(name=name, **kwargs) + self.streaming_units = streaming_units + self.query = query + self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_paged_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_paged_models.py new file mode 100644 index 000000000000..ad9238f0df94 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_paged_models.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class FunctionPaged(Paged): + """ + A paging container for iterating over a list of :class:`Function ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Function]'} + } + + def __init__(self, *args, **kwargs): + + super(FunctionPaged, self).__init__(*args, **kwargs) +class InputPaged(Paged): + """ + A paging container for iterating over a list of :class:`Input ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Input]'} + } + + def __init__(self, *args, **kwargs): + + super(InputPaged, self).__init__(*args, **kwargs) +class OutputPaged(Paged): + """ + A paging container for iterating over a list of :class:`Output ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Output]'} + } + + def __init__(self, *args, **kwargs): + + super(OutputPaged, self).__init__(*args, **kwargs) +class StreamingJobPaged(Paged): + """ + A paging container for iterating over a list of :class:`StreamingJob ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[StreamingJob]'} + } + + def __init__(self, *args, **kwargs): + + super(StreamingJobPaged, self).__init__(*args, **kwargs) +class OperationPaged(Paged): + """ + A paging container for iterating over a list of :class:`Operation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Operation]'} + } + + def __init__(self, *args, **kwargs): + + super(OperationPaged, self).__init__(*args, **kwargs) +class ClusterPaged(Paged): + """ + A paging container for iterating over a list of :class:`Cluster ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Cluster]'} + } + + def __init__(self, *args, **kwargs): + + super(ClusterPaged, self).__init__(*args, **kwargs) +class ClusterJobPaged(Paged): + """ + A paging container for iterating over a list of :class:`ClusterJob ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ClusterJob]'} + } + + def __init__(self, *args, **kwargs): + + super(ClusterJobPaged, self).__init__(*args, **kwargs) +class PrivateEndpointPaged(Paged): + """ + A paging container for iterating over a list of :class:`PrivateEndpoint ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PrivateEndpoint]'} + } + + def __init__(self, *args, **kwargs): + + super(PrivateEndpointPaged, self).__init__(*args, **kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py new file mode 100644 index 000000000000..3a953a262c1d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + + +class UdfType(str, Enum): + + scalar = "Scalar" + + +class AuthenticationMode(str, Enum): + + msi = "Msi" + user_token = "UserToken" + connection_string = "ConnectionString" + + +class Encoding(str, Enum): + + utf8 = "UTF8" + + +class JsonOutputSerializationFormat(str, Enum): + + line_separated = "LineSeparated" + array = "Array" + + +class EventSerializationType(str, Enum): + + csv = "Csv" + avro = "Avro" + json = "Json" + custom_clr = "CustomClr" + parquet = "Parquet" + + +class StreamingJobSkuName(str, Enum): + + standard = "Standard" + + +class JobType(str, Enum): + + cloud = "Cloud" + edge = "Edge" + + +class OutputStartMode(str, Enum): + + job_start_time = "JobStartTime" + custom_time = "CustomTime" + last_output_event_time = "LastOutputEventTime" + + +class EventsOutOfOrderPolicy(str, Enum): + + adjust = "Adjust" + drop = "Drop" + + +class OutputErrorPolicy(str, Enum): + + stop = "Stop" + drop = "Drop" + + +class CompatibilityLevel(str, Enum): + + one_full_stop_zero = "1.0" + + +class ContentStoragePolicy(str, Enum): + + system_account = "SystemAccount" + job_storage_account = "JobStorageAccount" + + +class ClusterSkuName(str, Enum): + + default = "Default" #: The default SKU. + + +class ClusterProvisioningState(str, Enum): + + succeeded = "Succeeded" #: The cluster provisioning succeeded. + failed = "Failed" #: The cluster provisioning failed. + canceled = "Canceled" #: The cluster provisioning was canceled. + in_progress = "InProgress" #: The cluster provisioning was inprogress. + + +class JobState(str, Enum): + + created = "Created" #: The job is currently in the Created state. + starting = "Starting" #: The job is currently in the Starting state. + running = "Running" #: The job is currently in the Running state. + stopping = "Stopping" #: The job is currently in the Stopping state. + stopped = "Stopped" #: The job is currently in the Stopped state. + deleting = "Deleting" #: The job is currently in the Deleting state. + failed = "Failed" #: The job is currently in the Failed state. + degraded = "Degraded" #: The job is currently in the Degraded state. + restarting = "Restarting" #: The job is currently in the Restarting state. + scaling = "Scaling" #: The job is currently in the Scaling state. diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py new file mode 100644 index 000000000000..76c40b0b0bcc --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from ._functions_operations import FunctionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations +from ._streaming_jobs_operations import StreamingJobsOperations +from ._subscriptions_operations import SubscriptionsOperations +from ._transformations_operations import TransformationsOperations +from ._operations import Operations +from ._clusters_operations import ClustersOperations +from ._private_endpoints_operations import PrivateEndpointsOperations + +__all__ = [ + 'FunctionsOperations', + 'InputsOperations', + 'OutputsOperations', + 'StreamingJobsOperations', + 'SubscriptionsOperations', + 'TransformationsOperations', + 'Operations', + 'ClustersOperations', + 'PrivateEndpointsOperations', +] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py new file mode 100644 index 000000000000..d97d5b1a548c --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -0,0 +1,616 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class ClustersOperations(object): + """ClustersOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2020-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2020-03-01-preview" + + self.config = config + + + def _create_or_update_initial( + self, cluster, resource_group_name, cluster_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(cluster, 'Cluster') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Cluster', response) + if response.status_code == 201: + deserialized = self._deserialize('Cluster', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def create_or_update( + self, cluster, resource_group_name, cluster_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a Stream Analytics Cluster or replaces an already existing + cluster. + + :param cluster: The definition of the cluster that will be used to + create a new cluster or replace the existing one. + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param if_match: The ETag of the resource. Omit this value to always + overwrite the current record set. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be + created, but to prevent updating an existing record set. Other values + will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns Cluster or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.Cluster] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.Cluster]] + :raises: + :class:`ErrorException` + """ + raw_result = self._create_or_update_initial( + cluster=cluster, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + if_match=if_match, + if_none_match=if_none_match, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('Cluster', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} + + + def _update_initial( + self, cluster, resource_group_name, cluster_name, if_match=None, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(cluster, 'Cluster') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Cluster', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update( + self, cluster, resource_group_name, cluster_name, if_match=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Updates an existing cluster. This can be used to partially update (ie. + update one or two properties) a cluster without affecting the rest of + the cluster definition. + + :param cluster: The properties specified here will overwrite the + corresponding properties in the existing cluster (ie. Those properties + will be updated). + :type cluster: ~azure.mgmt.streamanalytics.models.Cluster + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param if_match: The ETag of the resource. Omit this value to always + overwrite the current record set. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns Cluster or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.Cluster] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.Cluster]] + :raises: + :class:`ErrorException` + """ + raw_result = self._update_initial( + cluster=cluster, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + if_match=if_match, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('Cluster', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} + + def get( + self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): + """Gets information about the specified cluster. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Cluster or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Cluster or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} + + + def _delete_initial( + self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.ErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, cluster_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes the specified cluster. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorException` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}'} + + def list_by_subscription( + self, custom_headers=None, raw=False, **operation_config): + """Lists all of the clusters in the given subscription. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Cluster + :rtype: + ~azure.mgmt.streamanalytics.models.ClusterPaged[~azure.mgmt.streamanalytics.models.Cluster] + :raises: + :class:`ErrorException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ClusterPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters'} + + def list_by_resource_group( + self, resource_group_name, custom_headers=None, raw=False, **operation_config): + """Lists all of the clusters in the given resource group. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Cluster + :rtype: + ~azure.mgmt.streamanalytics.models.ClusterPaged[~azure.mgmt.streamanalytics.models.Cluster] + :raises: + :class:`ErrorException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ClusterPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters'} + + def list_streaming_jobs( + self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): + """Lists all of the streaming jobs in the given cluster. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of ClusterJob + :rtype: + ~azure.mgmt.streamanalytics.models.ClusterJobPaged[~azure.mgmt.streamanalytics.models.ClusterJob] + :raises: + :class:`ErrorException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_streaming_jobs.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ClusterJobPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_streaming_jobs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py new file mode 100644 index 000000000000..e6e91ddcec3f --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -0,0 +1,635 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class FunctionsOperations(object): + """FunctionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def create_or_replace( + self, resource_group_name, job_name, function_name, if_match=None, if_none_match=None, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Creates a function or replaces an already existing function under an + existing streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param if_match: The ETag of the function. Omit this value to always + overwrite the current function. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new function to be + created, but to prevent updating an existing function. Other values + will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with a function. + :type properties: + ~azure.mgmt.streamanalytics.models.FunctionProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Function or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Function or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + function = models.Function(name=name, properties=properties) + + # Construct URL + url = self.create_or_replace.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(function, 'Function') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Function', response) + header_dict = { + 'ETag': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('Function', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} + + def update( + self, resource_group_name, job_name, function_name, if_match=None, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Updates an existing function under an existing streaming job. This can + be used to partially update (ie. update one or two properties) a + function without affecting the rest the job or function definition. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param if_match: The ETag of the function. Omit this value to always + overwrite the current function. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with a function. + :type properties: + ~azure.mgmt.streamanalytics.models.FunctionProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Function or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Function or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + function = models.Function(name=name, properties=properties) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(function, 'Function') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Function', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} + + def delete( + self, resource_group_name, job_name, function_name, custom_headers=None, raw=False, **operation_config): + """Deletes a function from the streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} + + def get( + self, resource_group_name, job_name, function_name, custom_headers=None, raw=False, **operation_config): + """Gets details about the specified function. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Function or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Function or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Function', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}'} + + def list_by_streaming_job( + self, resource_group_name, job_name, select=None, custom_headers=None, raw=False, **operation_config): + """Lists all of the functions under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a + comma-separated list of structural properties to include in the + response, or "*" to include all properties. By default, all properties + are returned except diagnostics. Currently only accepts '*' as a valid + value. + :type select: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Function + :rtype: + ~azure.mgmt.streamanalytics.models.FunctionPaged[~azure.mgmt.streamanalytics.models.Function] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.FunctionPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions'} + + + def _test_initial( + self, resource_group_name, job_name, function_name, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + function = None + if name is not None or properties is not None: + function = models.Function(name=name, properties=properties) + + # Construct URL + url = self.test.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if function is not None: + body_content = self._serialize.body(function, 'Function') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def test( + self, resource_group_name, job_name, function_name, name=None, properties=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Tests if the information provided for a function is valid. This can + range from testing the connection to the underlying web service behind + the function or making sure the function code provided is syntactically + correct. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with a function. + :type properties: + ~azure.mgmt.streamanalytics.models.FunctionProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns ResourceTestStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.ResourceTestStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + function_name=function_name, + name=name, + properties=properties, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test'} + + def retrieve_default_definition( + self, resource_group_name, job_name, function_name, function_retrieve_default_definition_parameters=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the default definition of a function based on the parameters + specified. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param function_name: The name of the function. + :type function_name: str + :param function_retrieve_default_definition_parameters: Parameters + used to specify the type of function to retrieve the default + definition for. + :type function_retrieve_default_definition_parameters: + ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Function or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Function or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.retrieve_default_definition.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if function_retrieve_default_definition_parameters is not None: + body_content = self._serialize.body(function_retrieve_default_definition_parameters, 'FunctionRetrieveDefaultDefinitionParameters') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Function', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py new file mode 100644 index 000000000000..25698f3e9f9d --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -0,0 +1,555 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class InputsOperations(object): + """InputsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def create_or_replace( + self, resource_group_name, job_name, input_name, if_match=None, if_none_match=None, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Creates an input or replaces an already existing input under an + existing streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param if_match: The ETag of the input. Omit this value to always + overwrite the current input. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new input to be created, + but to prevent updating an existing input. Other values will result in + a 412 Pre-condition Failed response. + :type if_none_match: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with an input. + Required on PUT (CreateOrReplace) requests. + :type properties: ~azure.mgmt.streamanalytics.models.InputProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Input or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Input or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + input = models.Input(name=name, properties=properties) + + # Construct URL + url = self.create_or_replace.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(input, 'Input') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Input', response) + header_dict = { + 'ETag': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('Input', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} + + def update( + self, resource_group_name, job_name, input_name, if_match=None, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Updates an existing input under an existing streaming job. This can be + used to partially update (ie. update one or two properties) an input + without affecting the rest the job or input definition. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param if_match: The ETag of the input. Omit this value to always + overwrite the current input. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with an input. + Required on PUT (CreateOrReplace) requests. + :type properties: ~azure.mgmt.streamanalytics.models.InputProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Input or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Input or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + input = models.Input(name=name, properties=properties) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(input, 'Input') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Input', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} + + def delete( + self, resource_group_name, job_name, input_name, custom_headers=None, raw=False, **operation_config): + """Deletes an input from the streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} + + def get( + self, resource_group_name, job_name, input_name, custom_headers=None, raw=False, **operation_config): + """Gets details about the specified input. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Input or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Input or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Input', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}'} + + def list_by_streaming_job( + self, resource_group_name, job_name, select=None, custom_headers=None, raw=False, **operation_config): + """Lists all of the inputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a + comma-separated list of structural properties to include in the + response, or "*" to include all properties. By default, all properties + are returned except diagnostics. Currently only accepts '*' as a valid + value. + :type select: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Input + :rtype: + ~azure.mgmt.streamanalytics.models.InputPaged[~azure.mgmt.streamanalytics.models.Input] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.InputPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs'} + + + def _test_initial( + self, resource_group_name, job_name, input_name, name=None, properties=None, custom_headers=None, raw=False, **operation_config): + input = None + if name is not None or properties is not None: + input = models.Input(name=name, properties=properties) + + # Construct URL + url = self.test.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if input is not None: + body_content = self._serialize.body(input, 'Input') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def test( + self, resource_group_name, job_name, input_name, name=None, properties=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Tests whether an input’s datasource is reachable and usable by the + Azure Stream Analytics service. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param input_name: The name of the input. + :type input_name: str + :param name: Resource name + :type name: str + :param properties: The properties that are associated with an input. + Required on PUT (CreateOrReplace) requests. + :type properties: ~azure.mgmt.streamanalytics.models.InputProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns ResourceTestStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.ResourceTestStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + input_name=input_name, + name=name, + properties=properties, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py new file mode 100644 index 000000000000..1f8381bfd164 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class Operations(object): + """Operations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all of the available Stream Analytics related operations. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Operation + :rtype: + ~azure.mgmt.streamanalytics.models.OperationPaged[~azure.mgmt.streamanalytics.models.Operation] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/providers/Microsoft.StreamAnalytics/operations'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py new file mode 100644 index 000000000000..688fa913c2f1 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -0,0 +1,550 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class OutputsOperations(object): + """OutputsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def create_or_replace( + self, output, resource_group_name, job_name, output_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Creates an output or replaces an already existing output under an + existing streaming job. + + :param output: The definition of the output that will be used to + create a new output or replace the existing one under the streaming + job. + :type output: ~azure.mgmt.streamanalytics.models.Output + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param if_match: The ETag of the output. Omit this value to always + overwrite the current output. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new output to be created, + but to prevent updating an existing output. Other values will result + in a 412 Pre-condition Failed response. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Output or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Output or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_replace.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(output, 'Output') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Output', response) + header_dict = { + 'ETag': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('Output', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} + + def update( + self, output, resource_group_name, job_name, output_name, if_match=None, custom_headers=None, raw=False, **operation_config): + """Updates an existing output under an existing streaming job. This can be + used to partially update (ie. update one or two properties) an output + without affecting the rest the job or output definition. + + :param output: An Output object. The properties specified here will + overwrite the corresponding properties in the existing output (ie. + Those properties will be updated). Any properties that are set to null + here will mean that the corresponding property in the existing output + will remain the same and not change as a result of this PATCH + operation. + :type output: ~azure.mgmt.streamanalytics.models.Output + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param if_match: The ETag of the output. Omit this value to always + overwrite the current output. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Output or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Output or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(output, 'Output') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Output', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} + + def delete( + self, resource_group_name, job_name, output_name, custom_headers=None, raw=False, **operation_config): + """Deletes an output from the streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} + + def get( + self, resource_group_name, job_name, output_name, custom_headers=None, raw=False, **operation_config): + """Gets details about the specified output. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Output or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Output or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Output', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}'} + + def list_by_streaming_job( + self, resource_group_name, job_name, select=None, custom_headers=None, raw=False, **operation_config): + """Lists all of the outputs under the specified streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param select: The $select OData query parameter. This is a + comma-separated list of structural properties to include in the + response, or "*" to include all properties. By default, all properties + are returned except diagnostics. Currently only accepts '*' as a valid + value. + :type select: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Output + :rtype: + ~azure.mgmt.streamanalytics.models.OutputPaged[~azure.mgmt.streamanalytics.models.Output] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_streaming_job.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.OutputPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_streaming_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs'} + + + def _test_initial( + self, resource_group_name, job_name, output_name, output=None, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.test.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if output is not None: + body_content = self._serialize.body(output, 'Output') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def test( + self, resource_group_name, job_name, output_name, output=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Tests whether an output’s datasource is reachable and usable by the + Azure Stream Analytics service. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_name: The name of the output. + :type output_name: str + :param output: If the output specified does not already exist, this + parameter must contain the full output definition intended to be + tested. If the output specified already exists, this parameter can be + left null to test the existing output as is or if specified, the + properties specified will overwrite the corresponding properties in + the existing output (exactly like a PATCH operation) and the resulting + output will be tested. + :type output: ~azure.mgmt.streamanalytics.models.Output + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns ResourceTestStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.ResourceTestStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.ResourceTestStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._test_initial( + resource_group_name=resource_group_name, + job_name=job_name, + output_name=output_name, + output=output, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ResourceTestStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + test.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py new file mode 100644 index 000000000000..f05d35c23ee5 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -0,0 +1,348 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class PrivateEndpointsOperations(object): + """PrivateEndpointsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2020-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2020-03-01-preview" + + self.config = config + + def create_or_update( + self, resource_group_name, cluster_name, private_endpoint_name, if_match=None, if_none_match=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Creates a Stream Analytics Private Endpoint or replaces an already + existing Private Endpoint. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :param if_match: The ETag of the resource. Omit this value to always + overwrite the current record set. Specify the last-seen ETag value to + prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new resource to be + created, but to prevent updating an existing record set. Other values + will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :param properties: The properties associated with a private endpoint. + :type properties: + ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpoint or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorException` + """ + private_endpoint = models.PrivateEndpoint(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(private_endpoint, 'PrivateEndpoint') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpoint', response) + if response.status_code == 201: + deserialized = self._deserialize('PrivateEndpoint', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} + + def get( + self, resource_group_name, cluster_name, private_endpoint_name, custom_headers=None, raw=False, **operation_config): + """Gets information about the specified Private Endpoint. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpoint or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpoint', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} + + + def _delete_initial( + self, resource_group_name, cluster_name, private_endpoint_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.ErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, cluster_name, private_endpoint_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Delete the specified private endpoint. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param private_endpoint_name: The name of the private endpoint. + :type private_endpoint_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorException` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + private_endpoint_name=private_endpoint_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}'} + + def list_by_cluster( + self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config): + """Lists the private endpoints in the cluster. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the cluster. + :type cluster_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of PrivateEndpoint + :rtype: + ~azure.mgmt.streamanalytics.models.PrivateEndpointPaged[~azure.mgmt.streamanalytics.models.PrivateEndpoint] + :raises: + :class:`ErrorException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_cluster.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.PrivateEndpointPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py new file mode 100644 index 000000000000..a0e139227ff9 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -0,0 +1,754 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class StreamingJobsOperations(object): + """StreamingJobsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + + def _create_or_replace_initial( + self, streaming_job, resource_group_name, job_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create_or_replace.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(streaming_job, 'StreamingJob') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('StreamingJob', response) + header_dict = { + 'ETag': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('StreamingJob', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def create_or_replace( + self, streaming_job, resource_group_name, job_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a streaming job or replaces an already existing streaming job. + + :param streaming_job: The definition of the streaming job that will be + used to create a new streaming job or replace the existing one. + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param if_match: The ETag of the streaming job. Omit this value to + always overwrite the current record set. Specify the last-seen ETag + value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new streaming job to be + created, but to prevent updating an existing record set. Other values + will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns StreamingJob or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.streamanalytics.models.StreamingJob] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.streamanalytics.models.StreamingJob]] + :raises: :class:`CloudError` + """ + raw_result = self._create_or_replace_initial( + streaming_job=streaming_job, + resource_group_name=resource_group_name, + job_name=job_name, + if_match=if_match, + if_none_match=if_none_match, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'ETag': 'str', + } + deserialized = self._deserialize('StreamingJob', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} + + def update( + self, streaming_job, resource_group_name, job_name, if_match=None, custom_headers=None, raw=False, **operation_config): + """Updates an existing streaming job. This can be used to partially update + (ie. update one or two properties) a streaming job without affecting + the rest the job definition. + + :param streaming_job: A streaming job object. The properties specified + here will overwrite the corresponding properties in the existing + streaming job (ie. Those properties will be updated). Any properties + that are set to null here will mean that the corresponding property in + the existing input will remain the same and not change as a result of + this PATCH operation. + :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param if_match: The ETag of the streaming job. Omit this value to + always overwrite the current record set. Specify the last-seen ETag + value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: StreamingJob or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(streaming_job, 'StreamingJob') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('StreamingJob', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} + + + def _delete_initial( + self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + job_name=job_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} + + def get( + self, resource_group_name, job_name, expand=None, custom_headers=None, raw=False, **operation_config): + """Gets details about the specified streaming job. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param expand: The $expand OData query parameter. This is a + comma-separated list of additional streaming job properties to include + in the response, beyond the default set returned when this parameter + is absent. The default set is all streaming job properties other than + 'inputs', 'transformation', 'outputs', and 'functions'. + :type expand: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: StreamingJob or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.StreamingJob or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('StreamingJob', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}'} + + def list_by_resource_group( + self, resource_group_name, expand=None, custom_headers=None, raw=False, **operation_config): + """Lists all of the streaming jobs in the specified resource group. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param expand: The $expand OData query parameter. This is a + comma-separated list of additional streaming job properties to include + in the response, beyond the default set returned when this parameter + is absent. The default set is all streaming job properties other than + 'inputs', 'transformation', 'outputs', and 'functions'. + :type expand: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of StreamingJob + :rtype: + ~azure.mgmt.streamanalytics.models.StreamingJobPaged[~azure.mgmt.streamanalytics.models.StreamingJob] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.StreamingJobPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs'} + + def list( + self, expand=None, custom_headers=None, raw=False, **operation_config): + """Lists all of the streaming jobs in the given subscription. + + :param expand: The $expand OData query parameter. This is a + comma-separated list of additional streaming job properties to include + in the response, beyond the default set returned when this parameter + is absent. The default set is all streaming job properties other than + 'inputs', 'transformation', 'outputs', and 'functions'. + :type expand: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of StreamingJob + :rtype: + ~azure.mgmt.streamanalytics.models.StreamingJobPaged[~azure.mgmt.streamanalytics.models.StreamingJob] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if expand is not None: + query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.StreamingJobPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs'} + + + def _start_initial( + self, resource_group_name, job_name, output_start_mode=None, output_start_time=None, custom_headers=None, raw=False, **operation_config): + start_job_parameters = None + if output_start_mode is not None or output_start_time is not None: + start_job_parameters = models.StartStreamingJobParameters(output_start_mode=output_start_mode, output_start_time=output_start_time) + + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if start_job_parameters is not None: + body_content = self._serialize.body(start_job_parameters, 'StartStreamingJobParameters') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def start( + self, resource_group_name, job_name, output_start_mode=None, output_start_time=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Starts a streaming job. Once a job is started it will start processing + input events and produce output. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param output_start_mode: Value may be JobStartTime, CustomTime, or + LastOutputEventTime to indicate whether the starting point of the + output event stream should start whenever the job is started, start at + a custom user time stamp specified via the outputStartTime property, + or start from the last event output time. Possible values include: + 'JobStartTime', 'CustomTime', 'LastOutputEventTime' + :type output_start_mode: str or + ~azure.mgmt.streamanalytics.models.OutputStartMode + :param output_start_time: Value is either an ISO-8601 formatted time + stamp that indicates the starting point of the output event stream, or + null to indicate that the output event stream will start whenever the + streaming job is started. This property must have a value if + outputStartMode is set to CustomTime. + :type output_start_time: datetime + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + job_name=job_name, + output_start_mode=output_start_mode, + output_start_time=output_start_time, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start'} + + + def _stop_initial( + self, resource_group_name, job_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, job_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Stops a running streaming job. This will cause a running streaming job + to stop processing input events and producing output. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + job_name=job_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py new file mode 100644 index 000000000000..e4531e012c48 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class SubscriptionsOperations(object): + """SubscriptionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def list_quotas( + self, location, custom_headers=None, raw=False, **operation_config): + """Retrieves the subscription's current quota information in a particular + region. + + :param location: The region in which to retrieve the subscription's + quota information. You can find out which regions Azure Stream + Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/ + :type location: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SubscriptionQuotasListResult or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.streamanalytics.models.SubscriptionQuotasListResult or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.list_quotas.metadata['url'] + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SubscriptionQuotasListResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_quotas.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py new file mode 100644 index 000000000000..a1ce9238daf8 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -0,0 +1,298 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class TransformationsOperations(object): + """TransformationsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2017-04-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2017-04-01-preview" + + self.config = config + + def create_or_replace( + self, transformation, resource_group_name, job_name, transformation_name, if_match=None, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Creates a transformation or replaces an already existing transformation + under an existing streaming job. + + :param transformation: The definition of the transformation that will + be used to create a new transformation or replace the existing one + under the streaming job. + :type transformation: + ~azure.mgmt.streamanalytics.models.Transformation + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param if_match: The ETag of the transformation. Omit this value to + always overwrite the current transformation. Specify the last-seen + ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param if_none_match: Set to '*' to allow a new transformation to be + created, but to prevent updating an existing transformation. Other + values will result in a 412 Pre-condition Failed response. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Transformation or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Transformation or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_replace.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(transformation, 'Transformation') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Transformation', response) + header_dict = { + 'ETag': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('Transformation', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + create_or_replace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} + + def update( + self, transformation, resource_group_name, job_name, transformation_name, if_match=None, custom_headers=None, raw=False, **operation_config): + """Updates an existing transformation under an existing streaming job. + This can be used to partially update (ie. update one or two properties) + a transformation without affecting the rest the job or transformation + definition. + + :param transformation: A Transformation object. The properties + specified here will overwrite the corresponding properties in the + existing transformation (ie. Those properties will be updated). Any + properties that are set to null here will mean that the corresponding + property in the existing transformation will remain the same and not + change as a result of this PATCH operation. + :type transformation: + ~azure.mgmt.streamanalytics.models.Transformation + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param if_match: The ETag of the transformation. Omit this value to + always overwrite the current transformation. Specify the last-seen + ETag value to prevent accidentally overwriting concurrent changes. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Transformation or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Transformation or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(transformation, 'Transformation') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Transformation', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} + + def get( + self, resource_group_name, job_name, transformation_name, custom_headers=None, raw=False, **operation_config): + """Gets details about the specified transformation. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param transformation_name: The name of the transformation. + :type transformation_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Transformation or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.streamanalytics.models.Transformation or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'transformationName': self._serialize.url("transformation_name", transformation_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Transformation', response) + header_dict = { + 'ETag': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}'} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/version.py new file mode 100644 index 000000000000..e0ec669828cb --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "0.1.0" +