diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index f43faed0449..f03bb41574e 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,6 +3,16 @@ Release History =============== +0.3.0 ++++++ +* [BREAKING CHANGE] Renamed command subgroup `az datafactory factory` to `az datafactory`. +* [BREAKING CHANGE] `az datafactory integration-runtime managed create`: `--type-properties-compute-properties` renamed to `--compute-properties`, + `--type-properties-ssis-properties` renamed to `--ssis-properties`. +* [BREAKING CHANGE] `az datafactory integration-runtime self-hosted create`: `--type-properties-linked-info` renamed to `--linked-info`. +* [BREAKING CHANGE] `az datafactory integration-runtime update`: `--properties` renamed to `--linked-service`. +* [BREAKING CHANGE] `az datafactory linked-service delete`: `--properties` renamed to `--dataset`. +* [BREAKING CHANGE] `az datafactory trigger list`: `--properties` renamed to `--trigger`. + 0.2.1 +++++ * az datafactory factory create: Enable managed identity by default diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json index 4f48fa652a5..cfc30c747c7 100644 --- a/src/datafactory/azext_datafactory/azext_metadata.json +++ b/src/datafactory/azext_datafactory/azext_metadata.json @@ -1,4 +1,4 @@ { "azext.isExperimental": true, - "azext.minCliCoreVersion": "2.11.0" + "azext.minCliCoreVersion": "2.15.0" } \ No newline at end of file diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 69459245c8d..7db87b484da 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -11,46 +11,46 @@ def cf_datafactory_cl(cli_ctx, *_): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from ..vendored_sdks.datafactory import DataFactoryManagementClient + from azext_datafactory.vendored_sdks.datafactory import DataFactoryManagementClient return get_mgmt_service_client(cli_ctx, DataFactoryManagementClient) def cf_factory(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).factory + return cf_datafactory_cl(cli_ctx).factories def cf_integration_runtime(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).integration_runtime + return cf_datafactory_cl(cli_ctx).integration_runtimes def cf_integration_runtime_node(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).integration_runtime_node + return cf_datafactory_cl(cli_ctx).integration_runtime_nodes def cf_linked_service(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).linked_service + return cf_datafactory_cl(cli_ctx).linked_services def cf_dataset(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).dataset + return cf_datafactory_cl(cli_ctx).datasets def cf_pipeline(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).pipeline + return cf_datafactory_cl(cli_ctx).pipelines def cf_pipeline_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).pipeline_run + return cf_datafactory_cl(cli_ctx).pipeline_runs def cf_activity_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).activity_run + return cf_datafactory_cl(cli_ctx).activity_runs def cf_trigger(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).trigger + return cf_datafactory_cl(cli_ctx).triggers def cf_trigger_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).trigger_run + return cf_datafactory_cl(cli_ctx).trigger_runs diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index f42e472af34..fd2ab1dcd0e 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -12,32 +12,35 @@ from knack.help_files import helps -helps['datafactory factory'] = """ +helps['datafactory'] = """ type: group - short-summary: datafactory factory + short-summary: Manage factory with datafactory """ -helps['datafactory factory list'] = """ +helps['datafactory list'] = """ type: command - short-summary: "Lists factories under the specified subscription." + short-summary: "Lists factories. And Lists factories under the specified subscription." examples: - name: Factories_ListByResourceGroup text: |- - az datafactory factory list --resource-group "exampleResourceGroup" + az datafactory list --resource-group "exampleResourceGroup" + - name: Factories_List + text: |- + az datafactory list """ -helps['datafactory factory show'] = """ +helps['datafactory show'] = """ type: command short-summary: "Gets a factory." examples: - name: Factories_Get text: |- - az datafactory factory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" + az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory factory create'] = """ +helps['datafactory create'] = """ type: command - short-summary: "Creates or updates a factory." + short-summary: "Create a factory." parameters: - name: --factory-vsts-configuration short-summary: "Factory's VSTS repo information." @@ -69,30 +72,30 @@ examples: - name: Factories_CreateOrUpdate text: |- - az datafactory factory create --location "East US" --name "exampleFactoryName" --resource-group \ + az datafactory create --location "East US" --name "exampleFactoryName" --resource-group \ "exampleResourceGroup" """ -helps['datafactory factory update'] = """ +helps['datafactory update'] = """ type: command short-summary: "Updates a factory." examples: - name: Factories_Update text: |- - az datafactory factory update --name "exampleFactoryName" --tags exampleTag="exampleValue" \ ---resource-group "exampleResourceGroup" + az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" """ -helps['datafactory factory delete'] = """ +helps['datafactory delete'] = """ type: command short-summary: "Deletes a factory." examples: - name: Factories_Delete text: |- - az datafactory factory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" + az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory factory configure-factory-repo'] = """ +helps['datafactory configure-factory-repo'] = """ type: command short-summary: "Updates a factory's repo information." parameters: @@ -126,36 +129,36 @@ examples: - name: Factories_ConfigureFactoryRepo text: |- - az datafactory factory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1\ -234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ + az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-1234\ +5678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ --factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ repository-name="repo" root-folder="/" tenant-id="" --location "East US" """ -helps['datafactory factory get-data-plane-access'] = """ +helps['datafactory get-data-plane-access'] = """ type: command short-summary: "Get Data Plane access." examples: - name: Factories_GetDataPlaneAccess text: |- - az datafactory factory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" \ + az datafactory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" \ --expire-time "2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ "2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" """ -helps['datafactory factory get-git-hub-access-token'] = """ +helps['datafactory get-git-hub-access-token'] = """ type: command short-summary: "Get GitHub Access Token." examples: - name: Factories_GetGitHubAccessToken text: |- - az datafactory factory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code \ -"some" --git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" + az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime'] = """ type: group - short-summary: datafactory integration-runtime + short-summary: Manage integration runtime with datafactory """ helps['datafactory integration-runtime list'] = """ @@ -180,7 +183,7 @@ helps['datafactory integration-runtime linked-integration-runtime'] = """ type: group - short-summary: datafactory integration-runtime sub group linked-integration-runtime + short-summary: Manage integration runtime with datafactory sub group linked-integration-runtime """ helps['datafactory integration-runtime linked-integration-runtime create'] = """ @@ -192,28 +195,27 @@ az datafactory integration-runtime linked-integration-runtime create --name \ "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" --location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" \ --subscription-id "061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" \ ---integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --subscription-id \ -"12345678-1234-1234-1234-12345678abc" +--integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime managed'] = """ type: group - short-summary: datafactory integration-runtime sub group managed + short-summary: Manage integration runtime with datafactory sub group managed """ helps['datafactory integration-runtime managed create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." """ helps['datafactory integration-runtime self-hosted'] = """ type: group - short-summary: datafactory integration-runtime sub group self-hosted + short-summary: Manage integration runtime with datafactory sub group self-hosted """ helps['datafactory integration-runtime self-hosted create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." examples: - name: IntegrationRuntimes_Create text: |- @@ -361,7 +363,7 @@ helps['datafactory integration-runtime-node'] = """ type: group - short-summary: datafactory integration-runtime-node + short-summary: Manage integration runtime node with datafactory """ helps['datafactory integration-runtime-node show'] = """ @@ -407,7 +409,7 @@ helps['datafactory linked-service'] = """ type: group - short-summary: datafactory linked-service + short-summary: Manage linked service with datafactory """ helps['datafactory linked-service list'] = """ @@ -432,7 +434,7 @@ helps['datafactory linked-service create'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Create a linked service." examples: - name: LinkedServices_Create text: |- @@ -444,7 +446,7 @@ helps['datafactory linked-service update'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Update a linked service." examples: - name: LinkedServices_Update text: |- @@ -464,7 +466,7 @@ helps['datafactory dataset'] = """ type: group - short-summary: datafactory dataset + short-summary: Manage dataset with datafactory """ helps['datafactory dataset list'] = """ @@ -488,7 +490,7 @@ helps['datafactory dataset create'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Create a dataset." examples: - name: Datasets_Create text: |- @@ -502,7 +504,7 @@ helps['datafactory dataset update'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Update a dataset." parameters: - name: --folder short-summary: "The folder that this Dataset is in. If not specified, Dataset will appear at the root level." @@ -531,7 +533,7 @@ helps['datafactory pipeline'] = """ type: group - short-summary: datafactory pipeline + short-summary: Manage pipeline with datafactory """ helps['datafactory pipeline list'] = """ @@ -556,7 +558,7 @@ helps['datafactory pipeline create'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Create a pipeline." examples: - name: Pipelines_Create text: |- @@ -570,13 +572,13 @@ :{\\"type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipe\ line().parameters.OutputBlobNameList\\"}}}],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameLi\ st\\":{\\"type\\":\\"Array\\"}},\\"variables\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":\ -{\\"JobId\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "examplePipeline" \ ---resource-group "exampleResourceGroup" +{\\"JobId\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.JobId\\"}},\\"duration\\":\\"0.00:10:00\ +\\"}" --name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline update'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Update a pipeline." examples: - name: Pipelines_Update text: |- @@ -588,8 +590,8 @@ \\"Expression\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDa\ taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\ type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\ -).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name \ -"examplePipeline" --resource-group "exampleResourceGroup" +).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \ +"0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline delete'] = """ @@ -615,7 +617,7 @@ helps['datafactory pipeline-run'] = """ type: group - short-summary: datafactory pipeline-run + short-summary: Manage pipeline run with datafactory """ helps['datafactory pipeline-run show'] = """ @@ -675,7 +677,7 @@ helps['datafactory activity-run'] = """ type: group - short-summary: datafactory activity-run + short-summary: Manage activity run with datafactory """ helps['datafactory activity-run query-by-pipeline-run'] = """ @@ -715,7 +717,7 @@ helps['datafactory trigger'] = """ type: group - short-summary: datafactory trigger + short-summary: Manage trigger with datafactory """ helps['datafactory trigger list'] = """ @@ -739,7 +741,7 @@ helps['datafactory trigger create'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Create a trigger." examples: - name: Triggers_Create text: |- @@ -753,7 +755,7 @@ helps['datafactory trigger update'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Update a trigger." examples: - name: Triggers_Update text: |- @@ -843,7 +845,7 @@ helps['datafactory trigger-run'] = """ type: group - short-summary: datafactory trigger-run + short-summary: Manage trigger run with datafactory """ helps['datafactory trigger-run cancel'] = """ diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 85dcca4f8b3..2162b81c231 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -32,52 +32,52 @@ def load_arguments(self, _): - with self.argument_context('datafactory factory list') as c: + with self.argument_context('datafactory list') as c: c.argument('resource_group_name', resource_group_name_type) - with self.argument_context('datafactory factory show') as c: + with self.argument_context('datafactory show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('if_none_match', type=str, help='ETag of the factory entity. Should only be specified for get. If ' 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - with self.argument_context('datafactory factory create') as c: + with self.argument_context('datafactory create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.') c.argument('if_match', type=str, help='ETag of the factory entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('tags', tags_type) - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS ' + c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s ' + c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') - with self.argument_context('datafactory factory update') as c: + with self.argument_context('datafactory update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('tags', tags_type) - with self.argument_context('datafactory factory delete') as c: + with self.argument_context('datafactory delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') - with self.argument_context('datafactory factory configure-factory-repo') as c: + with self.argument_context('datafactory configure-factory-repo') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') c.argument('factory_resource_id', type=str, help='The factory resource id.') - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS ' + c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s ' + c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' 'GitHub repo information.', arg_group='RepoConfiguration') - with self.argument_context('datafactory factory get-data-plane-access') as c: + with self.argument_context('datafactory get-data-plane-access') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') @@ -92,7 +92,7 @@ def load_arguments(self, _): c.argument('expire_time', type=str, help='Expiration time for the token. Maximum duration for the token is ' 'eight hours and by default the token will expire in eight hours.') - with self.argument_context('datafactory factory get-git-hub-access-token') as c: + with self.argument_context('datafactory get-git-hub-access-token') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') @@ -122,7 +122,7 @@ def load_arguments(self, _): 'belongs to.') c.argument('data_factory_name', type=str, help='The name of the data factory that the linked integration ' 'runtime belongs to.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) with self.argument_context('datafactory integration-runtime managed create') as c: @@ -133,10 +133,10 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') - c.argument('type_properties_compute_properties', type=validate_file_or_dict, help='The compute resource for ' - 'managed integration runtime. Expected value: json-string/@json-file.') - c.argument('type_properties_ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed ' - 'integration runtime. Expected value: json-string/@json-file.') + c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' + 'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') + c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' + 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime self-hosted create') as c: c.argument('resource_group_name', resource_group_name_type) @@ -146,8 +146,8 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') - c.argument('type_properties_linked_info', type=validate_file_or_dict, help='The base definition of a linked ' - 'integration runtime. Expected value: json-string/@json-file.') + c.argument('linked_info', type=validate_file_or_dict, help='The base definition of a linked integration ' + 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -301,7 +301,7 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'linked service. Expected value: json-string/@json-file.') - c.ignore('properties') + c.ignore('linked_service') with self.argument_context('datafactory linked-service delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -351,9 +351,9 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'Dataset. Expected value: json-string/@json-file.') - c.argument('folder', action=AddFolder, nargs='*', help='The folder that this Dataset is in. If not specified, ' + c.argument('folder', action=AddFolder, nargs='+', help='The folder that this Dataset is in. If not specified, ' 'Dataset will appear at the root level.') - c.ignore('properties') + c.ignore('dataset') with self.argument_context('datafactory dataset delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -402,7 +402,10 @@ def load_arguments(self, _): 'Pipeline. Expected value: json-string/@json-file.') c.argument('run_dimensions', type=validate_file_or_dict, help='Dimensions emitted by Pipeline. Expected value: ' 'json-string/@json-file.') - c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.') + c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' + 'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric') + c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.', + arg_group='Folder') c.ignore('pipeline') with self.argument_context('datafactory pipeline delete') as c: @@ -450,8 +453,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory activity-run query-by-pipeline-run') as c: c.argument('resource_group_name', resource_group_name_type) @@ -463,8 +466,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory trigger list') as c: c.argument('resource_group_name', resource_group_name_type) @@ -498,7 +501,7 @@ def load_arguments(self, _): c.argument('description', type=str, help='Trigger description.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'trigger. Expected value: json-string/@json-file.') - c.ignore('properties') + c.ignore('trigger') with self.argument_context('datafactory trigger delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -567,8 +570,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory trigger-run rerun') as c: c.argument('resource_group_name', resource_group_name_type) diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index ec9616c8672..f645d72981a 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -45,6 +45,10 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['root_folder'] = v[0] elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter factory_vsts_configuration. All possible ' + 'keys are: project-name, tenant-id, account-name, repository-name, ' + 'collaboration-branch, root-folder, last-commit-id'.format(k)) d['type'] = 'FactoryVSTSConfiguration' return d @@ -78,6 +82,10 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['root_folder'] = v[0] elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter factory_git_hub_configuration. All ' + 'possible keys are: host-name, account-name, repository-name, collaboration-branch, ' + 'root-folder, last-commit-id'.format(k)) d['type'] = 'FactoryGitHubConfiguration' return d @@ -101,6 +109,9 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use v = properties[k] if kl == 'name': d['name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'. + format(k)) return d @@ -127,6 +138,9 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['operator'] = v[0] elif kl == 'values': d['values'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter filters. All possible keys are: operand, ' + 'operator, values'.format(k)) return d @@ -151,4 +165,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['order_by'] = v[0] elif kl == 'order': d['order'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: ' + 'order-by, order'.format(k)) return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index cfc2a3de83b..df59d171a88 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -17,27 +17,26 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_factory datafactory_factory = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factory_operations#FactoryOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio' + 'ns.{}', client_factory=cf_factory) - with self.command_group('datafactory factory', datafactory_factory, client_factory=cf_factory, - is_experimental=True) as g: - g.custom_command('list', 'datafactory_factory_list') - g.custom_show_command('show', 'datafactory_factory_show') - g.custom_command('create', 'datafactory_factory_create') - g.custom_command('update', 'datafactory_factory_update') - g.custom_command('delete', 'datafactory_factory_delete', confirmation=True) - g.custom_command('configure-factory-repo', 'datafactory_factory_configure_factory_repo') - g.custom_command('get-data-plane-access', 'datafactory_factory_get_data_plane_access') - g.custom_command('get-git-hub-access-token', 'datafactory_factory_get_git_hub_access_token') + with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory, is_experimental=True) as g: + g.custom_command('list', 'datafactory_list') + g.custom_show_command('show', 'datafactory_show') + g.custom_command('create', 'datafactory_create') + g.custom_command('update', 'datafactory_update') + g.custom_command('delete', 'datafactory_delete', confirmation=True) + g.custom_command('configure-factory-repo', 'datafactory_configure_factory_repo') + g.custom_command('get-data-plane-access', 'datafactory_get_data_plane_access') + g.custom_command('get-git-hub-access-token', 'datafactory_get_git_hub_access_token') from azext_datafactory.generated._client_factory import cf_integration_runtime datafactory_integration_runtime = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_operations#Integra' - 'tionRuntimeOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#Integr' + 'ationRuntimesOperations.{}', client_factory=cf_integration_runtime) with self.command_group('datafactory integration-runtime', datafactory_integration_runtime, - client_factory=cf_integration_runtime, is_experimental=True) as g: + client_factory=cf_integration_runtime) as g: g.custom_command('list', 'datafactory_integration_runtime_list') g.custom_show_command('show', 'datafactory_integration_runtime_show') g.custom_command('linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runti' @@ -60,11 +59,11 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_integration_runtime_node datafactory_integration_runtime_node = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_node_operations#In' - 'tegrationRuntimeNodeOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#I' + 'ntegrationRuntimeNodesOperations.{}', client_factory=cf_integration_runtime_node) with self.command_group('datafactory integration-runtime-node', datafactory_integration_runtime_node, - client_factory=cf_integration_runtime_node, is_experimental=True) as g: + client_factory=cf_integration_runtime_node) as g: g.custom_show_command('show', 'datafactory_integration_runtime_node_show') g.custom_command('update', 'datafactory_integration_runtime_node_update') g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True) @@ -72,39 +71,36 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_linked_service datafactory_linked_service = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._linked_service_operations#LinkedServic' - 'eOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServi' + 'cesOperations.{}', client_factory=cf_linked_service) - with self.command_group('datafactory linked-service', datafactory_linked_service, client_factory=cf_linked_service, - is_experimental=True) as g: + with self.command_group('datafactory linked-service', datafactory_linked_service, + client_factory=cf_linked_service) as g: g.custom_command('list', 'datafactory_linked_service_list') g.custom_show_command('show', 'datafactory_linked_service_show') g.custom_command('create', 'datafactory_linked_service_create') - g.generic_update_command('update', setter_arg_name='properties', custom_func_name='' - 'datafactory_linked_service_update') + g.generic_update_command('update', setter_arg_name='linked_service', + custom_func_name='datafactory_linked_service_update') g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_dataset datafactory_dataset = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._dataset_operations#DatasetOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations' + '.{}', client_factory=cf_dataset) - with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset, - is_experimental=True) as g: + with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset) as g: g.custom_command('list', 'datafactory_dataset_list') g.custom_show_command('show', 'datafactory_dataset_show') g.custom_command('create', 'datafactory_dataset_create') - g.generic_update_command('update', setter_arg_name='properties', - custom_func_name='datafactory_dataset_update') + g.generic_update_command('update', setter_arg_name='dataset', custom_func_name='datafactory_dataset_update') g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_pipeline datafactory_pipeline = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_operations#PipelineOperations' - '.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperatio' + 'ns.{}', client_factory=cf_pipeline) - with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline, - is_experimental=True) as g: + with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline) as g: g.custom_command('list', 'datafactory_pipeline_list') g.custom_show_command('show', 'datafactory_pipeline_show') g.custom_command('create', 'datafactory_pipeline_create') @@ -114,36 +110,34 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_pipeline_run datafactory_pipeline_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_run_operations#PipelineRunOpe' - 'rations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsO' + 'perations.{}', client_factory=cf_pipeline_run) - with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, client_factory=cf_pipeline_run, - is_experimental=True) as g: + with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, + client_factory=cf_pipeline_run) as g: g.custom_show_command('show', 'datafactory_pipeline_run_show') g.custom_command('cancel', 'datafactory_pipeline_run_cancel') g.custom_command('query-by-factory', 'datafactory_pipeline_run_query_by_factory') from azext_datafactory.generated._client_factory import cf_activity_run datafactory_activity_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._activity_run_operations#ActivityRunOpe' - 'rations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsO' + 'perations.{}', client_factory=cf_activity_run) - with self.command_group('datafactory activity-run', datafactory_activity_run, client_factory=cf_activity_run, - is_experimental=True) as g: + with self.command_group('datafactory activity-run', datafactory_activity_run, + client_factory=cf_activity_run) as g: g.custom_command('query-by-pipeline-run', 'datafactory_activity_run_query_by_pipeline_run') from azext_datafactory.generated._client_factory import cf_trigger datafactory_trigger = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_operations#TriggerOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations' + '.{}', client_factory=cf_trigger) - with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger, - is_experimental=True) as g: + with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger) as g: g.custom_command('list', 'datafactory_trigger_list') g.custom_show_command('show', 'datafactory_trigger_show') g.custom_command('create', 'datafactory_trigger_create') - g.generic_update_command('update', setter_arg_name='properties', - custom_func_name='datafactory_trigger_update') + g.generic_update_command('update', setter_arg_name='trigger', custom_func_name='datafactory_trigger_update') g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True) g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') @@ -156,11 +150,10 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_trigger_run datafactory_trigger_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_run_operations#TriggerRunOpera' - 'tions.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOpe' + 'rations.{}', client_factory=cf_trigger_run) - with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run, - is_experimental=True) as g: + with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run) as g: g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') g.custom_command('rerun', 'datafactory_trigger_run_rerun') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index 513e21ca96d..c269c1999ff 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -7,39 +7,39 @@ # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- +# pylint: disable=line-too-long # pylint: disable=too-many-lines # pylint: disable=unused-argument -import json from knack.util import CLIError from azure.cli.core.util import sdk_no_wait -def datafactory_factory_list(client, - resource_group_name=None): +def datafactory_list(client, + resource_group_name=None): if resource_group_name: return client.list_by_resource_group(resource_group_name=resource_group_name) return client.list() -def datafactory_factory_show(client, - resource_group_name, - factory_name, - if_none_match=None): +def datafactory_show(client, + resource_group_name, + factory_name, + if_none_match=None): return client.get(resource_group_name=resource_group_name, factory_name=factory_name, if_none_match=if_none_match) -def datafactory_factory_create(client, - resource_group_name, - factory_name, - if_match=None, - location=None, - tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): +def datafactory_create(client, + resource_group_name, + factory_name, + if_match=None, + location=None, + tags=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, + global_parameters=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -49,38 +49,43 @@ def datafactory_factory_create(client, raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + factory = {} + factory['location'] = location + factory['tags'] = tags + factory['repo_configuration'] = repo_configuration + factory['global_parameters'] = global_parameters + factory['encryption'] = {} + factory['identity'] = {} return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, - location=location, - tags=tags, - identity=None, - repo_configuration=repo_configuration, - global_parameters=global_parameters) + factory=factory) -def datafactory_factory_update(client, - resource_group_name, - factory_name, - tags=None): +def datafactory_update(client, + resource_group_name, + factory_name, + tags=None): + factory_update_parameters = {} + factory_update_parameters['tags'] = tags + factory_update_parameters['identity'] = {} return client.update(resource_group_name=resource_group_name, factory_name=factory_name, - tags=tags, - identity={"type": "SystemAssigned"}) + factory_update_parameters=factory_update_parameters) -def datafactory_factory_delete(client, - resource_group_name, - factory_name): +def datafactory_delete(client, + resource_group_name, + factory_name): return client.delete(resource_group_name=resource_group_name, factory_name=factory_name) -def datafactory_factory_configure_factory_repo(client, - location, - factory_resource_id=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None): +def datafactory_configure_factory_repo(client, + location, + factory_resource_id=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -90,39 +95,45 @@ def datafactory_factory_configure_factory_repo(client, raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + factory_repo_update = {} + factory_repo_update['factory_resource_id'] = factory_resource_id + factory_repo_update['repo_configuration'] = repo_configuration return client.configure_factory_repo(location_id=location, - factory_resource_id=factory_resource_id, - repo_configuration=repo_configuration) + factory_repo_update=factory_repo_update) -def datafactory_factory_get_data_plane_access(client, - resource_group_name, - factory_name, - permissions=None, - access_resource_path=None, - profile_name=None, - start_time=None, - expire_time=None): +def datafactory_get_data_plane_access(client, + resource_group_name, + factory_name, + permissions=None, + access_resource_path=None, + profile_name=None, + start_time=None, + expire_time=None): + policy = {} + policy['permissions'] = permissions + policy['access_resource_path'] = access_resource_path + policy['profile_name'] = profile_name + policy['start_time'] = start_time + policy['expire_time'] = expire_time return client.get_data_plane_access(resource_group_name=resource_group_name, factory_name=factory_name, - permissions=permissions, - access_resource_path=access_resource_path, - profile_name=profile_name, - start_time=start_time, - expire_time=expire_time) - - -def datafactory_factory_get_git_hub_access_token(client, - resource_group_name, - factory_name, - git_hub_access_code, - git_hub_access_token_base_url, - git_hub_client_id=None): + policy=policy) + + +def datafactory_get_git_hub_access_token(client, + resource_group_name, + factory_name, + git_hub_access_code, + git_hub_access_token_base_url, + git_hub_client_id=None): + git_hub_access_token_request = {} + git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code + git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id + git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url return client.get_git_hub_access_token(resource_group_name=resource_group_name, factory_name=factory_name, - git_hub_access_code=git_hub_access_code, - git_hub_client_id=git_hub_client_id, - git_hub_access_token_base_url=git_hub_access_token_base_url) + git_hub_access_token_request=git_hub_access_token_request) def datafactory_integration_runtime_list(client, @@ -151,13 +162,15 @@ def datafactory_integration_runtime_linked_integration_runtime_create(client, subscription_id=None, data_factory_name=None, location=None): + create_linked_integration_runtime_request = {} + create_linked_integration_runtime_request['name'] = name + create_linked_integration_runtime_request['subscription_id'] = subscription_id + create_linked_integration_runtime_request['data_factory_name'] = data_factory_name + create_linked_integration_runtime_request['data_factory_location'] = location return client.create_linked_integration_runtime(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - name=name, - subscription_id=subscription_id, - data_factory_name=data_factory_name, - data_factory_location=location) + create_linked_integration_runtime_request=create_linked_integration_runtime_request) def datafactory_integration_runtime_managed_create(client, @@ -166,18 +179,19 @@ def datafactory_integration_runtime_managed_create(client, integration_runtime_name, if_match=None, description=None, - type_properties_compute_properties=None, - type_properties_ssis_properties=None): - properties = {} - properties['type'] = 'Managed' - properties['description'] = description - properties['compute_properties'] = type_properties_compute_properties - properties['ssis_properties'] = type_properties_ssis_properties + compute_properties=None, + ssis_properties=None): + integration_runtime = {} + integration_runtime['properties'] = {} + integration_runtime['properties']['type'] = 'Managed' + integration_runtime['properties']['description'] = description + integration_runtime['properties']['compute_properties'] = compute_properties + integration_runtime['properties']['ssis_properties'] = ssis_properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, if_match=if_match, - properties=properties) + integration_runtime=integration_runtime) def datafactory_integration_runtime_self_hosted_create(client, @@ -186,16 +200,17 @@ def datafactory_integration_runtime_self_hosted_create(client, integration_runtime_name, if_match=None, description=None, - type_properties_linked_info=None): - properties = {} - properties['type'] = 'SelfHosted' - properties['description'] = description - properties['linked_info'] = type_properties_linked_info + linked_info=None): + integration_runtime = {} + integration_runtime['properties'] = {} + integration_runtime['properties']['type'] = 'SelfHosted' + integration_runtime['properties']['description'] = description + integration_runtime['properties']['linked_info'] = linked_info return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, if_match=if_match, - properties=properties) + integration_runtime=integration_runtime) def datafactory_integration_runtime_update(client, @@ -204,11 +219,13 @@ def datafactory_integration_runtime_update(client, integration_runtime_name, auto_update=None, update_delay_offset=None): + update_integration_runtime_request = {} + update_integration_runtime_request['auto_update'] = auto_update + update_integration_runtime_request['update_delay_offset'] = update_delay_offset return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - auto_update=auto_update, - update_delay_offset=update_delay_offset) + update_integration_runtime_request=update_integration_runtime_request) def datafactory_integration_runtime_delete(client, @@ -251,9 +268,9 @@ def datafactory_integration_runtime_list_auth_key(client, resource_group_name, factory_name, integration_runtime_name): - return client.list_auth_key(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) + return client.list_auth_keys(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) def datafactory_integration_runtime_regenerate_auth_key(client, @@ -261,10 +278,12 @@ def datafactory_integration_runtime_regenerate_auth_key(client, factory_name, integration_runtime_name, key_name=None): + regenerate_key_parameters = {} + regenerate_key_parameters['key_name'] = key_name return client.regenerate_auth_key(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - key_name=key_name) + regenerate_key_parameters=regenerate_key_parameters) def datafactory_integration_runtime_remove_link(client, @@ -272,10 +291,12 @@ def datafactory_integration_runtime_remove_link(client, factory_name, integration_runtime_name, linked_factory_name): - return client.remove_link(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - linked_factory_name=linked_factory_name) + linked_integration_runtime_request = {} + linked_integration_runtime_request['linked_factory_name'] = linked_factory_name + return client.remove_links(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + linked_integration_runtime_request=linked_integration_runtime_request) def datafactory_integration_runtime_start(client, @@ -337,11 +358,13 @@ def datafactory_integration_runtime_node_update(client, integration_runtime_name, node_name, concurrent_jobs_limit=None): + update_integration_runtime_node_request = {} + update_integration_runtime_node_request['concurrent_jobs_limit'] = concurrent_jobs_limit return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, - concurrent_jobs_limit=concurrent_jobs_limit) + update_integration_runtime_node_request=update_integration_runtime_node_request) def datafactory_integration_runtime_node_delete(client, @@ -390,11 +413,13 @@ def datafactory_linked_service_create(client, linked_service_name, properties, if_match=None): + linked_service = {} + linked_service['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, if_match=if_match, - properties=properties) + linked_service=linked_service) def datafactory_linked_service_update(instance, @@ -414,7 +439,7 @@ def datafactory_linked_service_update(instance, instance.properties.parameters = parameters if annotations is not None: instance.properties.annotations = annotations - return instance.properties + return instance def datafactory_linked_service_delete(client, @@ -450,11 +475,13 @@ def datafactory_dataset_create(client, dataset_name, properties, if_match=None): + dataset = {} + dataset['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, if_match=if_match, - properties=properties) + dataset=dataset) def datafactory_dataset_update(instance, @@ -483,7 +510,7 @@ def datafactory_dataset_update(instance, instance.properties.annotations = annotations if folder is not None: instance.properties.folder = folder - return instance.properties + return instance def datafactory_dataset_delete(client, @@ -538,6 +565,7 @@ def datafactory_pipeline_update(instance, concurrency=None, annotations=None, run_dimensions=None, + duration=None, folder_name=None): if description is not None: instance.description = description @@ -553,8 +581,10 @@ def datafactory_pipeline_update(instance, instance.annotations = annotations if run_dimensions is not None: instance.run_dimensions = run_dimensions + if duration is not None: + instance.elapsed_time_metric.duration = duration if folder_name is not None: - instance.name_folder_name = folder_name + instance.folder.name = folder_name return instance @@ -614,13 +644,15 @@ def datafactory_pipeline_run_query_by_factory(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_activity_run_query_by_pipeline_run(client, @@ -632,14 +664,16 @@ def datafactory_activity_run_query_by_pipeline_run(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_pipeline_run(resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_trigger_list(client, @@ -666,11 +700,13 @@ def datafactory_trigger_create(client, trigger_name, properties, if_match=None): + trigger = {} + trigger['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, if_match=if_match, - properties=properties) + trigger=trigger) def datafactory_trigger_update(instance, @@ -684,7 +720,7 @@ def datafactory_trigger_update(instance, instance.properties.description = description if annotations is not None: instance.properties.annotations = annotations - return instance.properties + return instance def datafactory_trigger_delete(client, @@ -710,10 +746,12 @@ def datafactory_trigger_query_by_factory(client, factory_name, continuation_token=None, parent_trigger_name=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['parent_trigger_name'] = parent_trigger_name return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - parent_trigger_name=parent_trigger_name) + filter_parameters=filter_parameters) def datafactory_trigger_start(client, @@ -746,7 +784,7 @@ def datafactory_trigger_subscribe_to_event(client, trigger_name, no_wait=False): return sdk_no_wait(no_wait, - client.begin_subscribe_to_event, + client.begin_subscribe_to_events, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name) @@ -758,7 +796,7 @@ def datafactory_trigger_unsubscribe_from_event(client, trigger_name, no_wait=False): return sdk_no_wait(no_wait, - client.begin_unsubscribe_from_event, + client.begin_unsubscribe_from_events, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name) @@ -783,13 +821,15 @@ def datafactory_trigger_run_query_by_factory(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_trigger_run_rerun(client, diff --git a/src/datafactory/azext_datafactory/manual/_help.py b/src/datafactory/azext_datafactory/manual/_help.py new file mode 100644 index 00000000000..cbc8eb2f3e5 --- /dev/null +++ b/src/datafactory/azext_datafactory/manual/_help.py @@ -0,0 +1,91 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['datafactory create'] = """ + type: command + short-summary: "Create a factory." + parameters: + - name: --factory-vsts-configuration --vsts-config + short-summary: "Factory's VSTS repo information." + long-summary: | + Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ +repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX + + project-name: Required. VSTS project name. + tenant-id: VSTS tenant id. + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + - name: --factory-git-hub-configuration --github-config + short-summary: "Factory's GitHub repo information." + long-summary: | + Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ +collaboration-branch=XX root-folder=XX last-commit-id=XX + + host-name: GitHub Enterprise host name. For example: https://github.mydomain.com + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + examples: + - name: Factories_CreateOrUpdate + text: |- + az datafactory create --location "East US" --name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" + +helps['datafactory configure-factory-repo'] = """ + type: command + short-summary: "Updates a factory's repo information." + parameters: + - name: --factory-vsts-configuration --vsts-config + short-summary: "Factory's VSTS repo information." + long-summary: | + Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX \ +repository-name=XX collaboration-branch=XX root-folder=XX last-commit-id=XX + + project-name: Required. VSTS project name. + tenant-id: VSTS tenant id. + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + - name: --factory-git-hub-configuration --github-config + short-summary: "Factory's GitHub repo information." + long-summary: | + Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX \ +collaboration-branch=XX root-folder=XX last-commit-id=XX + + host-name: GitHub Enterprise host name. For example: https://github.mydomain.com + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + examples: + - name: Factories_ConfigureFactoryRepo + text: |- + az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-1234\ +5678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ +--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ +repository-name="repo" root-folder="/" tenant-id="" --location "East US" +""" diff --git a/src/datafactory/azext_datafactory/manual/_params.py b/src/datafactory/azext_datafactory/manual/_params.py new file mode 100644 index 00000000000..2fac10a33d2 --- /dev/null +++ b/src/datafactory/azext_datafactory/manual/_params.py @@ -0,0 +1,35 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from azext_datafactory.action import ( + AddFactoryVstsConfiguration, + AddFactoryGitHubConfiguration +) + + +def load_arguments(self, _): + + with self.argument_context('datafactory create') as c: + c.argument('factory_vsts_configuration', options_list=['--vsts-config', '--factory-vsts-configuration'], + action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS repo information.', + arg_group='RepoConfiguration') + c.argument('factory_git_hub_configuration', + options_list=['--github-config', '--factory-git-hub-configuration'], + action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s GitHub repo information.', + arg_group='RepoConfiguration') + + with self.argument_context('datafactory configure-factory-repo') as c: + c.argument('factory_vsts_configuration', options_list=['--vsts-config', '--factory-vsts-configuration'], + action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS repo information.', + arg_group='RepoConfiguration') + c.argument('factory_git_hub_configuration', + options_list=['--github-config', '--factory-git-hub-configuration'], + action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s GitHub repo information.', + arg_group='RepoConfiguration') diff --git a/src/datafactory/azext_datafactory/manual/custom.py b/src/datafactory/azext_datafactory/manual/custom.py index 5b91fcd37d9..7a96c61335b 100644 --- a/src/datafactory/azext_datafactory/manual/custom.py +++ b/src/datafactory/azext_datafactory/manual/custom.py @@ -11,30 +11,45 @@ from knack.util import CLIError -def datafactory_factory_create(client, - resource_group_name, - factory_name, - if_match=None, - location=None, - tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): +def datafactory_create(client, + resource_group_name, + factory_name, + if_match=None, + location=None, + tags=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, + global_parameters=None): from azext_datafactory.vendored_sdks.datafactory.models import FactoryIdentity + from azext_datafactory.vendored_sdks.datafactory.models import FactoryIdentityType all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) if factory_git_hub_configuration is not None: all_repo_configuration.append(factory_git_hub_configuration) if len(all_repo_configuration) > 1: - raise CLIError('At most one of --factory-vsts-configuration, --factory-git-hub-configuration is needed for ' - 'repo configuration!') + raise CLIError('At most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' + 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + factory = {} + factory['location'] = location + factory['tags'] = tags + factory['repo_configuration'] = repo_configuration + factory['global_parameters'] = global_parameters + factory['encryption'] = {} + factory['identity'] = FactoryIdentity(type=FactoryIdentityType.SYSTEM_ASSIGNED) return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, - location=location, - tags=tags, - identity=FactoryIdentity(), - repo_configuration=repo_configuration, - global_parameters=global_parameters) + factory=factory) + + +def datafactory_update(client, + resource_group_name, + factory_name, + tags=None): + factory_update_parameters = {} + factory_update_parameters['tags'] = tags + return client.update(resource_group_name=resource_group_name, + factory_name=factory_name, + factory_update_parameters=factory_update_parameters) diff --git a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py index 0db57afba69..64fc8cefe48 100644 --- a/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/manual/tests/latest/test_datafactory_scenario.py @@ -10,7 +10,7 @@ # EXAMPLE: IntegrationRuntimes_Create -def step_integrationruntimes_create(test, rg): +def step_integration_runtime_create(test, rg): test.cmd('az datafactory integration-runtime self-hosted create ' '--factory-name "{myFactory}" ' '--description "A selfhosted integration runtime" ' @@ -22,7 +22,7 @@ def step_integrationruntimes_create(test, rg): ]) -def step_triggerruns_rerun(test, rg): +def step_trigger_run_rerun(test, rg): test.cmd('az datafactory trigger-run rerun ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -31,7 +31,7 @@ def step_triggerruns_rerun(test, rg): checks=[]) -def step_pipelines_createrun(test, rg): +def step_pipeline_create_run(test, rg): output = test.cmd('az datafactory pipeline create-run ' '--factory-name "{myFactory}" ' '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' @@ -41,7 +41,7 @@ def step_pipelines_createrun(test, rg): return output -def step_pipelineruns_cancel(test, rg): +def step_pipeline_run_cancel(test, rg): test.cmd('az datafactory pipeline-run cancel ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -49,7 +49,7 @@ def step_pipelineruns_cancel(test, rg): checks=[]) -def step_pipelineruns_get(test, rg): +def step_pipeline_run_show(test, rg): test.cmd('az datafactory pipeline-run show ' '--factory-name "{myFactory}" ' '--resource-group "{rg}" ' @@ -57,7 +57,7 @@ def step_pipelineruns_get(test, rg): checks=[]) -def step_pipelines_update(test, rg): +def step_pipeline_update(test, rg): test.cmd('az datafactory pipeline update ' '--factory-name "{myFactory}" ' '--description "Test Update description" ' @@ -66,7 +66,7 @@ def step_pipelines_update(test, rg): checks=[]) -def step_triggerruns_querybyfactory(test, rg): +def step_trigger_run_query_by_factory(test, rg): output = test.cmd('az datafactory trigger-run query-by-factory ' '--factory-name "{myFactory}" ' '--last-updated-after "{myStartTime}" ' @@ -76,16 +76,16 @@ def step_triggerruns_querybyfactory(test, rg): return output -def step_integrationruntimes_managed_create(test, rg): +def step_integration_runtime_managed_create(test, rg): test.cmd('az datafactory integration-runtime managed create ' '--factory-name "{myFactory}" ' '--name "{myIntegrationRuntime}" ' '--resource-group "{rg}" ' '--description "Managed Integration Runtime" ' - '--type-properties-compute-properties "{{\\"location\\":' + '--compute-properties "{{\\"location\\":' '\\"East US 2\\",\\"nodeSize\\":\\"Standard_D2_v3\\",' '\\"numberOfNodes\\":1,\\"maxParallelExecutionsPerNode\\":2}}" ' - '--type-properties-ssis-properties "{{\\"edition\\":\\"Standard' + '--ssis-properties "{{\\"edition\\":\\"Standard' '\\",\\"licenseType\\":\\"LicenseIncluded\\"}}" ', checks=[ test.check('name', "{myIntegrationRuntime}"), @@ -93,7 +93,7 @@ def step_integrationruntimes_managed_create(test, rg): ]) -def step_pipelines_wait_create(test, rg): +def step_pipeline_wait_create(test, rg): test.cmd('az datafactory pipeline create ' '--factory-name "{myFactory}" ' '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"Wait1\\",' @@ -108,7 +108,7 @@ def step_pipelines_wait_create(test, rg): ]) -def step_triggers_tumble_create(test, rg): +def step_trigger_tumble_create(test, rg): test.cmd('az datafactory trigger create ' '--resource-group "{rg}" ' '--properties "{{\\"description\\":\\"trumblingwindowtrigger' @@ -132,38 +132,38 @@ def step_triggers_tumble_create(test, rg): def call_managed_integrationruntime_scenario(test, rg): from ....tests.latest import test_datafactory_scenario as g - g.setup(test, rg) - g.step_factories_createorupdate(test, rg) - step_integrationruntimes_managed_create(test, rg) - g.step_integrationruntimes_get(test, rg) + g.setup_scenario(test, rg) + g.step_create(test, rg) + step_integration_runtime_managed_create(test, rg) + g.step_integration_runtime_show(test, rg) test.kwargs.update({'myIntegrationRuntime2': test.kwargs.get('myIntegrationRuntime')}) - g.step_integrationruntimes_start(test, rg) - g.step_integrationruntimes_stop(test, rg) - g.step_integrationruntimes_delete(test, rg) - g.step_factories_delete(test, rg) - g.cleanup(test, rg) + g.step_integration_runtime_start(test, rg) + g.step_integration_runtime_stop(test, rg) + g.step_integration_runtime_delete(test, rg) + g.step_delete(test, rg) + g.cleanup_scenario(test, rg) def call_triggerrun_scenario(test, rg): from ....tests.latest import test_datafactory_scenario as g import time - g.setup(test, rg) - g.step_factories_createorupdate(test, rg) - step_pipelines_wait_create(test, rg) - createrun_res = g.step_pipelines_createrun(test, rg) + g.setup_scenario(test, rg) + g.step_create(test, rg) + step_pipeline_wait_create(test, rg) + createrun_res = step_pipeline_create_run(test, rg) time.sleep(5) test.kwargs.update({'myRunId': createrun_res.get('runId')}) - g.step_pipelineruns_get(test, rg) - g.step_activityruns_querybypipelinerun(test, rg) - createrun_res = g.step_pipelines_createrun(test, rg) + step_pipeline_run_show(test, rg) + g.step_activity_run_query_by_pipeline_run(test, rg) + createrun_res = step_pipeline_create_run(test, rg) test.kwargs.update({'myRunId': createrun_res.get('runId')}) - g.step_pipelineruns_cancel(test, rg) - step_triggers_tumble_create(test, rg) - g.step_triggers_start(test, rg) - g.step_triggers_get(test, rg) + step_pipeline_run_cancel(test, rg) + step_trigger_tumble_create(test, rg) + g.step_trigger_start(test, rg) + g.step_trigger_show(test, rg) maxRound = 2 while True: - triggerrun_res = g.step_triggerruns_querybyfactory(test, rg) + triggerrun_res = step_trigger_run_query_by_factory(test, rg) if len(triggerrun_res['value']) > 0 and triggerrun_res['value'][0]['status'] == 'Succeeded': test.kwargs.update({'myRunId': triggerrun_res['value'][0]['triggerRunId']}) break @@ -175,79 +175,74 @@ def call_triggerrun_scenario(test, rg): else: break if maxRound > 0: - g.step_triggerruns_rerun(test, rg) - g.step_triggerruns_querybyfactory(test, rg) - g.step_triggers_stop(test, rg) - g.step_triggers_delete(test, rg) - g.step_pipelines_delete(test, rg) - g.step_factories_delete(test, rg) + step_trigger_run_rerun(test, rg) + step_trigger_run_query_by_factory(test, rg) + g.step_trigger_stop(test, rg) + g.step_trigger_delete(test, rg) + g.step_pipeline_delete(test, rg) + g.step_delete(test, rg) + g.cleanup_scenario(test, rg) def call_main_scenario(test, rg): from ....tests.latest import test_datafactory_scenario as g - g.setup(test, rg) - g.step_factories_createorupdate(test, rg) - g.step_factories_update(test, rg) - g.step_linkedservices_create(test, rg) - g.step_linkedservices_update(test, rg) - g.step_datasets_create(test, rg) - g.step_datasets_update(test, rg) - g.step_pipelines_create(test, rg) - g.step_pipelines_update(test, rg) - g.step_triggers_create(test, rg) - g.step_triggers_update(test, rg) - g.step_integrationruntimes_create(test, rg) - g.step_integrationruntimes_update(test, rg) - g.step_pipelines_createrun(test, rg) - g.step_integrationruntimes_get(test, rg) - g.step_reruntriggers_listbytrigger(test, rg) - g.step_linkedservices_get(test, rg) - # g.step_pipelineruns_get(test, rg) - g.step_pipelines_get(test, rg) - g.step_datasets_get(test, rg) - g.step_triggers_get(test, rg) - g.step_integrationruntimes_listbyfactory(test, rg) - g.step_linkedservices_listbyfactory(test, rg) - g.step_pipelines_listbyfactory(test, rg) - g.step_triggers_listbyfactory(test, rg) - g.step_datasets_listbyfactory(test, rg) - g.step_factories_get(test, rg) - g.step_factories_listbyresourcegroup(test, rg) - g.step_factories_list(test, rg) - g.step_operations_list(test, rg) - # g.step_reruntriggers_cancel(test, rg) - # g.step_reruntriggers_start(test, rg) - # g.step_reruntriggers_stop(test, rg) - g.step_integrationruntimes_regenerateauthkey(test, rg) - # g.step_triggerruns_rerun(test, rg) - # g.step_integrationruntimes_getconnectioninfo(test, rg) - g.step_integrationruntimes_synccredentials(test, rg) - g.step_integrationruntimes_getmonitoringdata(test, rg) - g.step_integrationruntimes_listauthkeys(test, rg) - g.step_integrationruntimes_upgrade(test, rg) - g.step_integrationruntimes_getstatus(test, rg) - # g.step_integrationruntimes_start(test, rg) - # g.step_integrationruntimes_stop(test, rg) + g.setup_scenario(test, rg) + g.step_create(test, rg) + g.step_update(test, rg) + g.step_linked_service_create(test, rg) + g.step_linked_service_update(test, rg) + g.step_dataset_create(test, rg) + g.step_dataset_update(test, rg) + g.step_pipeline_create(test, rg) + step_pipeline_update(test, rg) + g.step_trigger_create(test, rg) + g.step_trigger_update(test, rg) + g.step_integration_runtime_self_hosted_create(test, rg) + g.step_integration_runtime_update(test, rg) + # g.step_integration_runtime_linked(test, rg) + step_pipeline_create_run(test, rg) + g.step_integration_runtime_show(test, rg) + g.step_linked_service_show(test, rg) + g.step_pipeline_show(test, rg) + g.step_dataset_show(test, rg) + g.step_trigger_show(test, rg) + g.step_integration_runtime_list(test, rg) + g.step_linked_service_list(test, rg) + g.step_pipeline_list(test, rg) + g.step_trigger_list(test, rg) + g.step_dataset_list(test, rg) + g.step_show(test, rg) + g.step_list2(test, rg) + g.step_list(test, rg) + g.step_integration_runtime_regenerate_auth_key(test, rg) + # g.step_integration_runtime_get_connection_info(test, rg) + g.step_integration_runtime_sync_credentials(test, rg) + g.step_integration_runtime_get_monitoring_data(test, rg) + g.step_integration_runtime_list_auth_key(test, rg) + g.step_integration_runtime_remove_link(test, rg) + g.step_integration_runtime_get_status(test, rg) + # g.step_integration_runtime_start(test, rg) + # g.step_integration_runtime_stop(test, rg) # g.step_integrationruntimes_createlinkedintegrationruntime(test, rg) - g.step_triggers_geteventsubscriptionstatus(test, rg) - # g.step_activityruns_querybypipelinerun(test, rg) - g.step_triggers_unsubscribefromevents(test, rg) - g.step_triggers_subscribetoevents(test, rg) - g.step_triggers_start(test, rg) - g.step_triggers_stop(test, rg) - # g.step_factories_getgithubaccesstoken(test, rg) - g.step_factories_getdataplaneaccess(test, rg) - # g.step_pipelineruns_querybyfactory(test, rg) - # g.step_pipelineruns_cancel(test, rg) - g.step_triggerruns_querybyfactory(test, rg) - g.step_factories_configurefactoryrepo(test, rg) - g.step_integrationruntimes_delete(test, rg) - g.step_triggers_delete(test, rg) - g.step_pipelines_delete(test, rg) - g.step_datasets_delete(test, rg) - g.step_linkedservices_delete(test, rg) - g.step_factories_delete(test, rg) - g.cleanup(test, rg) + g.step_trigger_get_event_subscription_status(test, rg) + # g.step_activity_run_query_by_pipeline_run(test, rg) + g.step_trigger_unsubscribe_from_event(test, rg) + g.step_trigger_subscribe_to_event(test, rg) + g.step_trigger_start(test, rg) + g.step_trigger_stop(test, rg) + # g.step_get_git_hub_access_token(test, rg) + g.step_get_data_plane_access(test, rg) + # g.step_pipeline_run_query_by_factory(test, rg) + # g.step_pipeline_run_cancel(test, rg) + step_trigger_run_query_by_factory(test, rg) + g.step_configure_factory_repo(test, rg) + g.step_integration_runtime_delete(test, rg) + g.step_trigger_delete(test, rg) + g.step_pipeline_delete(test, rg) + g.step_dataset_delete(test, rg) + g.step_linked_service_delete(test, rg) + g.step_delete(test, rg) + g.cleanup_scenario(test, rg) def call_scenario(test, rg): diff --git a/src/datafactory/azext_datafactory/manual/version.py b/src/datafactory/azext_datafactory/manual/version.py index 4ddd487b20e..c061ba7d594 100644 --- a/src/datafactory/azext_datafactory/manual/version.py +++ b/src/datafactory/azext_datafactory/manual/version.py @@ -8,4 +8,4 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.1" +VERSION = "0.3.0" diff --git a/src/datafactory/azext_datafactory/tests/__init__.py b/src/datafactory/azext_datafactory/tests/__init__.py index 50e0627daff..70488e93851 100644 --- a/src/datafactory/azext_datafactory/tests/__init__.py +++ b/src/datafactory/azext_datafactory/tests/__init__.py @@ -31,8 +31,8 @@ def try_manual(func): def import_manual_function(origin_func): from importlib import import_module - decorated_path = inspect.getfile(origin_func) - module_path = __path__[0] + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() if not decorated_path.startswith(module_path): raise Exception("Decorator can only be used in submodules!") manual_path = os.path.join( @@ -46,7 +46,6 @@ def import_manual_function(origin_func): def get_func_to_call(): func_to_call = func try: - func_to_call = import_manual_function(func) func_to_call = import_manual_function(func) logger.info("Found manual override for %s(...)", func.__name__) except (ImportError, AttributeError): @@ -66,6 +65,9 @@ def wrapper(*args, **kwargs): ret = func_to_call(*args, **kwargs) except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() test_map[func.__name__]["result"] = FAILED test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py new file mode 100644 index 00000000000..42222d4e576 --- /dev/null +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -0,0 +1,822 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +# pylint: disable=unused-argument + + +from .. import try_manual + + +# EXAMPLE: /Factories/put/Factories_CreateOrUpdate +@try_manual +def step_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory create ' + '--location "East US" ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_Get +@try_manual +def step_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory show ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_List +@try_manual +def step_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_ListByResourceGroup +@try_manual +def step_list2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory list ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/patch/Factories_Update +@try_manual +def step_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory update ' + '--name "{myFactory}" ' + '--tags exampleTag="exampleValue" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_ConfigureFactoryRepo +@try_manual +def step_configure_factory_repo(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory configure-factory-repo ' + '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' + 'ry/factories/{myFactory}" ' + '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' + 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' + '--location "East US"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_GetDataPlaneAccess +@try_manual +def step_get_data_plane_access(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory get-data-plane-access ' + '--name "{myFactory}" ' + '--access-resource-path "" ' + '--expire-time "2018-11-10T09:46:20.2659347Z" ' + '--permissions "r" ' + '--profile-name "DefaultProfile" ' + '--start-time "2018-11-10T02:46:20.2659347Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_GetGitHubAccessToken +@try_manual +def step_get_git_hub_access_token(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory get-git-hub-access-token ' + '--name "{myFactory}" ' + '--git-hub-access-code "some" ' + '--git-hub-access-token-base-url "some" ' + '--git-hub-client-id "some" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ActivityRuns/post/ActivityRuns_QueryByPipelineRun +@try_manual +def step_activity_run_query_by_pipeline_run(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory activity-run query-by-pipeline-run ' + '--factory-name "{myFactory}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks) + + +# EXAMPLE: /Datasets/put/Datasets_Create +@try_manual +def step_dataset_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset create ' + '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' + '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' + 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' + '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' + '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/put/Datasets_Update +@try_manual +def step_dataset_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset update ' + '--description "Example description" ' + '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' + '\\"}}" ' + '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' + '}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/get/Datasets_Get +@try_manual +def step_dataset_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset show ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/get/Datasets_ListByFactory +@try_manual +def step_dataset_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/delete/Datasets_Delete +@try_manual +def step_dataset_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset delete -y ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create +@try_manual +def step_integration_runtime_self_hosted_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime self-hosted create ' + '--factory-name "{myFactory}" ' + '--description "A selfhosted integration runtime" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_Get +@try_manual +def step_integration_runtime_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime show ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_ListByFactory +@try_manual +def step_integration_runtime_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update +@try_manual +def step_integration_runtime_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime update ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--auto-update "Off" ' + '--update-delay-offset "\\"PT3H\\""', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_CreateLinkedIntegrationRuntime +@try_manual +def step_integration_runtime_linked(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime linked-integration-runtime create ' + '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' + '--location "West US" ' + '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' + '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetConnectionInfo +@try_manual +def step_integration_runtime_get_connection_info(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-connection-info ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetMonitoringData +@try_manual +def step_integration_runtime_get_monitoring_data(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-monitoring-data ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetStatus +@try_manual +def step_integration_runtime_get_status(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-status ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_ListAuthKeys +@try_manual +def step_integration_runtime_list_auth_key(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime list-auth-key ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_RegenerateAuthKey +@try_manual +def step_integration_runtime_regenerate_auth_key(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime regenerate-auth-key ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--key-name "authKey2" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start +@try_manual +def step_integration_runtime_start(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime start ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop +@try_manual +def step_integration_runtime_stop(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime stop ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials +@try_manual +def step_integration_runtime_sync_credentials(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime sync-credentials ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade +@try_manual +def step_integration_runtime_remove_link(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime remove-link ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--linked-factory-name "exampleFactoryName-linked" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/get/IntegrationRuntimeNodes_Get +@try_manual +def step_integration_runtime_node_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node show ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/patch/IntegrationRuntimeNodes_Update +@try_manual +def step_integration_runtime_node_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node update ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}" ' + '--concurrent-jobs-limit 2', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/post/IntegrationRuntimeNodes_GetIpAddress +@try_manual +def step_integration_runtime_node_get_ip_address(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node get-ip-address ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/delete/IntegrationRuntimesNodes_Delete +@try_manual +def step_integration_runtime_node_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node delete -y ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/delete/IntegrationRuntimes_Delete +@try_manual +def step_integration_runtime_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/put/LinkedServices_Create +@try_manual +def step_linked_service_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service create ' + '--factory-name "{myFactory}" ' + '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' + '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' + 'tKey=\\"}}}}}}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/put/LinkedServices_Update +@try_manual +def step_linked_service_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServices_Get +@try_manual +def step_linked_service_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service show ' + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServices_ListByFactory +@try_manual +def step_linked_service_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/delete/LinkedServices_Delete +@try_manual +def step_linked_service_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/get/PipelineRuns_Get +@try_manual +def step_pipeline_run_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/post/PipelineRuns_Cancel +@try_manual +def step_pipeline_run_cancel(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run cancel ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/post/PipelineRuns_QueryByFactory +@try_manual +def step_pipeline_run_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run query-by-factory ' + '--factory-name "{myFactory}" ' + '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/put/Pipelines_Create +@try_manual +def step_pipeline_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline create ' + '--factory-name "{myFactory}" ' + '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' + 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' + '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' + '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' + 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' + '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' + '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' + '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' + 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' + 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}},\\"duration\\":\\"0.00:10:00\\"}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/put/Pipelines_Update +@try_manual +def step_pipeline_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' + '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' + ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' + ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' + 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' + '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' + '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' + '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' + 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' + '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' + '--duration "0.00:10:00" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/get/Pipelines_Get +@try_manual +def step_pipeline_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline show ' + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/get/Pipelines_ListByFactory +@try_manual +def step_pipeline_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/post/Pipelines_CreateRun +@try_manual +def step_pipeline_create_run(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline create-run ' + '--factory-name "{myFactory}" ' + '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/delete/Pipelines_Delete +@try_manual +def step_pipeline_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/put/Triggers_Create +@try_manual +def step_trigger_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger create ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' + 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' + 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' + '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' + '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/put/Triggers_Update +@try_manual +def step_trigger_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger update ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--description "Example description" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/get/Triggers_Get +@try_manual +def step_trigger_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/get/Triggers_ListByFactory +@try_manual +def step_trigger_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_GetEventSubscriptionStatus +@try_manual +def step_trigger_get_event_subscription_status(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger get-event-subscription-status ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_QueryByFactory +@try_manual +def step_trigger_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger query-by-factory ' + '--factory-name "{myFactory}" ' + '--parent-trigger-name "{myTrigger}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_Start +@try_manual +def step_trigger_start(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger start ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_Stop +@try_manual +def step_trigger_stop(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger stop ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_SubscribeToEvents +@try_manual +def step_trigger_subscribe_to_event(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger subscribe-to-event ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_UnsubscribeFromEvents +@try_manual +def step_trigger_unsubscribe_from_event(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger unsubscribe-from-event ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/TriggerRuns_QueryByFactory +@try_manual +def step_trigger_run_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run query-by-factory ' + '--factory-name "{myFactory}" ' + '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/Triggers_Cancel +@try_manual +def step_trigger_run_cancel(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run cancel ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/Triggers_Rerun +@try_manual +def step_trigger_run_rerun(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run rerun ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/delete/Triggers_Delete +@try_manual +def step_trigger_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger delete -y ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Factories/delete/Factories_Delete +@try_manual +def step_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory delete -y ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) diff --git a/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_Scenario.yaml b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_Scenario.yaml new file mode 100644 index 00000000000..f3610fa71b6 --- /dev/null +++ b/src/datafactory/azext_datafactory/tests/latest/recordings/test_datafactory_Scenario.yaml @@ -0,0 +1,5146 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - account list + Connection: + - keep-alive + ParameterSetName: + - --query -o + User-Agent: + - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 + azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 + accept-language: + - en-US + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East + US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East + US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South + Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West + US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia + East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New + South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast + Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North + Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK + South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West + Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central + US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North + Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West + US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South + Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central + India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East + Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong + Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan + East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, + Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO + India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea + Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada + Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France + Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany + West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway + East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland + North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE + North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle + East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil + South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South + America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao + Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central + US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East + US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East + US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North + Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South + Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West + US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West + US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia + Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United + Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United + States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East + Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast + Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central + US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East + US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West + Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West + US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South + Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape + Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia + Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia + Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia + Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan + West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO + India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea + South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South + India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West + India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada + East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France + South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany + North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway + West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland + West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK + West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE + Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle + East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu + Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil + Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East + US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' + headers: + cache-control: + - no-cache + content-length: + - '26993' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:27 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"300010fc-0000-0100-0000-60865a130000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + headers: + cache-control: + - no-cache + content-length: + - '631' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"tags": {"exampleTag": "exampleValue"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory update + Connection: + - keep-alive + Content-Length: + - '40' + Content-Type: + - application/json + ParameterSetName: + - --name --tags --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}' + headers: + cache-control: + - no-cache + content-length: + - '658' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:45 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureStorage", "typeProperties": {"connectionString": + {"type": "SecureString", "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey="}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service create + Connection: + - keep-alive + Content-Length: + - '208' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --properties --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f24a-0000-0100-0000-60865a1c0000"}' + headers: + cache-control: + - no-cache + content-length: + - '757' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:48 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service update + Connection: + - keep-alive + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f24a-0000-0100-0000-60865a1c0000"}' + headers: + cache-control: + - no-cache + content-length: + - '757' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:49 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureStorage", "description": "Example description", + "typeProperties": {"connectionString": {"type": "SecureString", "value": "**********"}, + "encryptedCredential": "ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service update + Connection: + - keep-alive + Content-Length: + - '426' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example + description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}' + headers: + cache-control: + - no-cache + content-length: + - '793' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:50 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureBlob", "linkedServiceName": {"type": "LinkedServiceReference", + "referenceName": "exampleLin000003"}, "parameters": {"MyFileName": {"type": + "String"}, "MyFolderPath": {"type": "String"}}, "typeProperties": {"folderPath": + {"type": "Expression", "value": "@dataset().MyFolderPath"}, "fileName": {"type": + "Expression", "value": "@dataset().MyFileName"}, "format": {"type": "TextFormat"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset create + Connection: + - keep-alive + Content-Length: + - '419' + Content-Type: + - application/json + ParameterSetName: + - --properties --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00f74a-0000-0100-0000-60865a210000"}' + headers: + cache-control: + - no-cache + content-length: + - '746' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset update + Connection: + - keep-alive + ParameterSetName: + - --description --linked-service-name --parameters --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00f74a-0000-0100-0000-60865a210000"}' + headers: + cache-control: + - no-cache + content-length: + - '746' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:54 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "AzureBlob", "description": "Example description", + "linkedServiceName": {"type": "LinkedServiceReference", "referenceName": "exampleLin000003"}, + "parameters": {"MyFileName": {"type": "String"}, "MyFolderPath": {"type": "String"}}, + "typeProperties": {"folderPath": {"type": "Expression", "value": "@dataset().MyFolderPath"}, + "fileName": {"type": "Expression", "value": "@dataset().MyFileName"}, "format": + {"type": "TextFormat"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset update + Connection: + - keep-alive + Content-Length: + - '457' + Content-Type: + - application/json + ParameterSetName: + - --description --linked-service-name --parameters --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example + description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}' + headers: + cache-control: + - no-cache + content-length: + - '782' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:55 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"activities": [{"name": "ExampleForeachActivity", "type": + "ForEach", "typeProperties": {"isSequential": true, "items": {"type": "Expression", + "value": "@pipeline().parameters.OutputBlobNameList"}, "activities": [{"name": + "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": "DatasetReference", + "referenceName": "example000004", "parameters": {"MyFileName": "examplecontainer.csv", + "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": "DatasetReference", + "referenceName": "example000004", "parameters": {"MyFileName": {"type": "Expression", + "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], "typeProperties": + {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, "dataIntegrationUnits": + 32}}]}}], "parameters": {"JobId": {"type": "String"}, "OutputBlobNameList": + {"type": "Array"}}, "variables": {"TestVariableArray": {"type": "Array"}}, "runDimensions": + {"JobId": {"type": "Expression", "value": "@pipeline().parameters.JobId"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create + Connection: + - keep-alive + Content-Length: + - '982' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --pipeline --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"1c00fc4a-0000-0100-0000-60865a250000"}' + headers: + cache-control: + - no-cache + content-length: + - '1274' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline update + Connection: + - keep-alive + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:57Z"},"etag":"1c00fc4a-0000-0100-0000-60865a250000"}' + headers: + cache-control: + - no-cache + content-length: + - '1315' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"description": "Test Update description", "activities": + [{"name": "ExampleForeachActivity", "type": "ForEach", "typeProperties": {"isSequential": + true, "items": {"type": "Expression", "value": "@pipeline().parameters.OutputBlobNameList"}, + "activities": [{"name": "ExampleCopyActivity", "type": "Copy", "inputs": [{"type": + "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": + "examplecontainer.csv", "MyFolderPath": "examplecontainer"}}], "outputs": [{"type": + "DatasetReference", "referenceName": "example000004", "parameters": {"MyFileName": + {"type": "Expression", "value": "@item()"}, "MyFolderPath": "examplecontainer"}}], + "typeProperties": {"source": {"type": "BlobSource"}, "sink": {"type": "BlobSink"}, + "dataIntegrationUnits": 32}}]}}], "parameters": {"JobId": {"type": "String"}, + "OutputBlobNameList": {"type": "Array"}}, "variables": {"TestVariableArray": + {"type": "Array"}}, "runDimensions": {"JobId": {"type": "Expression", "value": + "@pipeline().parameters.JobId"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline update + Connection: + - keep-alive + Content-Length: + - '1024' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test + Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}}},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}' + headers: + cache-control: + - no-cache + content-length: + - '1314' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:13:59 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "ScheduleTrigger", "pipelines": [{"pipelineReference": + {"type": "PipelineReference", "referenceName": "example000005"}, "parameters": + {"OutputBlobNameList": ["exampleoutput.csv"]}}], "typeProperties": {"recurrence": + {"frequency": "Minute", "interval": 4, "startTime": "2018-06-16T00:39:13.84418Z", + "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": "UTC"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger create + Connection: + - keep-alive + Content-Length: + - '386' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --resource-group --properties --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00004b-0000-0100-0000-60865a290000"}' + headers: + cache-control: + - no-cache + content-length: + - '743' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger update + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group --description --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00004b-0000-0100-0000-60865a290000"}' + headers: + cache-control: + - no-cache + content-length: + - '743' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:03 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "ScheduleTrigger", "description": "Example description", + "pipelines": [{"pipelineReference": {"type": "PipelineReference", "referenceName": + "example000005"}, "parameters": {"OutputBlobNameList": ["exampleoutput.csv"]}}], + "typeProperties": {"recurrence": {"frequency": "Minute", "interval": 4, "startTime": + "2018-06-16T00:39:13.84418Z", "endTime": "2018-06-16T00:55:13.84418Z", "timeZone": + "UTC"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger update + Connection: + - keep-alive + Content-Length: + - '424' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --resource-group --description --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example + description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}' + headers: + cache-control: + - no-cache + content-length: + - '779' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "SelfHosted", "description": "A selfhosted integration + runtime"}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime self-hosted create + Connection: + - keep-alive + Content-Length: + - '89' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --description --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A + selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + headers: + cache-control: + - no-cache + content-length: + - '484' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"autoUpdate": "Off", "updateDelayOffset": "\"PT3H\""}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime update + Connection: + - keep-alive + Content-Length: + - '54' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --name --resource-group --auto-update --update-delay-offset + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PATCH + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A + selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + headers: + cache-control: + - no-cache + content-length: + - '484' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create-run + Connection: + - keep-alive + Content-Length: + - '45' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --parameters --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + response: + body: + string: '{"runId":"9c51f7b2-a656-11eb-9afd-84a93e64b16e"}' + headers: + cache-control: + - no-cache + content-length: + - '48' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:11 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A + selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}' + headers: + cache-control: + - no-cache + content-length: + - '484' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:12 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example + description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}' + headers: + cache-control: + - no-cache + content-length: + - '793' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:13 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test + Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:59Z"},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}' + headers: + cache-control: + - no-cache + content-length: + - '1355' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:14 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset show + Connection: + - keep-alive + ParameterSetName: + - --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example + description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}' + headers: + cache-control: + - no-cache + content-length: + - '782' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:15 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example + description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}' + headers: + cache-control: + - no-cache + content-length: + - '779' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:17 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"SelfHosted","description":"A + selfhosted integration runtime"},"etag":"1c00084b-0000-0100-0000-60865a2f0000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '496' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:18 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003","name":"exampleLin000003","type":"Microsoft.DataFactory/factories/linkedservices","properties":{"type":"AzureStorage","description":"Example + description","typeProperties":{"connectionString":{"type":"SecureString","value":"**********"},"encryptedCredential":"ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkVYQU1QTEVGQTVRTkMzREZCM184YWVmOGZkOC0yN2M2LTQ1NDEtOTM0MC01NmU0ZTlkNTBmZTUiDQp9"}},"etag":"1c00f54a-0000-0100-0000-60865a1e0000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '805' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:19 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"description":"Test + Update description","activities":[{"name":"ExampleForeachActivity","type":"ForEach","typeProperties":{"isSequential":true,"items":{"type":"Expression","value":"@pipeline().parameters.OutputBlobNameList"},"activities":[{"name":"ExampleCopyActivity","type":"Copy","inputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":"examplecontainer.csv","MyFolderPath":"examplecontainer"}}],"outputs":[{"type":"DatasetReference","referenceName":"example000004","parameters":{"MyFileName":{"type":"Expression","value":"@item()"},"MyFolderPath":"examplecontainer"}}],"typeProperties":{"source":{"type":"BlobSource"},"sink":{"type":"BlobSink"},"dataIntegrationUnits":32}}]}}],"parameters":{"JobId":{"type":"String"},"OutputBlobNameList":{"type":"Array"}},"variables":{"TestVariableArray":{"type":"Array"}},"runDimensions":{"JobId":{"type":"Expression","value":"@pipeline().parameters.JobId"}},"lastPublishTime":"2021-04-26T06:13:59Z"},"etag":"1c00ff4a-0000-0100-0000-60865a270000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '1367' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"ScheduleTrigger","description":"Example + description","pipelines":[{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"},"parameters":{"OutputBlobNameList":["exampleoutput.csv"]}}],"typeProperties":{"recurrence":{"frequency":"Minute","interval":4,"startTime":"2018-06-16T00:39:13.84418Z","endTime":"2018-06-16T00:55:13.84418Z","timeZone":"UTC"}},"runtimeState":"Stopped"},"etag":"1c00054b-0000-0100-0000-60865a2d0000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '791' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:21 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset list + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets?api-version=2018-06-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004","name":"example000004","type":"Microsoft.DataFactory/factories/datasets","properties":{"type":"AzureBlob","description":"Example + description","linkedServiceName":{"type":"LinkedServiceReference","referenceName":"exampleLin000003"},"parameters":{"MyFileName":{"type":"String"},"MyFolderPath":{"type":"String"}},"typeProperties":{"folderPath":{"type":"Expression","value":"@dataset().MyFolderPath"},"fileName":{"type":"Expression","value":"@dataset().MyFileName"},"format":{"type":"TextFormat"}}},"etag":"1c00fb4a-0000-0100-0000-60865a240000"}]}' + headers: + cache-control: + - no-cache + content-length: + - '794' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:23 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory show + Connection: + - keep-alive + ParameterSetName: + - --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","factoryStatistics":{"totalResourceCount":0,"maxAllowedResourceCount":0,"factorySizeInGbUnits":0,"maxAllowedFactorySizeInGbUnits":0},"encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}' + headers: + cache-control: + - no-cache + content-length: + - '791' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:24 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory list + Connection: + - keep-alive + ParameterSetName: + - --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 + response: + body: + string: '{"value":[{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T06:13:38.8060441Z","version":"2018-06-01","encryption":{}},"eTag":"\"30001ffc-0000-0100-0000-60865a170000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"06a7c2de-469c-4e53-bbc5-69adf6b29d6b","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{"exampleTag":"exampleValue"}}]}' + headers: + cache-control: + - no-cache + content-length: + - '670' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:24 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory list + Connection: + - keep-alive + ParameterSetName: + - -g + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/factories?api-version=2018-06-01 + response: + body: + string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"exampleFa000001\",\r\n + \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3\",\r\n + \ \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": + {\r\n \"provisioningState\": \"Succeeded\",\r\n \"createTime\": + \"2021-04-26T06:13:38.8060441Z\",\r\n \"version\": \"2018-06-01\",\r\n + \ \"factoryStatistics\": null,\r\n \"encryption\": {}\r\n },\r\n + \ \"eTag\": \"\\\"30001ffc-0000-0100-0000-60865a170000\\\"\",\r\n \"location\": + \"eastus\",\r\n \"identity\": {\r\n \"type\": \"SystemAssigned\",\r\n + \ \"principalId\": \"06a7c2de-469c-4e53-bbc5-69adf6b29d6b\",\r\n \"tenantId\": + \"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a\",\r\n \"userAssignedIdentities\": + null\r\n },\r\n \"tags\": {\r\n \"exampleTag\": \"exampleValue\"\r\n + \ }\r\n }\r\n ],\r\n \"nextLink\": null\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '975' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:26 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"keyName": "authKey2"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime regenerate-auth-key + Connection: + - keep-alive + Content-Length: + - '23' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --name --key-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/regenerateAuthKey?api-version=2018-06-01 + response: + body: + string: '{"authKey2":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@ylfb9tUmUb8VxkygSirkWE9RAr5C7EXesqOgJjGsyGU="}' + headers: + cache-control: + - no-cache + content-length: + - '182' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:27 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime sync-credentials + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/syncCredentials?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:29 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime get-monitoring-data + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/monitoringData?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleInteg000002"}' + headers: + cache-control: + - no-cache + content-length: + - '36' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime list-auth-key + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/listAuthKeys?api-version=2018-06-01 + response: + body: + string: '{"authKey1":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@RaoFzpk8dYtoc+PJqfn0M1Vxak6imWEc6JrR+tHgM74=","authKey2":"IR@a9a706fd-86c1-4717-9b1d-8c451e820276@exampleFa000001@ServiceEndpoint=examplefa5qnc3dfb3.eastus.datafactory.azure.net@ylfb9tUmUb8VxkygSirkWE9RAr5C7EXesqOgJjGsyGU="}' + headers: + cache-control: + - no-cache + content-length: + - '363' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:30 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"factoryName": "exampleFactoryName-linked"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime remove-link + Connection: + - keep-alive + Content-Length: + - '44' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --name --linked-factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/removeLinks?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:31 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime get-status + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/getStatus?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"NeedRegistration","type":"SelfHosted","typeProperties":{"serviceRegion":"eu","autoUpdate":"Off","internalChannelEncryption":"NotSet","taskQueueId":"a9a706fd-86c1-4717-9b1d-8c451e820276","nodes":[],"updateDelayOffset":"PT3H","serviceUrls":["examplefa5qnc3dfb3.eastus.datafactory.azure.net"],"links":[],"versionStatus":"None","capabilities":{},"latestVersion":"5.5.7762.1","newerVersions":["5.5.7762.1","5.4.7749.1","5.4.7741.1","5.2.7740.4","5.4.7732.1"],"createTime":"2021-04-26T06:14:06.5886459Z"}}}' + headers: + cache-control: + - no-cache + content-length: + - '598' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:32 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger get-event-subscription-status + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/getEventSubscriptionStatus?api-version=2018-06-01 + response: + body: + string: '{"triggerName":"example000006","status":"Enabled"}' + headers: + cache-control: + - no-cache + content-length: + - '51' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:32 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger unsubscribe-from-event + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/unsubscribeFromEvents?api-version=2018-06-01 + response: + body: + string: '{"triggerName":"example000006","status":"Disabled"}' + headers: + cache-control: + - no-cache + content-length: + - '52' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:33 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger subscribe-to-event + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/subscribeToEvents?api-version=2018-06-01 + response: + body: + string: '{"triggerName":"example000006","status":"Enabled"}' + headers: + cache-control: + - no-cache + content-length: + - '51' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:35 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger start + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger stop + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"permissions": "r", "accessResourcePath": "", "profileName": "DefaultProfile", + "startTime": "2018-11-10T02:46:20.2659347Z", "expireTime": "2018-11-10T09:46:20.2659347Z"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory get-data-plane-access + Connection: + - keep-alive + Content-Length: + - '170' + Content-Type: + - application/json + ParameterSetName: + - --name --access-resource-path --expire-time --permissions --profile-name --start-time + --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/getDataPlaneAccess?api-version=2018-06-01 + response: + body: + string: '{"policy":{"permissions":"r","accessResourcePath":"","profileName":"DefaultProfile","startTime":"2018-11-10T02:46:20.2659347Z","expireTime":"2018-11-10T09:46:20.2659347Z"},"dataPlaneUrl":"https://dpeastus.svc.datafactory.azure.com/dataplane","accessToken":"EAAAAOzMyR0nKhAsquydok/3FiewAQAAldolR6fZ/QXURYf8LmzIQhlbZ5KPPrhxUN3NtNVOyPfj0LU16xDxhzhPRzibayyb+le1ujMNo6y6cGvIuZ2pghzrqca4ZBXBuTKEr2luQEMttXb36oGBn3CNGXKEeZBYxOt6QEvYvBsrkalH+LhOD2kbGqxhkoWIj58mwG2oW0YD39cuosVcP5NYPGHJ5/dSfCC6y/x9mVYTdwAlAjgyo7eQd/Sj2tJh+WxPnLyft3l+BnXmBZWDU5qyV8SHlHUlKVG9vAuCnc8YTkdLH3+mLOJFU+lLUDHnjf+9AWN9CqLZZ+HX0Vth9MC0HMYXLtF6Kfm0sb4B10nBO38nVcKx2W+pYl3IN2CTaMxLC7SsbWp0VQ/YE9mUG1hgNOtLJpULJ99kVlZBBdSkYwLBOCNR/8nXK+O9y4QVkgf00pZiZmRhZz+HaQq2IwflWvN/AHgGA3Jx61J2XhMebqIQg2+qx3o6n0TLVuz7GE0lQpX5V/pX4iCePb82o6uxIgIdoztYahDNPcCXvwuL1nVWPOHjWc/Cm9EyvYmk0uDHmz3nQ294jRKciOElQg1LyvdEPRx2IAAAAPKNLZM+2jvM64e+RlV+ZRONovuxThkqtN4FTVuhVXQZ"}' + headers: + cache-control: + - no-cache + content-length: + - '915' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"lastUpdatedAfter": "2021-04-26T06:13:26.000Z", "lastUpdatedBefore": "2021-04-26T07:13:26.000Z"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run query-by-factory + Connection: + - keep-alive + Content-Length: + - '97' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --last-updated-after --last-updated-before --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run query-by-factory + Connection: + - keep-alive + ParameterSetName: + - --factory-name --last-updated-after --last-updated-before --resource-group + User-Agent: + - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 + azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 + accept-language: + - en-US + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East + US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East + US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South + Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West + US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia + East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New + South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast + Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North + Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK + South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West + Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central + US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North + Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West + US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South + Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central + India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East + Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong + Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan + East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, + Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO + India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea + Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada + Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France + Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany + West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway + East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland + North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE + North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle + East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil + South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South + America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao + Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central + US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East + US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East + US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North + Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South + Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West + US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West + US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia + Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United + Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United + States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East + Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast + Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central + US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East + US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West + Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West + US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South + Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape + Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia + Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia + Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia + Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan + West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO + India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea + South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South + India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West + India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada + East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France + South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany + North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway + West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland + West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK + West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE + Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle + East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu + Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil + Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East + US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' + headers: + cache-control: + - no-cache + content-length: + - '26993' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:44 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"factoryResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001", + "repoConfiguration": {"type": "FactoryVSTSConfiguration", "accountName": "ADF", + "repositoryName": "repo", "collaborationBranch": "master", "rootFolder": "/", + "lastCommitId": "", "projectName": "project", "tenantId": ""}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory configure-factory-repo + Connection: + - keep-alive + Content-Length: + - '449' + Content-Type: + - application/json + ParameterSetName: + - --factory-resource-id --factory-vsts-configuration --location + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.DataFactory/locations/eastus/configureFactoryRepo?api-version=2018-06-01 + response: + body: + string: "{\r\n \"name\": \"exampleFa000001\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefa5qnc3dfb3\",\r\n + \ \"type\": \"Microsoft.DataFactory/factories\",\r\n \"properties\": {\r\n + \ \"provisioningState\": \"Succeeded\",\r\n \"createTime\": \"2021-04-26T06:13:38.8060441Z\",\r\n + \ \"version\": \"2018-06-01\",\r\n \"factoryStatistics\": null,\r\n \"repoConfiguration\": + {\r\n \"type\": \"FactoryVSTSConfiguration\",\r\n \"accountName\": + \"ADF\",\r\n \"repositoryName\": \"repo\",\r\n \"collaborationBranch\": + \"master\",\r\n \"rootFolder\": \"/\",\r\n \"lastCommitId\": \"\",\r\n + \ \"projectName\": \"project\",\r\n \"tenantId\": \"\"\r\n },\r\n + \ \"encryption\": {}\r\n },\r\n \"eTag\": \"\\\"3000bdfc-0000-0100-0000-60865a550000\\\"\",\r\n + \ \"location\": \"eastus\",\r\n \"identity\": {\r\n \"type\": \"SystemAssigned\",\r\n + \ \"principalId\": \"06a7c2de-469c-4e53-bbc5-69adf6b29d6b\",\r\n \"tenantId\": + \"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a\",\r\n \"userAssignedIdentities\": + null\r\n },\r\n \"tags\": {\r\n \"exampleTag\": \"exampleValue\"\r\n + \ }\r\n}" + headers: + cache-control: + - no-cache + content-length: + - '1126' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 06:14:45 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:47 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:50 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:55 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory dataset delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --name --factory-name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/datasets/example000004?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory linked-service delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/linkedservices/exampleLin000003?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:14:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 06:15:04 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - account list + Connection: + - keep-alive + ParameterSetName: + - --query -o + User-Agent: + - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 + azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 + accept-language: + - en-US + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East + US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East + US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South + Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West + US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia + East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New + South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast + Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North + Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK + South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West + Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central + US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North + Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West + US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South + Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central + India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East + Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong + Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan + East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, + Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO + India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea + Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada + Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France + Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany + West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway + East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland + North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE + North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle + East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil + South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South + America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao + Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central + US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East + US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East + US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North + Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South + Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West + US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West + US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia + Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United + Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United + States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East + Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast + Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central + US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East + US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West + Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West + US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South + Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape + Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia + Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia + Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia + Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan + West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO + India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea + South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South + India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West + India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada + East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France + South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany + North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway + West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland + West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK + West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE + Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle + East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu + Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil + Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East + US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' + headers: + cache-control: + - no-cache + content-length: + - '26993' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:35:43 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefaakcpqyyqy","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T03:35:56.3240034Z","version":"2018-06-01","encryption":{}},"eTag":"\"22000c62-0000-0100-0000-6086351c0000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"0b4983b4-7dc6-426f-b5ff-fc26da9b2cdc","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + headers: + cache-control: + - no-cache + content-length: + - '631' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:35:58 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "Managed", "description": "Managed Integration + Runtime", "typeProperties": {"computeProperties": {"location": "East US 2", + "nodeSize": "Standard_D2_v3", "numberOfNodes": 1, "maxParallelExecutionsPerNode": + 2}, "ssisProperties": {"licenseType": "LicenseIncluded", "edition": "Standard"}}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime managed create + Connection: + - keep-alive + Content-Length: + - '311' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --name --resource-group --description --compute-properties + --ssis-properties + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed + Integration Runtime","typeProperties":{"computeProperties":{"location":"East + US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"3100568e-0000-0100-0000-608635200000"}' + headers: + cache-control: + - no-cache + content-length: + - '709' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:36:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationruntimes/exampleInteg000002","name":"exampleInteg000002","type":"Microsoft.DataFactory/factories/integrationruntimes","properties":{"type":"Managed","description":"Managed + Integration Runtime","typeProperties":{"computeProperties":{"location":"East + US 2","nodeSize":"Standard_D2_v3","numberOfNodes":1,"maxParallelExecutionsPerNode":2},"ssisProperties":{"licenseType":"LicenseIncluded","edition":"Standard"}},"state":"Initial"},"etag":"3100568e-0000-0100-0000-608635200000"}' + headers: + cache-control: + - no-cache + content-length: + - '709' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:36:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime start + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start?api-version=2018-06-01 + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 03:36:04 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime start + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"4acc0fceb75e485985c62f8f9282673a","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:36:19 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime start + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"4acc0fceb75e485985c62f8f9282673a","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:36:49 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime start + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationstatuses/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + response: + body: + string: '{"status":"Succeeded","name":"4acc0fceb75e485985c62f8f9282673a","properties":{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2021-04-26T03:36:03.6784667Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}},"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '405' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:37:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime start + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/start/operationresults/4acc0fceb75e485985c62f8f9282673a?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleInteg000002","properties":{"dataFactoryName":"exampleFa000001","state":"Started","type":"Managed","dataFactoryLocation":null,"resourceUri":null,"typeProperties":{"nodes":[],"otherErrors":[],"createTime":"2021-04-26T03:36:03.6784667Z"},"dataFactoryTags":null,"managedVirtualNetwork":null}}' + headers: + cache-control: + - no-cache + content-length: + - '314' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:37:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop?api-version=2018-06-01 + response: + body: + string: '' + headers: + azure-asyncoperation: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 03:37:22 GMT + expires: + - '-1' + location: + - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:37:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:38:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:38:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:39:09 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:39:39 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:40:10 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:40:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"InProgress","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '96' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:41:11 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationstatuses/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '{"status":"Succeeded","name":"0cfd1f0e21614269807f6a5f699d32f2","properties":null,"error":null}' + headers: + cache-control: + - no-cache + content-length: + - '95' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 03:41:41 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime stop + Connection: + - keep-alive + ParameterSetName: + - --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002/stop/operationresults/0cfd1f0e21614269807f6a5f699d32f2?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 03:41:42 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory integration-runtime delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/integrationRuntimes/exampleInteg000002?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 03:41:43 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 03:41:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - account list + Connection: + - keep-alive + ParameterSetName: + - --query -o + User-Agent: + - python/3.8.0 (Windows-10-10.0.19041-SP0) msrest/0.6.21 msrest_azure/0.6.3 + azure-mgmt-resource/12.1.0 Azure-SDK-For-Python AZURECLI/2.22.1 + accept-language: + - en-US + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/locations?api-version=2019-11-01 + response: + body: + string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus","name":"eastus","displayName":"East + US","regionalDisplayName":"(US) East US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-79.8164","latitude":"37.3719","physicalLocation":"Virginia","pairedRegion":[{"name":"westus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2","name":"eastus2","displayName":"East + US 2","regionalDisplayName":"(US) East US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","physicalLocation":"Virginia","pairedRegion":[{"name":"centralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus","name":"southcentralus","displayName":"South + Central US","regionalDisplayName":"(US) South Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-98.5","latitude":"29.4167","physicalLocation":"Texas","pairedRegion":[{"name":"northcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2","name":"westus2","displayName":"West + US 2","regionalDisplayName":"(US) West US 2","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-119.852","latitude":"47.233","physicalLocation":"Washington","pairedRegion":[{"name":"westcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast","name":"australiaeast","displayName":"Australia + East","regionalDisplayName":"(Asia Pacific) Australia East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"151.2094","latitude":"-33.86","physicalLocation":"New + South Wales","pairedRegion":[{"name":"australiasoutheast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia","name":"southeastasia","displayName":"Southeast + Asia","regionalDisplayName":"(Asia Pacific) Southeast Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"103.833","latitude":"1.283","physicalLocation":"Singapore","pairedRegion":[{"name":"eastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope","name":"northeurope","displayName":"North + Europe","regionalDisplayName":"(Europe) North Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-6.2597","latitude":"53.3478","physicalLocation":"Ireland","pairedRegion":[{"name":"westeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth","name":"uksouth","displayName":"UK + South","regionalDisplayName":"(Europe) UK South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"-0.799","latitude":"50.941","physicalLocation":"London","pairedRegion":[{"name":"ukwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westeurope","name":"westeurope","displayName":"West + Europe","regionalDisplayName":"(Europe) West Europe","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"4.9","latitude":"52.3667","physicalLocation":"Netherlands","pairedRegion":[{"name":"northeurope","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northeurope"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralus","name":"centralus","displayName":"Central + US","regionalDisplayName":"(US) Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","physicalLocation":"Iowa","pairedRegion":[{"name":"eastus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralus","name":"northcentralus","displayName":"North + Central US","regionalDisplayName":"(US) North Central US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-87.6278","latitude":"41.8819","physicalLocation":"Illinois","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus","name":"westus","displayName":"West + US","regionalDisplayName":"(US) West US","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"US","longitude":"-122.417","latitude":"37.783","physicalLocation":"California","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth","name":"southafricanorth","displayName":"South + Africa North","regionalDisplayName":"(Africa) South Africa North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Africa","longitude":"28.218370","latitude":"-25.731340","physicalLocation":"Johannesburg","pairedRegion":[{"name":"southafricawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia","name":"centralindia","displayName":"Central + India","regionalDisplayName":"(Asia Pacific) Central India","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"73.9197","latitude":"18.5822","physicalLocation":"Pune","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasia","name":"eastasia","displayName":"East + Asia","regionalDisplayName":"(Asia Pacific) East Asia","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"114.188","latitude":"22.267","physicalLocation":"Hong + Kong","pairedRegion":[{"name":"southeastasia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast","name":"japaneast","displayName":"Japan + East","regionalDisplayName":"(Asia Pacific) Japan East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"139.77","latitude":"35.68","physicalLocation":"Tokyo, + Saitama","pairedRegion":[{"name":"japanwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest","name":"jioindiawest","displayName":"JIO + India West","regionalDisplayName":"(Asia Pacific) JIO India West","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"70.05773","latitude":"22.470701","physicalLocation":"Jamnagar","pairedRegion":[{"name":"jioindiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral","name":"koreacentral","displayName":"Korea + Central","regionalDisplayName":"(Asia Pacific) Korea Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Asia + Pacific","longitude":"126.9780","latitude":"37.5665","physicalLocation":"Seoul","pairedRegion":[{"name":"koreasouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral","name":"canadacentral","displayName":"Canada + Central","regionalDisplayName":"(Canada) Canada Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Canada","longitude":"-79.383","latitude":"43.653","physicalLocation":"Toronto","pairedRegion":[{"name":"canadaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral","name":"francecentral","displayName":"France + Central","regionalDisplayName":"(Europe) France Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"2.3730","latitude":"46.3772","physicalLocation":"Paris","pairedRegion":[{"name":"francesouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral","name":"germanywestcentral","displayName":"Germany + West Central","regionalDisplayName":"(Europe) Germany West Central","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.682127","latitude":"50.110924","physicalLocation":"Frankfurt","pairedRegion":[{"name":"germanynorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast","name":"norwayeast","displayName":"Norway + East","regionalDisplayName":"(Europe) Norway East","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"10.752245","latitude":"59.913868","physicalLocation":"Norway","pairedRegion":[{"name":"norwaywest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth","name":"switzerlandnorth","displayName":"Switzerland + North","regionalDisplayName":"(Europe) Switzerland North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Europe","longitude":"8.564572","latitude":"47.451542","physicalLocation":"Zurich","pairedRegion":[{"name":"switzerlandwest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth","name":"uaenorth","displayName":"UAE + North","regionalDisplayName":"(Middle East) UAE North","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"Middle + East","longitude":"55.316666","latitude":"25.266666","physicalLocation":"Dubai","pairedRegion":[{"name":"uaecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth","name":"brazilsouth","displayName":"Brazil + South","regionalDisplayName":"(South America) Brazil South","metadata":{"regionType":"Physical","regionCategory":"Recommended","geographyGroup":"South + America","longitude":"-46.633","latitude":"-23.55","physicalLocation":"Sao + Paulo State","pairedRegion":[{"name":"southcentralus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralusstage","name":"centralusstage","displayName":"Central + US (Stage)","regionalDisplayName":"(US) Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusstage","name":"eastusstage","displayName":"East + US (Stage)","regionalDisplayName":"(US) East US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2stage","name":"eastus2stage","displayName":"East + US 2 (Stage)","regionalDisplayName":"(US) East US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/northcentralusstage","name":"northcentralusstage","displayName":"North + Central US (Stage)","regionalDisplayName":"(US) North Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southcentralusstage","name":"southcentralusstage","displayName":"South + Central US (Stage)","regionalDisplayName":"(US) South Central US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westusstage","name":"westusstage","displayName":"West + US (Stage)","regionalDisplayName":"(US) West US (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2stage","name":"westus2stage","displayName":"West + US 2 (Stage)","regionalDisplayName":"(US) West US 2 (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"US"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asia","name":"asia","displayName":"Asia","regionalDisplayName":"Asia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/asiapacific","name":"asiapacific","displayName":"Asia + Pacific","regionalDisplayName":"Asia Pacific","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australia","name":"australia","displayName":"Australia","regionalDisplayName":"Australia","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazil","name":"brazil","displayName":"Brazil","regionalDisplayName":"Brazil","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canada","name":"canada","displayName":"Canada","regionalDisplayName":"Canada","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/europe","name":"europe","displayName":"Europe","regionalDisplayName":"Europe","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/global","name":"global","displayName":"Global","regionalDisplayName":"Global","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/india","name":"india","displayName":"India","regionalDisplayName":"India","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japan","name":"japan","displayName":"Japan","regionalDisplayName":"Japan","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uk","name":"uk","displayName":"United + Kingdom","regionalDisplayName":"United Kingdom","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/unitedstates","name":"unitedstates","displayName":"United + States","regionalDisplayName":"United States","metadata":{"regionType":"Logical","regionCategory":"Other"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastasiastage","name":"eastasiastage","displayName":"East + Asia (Stage)","regionalDisplayName":"(Asia Pacific) East Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southeastasiastage","name":"southeastasiastage","displayName":"Southeast + Asia (Stage)","regionalDisplayName":"(Asia Pacific) Southeast Asia (Stage)","metadata":{"regionType":"Logical","regionCategory":"Other","geographyGroup":"Asia + Pacific"}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap","name":"centraluseuap","displayName":"Central + US EUAP","regionalDisplayName":"(US) Central US EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-93.6208","latitude":"41.5908","pairedRegion":[{"name":"eastus2euap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus2euap","name":"eastus2euap","displayName":"East + US 2 EUAP","regionalDisplayName":"(US) East US 2 EUAP","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-78.3889","latitude":"36.6681","pairedRegion":[{"name":"centraluseuap","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centraluseuap"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westcentralus","name":"westcentralus","displayName":"West + Central US","regionalDisplayName":"(US) West Central US","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-110.234","latitude":"40.890","physicalLocation":"Wyoming","pairedRegion":[{"name":"westus2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westus3","name":"westus3","displayName":"West + US 3","regionalDisplayName":"(US) West US 3","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"US","longitude":"-112.074036","latitude":"33.448376","physicalLocation":"Phoenix","pairedRegion":[{"name":"eastus","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastus"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricawest","name":"southafricawest","displayName":"South + Africa West","regionalDisplayName":"(Africa) South Africa West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Africa","longitude":"18.843266","latitude":"-34.075691","physicalLocation":"Cape + Town","pairedRegion":[{"name":"southafricanorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southafricanorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral","name":"australiacentral","displayName":"Australia + Central","regionalDisplayName":"(Asia Pacific) Australia Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2","name":"australiacentral2","displayName":"Australia + Central 2","regionalDisplayName":"(Asia Pacific) Australia Central 2","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"149.1244","latitude":"-35.3075","physicalLocation":"Canberra","pairedRegion":[{"name":"australiacentral2","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiacentral2"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiasoutheast","name":"australiasoutheast","displayName":"Australia + Southeast","regionalDisplayName":"(Asia Pacific) Australia Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"144.9631","latitude":"-37.8136","physicalLocation":"Victoria","pairedRegion":[{"name":"australiaeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/australiaeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japanwest","name":"japanwest","displayName":"Japan + West","regionalDisplayName":"(Asia Pacific) Japan West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"135.5022","latitude":"34.6939","physicalLocation":"Osaka","pairedRegion":[{"name":"japaneast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/japaneast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiacentral","name":"jioindiacentral","displayName":"JIO + India Central","regionalDisplayName":"(Asia Pacific) JIO India Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"79.08886","latitude":"21.146633","physicalLocation":"Nagpur","pairedRegion":[{"name":"jioindiawest","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/jioindiawest"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreasouth","name":"koreasouth","displayName":"Korea + South","regionalDisplayName":"(Asia Pacific) Korea South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"129.0756","latitude":"35.1796","physicalLocation":"Busan","pairedRegion":[{"name":"koreacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/koreacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia","name":"southindia","displayName":"South + India","regionalDisplayName":"(Asia Pacific) South India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"80.1636","latitude":"12.9822","physicalLocation":"Chennai","pairedRegion":[{"name":"centralindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/centralindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/westindia","name":"westindia","displayName":"West + India","regionalDisplayName":"(Asia Pacific) West India","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Asia + Pacific","longitude":"72.868","latitude":"19.088","physicalLocation":"Mumbai","pairedRegion":[{"name":"southindia","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/southindia"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadaeast","name":"canadaeast","displayName":"Canada + East","regionalDisplayName":"(Canada) Canada East","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Canada","longitude":"-71.217","latitude":"46.817","physicalLocation":"Quebec","pairedRegion":[{"name":"canadacentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/canadacentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francesouth","name":"francesouth","displayName":"France + South","regionalDisplayName":"(Europe) France South","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"2.1972","latitude":"43.8345","physicalLocation":"Marseille","pairedRegion":[{"name":"francecentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/francecentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanynorth","name":"germanynorth","displayName":"Germany + North","regionalDisplayName":"(Europe) Germany North","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"8.806422","latitude":"53.073635","physicalLocation":"Berlin","pairedRegion":[{"name":"germanywestcentral","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/germanywestcentral"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwaywest","name":"norwaywest","displayName":"Norway + West","regionalDisplayName":"(Europe) Norway West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"5.733107","latitude":"58.969975","physicalLocation":"Norway","pairedRegion":[{"name":"norwayeast","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/norwayeast"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandwest","name":"switzerlandwest","displayName":"Switzerland + West","regionalDisplayName":"(Europe) Switzerland West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"6.143158","latitude":"46.204391","physicalLocation":"Geneva","pairedRegion":[{"name":"switzerlandnorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/switzerlandnorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/ukwest","name":"ukwest","displayName":"UK + West","regionalDisplayName":"(Europe) UK West","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Europe","longitude":"-3.084","latitude":"53.427","physicalLocation":"Cardiff","pairedRegion":[{"name":"uksouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uksouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaecentral","name":"uaecentral","displayName":"UAE + Central","regionalDisplayName":"(Middle East) UAE Central","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"Middle + East","longitude":"54.366669","latitude":"24.466667","physicalLocation":"Abu + Dhabi","pairedRegion":[{"name":"uaenorth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/uaenorth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsoutheast","name":"brazilsoutheast","displayName":"Brazil + Southeast","regionalDisplayName":"(South America) Brazil Southeast","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Rio","pairedRegion":[{"name":"brazilsouth","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/brazilsouth"}]}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv","name":"eastusslv","displayName":"East + US SLV","regionalDisplayName":"(South America) East US SLV","metadata":{"regionType":"Physical","regionCategory":"Other","geographyGroup":"South + America","longitude":"-43.2075","latitude":"-22.90278","physicalLocation":"Silverstone","pairedRegion":[{"name":"eastusslv","id":"/subscriptions/00000000-0000-0000-0000-000000000000/locations/eastusslv"}]}}]}' + headers: + cache-control: + - no-cache + content-length: + - '26993' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:24 GMT + expires: + - '-1' + pragma: + - no-cache + strict-transport-security: + - max-age=31536000; includeSubDomains + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + status: + code: 200 + message: OK +- request: + body: '{"location": "eastus", "identity": {"type": "SystemAssigned"}, "properties": + {"encryption": {}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory create + Connection: + - keep-alive + Content-Length: + - '96' + Content-Type: + - application/json + ParameterSetName: + - --location --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '{"name":"exampleFa000001","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/examplefataiszwk32","type":"Microsoft.DataFactory/factories","properties":{"provisioningState":"Succeeded","createTime":"2021-04-26T04:35:35.9320014Z","version":"2018-06-01","encryption":{}},"eTag":"\"30000750-0000-0100-0000-608643180000\"","location":"eastus","identity":{"type":"SystemAssigned","principalId":"7d2cdddd-762b-41b6-a3cb-b2798bee84bf","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},"tags":{}}' + headers: + cache-control: + - no-cache + content-length: + - '631' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:38 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1197' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"activities": [{"name": "Wait1", "type": "Wait", "dependsOn": + [], "userProperties": [], "typeProperties": {"waitTimeInSeconds": 5}}], "annotations": + []}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create + Connection: + - keep-alive + Content-Length: + - '169' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --pipeline --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005","name":"example000005","type":"Microsoft.DataFactory/factories/pipelines","properties":{"activities":[{"name":"Wait1","type":"Wait","dependsOn":[],"userProperties":[],"typeProperties":{"waitTimeInSeconds":5}}],"annotations":[]},"etag":"5b0004ed-0000-0100-0000-6086431d0000"}' + headers: + cache-control: + - no-cache + content-length: + - '514' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:40 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create-run + Connection: + - keep-alive + Content-Length: + - '45' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --parameters --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + response: + body: + string: '{"runId":"db9d3cb1-a648-11eb-b950-84a93e64b16e"}' + headers: + cache-control: + - no-cache + content-length: + - '48' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:44 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline-run show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group --run-id + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/db9d3cb1-a648-11eb-b950-84a93e64b16e?api-version=2018-06-01 + response: + body: + string: '{"id":"/SUBSCRIPTIONS/00000000-0000-0000-0000-000000000000/RESOURCEGROUPS/CLITESTUREUOWB2FIDALQDHZBZQFN5YX3MAHYIR2OCULQUBA2QXHMFOYTDLJ5C3QD7OFFHGGZI3/PROVIDERS/MICROSOFT.DATAFACTORY/FACTORIES/EXAMPLEFATAISZWK32/pipelineruns/db9d3cb1-a648-11eb-b950-84a93e64b16e","runId":"db9d3cb1-a648-11eb-b950-84a93e64b16e","debugRunId":null,"runGroupId":"db9d3cb1-a648-11eb-b950-84a93e64b16e","pipelineName":"example000005","parameters":{},"invokedBy":{"id":"d5b6b289661d402c8f76621689c77f40","name":"Manual","invokedByType":"Manual"},"runStart":"2021-04-26T04:35:43.0610781Z","runEnd":"2021-04-26T04:35:51.3152593Z","durationInMs":8254,"status":"Succeeded","message":"","lastUpdated":"2021-04-26T04:35:51.3152593Z","annotations":[],"runDimension":{},"isLatest":true}' + headers: + cache-control: + - no-cache + content-length: + - '755' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:51 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"lastUpdatedAfter": "2018-06-16T00:36:44.334575Z", "lastUpdatedBefore": + "2018-06-16T00:49:48.368647Z"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory activity-run query-by-pipeline-run + Connection: + - keep-alive + Content-Length: + - '103' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --last-updated-after --last-updated-before --resource-group + --run-id + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/2f7fdb90-5df1-4b8e-ac2f-064cfa58202b/queryActivityruns?api-version=2018-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:51 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"OutputBlobNameList": ["exampleoutput.csv"]}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline create-run + Connection: + - keep-alive + Content-Length: + - '45' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --parameters --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005/createRun?api-version=2018-06-01 + response: + body: + string: '{"runId":"e21653a1-a648-11eb-a1d9-84a93e64b16e"}' + headers: + cache-control: + - no-cache + content-length: + - '48' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:52 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline-run cancel + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --run-id + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelineruns/e21653a1-a648-11eb-a1d9-84a93e64b16e/cancel?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:35:54 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"properties": {"type": "TumblingWindowTrigger", "description": "trumblingwindowtrigger", + "annotations": [], "pipeline": {"pipelineReference": {"type": "PipelineReference", + "referenceName": "example000005"}}, "typeProperties": {"frequency": "Minute", + "interval": 5, "startTime": "2021-04-26T04:35:23.000Z", "endTime": "2021-04-26T05:35:23.000Z", + "delay": "00:00:00", "maxConcurrency": 50, "retryPolicy": {"intervalInSeconds": + 30}, "dependsOn": []}}}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger create + Connection: + - keep-alive + Content-Length: + - '451' + Content-Type: + - application/json + ParameterSetName: + - --resource-group --properties --factory-name --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: PUT + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2021-04-26T04:35:23Z","endTime":"2021-04-26T05:35:23Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Stopped"},"etag":"5b00f6ee-0000-0100-0000-6086432d0000"}' + headers: + cache-control: + - no-cache + content-length: + - '793' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:35:56 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1198' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger start + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/start?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:36:00 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger show + Connection: + - keep-alive + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: GET + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006","name":"example000006","type":"Microsoft.DataFactory/factories/triggers","properties":{"type":"TumblingWindowTrigger","description":"trumblingwindowtrigger","annotations":[],"pipeline":{"pipelineReference":{"type":"PipelineReference","referenceName":"example000005"}},"typeProperties":{"frequency":"Minute","interval":5,"startTime":"2021-04-26T04:35:23Z","endTime":"2021-04-26T05:35:23Z","delay":"00:00:00","maxConcurrency":50,"retryPolicy":{"intervalInSeconds":30},"dependsOn":[]},"runtimeState":"Started"},"etag":"5b008aef-0000-0100-0000-608643310000"}' + headers: + cache-control: + - no-cache + content-length: + - '793' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:36:01 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"lastUpdatedAfter": "2021-04-26T04:35:23.000Z", "lastUpdatedBefore": "2021-04-26T05:35:23.000Z"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run query-by-factory + Connection: + - keep-alive + Content-Length: + - '97' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --last-updated-after --last-updated-before --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:36:03 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"lastUpdatedAfter": "2021-04-26T04:35:23.000Z", "lastUpdatedBefore": "2021-04-26T05:35:23.000Z"}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run query-by-factory + Connection: + - keep-alive + Content-Length: + - '97' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --last-updated-after --last-updated-before --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + response: + body: + string: '{"value":[{"triggerName":"example000006","triggerRunId":"08585821951252995598172155524CU16","triggerType":"TumblingWindowTrigger","triggerRunTimestamp":"2021-04-26T04:40:23.2861718Z","status":"Succeeded","message":null,"properties":{"TriggerTime":"4/26/2021 + 4:40:23 AM","windowStartTime":"4/26/2021 4:35:23 AM","windowEndTime":"4/26/2021 + 4:40:23 AM"},"triggeredPipelines":{"example000005":"904d185a-ceed-43cb-9812-639b5e92ac89"},"groupId":"08585821951252995598172155524CU16","dependencyStatus":{}}]}' + headers: + cache-control: + - no-cache + content-length: + - '502' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:41:05 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run rerun + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --trigger-name --run-id + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/triggerRuns/08585821951252995598172155524CU16/rerun?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:41:07 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: '{"lastUpdatedAfter": "2018-06-16T00:36:44.334575Z", "lastUpdatedBefore": + "2018-06-16T00:49:48.368647Z", "filters": [{"operand": "TriggerName", "operator": + "Equals", "values": ["example000006"]}]}' + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger-run query-by-factory + Connection: + - keep-alive + Content-Length: + - '196' + Content-Type: + - application/json + ParameterSetName: + - --factory-name --filters --last-updated-after --last-updated-before --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/queryTriggerRuns?api-version=2018-06-01 + response: + body: + string: '{"value":[]}' + headers: + cache-control: + - no-cache + content-length: + - '12' + content-type: + - application/json; charset=utf-8 + date: + - Mon, 26 Apr 2021 04:41:08 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger stop + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: POST + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006/stop?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:41:11 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-writes: + - '1199' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory trigger delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --resource-group --name + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/triggers/example000006?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:41:16 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory pipeline delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --factory-name --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001/pipelines/example000005?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:41:20 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - application/json + Accept-Encoding: + - gzip, deflate + CommandName: + - datafactory delete + Connection: + - keep-alive + Content-Length: + - '0' + ParameterSetName: + - -y --name --resource-group + User-Agent: + - AZURECLI/2.22.1 azsdk-python-datafactorymanagementclient/unknown Python/3.8.0 + (Windows-10-10.0.19041-SP0) + method: DELETE + uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest000007/providers/Microsoft.DataFactory/factories/exampleFa000001?api-version=2018-06-01 + response: + body: + string: '' + headers: + cache-control: + - no-cache + content-length: + - '0' + date: + - Mon, 26 Apr 2021 04:41:27 GMT + expires: + - '-1' + pragma: + - no-cache + server: + - Microsoft-IIS/10.0 + strict-transport-security: + - max-age=31536000; includeSubDomains + x-content-type-options: + - nosniff + x-ms-ratelimit-remaining-subscription-deletes: + - '14999' + x-powered-by: + - ASP.NET + status: + code: 200 + message: OK +version: 1 diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index bdc9fd88e23..517a35650f8 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -10,745 +10,158 @@ import os from azure.cli.testsdk import ScenarioTest -from .. import try_manual, raise_if, calc_coverage from azure.cli.testsdk import ResourceGroupPreparer +from .example_steps import step_create +from .example_steps import step_update +from .example_steps import step_linked_service_create +from .example_steps import step_linked_service_update +from .example_steps import step_dataset_create +from .example_steps import step_dataset_update +from .example_steps import step_pipeline_create +from .example_steps import step_pipeline_update +from .example_steps import step_trigger_create +from .example_steps import step_trigger_update +from .example_steps import step_integration_runtime_self_hosted_create +from .example_steps import step_integration_runtime_update +from .example_steps import step_integration_runtime_linked +from .example_steps import step_pipeline_create_run +from .example_steps import step_integration_runtime_show +from .example_steps import step_linked_service_show +from .example_steps import step_pipeline_run_show +from .example_steps import step_pipeline_show +from .example_steps import step_dataset_show +from .example_steps import step_trigger_show +from .example_steps import step_integration_runtime_list +from .example_steps import step_linked_service_list +from .example_steps import step_pipeline_list +from .example_steps import step_trigger_list +from .example_steps import step_dataset_list +from .example_steps import step_show +from .example_steps import step_list2 +from .example_steps import step_list +from .example_steps import step_integration_runtime_regenerate_auth_key +from .example_steps import step_integration_runtime_get_connection_info +from .example_steps import step_integration_runtime_sync_credentials +from .example_steps import step_integration_runtime_get_monitoring_data +from .example_steps import step_integration_runtime_list_auth_key +from .example_steps import step_integration_runtime_remove_link +from .example_steps import step_integration_runtime_get_status +from .example_steps import step_integration_runtime_start +from .example_steps import step_integration_runtime_stop +from .example_steps import step_trigger_get_event_subscription_status +from .example_steps import step_activity_run_query_by_pipeline_run +from .example_steps import step_trigger_unsubscribe_from_event +from .example_steps import step_trigger_subscribe_to_event +from .example_steps import step_trigger_start +from .example_steps import step_trigger_stop +from .example_steps import step_get_git_hub_access_token +from .example_steps import step_get_data_plane_access +from .example_steps import step_pipeline_run_query_by_factory +from .example_steps import step_pipeline_run_cancel +from .example_steps import step_trigger_run_query_by_factory +from .example_steps import step_configure_factory_repo +from .example_steps import step_integration_runtime_delete +from .example_steps import step_trigger_delete +from .example_steps import step_pipeline_delete +from .example_steps import step_dataset_delete +from .example_steps import step_linked_service_delete +from .example_steps import step_delete +from .. import ( + try_manual, + raise_if, + calc_coverage +) TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) +# Env setup_scenario @try_manual -def setup(test, rg): +def setup_scenario(test, rg): pass -# EXAMPLE: Factories_CreateOrUpdate +# Env cleanup_scenario @try_manual -def step_factories_createorupdate(test, rg): - test.cmd('az datafactory factory create ' - '--location "East US" ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Update -@try_manual -def step_factories_update(test, rg): - test.cmd('az datafactory factory update ' - '--name "{myFactory}" ' - '--tags exampleTag="exampleValue" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Create -@try_manual -def step_linkedservices_create(test, rg): - test.cmd('az datafactory linked-service create ' - '--factory-name "{myFactory}" ' - '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' - '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' - 'tKey=\\"}}}}}}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Update -@try_manual -def step_linkedservices_update(test, rg): - test.cmd('az datafactory linked-service update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Create -@try_manual -def step_datasets_create(test, rg): - test.cmd('az datafactory dataset create ' - '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' - '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' - 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' - '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' - '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Update -@try_manual -def step_datasets_update(test, rg): - test.cmd('az datafactory dataset update ' - '--description "Example description" ' - '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' - '\\"}}" ' - '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' - '}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_Create -@try_manual -def step_pipelines_create(test, rg): - test.cmd('az datafactory pipeline create ' - '--factory-name "{myFactory}" ' - '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' - 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' - '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' - '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' - 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' - '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' - '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' - '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' - 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' - 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}}}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_Update -@try_manual -def step_pipelines_update(test, rg): - test.cmd('az datafactory pipeline update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' - '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' - ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' - ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' - 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' - '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' - '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' - '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' - 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' - '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Create -@try_manual -def step_triggers_create(test, rg): - test.cmd('az datafactory trigger create ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' - 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' - 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' - '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' - '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Update -@try_manual -def step_triggers_update(test, rg): - test.cmd('az datafactory trigger update ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--description "Example description" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Create -@try_manual -def step_integrationruntimes_create(test, rg): - test.cmd('az datafactory integration-runtime self-hosted create ' - '--factory-name "{myFactory}" ' - '--description "A selfhosted integration runtime" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Update -@try_manual -def step_integrationruntimes_update(test, rg): - test.cmd('az datafactory integration-runtime update ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--auto-update "Off" ' - '--update-delay-offset "\\"PT3H\\""', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_CreateLinkedIntegrationRuntime -@try_manual -def step_integrationruntimes_createlinkedintegrationru(test, rg): - test.cmd('az datafactory integration-runtime linked-integration-runtime create ' - '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' - '--location "West US" ' - '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' - '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--subscription-id "12345678-1234-1234-1234-12345678abc"', - checks=[]) - - -# EXAMPLE: Pipelines_CreateRun -@try_manual -def step_pipelines_createrun(test, rg): - test.cmd('az datafactory pipeline create-run ' - '--factory-name "{myFactory}" ' - '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Get -@try_manual -def step_integrationruntimes_get(test, rg): - test.cmd('az datafactory integration-runtime show ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: RerunTriggers_ListByTrigger -@try_manual -def step_reruntriggers_listbytrigger(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: LinkedServices_Get -@try_manual -def step_linkedservices_get(test, rg): - test.cmd('az datafactory linked-service show ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_Get -@try_manual -def step_pipelineruns_get(test, rg): - test.cmd('az datafactory pipeline-run show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=[]) - - -# EXAMPLE: Pipelines_Get -@try_manual -def step_pipelines_get(test, rg): - test.cmd('az datafactory pipeline show ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Get -@try_manual -def step_datasets_get(test, rg): - test.cmd('az datafactory dataset show ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Get -@try_manual -def step_triggers_get(test, rg): - test.cmd('az datafactory trigger show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_ListByFactory -@try_manual -def step_integrationruntimes_listbyfactory(test, rg): - test.cmd('az datafactory integration-runtime list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_ListByFactory -@try_manual -def step_linkedservices_listbyfactory(test, rg): - test.cmd('az datafactory linked-service list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_ListByFactory -@try_manual -def step_pipelines_listbyfactory(test, rg): - test.cmd('az datafactory pipeline list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_ListByFactory -@try_manual -def step_triggers_listbyfactory(test, rg): - test.cmd('az datafactory trigger list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_ListByFactory -@try_manual -def step_datasets_listbyfactory(test, rg): - test.cmd('az datafactory dataset list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Get -@try_manual -def step_factories_get(test, rg): - test.cmd('az datafactory factory show ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_ListByResourceGroup -@try_manual -def step_factories_listbyresourcegroup(test, rg): - test.cmd('az datafactory factory list ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_List -@try_manual -def step_factories_list(test, rg): - test.cmd('az datafactory factory list ' - '-g ""', - checks=[]) - - -# EXAMPLE: Operations_List -@try_manual -def step_operations_list(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Cancel -@try_manual -def step_reruntriggers_cancel(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Start -@try_manual -def step_reruntriggers_start(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Stop -@try_manual -def step_reruntriggers_stop(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: IntegrationRuntimes_RegenerateAuthKey -@try_manual -def step_integrationruntimes_regenerateauthkey(test, rg): - test.cmd('az datafactory integration-runtime regenerate-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--key-name "authKey2" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: TriggerRuns_Rerun -@try_manual -def step_triggerruns_rerun(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: IntegrationRuntimes_GetConnectionInfo -@try_manual -def step_integrationruntimes_getconnectioninfo(test, rg): - test.cmd('az datafactory integration-runtime get-connection-info ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_SyncCredentials -@try_manual -def step_integrationruntimes_synccredentials(test, rg): - test.cmd('az datafactory integration-runtime sync-credentials ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_GetMonitoringData -@try_manual -def step_integrationruntimes_getmonitoringdata(test, rg): - test.cmd('az datafactory integration-runtime get-monitoring-data ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_ListAuthKeys -@try_manual -def step_integrationruntimes_listauthkeys(test, rg): - test.cmd('az datafactory integration-runtime list-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Upgrade -@try_manual -def step_integrationruntimes_upgrade(test, rg): - test.cmd('az datafactory integration-runtime remove-link ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--linked-factory-name "exampleFactoryName-linked" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_GetStatus -@try_manual -def step_integrationruntimes_getstatus(test, rg): - test.cmd('az datafactory integration-runtime get-status ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Start -@try_manual -def step_integrationruntimes_start(test, rg): - test.cmd('az datafactory integration-runtime start ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Stop -@try_manual -def step_integrationruntimes_stop(test, rg): - test.cmd('az datafactory integration-runtime stop ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_GetEventSubscriptionStatus -@try_manual -def step_triggers_geteventsubscriptionstatus(test, rg): - test.cmd('az datafactory trigger get-event-subscription-status ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: ActivityRuns_QueryByPipelineRun -@try_manual -def step_activityruns_querybypipelinerun(test, rg): - test.cmd('az datafactory activity-run query-by-pipeline-run ' - '--factory-name "{myFactory}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=[]) - - -# EXAMPLE: Triggers_UnsubscribeFromEvents -@try_manual -def step_triggers_unsubscribefromevents(test, rg): - test.cmd('az datafactory trigger unsubscribe-from-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_SubscribeToEvents -@try_manual -def step_triggers_subscribetoevents(test, rg): - test.cmd('az datafactory trigger subscribe-to-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Start -@try_manual -def step_triggers_start(test, rg): - test.cmd('az datafactory trigger start ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Stop -@try_manual -def step_triggers_stop(test, rg): - test.cmd('az datafactory trigger stop ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Factories_GetGitHubAccessToken -@try_manual -def step_factories_getgithubaccesstoken(test, rg): - test.cmd('az datafactory factory get-git-hub-access-token ' - '--name "{myFactory}" ' - '--git-hub-access-code "some" ' - '--git-hub-access-token-base-url "some" ' - '--git-hub-client-id "some" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_GetDataPlaneAccess -@try_manual -def step_factories_getdataplaneaccess(test, rg): - test.cmd('az datafactory factory get-data-plane-access ' - '--name "{myFactory}" ' - '--access-resource-path "" ' - '--expire-time "2018-11-10T09:46:20.2659347Z" ' - '--permissions "r" ' - '--profile-name "DefaultProfile" ' - '--start-time "2018-11-10T02:46:20.2659347Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_QueryByFactory -@try_manual -def step_pipelineruns_querybyfactory(test, rg): - test.cmd('az datafactory pipeline-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_Cancel -@try_manual -def step_pipelineruns_cancel(test, rg): - test.cmd('az datafactory pipeline-run cancel ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', - checks=[]) - - -# EXAMPLE: TriggerRuns_QueryByFactory -@try_manual -def step_triggerruns_querybyfactory(test, rg): - test.cmd('az datafactory trigger-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_ConfigureFactoryRepo -@try_manual -def step_factories_configurefactoryrepo(test, rg): - test.cmd('az datafactory factory configure-factory-repo ' - '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' - 'ry/factories/{myFactory}" ' - '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' - 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' - '--location "East US"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Delete -@try_manual -def step_integrationruntimes_delete(test, rg): - test.cmd('az datafactory integration-runtime delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Delete -@try_manual -def step_triggers_delete(test, rg): - test.cmd('az datafactory trigger delete -y ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Pipelines_Delete -@try_manual -def step_pipelines_delete(test, rg): - test.cmd('az datafactory pipeline delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Delete -@try_manual -def step_datasets_delete(test, rg): - test.cmd('az datafactory dataset delete -y ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Delete -@try_manual -def step_linkedservices_delete(test, rg): - test.cmd('az datafactory linked-service delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Delete -@try_manual -def step_factories_delete(test, rg): - test.cmd('az datafactory factory delete -y ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -@try_manual -def cleanup(test, rg): +def cleanup_scenario(test, rg): pass +# Testcase: Scenario @try_manual def call_scenario(test, rg): - setup(test, rg) - step_factories_createorupdate(test, rg) - step_factories_update(test, rg) - step_linkedservices_create(test, rg) - step_linkedservices_update(test, rg) - step_datasets_create(test, rg) - step_datasets_update(test, rg) - step_pipelines_create(test, rg) - step_pipelines_update(test, rg) - step_triggers_create(test, rg) - step_triggers_update(test, rg) - step_integrationruntimes_create(test, rg) - step_integrationruntimes_update(test, rg) - step_integrationruntimes_createlinkedintegrationru(test, rg) - step_pipelines_createrun(test, rg) - step_integrationruntimes_get(test, rg) - step_reruntriggers_listbytrigger(test, rg) - step_linkedservices_get(test, rg) - step_pipelineruns_get(test, rg) - step_pipelines_get(test, rg) - step_datasets_get(test, rg) - step_triggers_get(test, rg) - step_integrationruntimes_listbyfactory(test, rg) - step_linkedservices_listbyfactory(test, rg) - step_pipelines_listbyfactory(test, rg) - step_triggers_listbyfactory(test, rg) - step_datasets_listbyfactory(test, rg) - step_factories_get(test, rg) - step_factories_listbyresourcegroup(test, rg) - step_factories_list(test, rg) - step_operations_list(test, rg) - step_reruntriggers_cancel(test, rg) - step_reruntriggers_start(test, rg) - step_reruntriggers_stop(test, rg) - step_integrationruntimes_regenerateauthkey(test, rg) - step_triggerruns_rerun(test, rg) - step_integrationruntimes_getconnectioninfo(test, rg) - step_integrationruntimes_synccredentials(test, rg) - step_integrationruntimes_getmonitoringdata(test, rg) - step_integrationruntimes_listauthkeys(test, rg) - step_integrationruntimes_upgrade(test, rg) - step_integrationruntimes_getstatus(test, rg) - step_integrationruntimes_start(test, rg) - step_integrationruntimes_stop(test, rg) - step_triggers_geteventsubscriptionstatus(test, rg) - step_activityruns_querybypipelinerun(test, rg) - step_triggers_unsubscribefromevents(test, rg) - step_triggers_subscribetoevents(test, rg) - step_triggers_start(test, rg) - step_triggers_stop(test, rg) - step_factories_getgithubaccesstoken(test, rg) - step_factories_getdataplaneaccess(test, rg) - step_pipelineruns_querybyfactory(test, rg) - step_pipelineruns_cancel(test, rg) - step_triggerruns_querybyfactory(test, rg) - step_factories_configurefactoryrepo(test, rg) - step_integrationruntimes_delete(test, rg) - step_triggers_delete(test, rg) - step_pipelines_delete(test, rg) - step_datasets_delete(test, rg) - step_linkedservices_delete(test, rg) - step_factories_delete(test, rg) - cleanup(test, rg) - - -@try_manual -class DataFactoryManagementClientScenarioTest(ScenarioTest): - - @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') - def test_datafactory(self, rg): - + setup_scenario(test, rg) + step_create(test, rg, checks=[]) + step_update(test, rg, checks=[]) + step_linked_service_create(test, rg, checks=[]) + step_linked_service_update(test, rg, checks=[]) + step_dataset_create(test, rg, checks=[]) + step_dataset_update(test, rg, checks=[]) + step_pipeline_create(test, rg, checks=[]) + step_pipeline_update(test, rg, checks=[]) + step_trigger_create(test, rg, checks=[]) + step_trigger_update(test, rg, checks=[]) + step_integration_runtime_self_hosted_create(test, rg, checks=[]) + step_integration_runtime_update(test, rg, checks=[]) + step_integration_runtime_linked(test, rg, checks=[]) + step_pipeline_create_run(test, rg, checks=[]) + step_integration_runtime_show(test, rg, checks=[]) + # STEP NOT FOUND: RerunTriggers_ListByTrigger + step_linked_service_show(test, rg, checks=[]) + step_pipeline_run_show(test, rg, checks=[]) + step_pipeline_show(test, rg, checks=[]) + step_dataset_show(test, rg, checks=[]) + step_trigger_show(test, rg, checks=[]) + step_integration_runtime_list(test, rg, checks=[]) + step_linked_service_list(test, rg, checks=[]) + step_pipeline_list(test, rg, checks=[]) + step_trigger_list(test, rg, checks=[]) + step_dataset_list(test, rg, checks=[]) + step_show(test, rg, checks=[]) + step_list2(test, rg, checks=[]) + step_list(test, rg, checks=[]) + # STEP NOT FOUND: Operations_List + # STEP NOT FOUND: RerunTriggers_Cancel + # STEP NOT FOUND: RerunTriggers_Start + # STEP NOT FOUND: RerunTriggers_Stop + step_integration_runtime_regenerate_auth_key(test, rg, checks=[]) + # STEP NOT FOUND: TriggerRuns_Rerun + step_integration_runtime_get_connection_info(test, rg, checks=[]) + step_integration_runtime_sync_credentials(test, rg, checks=[]) + step_integration_runtime_get_monitoring_data(test, rg, checks=[]) + step_integration_runtime_list_auth_key(test, rg, checks=[]) + step_integration_runtime_remove_link(test, rg, checks=[]) + step_integration_runtime_get_status(test, rg, checks=[]) + step_integration_runtime_start(test, rg, checks=[]) + step_integration_runtime_stop(test, rg, checks=[]) + step_trigger_get_event_subscription_status(test, rg, checks=[]) + step_activity_run_query_by_pipeline_run(test, rg, checks=[]) + step_trigger_unsubscribe_from_event(test, rg, checks=[]) + step_trigger_subscribe_to_event(test, rg, checks=[]) + step_trigger_start(test, rg, checks=[]) + step_trigger_stop(test, rg, checks=[]) + step_get_git_hub_access_token(test, rg, checks=[]) + step_get_data_plane_access(test, rg, checks=[]) + step_pipeline_run_query_by_factory(test, rg, checks=[]) + step_pipeline_run_cancel(test, rg, checks=[]) + step_trigger_run_query_by_factory(test, rg, checks=[]) + step_configure_factory_repo(test, rg, checks=[]) + step_integration_runtime_delete(test, rg, checks=[]) + step_trigger_delete(test, rg, checks=[]) + step_pipeline_delete(test, rg, checks=[]) + step_dataset_delete(test, rg, checks=[]) + step_linked_service_delete(test, rg, checks=[]) + step_delete(test, rg, checks=[]) + cleanup_scenario(test, rg) + + +# Test class for Scenario +@try_manual +class DatafactoryScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(DatafactoryScenarioTest, self).__init__(*args, **kwargs) self.kwargs.update({ 'subscription_id': self.get_subscription_id() }) @@ -763,6 +176,8 @@ def test_datafactory(self, rg): 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), }) + @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') + def test_datafactory_Scenario(self, rg): call_scenario(self, rg) calc_coverage(__file__) raise_if() diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md index 9f0a5f555cf..b7eabe4528a 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md @@ -1,56 +1,48 @@ |Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_factories_createorupdate|successed||||2020-09-09 03:20:36.459026|2020-09-09 03:20:47.009536| -|step_factories_update|successed||||2020-09-09 03:13:09.171999|2020-09-09 03:13:17.158415| -|step_linkedservices_create|successed||||2020-09-09 03:13:17.158521|2020-09-09 03:13:18.391822| -|step_linkedservices_update|successed||||2020-09-09 03:13:18.391936|2020-09-09 03:13:20.086697| -|step_datasets_create|successed||||2020-09-09 03:13:20.086867|2020-09-09 03:13:21.030817| -|step_datasets_update|successed||||2020-09-09 03:13:21.030945|2020-09-09 03:13:23.212312| -|step_pipelines_create|successed||||2020-09-09 03:13:23.212418|2020-09-09 03:13:24.144046| -|step_pipelines_update|successed||||2020-09-09 03:13:24.144222|2020-09-09 03:13:25.633789| -|step_triggers_create|successed||||2020-09-09 03:13:25.633898|2020-09-09 03:13:26.656790| -|step_triggers_update|successed||||2020-09-09 03:13:26.656960|2020-09-09 03:13:28.908454| -|step_integrationruntimes_create|successed||||2020-09-09 03:13:28.908624|2020-09-09 03:13:30.048626| -|step_integrationruntimes_update|successed||||2020-09-09 03:13:30.048749|2020-09-09 03:13:30.718810| -|step_pipelines_createrun|successed||||2020-09-09 03:20:58.002857|2020-09-09 03:20:59.633723| -|step_integrationruntimes_get|successed||||2020-09-09 03:14:47.710770|2020-09-09 03:14:48.302708| -|step_reruntriggers_listbytrigger|successed||||2020-09-09 03:13:55.851345|2020-09-09 03:13:55.851351| -|step_linkedservices_get|successed||||2020-09-09 03:13:55.851669|2020-09-09 03:13:56.412982| -|step_pipelines_get|successed||||2020-09-09 03:13:56.413198|2020-09-09 03:13:57.000576| -|step_datasets_get|successed||||2020-09-09 03:13:57.000711|2020-09-09 03:13:57.627004| -|step_triggers_get|successed||||2020-09-09 03:21:05.240434|2020-09-09 03:21:05.843100| -|step_integrationruntimes_listbyfactory|successed||||2020-09-09 03:13:58.229772|2020-09-09 03:13:58.825227| -|step_linkedservices_listbyfactory|successed||||2020-09-09 03:13:58.825356|2020-09-09 03:13:59.474421| -|step_pipelines_listbyfactory|successed||||2020-09-09 03:13:59.474573|2020-09-09 03:14:00.075676| -|step_triggers_listbyfactory|successed||||2020-09-09 03:14:00.075778|2020-09-09 03:14:00.659819| -|step_datasets_listbyfactory|successed||||2020-09-09 03:14:00.659993|2020-09-09 03:14:01.933289| -|step_factories_get|successed||||2020-09-09 03:14:01.933391|2020-09-09 03:14:02.496092| -|step_factories_listbyresourcegroup|successed||||2020-09-09 03:14:02.496256|2020-09-09 03:14:03.065882| -|step_factories_list|successed||||2020-09-09 03:14:03.066110|2020-09-09 03:14:03.652226| -|step_operations_list|successed||||2020-09-09 03:14:03.652474|2020-09-09 03:14:03.652478| -|step_integrationruntimes_regenerateauthkey|successed||||2020-09-09 03:14:03.652652|2020-09-09 03:14:04.485017| -|step_integrationruntimes_synccredentials|successed||||2020-09-09 03:14:04.485147|2020-09-09 03:14:05.267252| -|step_integrationruntimes_getmonitoringdata|successed||||2020-09-09 03:14:05.267586|2020-09-09 03:14:05.885271| -|step_integrationruntimes_listauthkeys|successed||||2020-09-09 03:14:05.885388|2020-09-09 03:14:06.561146| -|step_integrationruntimes_upgrade|successed||||2020-09-09 03:14:06.561314|2020-09-09 03:14:07.329602| -|step_integrationruntimes_getstatus|successed||||2020-09-09 03:14:07.329744|2020-09-09 03:14:07.953859| -|step_triggers_geteventsubscriptionstatus|successed||||2020-09-09 03:14:07.953969|2020-09-09 03:14:08.601782| -|step_triggers_unsubscribefromevents|successed||||2020-09-09 03:14:08.601956|2020-09-09 03:14:09.397017| -|step_triggers_subscribetoevents|successed||||2020-09-09 03:14:09.397108|2020-09-09 03:14:10.182324| -|step_triggers_start|successed||||2020-09-09 03:21:02.232388|2020-09-09 03:21:05.240220| -|step_triggers_stop|successed||||2020-09-09 03:26:10.167791|2020-09-09 03:26:12.927439| -|step_factories_getdataplaneaccess|successed||||2020-09-09 03:14:15.489956|2020-09-09 03:14:16.213205| -|step_triggerruns_querybyfactory|successed||||2020-09-09 03:26:09.523472|2020-09-09 03:26:10.167619| -|step_factories_configurefactoryrepo|successed||||2020-09-09 03:14:16.845877|2020-09-09 03:14:19.093687| -|step_integrationruntimes_delete|successed||||2020-09-09 03:20:29.384647|2020-09-09 03:20:31.116770| -|step_triggers_delete|successed||||2020-09-09 03:26:12.927659|2020-09-09 03:26:17.287524| -|step_pipelines_delete|successed||||2020-09-09 03:26:17.287730|2020-09-09 03:26:21.728041| -|step_datasets_delete|successed||||2020-09-09 03:14:27.023092|2020-09-09 03:14:28.558416| -|step_linkedservices_delete|successed||||2020-09-09 03:14:28.558520|2020-09-09 03:14:29.608796| -|step_factories_delete|successed||||2020-09-09 03:26:21.728199|2020-09-09 03:26:28.588917| -|step_integrationruntimes_start|successed||||2020-09-09 03:14:48.302813|2020-09-09 03:16:06.820258| -|step_integrationruntimes_stop|successed||||2020-09-09 03:16:06.820543|2020-09-09 03:20:29.384268| -|step_pipelineruns_get|successed||||2020-09-09 03:20:55.257577|2020-09-09 03:20:56.603108| -|step_activityruns_querybypipelinerun|successed||||2020-09-09 03:20:56.603386|2020-09-09 03:20:58.002421| -|step_pipelineruns_cancel|successed||||2020-09-09 03:20:59.634195|2020-09-09 03:21:00.580469| -|step_triggerruns_rerun|successed||||2020-09-09 03:26:07.876793|2020-09-09 03:26:09.523263| -Coverage: 54/54 +|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033| +|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707| +|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706| +|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579| +|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043| +|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047| +|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751| +|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057| +|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053| +|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099| +|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096| +|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815| +|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292| +|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517| +|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030| +|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138| +|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055| +|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635| +|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704| +|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865| +|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871| +|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042| +|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042| +|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196| +|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194| +|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307| +|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000| +|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002| +|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513| +|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511| +|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487| +|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802| +|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807| +|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136| +|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873| +|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072| +|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426| +|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944| +|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878| +|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991| +|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337| +|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612| +|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507| +|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119| +|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925| +|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579| +Coverage: 46/46 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index 84eac8676c6..3e3cbab9738 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index ab18f1f6cb4..f272437a3e9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -18,63 +18,72 @@ from azure.core.credentials import TokenCredential from ._configuration import DataFactoryManagementClientConfiguration -from .operations import OperationOperations -from .operations import FactoryOperations +from .operations import Operations +from .operations import FactoriesOperations from .operations import ExposureControlOperations -from .operations import IntegrationRuntimeOperations +from .operations import IntegrationRuntimesOperations from .operations import IntegrationRuntimeObjectMetadataOperations -from .operations import IntegrationRuntimeNodeOperations -from .operations import LinkedServiceOperations -from .operations import DatasetOperations -from .operations import PipelineOperations -from .operations import PipelineRunOperations -from .operations import ActivityRunOperations -from .operations import TriggerOperations -from .operations import TriggerRunOperations -from .operations import DataFlowOperations +from .operations import IntegrationRuntimeNodesOperations +from .operations import LinkedServicesOperations +from .operations import DatasetsOperations +from .operations import PipelinesOperations +from .operations import PipelineRunsOperations +from .operations import ActivityRunsOperations +from .operations import TriggersOperations +from .operations import TriggerRunsOperations +from .operations import DataFlowsOperations from .operations import DataFlowDebugSessionOperations -from .operations import ManagedVirtualNetworkOperations -from .operations import ManagedPrivateEndpointOperations +from .operations import ManagedVirtualNetworksOperations +from .operations import ManagedPrivateEndpointsOperations +from .operations import PrivateEndPointConnectionsOperations +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourcesOperations from . import models class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.operations.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.operations.FactoryOperations + :ivar operations: Operations operations + :vartype operations: data_factory_management_client.operations.Operations + :ivar factories: FactoriesOperations operations + :vartype factories: data_factory_management_client.operations.FactoriesOperations :ivar exposure_control: ExposureControlOperations operations :vartype exposure_control: data_factory_management_client.operations.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.operations.IntegrationRuntimeOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: data_factory_management_client.operations.IntegrationRuntimesOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations :vartype integration_runtime_object_metadata: data_factory_management_client.operations.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.operations.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.operations.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.operations.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.operations.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.operations.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.operations.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.operations.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.operations.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.operations.DataFlowOperations + :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations + :vartype integration_runtime_nodes: data_factory_management_client.operations.IntegrationRuntimeNodesOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: data_factory_management_client.operations.LinkedServicesOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: data_factory_management_client.operations.DatasetsOperations + :ivar pipelines: PipelinesOperations operations + :vartype pipelines: data_factory_management_client.operations.PipelinesOperations + :ivar pipeline_runs: PipelineRunsOperations operations + :vartype pipeline_runs: data_factory_management_client.operations.PipelineRunsOperations + :ivar activity_runs: ActivityRunsOperations operations + :vartype activity_runs: data_factory_management_client.operations.ActivityRunsOperations + :ivar triggers: TriggersOperations operations + :vartype triggers: data_factory_management_client.operations.TriggersOperations + :ivar trigger_runs: TriggerRunsOperations operations + :vartype trigger_runs: data_factory_management_client.operations.TriggerRunsOperations + :ivar data_flows: DataFlowsOperations operations + :vartype data_flows: data_factory_management_client.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations :vartype data_flow_debug_session: data_factory_management_client.operations.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.operations.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.operations.ManagedPrivateEndpointOperations + :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations + :vartype managed_virtual_networks: data_factory_management_client.operations.ManagedVirtualNetworksOperations + :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations + :vartype managed_private_endpoints: data_factory_management_client.operations.ManagedPrivateEndpointsOperations + :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations + :vartype private_end_point_connections: data_factory_management_client.operations.PrivateEndPointConnectionsOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: data_factory_management_client.operations.PrivateEndpointConnectionOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: data_factory_management_client.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -98,41 +107,48 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - self.operation = OperationOperations( + self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( + self.factories = FactoriesOperations( self._client, self._config, self._serialize, self._deserialize) self.exposure_control = ExposureControlOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( + self.integration_runtimes = IntegrationRuntimesOperations( self._client, self._config, self._serialize, self._deserialize) self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( + self.linked_services = LinkedServicesOperations( self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( + self.datasets = DatasetsOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( + self.pipelines = PipelinesOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( + self.pipeline_runs = PipelineRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( + self.activity_runs = ActivityRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( + self.triggers = TriggersOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( + self.trigger_runs = TriggerRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( + self.data_flows = DataFlowsOperations( self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( + self.managed_virtual_networks = ManagedVirtualNetworksOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( + self.managed_private_endpoints = ManagedPrivateEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( self._client, self._config, self._serialize, self._deserialize) def close(self): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py index 5cde5bc8d05..571673cab5c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._data_factory_management_client_async import DataFactoryManagementClient +from ._data_factory_management_client import DataFactoryManagementClient __all__ = ['DataFactoryManagementClient'] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py new file mode 100644 index 00000000000..c88a091bdb9 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class DataFactoryManagementClientConfiguration(Configuration): + """Configuration for DataFactoryManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription identifier. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2018-06-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py new file mode 100644 index 00000000000..255a1839c21 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -0,0 +1,159 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import Operations +from .operations import FactoriesOperations +from .operations import ExposureControlOperations +from .operations import IntegrationRuntimesOperations +from .operations import IntegrationRuntimeObjectMetadataOperations +from .operations import IntegrationRuntimeNodesOperations +from .operations import LinkedServicesOperations +from .operations import DatasetsOperations +from .operations import PipelinesOperations +from .operations import PipelineRunsOperations +from .operations import ActivityRunsOperations +from .operations import TriggersOperations +from .operations import TriggerRunsOperations +from .operations import DataFlowsOperations +from .operations import DataFlowDebugSessionOperations +from .operations import ManagedVirtualNetworksOperations +from .operations import ManagedPrivateEndpointsOperations +from .operations import PrivateEndPointConnectionsOperations +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourcesOperations +from .. import models + + +class DataFactoryManagementClient(object): + """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. + + :ivar operations: Operations operations + :vartype operations: data_factory_management_client.aio.operations.Operations + :ivar factories: FactoriesOperations operations + :vartype factories: data_factory_management_client.aio.operations.FactoriesOperations + :ivar exposure_control: ExposureControlOperations operations + :vartype exposure_control: data_factory_management_client.aio.operations.ExposureControlOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: data_factory_management_client.aio.operations.IntegrationRuntimesOperations + :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations + :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations.IntegrationRuntimeObjectMetadataOperations + :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations + :vartype integration_runtime_nodes: data_factory_management_client.aio.operations.IntegrationRuntimeNodesOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: data_factory_management_client.aio.operations.LinkedServicesOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: data_factory_management_client.aio.operations.DatasetsOperations + :ivar pipelines: PipelinesOperations operations + :vartype pipelines: data_factory_management_client.aio.operations.PipelinesOperations + :ivar pipeline_runs: PipelineRunsOperations operations + :vartype pipeline_runs: data_factory_management_client.aio.operations.PipelineRunsOperations + :ivar activity_runs: ActivityRunsOperations operations + :vartype activity_runs: data_factory_management_client.aio.operations.ActivityRunsOperations + :ivar triggers: TriggersOperations operations + :vartype triggers: data_factory_management_client.aio.operations.TriggersOperations + :ivar trigger_runs: TriggerRunsOperations operations + :vartype trigger_runs: data_factory_management_client.aio.operations.TriggerRunsOperations + :ivar data_flows: DataFlowsOperations operations + :vartype data_flows: data_factory_management_client.aio.operations.DataFlowsOperations + :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations + :vartype data_flow_debug_session: data_factory_management_client.aio.operations.DataFlowDebugSessionOperations + :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations + :vartype managed_virtual_networks: data_factory_management_client.aio.operations.ManagedVirtualNetworksOperations + :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations + :vartype managed_private_endpoints: data_factory_management_client.aio.operations.ManagedPrivateEndpointsOperations + :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations + :vartype private_end_point_connections: data_factory_management_client.aio.operations.PrivateEndPointConnectionsOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: data_factory_management_client.aio.operations.PrivateEndpointConnectionOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: data_factory_management_client.aio.operations.PrivateLinkResourcesOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.factories = FactoriesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.exposure_control = ExposureControlOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.triggers = TriggersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_flow_debug_session = DataFlowDebugSessionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.managed_virtual_networks = ManagedVirtualNetworksOperations( + self._client, self._config, self._serialize, self._deserialize) + self.managed_private_endpoints = ManagedPrivateEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "DataFactoryManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py new file mode 100644 index 00000000000..c1da8c996a3 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._factories_operations import FactoriesOperations +from ._exposure_control_operations import ExposureControlOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations +from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from ._linked_services_operations import LinkedServicesOperations +from ._datasets_operations import DatasetsOperations +from ._pipelines_operations import PipelinesOperations +from ._pipeline_runs_operations import PipelineRunsOperations +from ._activity_runs_operations import ActivityRunsOperations +from ._triggers_operations import TriggersOperations +from ._trigger_runs_operations import TriggerRunsOperations +from ._data_flows_operations import DataFlowsOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations +from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations +from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations + +__all__ = [ + 'Operations', + 'FactoriesOperations', + 'ExposureControlOperations', + 'IntegrationRuntimesOperations', + 'IntegrationRuntimeObjectMetadataOperations', + 'IntegrationRuntimeNodesOperations', + 'LinkedServicesOperations', + 'DatasetsOperations', + 'PipelinesOperations', + 'PipelineRunsOperations', + 'ActivityRunsOperations', + 'TriggersOperations', + 'TriggerRunsOperations', + 'DataFlowsOperations', + 'DataFlowDebugSessionOperations', + 'ManagedVirtualNetworksOperations', + 'ManagedPrivateEndpointsOperations', + 'PrivateEndPointConnectionsOperations', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourcesOperations', +] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py new file mode 100644 index 00000000000..39382a45d74 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py @@ -0,0 +1,111 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ActivityRunsOperations: + """ActivityRunsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def query_by_pipeline_run( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + filter_parameters: "models.RunFilterParameters", + **kwargs + ) -> "models.ActivityRunsQueryResponse": + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_pipeline_run.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py new file mode 100644 index 00000000000..dbb85249ab9 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -0,0 +1,507 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataFlowDebugSessionOperations: + """DataFlowDebugSessionOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _create_initial( + self, + resource_group_name: str, + factory_name: str, + request: "models.CreateDataFlowDebugSessionRequest", + **kwargs + ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + + if response.status_code == 202: + response_headers['location']=self._deserialize('str', response.headers.get('location')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + + async def begin_create( + self, + resource_group_name: str, + factory_name: str, + request: "models.CreateDataFlowDebugSessionRequest", + **kwargs + ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition. + :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore + + def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]: + """Query all active data flow debug sessions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore + + async def add_data_flow( + self, + resource_group_name: str, + factory_name: str, + request: "models.DataFlowDebugPackage", + **kwargs + ) -> "models.AddDataFlowToDebugSessionResponse": + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition with debug content. + :type request: ~data_factory_management_client.models.DataFlowDebugPackage + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.add_data_flow.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(request, 'DataFlowDebugPackage') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + request: "models.DeleteDataFlowDebugSessionRequest", + **kwargs + ) -> None: + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition for deletion. + :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore + + async def _execute_command_initial( + self, + resource_group_name: str, + factory_name: str, + request: "models.DataFlowDebugCommandRequest", + **kwargs + ) -> Optional["models.DataFlowDebugCommandResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._execute_command_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + + if response.status_code == 202: + response_headers['location']=self._deserialize('str', response.headers.get('location')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore + + async def begin_execute_command( + self, + resource_group_name: str, + factory_name: str, + request: "models.DataFlowDebugCommandRequest", + **kwargs + ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug command definition. + :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._execute_command_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py new file mode 100644 index 00000000000..20d1ec288ce --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py @@ -0,0 +1,319 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataFlowsOperations: + """DataFlowsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + data_flow: "models.DataFlowResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.DataFlowResource": + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param data_flow: Data flow resource definition. + :type data_flow: ~data_factory_management_client.models.DataFlowResource + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DataFlowResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_flow, 'DataFlowResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.DataFlowResource": + """Gets a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the + ETag matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DataFlowResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + data_flow_name: str, + **kwargs + ) -> None: + """Deletes a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.DataFlowListResponse"]: + """Lists data flows. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DataFlowListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py new file mode 100644 index 00000000000..23cd39c246d --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py @@ -0,0 +1,321 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DatasetsOperations: + """DatasetsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.DatasetListResponse"]: + """Lists datasets. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DatasetListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DatasetListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + dataset: "models.DatasetResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.DatasetResource": + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param dataset: Dataset resource definition. + :type dataset: ~data_factory_management_client.models.DatasetResource + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DatasetResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(dataset, 'DatasetResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.DatasetResource"]: + """Gets a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DatasetResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + dataset_name: str, + **kwargs + ) -> None: + """Deletes a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py new file mode 100644 index 00000000000..df180e52804 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -0,0 +1,235 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ExposureControlOperations: + """ExposureControlOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get_feature_value( + self, + location_id: str, + exposure_control_request: "models.ExposureControlRequest", + **kwargs + ) -> "models.ExposureControlResponse": + """Get exposure control feature for specific location. + + :param location_id: The location identifier. + :type location_id: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ExposureControlResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_feature_value.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'locationId': self._serialize.url("location_id", location_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore + + async def get_feature_value_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_request: "models.ExposureControlRequest", + **kwargs + ) -> "models.ExposureControlResponse": + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ExposureControlResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_feature_value_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExposureControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore + + async def query_feature_values_by_factory( + self, + resource_group_name: str, + factory_name: str, + exposure_control_batch_request: "models.ExposureControlBatchRequest", + **kwargs + ) -> "models.ExposureControlBatchResponse": + """Get list of exposure control features for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param exposure_control_batch_request: The exposure control request for list of features. + :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExposureControlBatchResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_feature_values_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py new file mode 100644 index 00000000000..f8b64723a03 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py @@ -0,0 +1,631 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class FactoriesOperations: + """FactoriesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.FactoryListResponse"]: + """Lists factories under the specified subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FactoryListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('FactoryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore + + async def configure_factory_repo( + self, + location_id: str, + factory_repo_update: "models.FactoryRepoUpdate", + **kwargs + ) -> "models.Factory": + """Updates a factory's repo information. + + :param location_id: The location identifier. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. + :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.configure_factory_repo.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'locationId': self._serialize.url("location_id", location_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.FactoryListResponse"]: + """Lists factories. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FactoryListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('FactoryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + factory: "models.Factory", + if_match: Optional[str] = None, + **kwargs + ) -> "models.Factory": + """Creates or updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param factory: Factory resource definition. + :type factory: ~data_factory_management_client.models.Factory + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory, 'Factory') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + factory_name: str, + factory_update_parameters: "models.FactoryUpdateParameters", + **kwargs + ) -> "models.Factory": + """Updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. + :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.Factory"]: + """Gets a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> None: + """Deletes a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + async def get_git_hub_access_token( + self, + resource_group_name: str, + factory_name: str, + git_hub_access_token_request: "models.GitHubAccessTokenRequest", + **kwargs + ) -> "models.GitHubAccessTokenResponse": + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. + :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_git_hub_access_token.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore + + async def get_data_plane_access( + self, + resource_group_name: str, + factory_name: str, + policy: "models.UserAccessPolicy", + **kwargs + ) -> "models.AccessPolicyResponse": + """Get Data Plane access. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param policy: Data Plane user access policy definition. + :type policy: ~data_factory_management_client.models.UserAccessPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.AccessPolicyResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_data_plane_access.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(policy, 'UserAccessPolicy') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py new file mode 100644 index 00000000000..098d00bbb3e --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -0,0 +1,311 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimeNodesOperations: + """IntegrationRuntimeNodesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + **kwargs + ) -> "models.SelfHostedIntegrationRuntimeNode": + """Gets a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + **kwargs + ) -> None: + """Deletes a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + update_integration_runtime_node_request: "models.UpdateIntegrationRuntimeNodeRequest", + **kwargs + ) -> "models.SelfHostedIntegrationRuntimeNode": + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. + :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + async def get_ip_address( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + node_name: str, + **kwargs + ) -> "models.IntegrationRuntimeNodeIpAddress": + """Get the IP address of self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_ip_address.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py new file mode 100644 index 00000000000..a1825a0d1bb --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -0,0 +1,240 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimeObjectMetadataOperations: + """IntegrationRuntimeObjectMetadataOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _refresh_initial( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> Optional["models.SsisObjectMetadataStatusResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._refresh_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + + async def begin_refresh( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]: + """Refresh a SSIS integration runtime object metadata. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._refresh_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + get_metadata_request: Optional["models.GetSsisObjectMetadataRequest"] = None, + **kwargs + ) -> "models.SsisObjectMetadataListResponse": + """Get a SSIS integration runtime object metadata by specified path. The return is pageable + metadata list. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. + :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SsisObjectMetadataListResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if get_metadata_request is not None: + body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py new file mode 100644 index 00000000000..6b27efc1819 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -0,0 +1,1213 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations: + """IntegrationRuntimesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]: + """Lists integration runtimes. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + integration_runtime: "models.IntegrationRuntimeResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. + :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.IntegrationRuntimeResource"]: + """Gets an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + update_integration_runtime_request: "models.UpdateIntegrationRuntimeRequest", + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Deletes an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def get_status( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + """Gets detailed status information for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + async def get_connection_info( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeConnectionInfo": + """Gets the on-premises integration runtime connection information for encrypting the on-premises + data source credentials. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_connection_info.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + async def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: "models.IntegrationRuntimeRegenerateKeyParameters", + **kwargs + ) -> "models.IntegrationRuntimeAuthKeys": + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + async def list_auth_keys( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeAuthKeys": + """Retrieves the authentication keys for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_auth_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + + async def _start_initial( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> Optional["models.IntegrationRuntimeStatusResponse"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + async def begin_start( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]: + """Starts a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + async def _stop_initial( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + async def begin_stop( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Stops a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + async def sync_credentials( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Force the integration runtime to synchronize credentials across integration runtime nodes, and + this will override the credentials across all worker nodes with those available on the + dispatcher node. If you already have the latest credential backup file, you should manually + import it (preferred) on any self-hosted integration runtime node than using this API directly. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.sync_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + async def get_monitoring_data( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeMonitoringData": + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes + under this integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeMonitoringData, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_monitoring_data.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + async def upgrade( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Upgrade self-hosted integration runtime to latest version if availability. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.upgrade.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + async def remove_links( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_integration_runtime_request: "models.LinkedIntegrationRuntimeRequest", + **kwargs + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. + :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.remove_links.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + async def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + create_linked_integration_runtime_request: "models.CreateLinkedIntegrationRuntimeRequest", + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_linked_integration_runtime.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py new file mode 100644 index 00000000000..e6444acf5f7 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py @@ -0,0 +1,322 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LinkedServicesOperations: + """LinkedServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.LinkedServiceListResponse"]: + """Lists linked services. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + linked_service: "models.LinkedServiceResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.LinkedServiceResource": + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param linked_service: Linked service resource definition. + :type linked_service: ~data_factory_management_client.models.LinkedServiceResource + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.LinkedServiceResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(linked_service, 'LinkedServiceResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.LinkedServiceResource"]: + """Gets a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param if_none_match: ETag of the linked service entity. Should only be specified for get. If + the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.LinkedServiceResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + linked_service_name: str, + **kwargs + ) -> None: + """Deletes a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py new file mode 100644 index 00000000000..3a0dfd46129 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -0,0 +1,336 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagedPrivateEndpointsOperations: + """ManagedPrivateEndpointsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + **kwargs + ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]: + """Lists managed private endpoints. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + managed_private_endpoint: "models.ManagedPrivateEndpointResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedPrivateEndpointResource": + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. + :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedPrivateEndpointResource": + """Gets a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_private_endpoint_name: str, + **kwargs + ) -> None: + """Deletes a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py new file mode 100644 index 00000000000..908d7b58ffe --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -0,0 +1,261 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagedVirtualNetworksOperations: + """ManagedVirtualNetworksOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]: + """Lists managed Virtual Networks. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + managed_virtual_network: "models.ManagedVirtualNetworkResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedVirtualNetworkResource": + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. + :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + managed_virtual_network_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.ManagedVirtualNetworkResource": + """Gets a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py new file mode 100644 index 00000000000..8d96ffc136c --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.OperationListResponse"]: + """Lists the available Azure Data Factory API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py new file mode 100644 index 00000000000..8d4b4efdb99 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py @@ -0,0 +1,234 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PipelineRunsOperations: + """PipelineRunsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: "models.RunFilterParameters", + **kwargs + ) -> "models.PipelineRunsQueryResponse": + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + **kwargs + ) -> "models.PipelineRun": + """Get a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRun, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineRun + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineRun', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore + + async def cancel( + self, + resource_group_name: str, + factory_name: str, + run_id: str, + is_recursive: Optional[bool] = None, + **kwargs + ) -> None: + """Cancel a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current + pipeline. + :type is_recursive: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if is_recursive is not None: + query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py new file mode 100644 index 00000000000..1c73e154e35 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py @@ -0,0 +1,419 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PipelinesOperations: + """PipelinesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.PipelineListResponse"]: + """Lists pipelines. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PipelineListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PipelineListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + pipeline: "models.PipelineResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.PipelineResource": + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~data_factory_management_client.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(pipeline, 'PipelineResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.PipelineResource"]: + """Gets a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the + ETag matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + **kwargs + ) -> None: + """Deletes a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + async def create_run( + self, + resource_group_name: str, + factory_name: str, + pipeline_name: str, + reference_pipeline_run_id: Optional[str] = None, + is_recovery: Optional[bool] = None, + start_activity_name: Optional[str] = None, + start_from_failure: Optional[bool] = None, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ) -> "models.CreateRunResponse": + """Creates a run of a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. + :type parameters: dict[str, object] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.CreateRunResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_run.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if reference_pipeline_run_id is not None: + query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CreateRunResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py new file mode 100644 index 00000000000..4dabd9932f8 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndPointConnectionsOperations: + """PrivateEndPointConnectionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.PrivateEndpointConnectionListResponse"]: + """Lists Private endpoint connections. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PrivateEndpointConnectionListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py new file mode 100644 index 00000000000..90ee37632ce --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,245 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations: + """PrivateEndpointConnectionOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: "models.PrivateLinkConnectionApprovalRequestResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.PrivateEndpointConnectionResource": + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: + :type private_endpoint_wrapper: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequestResource + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.PrivateEndpointConnectionResource": + """Gets a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param if_none_match: ETag of the private endpoint connection entity. Should only be specified + for get. If the ETag matches the existing entity tag, or if * was provided, then no content + will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + **kwargs + ) -> None: + """Deletes a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py new file mode 100644 index 00000000000..fd47a6c7373 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_private_link_resources_operations.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations: + """PrivateLinkResourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> "models.PrivateLinkResourcesWrapper": + """Gets the private link resources. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateLinkResourcesWrapper + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourcesWrapper"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py new file mode 100644 index 00000000000..7fbcbc61f39 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py @@ -0,0 +1,233 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TriggerRunsOperations: + """TriggerRunsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def rerun( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + run_id: str, + **kwargs + ) -> None: + """Rerun single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.rerun.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + + async def cancel( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + run_id: str, + **kwargs + ) -> None: + """Cancel a single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: "models.RunFilterParameters", + **kwargs + ) -> "models.TriggerRunsQueryResponse": + """Query trigger runs. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py new file mode 100644 index 00000000000..a9f7bd54c4d --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py @@ -0,0 +1,927 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TriggersOperations: + """TriggersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.TriggerListResponse"]: + """Lists triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either TriggerListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('TriggerListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + filter_parameters: "models.TriggerFilterParameters", + **kwargs + ) -> "models.TriggerQueryResponse": + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. + :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + trigger: "models.TriggerResource", + if_match: Optional[str] = None, + **kwargs + ) -> "models.TriggerResource": + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param trigger: Trigger resource definition. + :type trigger: ~data_factory_management_client.models.TriggerResource + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(trigger, 'TriggerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> Optional["models.TriggerResource"]: + """Gets a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + """Deletes a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def _subscribe_to_events_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> Optional["models.TriggerSubscriptionOperationStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._subscribe_to_events_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + async def begin_subscribe_to_events( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._subscribe_to_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + async def get_event_subscription_status( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerSubscriptionOperationStatus, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_event_subscription_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + + async def _unsubscribe_from_events_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> Optional["models.TriggerSubscriptionOperationStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + async def begin_unsubscribe_from_events( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._unsubscribe_from_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + async def _start_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + async def begin_start( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + async def _stop_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + async def begin_stop( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 160afca0562..1f1ab102631 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -21,11 +21,15 @@ from ._models_py3 import AmazonRedshiftLinkedService from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset + from ._models_py3 import AmazonS3CompatibleLinkedService + from ._models_py3 import AmazonS3CompatibleLocation + from ._models_py3 import AmazonS3CompatibleReadSettings from ._models_py3 import AmazonS3Dataset from ._models_py3 import AmazonS3LinkedService from ._models_py3 import AmazonS3Location from ._models_py3 import AmazonS3ReadSettings from ._models_py3 import AppendVariableActivity + from ._models_py3 import ArmIdWrapper from ._models_py3 import AvroDataset from ._models_py3 import AvroFormat from ._models_py3 import AvroSink @@ -58,6 +62,12 @@ from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureFileStorageLinkedService from ._models_py3 import AzureFileStorageLocation @@ -67,35 +77,35 @@ from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService from ._models_py3 import AzureKeyVaultSecretReference - from ._models_py3 import AzureMariaDBLinkedService - from ._models_py3 import AzureMariaDBSource - from ._models_py3 import AzureMariaDBTableDataset + from ._models_py3 import AzureMariaDbLinkedService + from ._models_py3 import AzureMariaDbSource + from ._models_py3 import AzureMariaDbTableDataset from ._models_py3 import AzureMlBatchExecutionActivity from ._models_py3 import AzureMlExecutePipelineActivity from ._models_py3 import AzureMlLinkedService from ._models_py3 import AzureMlServiceLinkedService from ._models_py3 import AzureMlUpdateResourceActivity from ._models_py3 import AzureMlWebServiceFile - from ._models_py3 import AzureMySQLLinkedService - from ._models_py3 import AzureMySQLSink - from ._models_py3 import AzureMySQLSource - from ._models_py3 import AzureMySQLTableDataset - from ._models_py3 import AzurePostgreSQLLinkedService - from ._models_py3 import AzurePostgreSQLSink - from ._models_py3 import AzurePostgreSQLSource - from ._models_py3 import AzurePostgreSQLTableDataset + from ._models_py3 import AzureMySqlLinkedService + from ._models_py3 import AzureMySqlSink + from ._models_py3 import AzureMySqlSource + from ._models_py3 import AzureMySqlTableDataset + from ._models_py3 import AzurePostgreSqlLinkedService + from ._models_py3 import AzurePostgreSqlSink + from ._models_py3 import AzurePostgreSqlSource + from ._models_py3 import AzurePostgreSqlTableDataset from ._models_py3 import AzureQueueSink - from ._models_py3 import AzureSQLDWLinkedService - from ._models_py3 import AzureSQLDWTableDataset - from ._models_py3 import AzureSQLDatabaseLinkedService - from ._models_py3 import AzureSQLMiLinkedService - from ._models_py3 import AzureSQLMiTableDataset - from ._models_py3 import AzureSQLSink - from ._models_py3 import AzureSQLSource - from ._models_py3 import AzureSQLTableDataset from ._models_py3 import AzureSearchIndexDataset from ._models_py3 import AzureSearchIndexSink from ._models_py3 import AzureSearchLinkedService + from ._models_py3 import AzureSqlDatabaseLinkedService + from ._models_py3 import AzureSqlDwLinkedService + from ._models_py3 import AzureSqlDwTableDataset + from ._models_py3 import AzureSqlMiLinkedService + from ._models_py3 import AzureSqlMiTableDataset + from ._models_py3 import AzureSqlSink + from ._models_py3 import AzureSqlSource + from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService from ._models_py3 import AzureTableDataset from ._models_py3 import AzureTableSink @@ -115,6 +125,7 @@ from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError from ._models_py3 import CmdkeySetup + from ._models_py3 import CmkIdentityDefinition from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink @@ -127,17 +138,18 @@ from ._models_py3 import ConnectionStateProperties from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource from ._models_py3 import CopyTranslator - from ._models_py3 import CosmosDBLinkedService - from ._models_py3 import CosmosDBMongoDBApiCollectionDataset - from ._models_py3 import CosmosDBMongoDBApiLinkedService - from ._models_py3 import CosmosDBMongoDBApiSink - from ._models_py3 import CosmosDBMongoDBApiSource - from ._models_py3 import CosmosDBSQLApiCollectionDataset - from ._models_py3 import CosmosDBSQLApiSink - from ._models_py3 import CosmosDBSQLApiSource + from ._models_py3 import CosmosDbLinkedService + from ._models_py3 import CosmosDbMongoDbApiCollectionDataset + from ._models_py3 import CosmosDbMongoDbApiLinkedService + from ._models_py3 import CosmosDbMongoDbApiSink + from ._models_py3 import CosmosDbMongoDbApiSource + from ._models_py3 import CosmosDbSqlApiCollectionDataset + from ._models_py3 import CosmosDbSqlApiSink + from ._models_py3 import CosmosDbSqlApiSource from ._models_py3 import CouchbaseLinkedService from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset @@ -149,14 +161,14 @@ from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService from ._models_py3 import CustomDataset + from ._models_py3 import CustomEventsTrigger from ._models_py3 import CustomSetupBase - from ._models_py3 import DWCopyCommandDefaultValue - from ._models_py3 import DWCopyCommandSettings from ._models_py3 import DataFlow from ._models_py3 import DataFlowDebugCommandPayload from ._models_py3 import DataFlowDebugCommandRequest from ._models_py3 import DataFlowDebugCommandResponse from ._models_py3 import DataFlowDebugPackage + from ._models_py3 import DataFlowDebugPackageDebugSettings from ._models_py3 import DataFlowDebugResource from ._models_py3 import DataFlowDebugSessionInfo from ._models_py3 import DataFlowFolder @@ -185,6 +197,8 @@ from ._models_py3 import DatasetResource from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -198,12 +212,14 @@ from ._models_py3 import DelimitedTextWriteSettings from ._models_py3 import DependencyReference from ._models_py3 import DistcpSettings - from ._models_py3 import DocumentDBCollectionDataset - from ._models_py3 import DocumentDBCollectionSink - from ._models_py3 import DocumentDBCollectionSource + from ._models_py3 import DocumentDbCollectionDataset + from ._models_py3 import DocumentDbCollectionSink + from ._models_py3 import DocumentDbCollectionSource from ._models_py3 import DrillLinkedService from ._models_py3 import DrillSource from ._models_py3 import DrillTableDataset + from ._models_py3 import DwCopyCommandDefaultValue + from ._models_py3 import DwCopyCommandSettings from ._models_py3 import DynamicsAxLinkedService from ._models_py3 import DynamicsAxResourceDataset from ._models_py3 import DynamicsAxSource @@ -218,6 +234,7 @@ from ._models_py3 import EloquaLinkedService from ._models_py3 import EloquaObjectDataset from ._models_py3 import EloquaSource + from ._models_py3 import EncryptionConfiguration from ._models_py3 import EntityReference from ._models_py3 import EnvironmentVariableSetup from ._models_py3 import ExcelDataset @@ -347,6 +364,8 @@ from ._models_py3 import LinkedServiceListResponse from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -362,11 +381,12 @@ from ._models_py3 import ManagedPrivateEndpointResource from ._models_py3 import ManagedVirtualNetwork from ._models_py3 import ManagedVirtualNetworkListResponse + from ._models_py3 import ManagedVirtualNetworkReference from ._models_py3 import ManagedVirtualNetworkResource from ._models_py3 import MappingDataFlow - from ._models_py3 import MariaDBLinkedService - from ._models_py3 import MariaDBSource - from ._models_py3 import MariaDBTableDataset + from ._models_py3 import MariaDbLinkedService + from ._models_py3 import MariaDbSource + from ._models_py3 import MariaDbTableDataset from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource @@ -374,17 +394,20 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset - from ._models_py3 import MongoDBCollectionDataset - from ._models_py3 import MongoDBCursorMethodsProperties - from ._models_py3 import MongoDBLinkedService - from ._models_py3 import MongoDBSource - from ._models_py3 import MongoDBV2CollectionDataset - from ._models_py3 import MongoDBV2LinkedService - from ._models_py3 import MongoDBV2Source + from ._models_py3 import MongoDbAtlasCollectionDataset + from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSource + from ._models_py3 import MongoDbCollectionDataset + from ._models_py3 import MongoDbCursorMethodsProperties + from ._models_py3 import MongoDbLinkedService + from ._models_py3 import MongoDbSource + from ._models_py3 import MongoDbV2CollectionDataset + from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger - from ._models_py3 import MySQLLinkedService - from ._models_py3 import MySQLSource - from ._models_py3 import MySQLTableDataset + from ._models_py3 import MySqlLinkedService + from ._models_py3 import MySqlSource + from ._models_py3 import MySqlTableDataset from ._models_py3 import NetezzaLinkedService from ._models_py3 import NetezzaPartitionSettings from ._models_py3 import NetezzaSource @@ -407,6 +430,9 @@ from ._models_py3 import OperationMetricDimension from ._models_py3 import OperationMetricSpecification from ._models_py3 import OperationServiceSpecification + from ._models_py3 import OracleCloudStorageLinkedService + from ._models_py3 import OracleCloudStorageLocation + from ._models_py3 import OracleCloudStorageReadSettings from ._models_py3 import OracleLinkedService from ._models_py3 import OraclePartitionSettings from ._models_py3 import OracleServiceCloudLinkedService @@ -419,31 +445,44 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource from ._models_py3 import PhoenixLinkedService from ._models_py3 import PhoenixObjectDataset from ._models_py3 import PhoenixSource + from ._models_py3 import PipelineElapsedTimeMetricPolicy + from ._models_py3 import PipelineFolder from ._models_py3 import PipelineListResponse + from ._models_py3 import PipelinePolicy from ._models_py3 import PipelineReference from ._models_py3 import PipelineResource from ._models_py3 import PipelineRun from ._models_py3 import PipelineRunInvokedBy from ._models_py3 import PipelineRunsQueryResponse from ._models_py3 import PolybaseSettings - from ._models_py3 import PostgreSQLLinkedService - from ._models_py3 import PostgreSQLSource - from ._models_py3 import PostgreSQLTableDataset + from ._models_py3 import PostgreSqlLinkedService + from ._models_py3 import PostgreSqlSource + from ._models_py3 import PostgreSqlTableDataset from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset from ._models_py3 import PrestoSource + from ._models_py3 import PrivateEndpointConnectionListResponse + from ._models_py3 import PrivateEndpointConnectionResource + from ._models_py3 import PrivateLinkConnectionApprovalRequest + from ._models_py3 import PrivateLinkConnectionApprovalRequestResource + from ._models_py3 import PrivateLinkConnectionState + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceProperties + from ._models_py3 import PrivateLinkResourcesWrapper from ._models_py3 import QueryDataFlowDebugSessionsResponse from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset @@ -454,6 +493,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset + from ._models_py3 import RemotePrivateEndpointConnection from ._models_py3 import RerunTumblingWindowTrigger from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService @@ -467,18 +507,6 @@ from ._models_py3 import RunFilterParameters from ._models_py3 import RunQueryFilter from ._models_py3 import RunQueryOrderBy - from ._models_py3 import SQLDWSink - from ._models_py3 import SQLDWSource - from ._models_py3 import SQLMiSink - from ._models_py3 import SQLMiSource - from ._models_py3 import SQLPartitionSettings - from ._models_py3 import SQLServerLinkedService - from ._models_py3 import SQLServerSink - from ._models_py3 import SQLServerSource - from ._models_py3 import SQLServerStoredProcedureActivity - from ._models_py3 import SQLServerTableDataset - from ._models_py3 import SQLSink - from ._models_py3 import SQLSource from ._models_py3 import SalesforceLinkedService from ._models_py3 import SalesforceMarketingCloudLinkedService from ._models_py3 import SalesforceMarketingCloudObjectDataset @@ -544,6 +572,19 @@ from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource + from ._models_py3 import SqlAlwaysEncryptedProperties + from ._models_py3 import SqlDwSink + from ._models_py3 import SqlDwSource + from ._models_py3 import SqlMiSink + from ._models_py3 import SqlMiSource + from ._models_py3 import SqlPartitionSettings + from ._models_py3 import SqlServerLinkedService + from ._models_py3 import SqlServerSink + from ._models_py3 import SqlServerSource + from ._models_py3 import SqlServerStoredProcedureActivity + from ._models_py3 import SqlServerTableDataset + from ._models_py3 import SqlSink + from ._models_py3 import SqlSource from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset from ._models_py3 import SquareSource @@ -577,6 +618,8 @@ from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -643,11 +686,15 @@ from ._models import AmazonRedshiftLinkedService # type: ignore from ._models import AmazonRedshiftSource # type: ignore from ._models import AmazonRedshiftTableDataset # type: ignore + from ._models import AmazonS3CompatibleLinkedService # type: ignore + from ._models import AmazonS3CompatibleLocation # type: ignore + from ._models import AmazonS3CompatibleReadSettings # type: ignore from ._models import AmazonS3Dataset # type: ignore from ._models import AmazonS3LinkedService # type: ignore from ._models import AmazonS3Location # type: ignore from ._models import AmazonS3ReadSettings # type: ignore from ._models import AppendVariableActivity # type: ignore + from ._models import ArmIdWrapper # type: ignore from ._models import AvroDataset # type: ignore from ._models import AvroFormat # type: ignore from ._models import AvroSink # type: ignore @@ -680,6 +727,12 @@ from ._models import AzureDataLakeStoreSink # type: ignore from ._models import AzureDataLakeStoreSource # type: ignore from ._models import AzureDataLakeStoreWriteSettings # type: ignore + from ._models import AzureDatabricksDeltaLakeDataset # type: ignore + from ._models import AzureDatabricksDeltaLakeExportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeImportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeLinkedService # type: ignore + from ._models import AzureDatabricksDeltaLakeSink # type: ignore + from ._models import AzureDatabricksDeltaLakeSource # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore from ._models import AzureFileStorageLocation # type: ignore @@ -689,35 +742,35 @@ from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore from ._models import AzureKeyVaultSecretReference # type: ignore - from ._models import AzureMariaDBLinkedService # type: ignore - from ._models import AzureMariaDBSource # type: ignore - from ._models import AzureMariaDBTableDataset # type: ignore + from ._models import AzureMariaDbLinkedService # type: ignore + from ._models import AzureMariaDbSource # type: ignore + from ._models import AzureMariaDbTableDataset # type: ignore from ._models import AzureMlBatchExecutionActivity # type: ignore from ._models import AzureMlExecutePipelineActivity # type: ignore from ._models import AzureMlLinkedService # type: ignore from ._models import AzureMlServiceLinkedService # type: ignore from ._models import AzureMlUpdateResourceActivity # type: ignore from ._models import AzureMlWebServiceFile # type: ignore - from ._models import AzureMySQLLinkedService # type: ignore - from ._models import AzureMySQLSink # type: ignore - from ._models import AzureMySQLSource # type: ignore - from ._models import AzureMySQLTableDataset # type: ignore - from ._models import AzurePostgreSQLLinkedService # type: ignore - from ._models import AzurePostgreSQLSink # type: ignore - from ._models import AzurePostgreSQLSource # type: ignore - from ._models import AzurePostgreSQLTableDataset # type: ignore + from ._models import AzureMySqlLinkedService # type: ignore + from ._models import AzureMySqlSink # type: ignore + from ._models import AzureMySqlSource # type: ignore + from ._models import AzureMySqlTableDataset # type: ignore + from ._models import AzurePostgreSqlLinkedService # type: ignore + from ._models import AzurePostgreSqlSink # type: ignore + from ._models import AzurePostgreSqlSource # type: ignore + from ._models import AzurePostgreSqlTableDataset # type: ignore from ._models import AzureQueueSink # type: ignore - from ._models import AzureSQLDWLinkedService # type: ignore - from ._models import AzureSQLDWTableDataset # type: ignore - from ._models import AzureSQLDatabaseLinkedService # type: ignore - from ._models import AzureSQLMiLinkedService # type: ignore - from ._models import AzureSQLMiTableDataset # type: ignore - from ._models import AzureSQLSink # type: ignore - from ._models import AzureSQLSource # type: ignore - from ._models import AzureSQLTableDataset # type: ignore from ._models import AzureSearchIndexDataset # type: ignore from ._models import AzureSearchIndexSink # type: ignore from ._models import AzureSearchLinkedService # type: ignore + from ._models import AzureSqlDatabaseLinkedService # type: ignore + from ._models import AzureSqlDwLinkedService # type: ignore + from ._models import AzureSqlDwTableDataset # type: ignore + from ._models import AzureSqlMiLinkedService # type: ignore + from ._models import AzureSqlMiTableDataset # type: ignore + from ._models import AzureSqlSink # type: ignore + from ._models import AzureSqlSource # type: ignore + from ._models import AzureSqlTableDataset # type: ignore from ._models import AzureStorageLinkedService # type: ignore from ._models import AzureTableDataset # type: ignore from ._models import AzureTableSink # type: ignore @@ -737,6 +790,7 @@ from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore from ._models import CmdkeySetup # type: ignore + from ._models import CmkIdentityDefinition # type: ignore from ._models import CommonDataServiceForAppsEntityDataset # type: ignore from ._models import CommonDataServiceForAppsLinkedService # type: ignore from ._models import CommonDataServiceForAppsSink # type: ignore @@ -749,17 +803,18 @@ from ._models import ConnectionStateProperties # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore + from ._models import CopyActivityLogSettings # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore from ._models import CopyTranslator # type: ignore - from ._models import CosmosDBLinkedService # type: ignore - from ._models import CosmosDBMongoDBApiCollectionDataset # type: ignore - from ._models import CosmosDBMongoDBApiLinkedService # type: ignore - from ._models import CosmosDBMongoDBApiSink # type: ignore - from ._models import CosmosDBMongoDBApiSource # type: ignore - from ._models import CosmosDBSQLApiCollectionDataset # type: ignore - from ._models import CosmosDBSQLApiSink # type: ignore - from ._models import CosmosDBSQLApiSource # type: ignore + from ._models import CosmosDbLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiCollectionDataset # type: ignore + from ._models import CosmosDbMongoDbApiLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiSink # type: ignore + from ._models import CosmosDbMongoDbApiSource # type: ignore + from ._models import CosmosDbSqlApiCollectionDataset # type: ignore + from ._models import CosmosDbSqlApiSink # type: ignore + from ._models import CosmosDbSqlApiSource # type: ignore from ._models import CouchbaseLinkedService # type: ignore from ._models import CouchbaseSource # type: ignore from ._models import CouchbaseTableDataset # type: ignore @@ -771,14 +826,14 @@ from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore from ._models import CustomDataset # type: ignore + from ._models import CustomEventsTrigger # type: ignore from ._models import CustomSetupBase # type: ignore - from ._models import DWCopyCommandDefaultValue # type: ignore - from ._models import DWCopyCommandSettings # type: ignore from ._models import DataFlow # type: ignore from ._models import DataFlowDebugCommandPayload # type: ignore from ._models import DataFlowDebugCommandRequest # type: ignore from ._models import DataFlowDebugCommandResponse # type: ignore from ._models import DataFlowDebugPackage # type: ignore + from ._models import DataFlowDebugPackageDebugSettings # type: ignore from ._models import DataFlowDebugResource # type: ignore from ._models import DataFlowDebugSessionInfo # type: ignore from ._models import DataFlowFolder # type: ignore @@ -807,6 +862,8 @@ from ._models import DatasetResource # type: ignore from ._models import DatasetSchemaDataElement # type: ignore from ._models import DatasetStorageFormat # type: ignore + from ._models import DatasetTarCompression # type: ignore + from ._models import DatasetTarGZipCompression # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore from ._models import Db2Source # type: ignore @@ -820,12 +877,14 @@ from ._models import DelimitedTextWriteSettings # type: ignore from ._models import DependencyReference # type: ignore from ._models import DistcpSettings # type: ignore - from ._models import DocumentDBCollectionDataset # type: ignore - from ._models import DocumentDBCollectionSink # type: ignore - from ._models import DocumentDBCollectionSource # type: ignore + from ._models import DocumentDbCollectionDataset # type: ignore + from ._models import DocumentDbCollectionSink # type: ignore + from ._models import DocumentDbCollectionSource # type: ignore from ._models import DrillLinkedService # type: ignore from ._models import DrillSource # type: ignore from ._models import DrillTableDataset # type: ignore + from ._models import DwCopyCommandDefaultValue # type: ignore + from ._models import DwCopyCommandSettings # type: ignore from ._models import DynamicsAxLinkedService # type: ignore from ._models import DynamicsAxResourceDataset # type: ignore from ._models import DynamicsAxSource # type: ignore @@ -840,6 +899,7 @@ from ._models import EloquaLinkedService # type: ignore from ._models import EloquaObjectDataset # type: ignore from ._models import EloquaSource # type: ignore + from ._models import EncryptionConfiguration # type: ignore from ._models import EntityReference # type: ignore from ._models import EnvironmentVariableSetup # type: ignore from ._models import ExcelDataset # type: ignore @@ -969,6 +1029,8 @@ from ._models import LinkedServiceListResponse # type: ignore from ._models import LinkedServiceReference # type: ignore from ._models import LinkedServiceResource # type: ignore + from ._models import LogLocationSettings # type: ignore + from ._models import LogSettings # type: ignore from ._models import LogStorageSettings # type: ignore from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore @@ -984,11 +1046,12 @@ from ._models import ManagedPrivateEndpointResource # type: ignore from ._models import ManagedVirtualNetwork # type: ignore from ._models import ManagedVirtualNetworkListResponse # type: ignore + from ._models import ManagedVirtualNetworkReference # type: ignore from ._models import ManagedVirtualNetworkResource # type: ignore from ._models import MappingDataFlow # type: ignore - from ._models import MariaDBLinkedService # type: ignore - from ._models import MariaDBSource # type: ignore - from ._models import MariaDBTableDataset # type: ignore + from ._models import MariaDbLinkedService # type: ignore + from ._models import MariaDbSource # type: ignore + from ._models import MariaDbTableDataset # type: ignore from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore @@ -996,17 +1059,20 @@ from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore - from ._models import MongoDBCollectionDataset # type: ignore - from ._models import MongoDBCursorMethodsProperties # type: ignore - from ._models import MongoDBLinkedService # type: ignore - from ._models import MongoDBSource # type: ignore - from ._models import MongoDBV2CollectionDataset # type: ignore - from ._models import MongoDBV2LinkedService # type: ignore - from ._models import MongoDBV2Source # type: ignore + from ._models import MongoDbAtlasCollectionDataset # type: ignore + from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSource # type: ignore + from ._models import MongoDbCollectionDataset # type: ignore + from ._models import MongoDbCursorMethodsProperties # type: ignore + from ._models import MongoDbLinkedService # type: ignore + from ._models import MongoDbSource # type: ignore + from ._models import MongoDbV2CollectionDataset # type: ignore + from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore - from ._models import MySQLLinkedService # type: ignore - from ._models import MySQLSource # type: ignore - from ._models import MySQLTableDataset # type: ignore + from ._models import MySqlLinkedService # type: ignore + from ._models import MySqlSource # type: ignore + from ._models import MySqlTableDataset # type: ignore from ._models import NetezzaLinkedService # type: ignore from ._models import NetezzaPartitionSettings # type: ignore from ._models import NetezzaSource # type: ignore @@ -1029,6 +1095,9 @@ from ._models import OperationMetricDimension # type: ignore from ._models import OperationMetricSpecification # type: ignore from ._models import OperationServiceSpecification # type: ignore + from ._models import OracleCloudStorageLinkedService # type: ignore + from ._models import OracleCloudStorageLocation # type: ignore + from ._models import OracleCloudStorageReadSettings # type: ignore from ._models import OracleLinkedService # type: ignore from ._models import OraclePartitionSettings # type: ignore from ._models import OracleServiceCloudLinkedService # type: ignore @@ -1041,31 +1110,44 @@ from ._models import OrcFormat # type: ignore from ._models import OrcSink # type: ignore from ._models import OrcSource # type: ignore + from ._models import OrcWriteSettings # type: ignore from ._models import PackageStore # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore from ._models import ParquetFormat # type: ignore from ._models import ParquetSink # type: ignore from ._models import ParquetSource # type: ignore + from ._models import ParquetWriteSettings # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore from ._models import PaypalSource # type: ignore from ._models import PhoenixLinkedService # type: ignore from ._models import PhoenixObjectDataset # type: ignore from ._models import PhoenixSource # type: ignore + from ._models import PipelineElapsedTimeMetricPolicy # type: ignore + from ._models import PipelineFolder # type: ignore from ._models import PipelineListResponse # type: ignore + from ._models import PipelinePolicy # type: ignore from ._models import PipelineReference # type: ignore from ._models import PipelineResource # type: ignore from ._models import PipelineRun # type: ignore from ._models import PipelineRunInvokedBy # type: ignore from ._models import PipelineRunsQueryResponse # type: ignore from ._models import PolybaseSettings # type: ignore - from ._models import PostgreSQLLinkedService # type: ignore - from ._models import PostgreSQLSource # type: ignore - from ._models import PostgreSQLTableDataset # type: ignore + from ._models import PostgreSqlLinkedService # type: ignore + from ._models import PostgreSqlSource # type: ignore + from ._models import PostgreSqlTableDataset # type: ignore from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore from ._models import PrestoSource # type: ignore + from ._models import PrivateEndpointConnectionListResponse # type: ignore + from ._models import PrivateEndpointConnectionResource # type: ignore + from ._models import PrivateLinkConnectionApprovalRequest # type: ignore + from ._models import PrivateLinkConnectionApprovalRequestResource # type: ignore + from ._models import PrivateLinkConnectionState # type: ignore + from ._models import PrivateLinkResource # type: ignore + from ._models import PrivateLinkResourceProperties # type: ignore + from ._models import PrivateLinkResourcesWrapper # type: ignore from ._models import QueryDataFlowDebugSessionsResponse # type: ignore from ._models import QuickBooksLinkedService # type: ignore from ._models import QuickBooksObjectDataset # type: ignore @@ -1076,6 +1158,7 @@ from ._models import RedshiftUnloadSettings # type: ignore from ._models import RelationalSource # type: ignore from ._models import RelationalTableDataset # type: ignore + from ._models import RemotePrivateEndpointConnection # type: ignore from ._models import RerunTumblingWindowTrigger # type: ignore from ._models import Resource # type: ignore from ._models import ResponsysLinkedService # type: ignore @@ -1089,18 +1172,6 @@ from ._models import RunFilterParameters # type: ignore from ._models import RunQueryFilter # type: ignore from ._models import RunQueryOrderBy # type: ignore - from ._models import SQLDWSink # type: ignore - from ._models import SQLDWSource # type: ignore - from ._models import SQLMiSink # type: ignore - from ._models import SQLMiSource # type: ignore - from ._models import SQLPartitionSettings # type: ignore - from ._models import SQLServerLinkedService # type: ignore - from ._models import SQLServerSink # type: ignore - from ._models import SQLServerSource # type: ignore - from ._models import SQLServerStoredProcedureActivity # type: ignore - from ._models import SQLServerTableDataset # type: ignore - from ._models import SQLSink # type: ignore - from ._models import SQLSource # type: ignore from ._models import SalesforceLinkedService # type: ignore from ._models import SalesforceMarketingCloudLinkedService # type: ignore from ._models import SalesforceMarketingCloudObjectDataset # type: ignore @@ -1166,6 +1237,19 @@ from ._models import SparkLinkedService # type: ignore from ._models import SparkObjectDataset # type: ignore from ._models import SparkSource # type: ignore + from ._models import SqlAlwaysEncryptedProperties # type: ignore + from ._models import SqlDwSink # type: ignore + from ._models import SqlDwSource # type: ignore + from ._models import SqlMiSink # type: ignore + from ._models import SqlMiSource # type: ignore + from ._models import SqlPartitionSettings # type: ignore + from ._models import SqlServerLinkedService # type: ignore + from ._models import SqlServerSink # type: ignore + from ._models import SqlServerSource # type: ignore + from ._models import SqlServerStoredProcedureActivity # type: ignore + from ._models import SqlServerTableDataset # type: ignore + from ._models import SqlSink # type: ignore + from ._models import SqlSource # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore from ._models import SquareSource # type: ignore @@ -1199,6 +1283,8 @@ from ._models import SybaseTableDataset # type: ignore from ._models import TabularSource # type: ignore from ._models import TabularTranslator # type: ignore + from ._models import TarGZipReadSettings # type: ignore + from ._models import TarReadSettings # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataPartitionSettings # type: ignore from ._models import TeradataSource # type: ignore @@ -1259,6 +1345,8 @@ CassandraSourceReadConsistencyLevels, CompressionCodec, CopyBehaviorType, + CosmosDbConnectionMode, + CosmosDbServicePrincipalCredentialType, DataFlowComputeType, DataFlowDebugCommandType, DatasetCompressionLevel, @@ -1271,6 +1359,7 @@ DynamicsServicePrincipalCredentialType, DynamicsSinkWriteBehavior, EventSubscriptionStatus, + FactoryIdentityType, FtpAuthenticationType, GlobalParameterType, GoogleAdWordsAuthenticationType, @@ -1296,9 +1385,9 @@ JsonFormatFilePattern, JsonWriteFilePattern, ManagedIntegrationRuntimeNodeStatus, - MongoDBAuthenticationType, + MongoDbAuthenticationType, NetezzaPartitionOption, - ODataAADServicePrincipalCredentialType, + ODataAadServicePrincipalCredentialType, ODataAuthenticationType, OraclePartitionOption, OrcCompressionCodec, @@ -1306,13 +1395,13 @@ PhoenixAuthenticationType, PolybaseSettingsRejectType, PrestoAuthenticationType, + PublicNetworkAccess, RecurrenceFrequency, RestServiceAuthenticationType, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrder, RunQueryOrderByField, - SQLPartitionOption, SalesforceSinkWriteBehavior, SalesforceSourceReadBehavior, SapCloudForCustomerSinkWriteBehavior, @@ -1325,6 +1414,8 @@ SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, + SqlAlwaysEncryptedAkvAuthType, + SqlPartitionOption, SsisLogLocationType, SsisObjectMetadataType, SsisPackageLocationType, @@ -1356,11 +1447,15 @@ 'AmazonRedshiftLinkedService', 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', + 'AmazonS3CompatibleLinkedService', + 'AmazonS3CompatibleLocation', + 'AmazonS3CompatibleReadSettings', 'AmazonS3Dataset', 'AmazonS3LinkedService', 'AmazonS3Location', 'AmazonS3ReadSettings', 'AppendVariableActivity', + 'ArmIdWrapper', 'AvroDataset', 'AvroFormat', 'AvroSink', @@ -1393,6 +1488,12 @@ 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureFileStorageLinkedService', 'AzureFileStorageLocation', @@ -1402,35 +1503,35 @@ 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', 'AzureKeyVaultSecretReference', - 'AzureMariaDBLinkedService', - 'AzureMariaDBSource', - 'AzureMariaDBTableDataset', + 'AzureMariaDbLinkedService', + 'AzureMariaDbSource', + 'AzureMariaDbTableDataset', 'AzureMlBatchExecutionActivity', 'AzureMlExecutePipelineActivity', 'AzureMlLinkedService', 'AzureMlServiceLinkedService', 'AzureMlUpdateResourceActivity', 'AzureMlWebServiceFile', - 'AzureMySQLLinkedService', - 'AzureMySQLSink', - 'AzureMySQLSource', - 'AzureMySQLTableDataset', - 'AzurePostgreSQLLinkedService', - 'AzurePostgreSQLSink', - 'AzurePostgreSQLSource', - 'AzurePostgreSQLTableDataset', + 'AzureMySqlLinkedService', + 'AzureMySqlSink', + 'AzureMySqlSource', + 'AzureMySqlTableDataset', + 'AzurePostgreSqlLinkedService', + 'AzurePostgreSqlSink', + 'AzurePostgreSqlSource', + 'AzurePostgreSqlTableDataset', 'AzureQueueSink', - 'AzureSQLDWLinkedService', - 'AzureSQLDWTableDataset', - 'AzureSQLDatabaseLinkedService', - 'AzureSQLMiLinkedService', - 'AzureSQLMiTableDataset', - 'AzureSQLSink', - 'AzureSQLSource', - 'AzureSQLTableDataset', 'AzureSearchIndexDataset', 'AzureSearchIndexSink', 'AzureSearchLinkedService', + 'AzureSqlDatabaseLinkedService', + 'AzureSqlDwLinkedService', + 'AzureSqlDwTableDataset', + 'AzureSqlMiLinkedService', + 'AzureSqlMiTableDataset', + 'AzureSqlSink', + 'AzureSqlSource', + 'AzureSqlTableDataset', 'AzureStorageLinkedService', 'AzureTableDataset', 'AzureTableSink', @@ -1450,6 +1551,7 @@ 'ChainingTrigger', 'CloudError', 'CmdkeySetup', + 'CmkIdentityDefinition', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', @@ -1462,17 +1564,18 @@ 'ConnectionStateProperties', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', 'CopyTranslator', - 'CosmosDBLinkedService', - 'CosmosDBMongoDBApiCollectionDataset', - 'CosmosDBMongoDBApiLinkedService', - 'CosmosDBMongoDBApiSink', - 'CosmosDBMongoDBApiSource', - 'CosmosDBSQLApiCollectionDataset', - 'CosmosDBSQLApiSink', - 'CosmosDBSQLApiSource', + 'CosmosDbLinkedService', + 'CosmosDbMongoDbApiCollectionDataset', + 'CosmosDbMongoDbApiLinkedService', + 'CosmosDbMongoDbApiSink', + 'CosmosDbMongoDbApiSource', + 'CosmosDbSqlApiCollectionDataset', + 'CosmosDbSqlApiSink', + 'CosmosDbSqlApiSource', 'CouchbaseLinkedService', 'CouchbaseSource', 'CouchbaseTableDataset', @@ -1484,14 +1587,14 @@ 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', 'CustomDataset', + 'CustomEventsTrigger', 'CustomSetupBase', - 'DWCopyCommandDefaultValue', - 'DWCopyCommandSettings', 'DataFlow', 'DataFlowDebugCommandPayload', 'DataFlowDebugCommandRequest', 'DataFlowDebugCommandResponse', 'DataFlowDebugPackage', + 'DataFlowDebugPackageDebugSettings', 'DataFlowDebugResource', 'DataFlowDebugSessionInfo', 'DataFlowFolder', @@ -1520,6 +1623,8 @@ 'DatasetResource', 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1533,12 +1638,14 @@ 'DelimitedTextWriteSettings', 'DependencyReference', 'DistcpSettings', - 'DocumentDBCollectionDataset', - 'DocumentDBCollectionSink', - 'DocumentDBCollectionSource', + 'DocumentDbCollectionDataset', + 'DocumentDbCollectionSink', + 'DocumentDbCollectionSource', 'DrillLinkedService', 'DrillSource', 'DrillTableDataset', + 'DwCopyCommandDefaultValue', + 'DwCopyCommandSettings', 'DynamicsAxLinkedService', 'DynamicsAxResourceDataset', 'DynamicsAxSource', @@ -1553,6 +1660,7 @@ 'EloquaLinkedService', 'EloquaObjectDataset', 'EloquaSource', + 'EncryptionConfiguration', 'EntityReference', 'EnvironmentVariableSetup', 'ExcelDataset', @@ -1682,6 +1790,8 @@ 'LinkedServiceListResponse', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1697,11 +1807,12 @@ 'ManagedPrivateEndpointResource', 'ManagedVirtualNetwork', 'ManagedVirtualNetworkListResponse', + 'ManagedVirtualNetworkReference', 'ManagedVirtualNetworkResource', 'MappingDataFlow', - 'MariaDBLinkedService', - 'MariaDBSource', - 'MariaDBTableDataset', + 'MariaDbLinkedService', + 'MariaDbSource', + 'MariaDbTableDataset', 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', @@ -1709,17 +1820,20 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', - 'MongoDBCollectionDataset', - 'MongoDBCursorMethodsProperties', - 'MongoDBLinkedService', - 'MongoDBSource', - 'MongoDBV2CollectionDataset', - 'MongoDBV2LinkedService', - 'MongoDBV2Source', + 'MongoDbAtlasCollectionDataset', + 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSource', + 'MongoDbCollectionDataset', + 'MongoDbCursorMethodsProperties', + 'MongoDbLinkedService', + 'MongoDbSource', + 'MongoDbV2CollectionDataset', + 'MongoDbV2LinkedService', + 'MongoDbV2Source', 'MultiplePipelineTrigger', - 'MySQLLinkedService', - 'MySQLSource', - 'MySQLTableDataset', + 'MySqlLinkedService', + 'MySqlSource', + 'MySqlTableDataset', 'NetezzaLinkedService', 'NetezzaPartitionSettings', 'NetezzaSource', @@ -1742,6 +1856,9 @@ 'OperationMetricDimension', 'OperationMetricSpecification', 'OperationServiceSpecification', + 'OracleCloudStorageLinkedService', + 'OracleCloudStorageLocation', + 'OracleCloudStorageReadSettings', 'OracleLinkedService', 'OraclePartitionSettings', 'OracleServiceCloudLinkedService', @@ -1754,31 +1871,44 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', 'PhoenixLinkedService', 'PhoenixObjectDataset', 'PhoenixSource', + 'PipelineElapsedTimeMetricPolicy', + 'PipelineFolder', 'PipelineListResponse', + 'PipelinePolicy', 'PipelineReference', 'PipelineResource', 'PipelineRun', 'PipelineRunInvokedBy', 'PipelineRunsQueryResponse', 'PolybaseSettings', - 'PostgreSQLLinkedService', - 'PostgreSQLSource', - 'PostgreSQLTableDataset', + 'PostgreSqlLinkedService', + 'PostgreSqlSource', + 'PostgreSqlTableDataset', 'PrestoLinkedService', 'PrestoObjectDataset', 'PrestoSource', + 'PrivateEndpointConnectionListResponse', + 'PrivateEndpointConnectionResource', + 'PrivateLinkConnectionApprovalRequest', + 'PrivateLinkConnectionApprovalRequestResource', + 'PrivateLinkConnectionState', + 'PrivateLinkResource', + 'PrivateLinkResourceProperties', + 'PrivateLinkResourcesWrapper', 'QueryDataFlowDebugSessionsResponse', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', @@ -1789,6 +1919,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', + 'RemotePrivateEndpointConnection', 'RerunTumblingWindowTrigger', 'Resource', 'ResponsysLinkedService', @@ -1802,18 +1933,6 @@ 'RunFilterParameters', 'RunQueryFilter', 'RunQueryOrderBy', - 'SQLDWSink', - 'SQLDWSource', - 'SQLMiSink', - 'SQLMiSource', - 'SQLPartitionSettings', - 'SQLServerLinkedService', - 'SQLServerSink', - 'SQLServerSource', - 'SQLServerStoredProcedureActivity', - 'SQLServerTableDataset', - 'SQLSink', - 'SQLSource', 'SalesforceLinkedService', 'SalesforceMarketingCloudLinkedService', 'SalesforceMarketingCloudObjectDataset', @@ -1879,6 +1998,19 @@ 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', + 'SqlAlwaysEncryptedProperties', + 'SqlDwSink', + 'SqlDwSource', + 'SqlMiSink', + 'SqlMiSource', + 'SqlPartitionSettings', + 'SqlServerLinkedService', + 'SqlServerSink', + 'SqlServerSource', + 'SqlServerStoredProcedureActivity', + 'SqlServerTableDataset', + 'SqlSink', + 'SqlSource', 'SquareLinkedService', 'SquareObjectDataset', 'SquareSource', @@ -1912,6 +2044,8 @@ 'SybaseTableDataset', 'TabularSource', 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -1970,6 +2104,8 @@ 'CassandraSourceReadConsistencyLevels', 'CompressionCodec', 'CopyBehaviorType', + 'CosmosDbConnectionMode', + 'CosmosDbServicePrincipalCredentialType', 'DataFlowComputeType', 'DataFlowDebugCommandType', 'DatasetCompressionLevel', @@ -1982,6 +2118,7 @@ 'DynamicsServicePrincipalCredentialType', 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', + 'FactoryIdentityType', 'FtpAuthenticationType', 'GlobalParameterType', 'GoogleAdWordsAuthenticationType', @@ -2007,9 +2144,9 @@ 'JsonFormatFilePattern', 'JsonWriteFilePattern', 'ManagedIntegrationRuntimeNodeStatus', - 'MongoDBAuthenticationType', + 'MongoDbAuthenticationType', 'NetezzaPartitionOption', - 'ODataAADServicePrincipalCredentialType', + 'ODataAadServicePrincipalCredentialType', 'ODataAuthenticationType', 'OraclePartitionOption', 'OrcCompressionCodec', @@ -2017,13 +2154,13 @@ 'PhoenixAuthenticationType', 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PublicNetworkAccess', 'RecurrenceFrequency', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrder', 'RunQueryOrderByField', - 'SQLPartitionOption', 'SalesforceSinkWriteBehavior', 'SalesforceSourceReadBehavior', 'SapCloudForCustomerSinkWriteBehavior', @@ -2036,6 +2173,8 @@ 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', + 'SqlAlwaysEncryptedAkvAuthType', + 'SqlPartitionOption', 'SsisLogLocationType', 'SsisObjectMetadataType', 'SsisPackageLocationType', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index d663167e0f4..1e1c0d92c7d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -86,6 +86,8 @@ class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" LZ4 = "lz4" + TAR = "tar" + TAR_G_ZIP = "tarGZip" class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. @@ -95,6 +97,23 @@ class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FLATTEN_HIERARCHY = "FlattenHierarchy" MERGE_FILES = "MergeFiles" +class CosmosDbConnectionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The connection mode used to access CosmosDB account. Type: string (or Expression with + resultType string). + """ + + GATEWAY = "Gateway" + DIRECT = "Direct" + +class CosmosDbServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The service principal credential type to use in Server-To-Server authentication. + 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or + Expression with resultType string). + """ + + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ @@ -183,7 +202,7 @@ class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The write behavior for the operation. + """Defines values for DynamicsSinkWriteBehavior. """ UPSERT = "Upsert" @@ -198,6 +217,14 @@ class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum DISABLED = "Disabled" UNKNOWN = "Unknown" +class FactoryIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The identity type. + """ + + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + class FtpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ @@ -406,7 +433,7 @@ class ManagedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMet RECYCLING = "Recycling" UNAVAILABLE = "Unavailable" -class MongoDBAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ @@ -421,7 +448,7 @@ class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) DATA_SLICE = "DataSlice" DYNAMIC_RANGE = "DynamicRange" -class ODataAADServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ODataAadServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ @@ -451,6 +478,7 @@ class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" + LZO = "lzo" class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. @@ -486,6 +514,13 @@ class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu ANONYMOUS = "Anonymous" LDAP = "LDAP" +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Whether or not public network access is allowed for the data factory. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the schedule trigger. """ @@ -632,6 +667,7 @@ class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) BASIC = "Basic" SSH_PUBLIC_KEY = "SshPublicKey" + MULTI_FACTOR = "MultiFactor" class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Spark server. @@ -658,7 +694,15 @@ class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, SASL = "SASL" HTTP = "HTTP " -class SQLPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Sql always encrypted AKV authentication type. Type: string (or Expression with resultType + string). + """ + + SERVICE_PRINCIPAL = "ServicePrincipal" + MANAGED_IDENTITY = "ManagedIdentity" + +class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ @@ -746,6 +790,7 @@ class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum MINUTE = "Minute" HOUR = "Hour" + MONTH = "Month" class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index ec6d27ddb59..e97fd0ab305 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -342,7 +342,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -375,7 +375,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -489,7 +489,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -535,7 +535,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -616,7 +616,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -649,7 +649,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -668,7 +668,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDbSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDbSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDwSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -709,7 +709,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDbSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDbSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDwSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -977,6 +977,294 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) +class AmazonS3CompatibleLinkedService(LinkedService): + """Linked service for Amazon S3 Compatible. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access + Management (IAM) user. + :type secret_access_key: ~data_factory_management_client.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style + access. Default value is false. Type: boolean (or Expression with resultType boolean). + :type force_path_style: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonS3CompatibleLinkedService, self).__init__(**kwargs) + self.type = 'AmazonS3Compatible' # type: str + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.force_path_style = kwargs.get('force_path_style', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFsLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, SftpLocation. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFsLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'SftpLocation': 'SftpLocation'} + } + + def __init__( + self, + **kwargs + ): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetLocation' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + + +class AmazonS3CompatibleLocation(DatasetLocation): + """The location of Amazon S3 Compatible dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonS3CompatibleLocation, self).__init__(**kwargs) + self.type = 'AmazonS3CompatibleLocation' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFsReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFsReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'SftpReadSettings': 'SftpReadSettings'} + } + + def __init__( + self, + **kwargs + ): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + + +class AmazonS3CompatibleReadSettings(StoreReadSettings): + """Amazon S3 Compatible read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression + with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonS3CompatibleReadSettings, self).__init__(**kwargs) + self.type = 'AmazonS3CompatibleReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. @@ -1088,6 +1376,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1098,6 +1389,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1115,9 +1408,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1127,59 +1422,14 @@ def __init__( ): super(AmazonS3LinkedService, self).__init__(**kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3Location, AzureBlobFsLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFsLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} - } - - def __init__( - self, - **kwargs - ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - - class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. @@ -1227,50 +1477,8 @@ def __init__( self.version = kwargs.get('version', None) -class StoreReadSettings(msrest.serialization.Model): - """Connector read setting. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3ReadSettings, AzureBlobFsReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFsReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} - } - - def __init__( - self, - **kwargs - ): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'StoreReadSettings' # type: str - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - - class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. + """Amazon S3 read settings. All required parameters must be populated in order to send to Azure. @@ -1402,6 +1610,31 @@ def __init__( self.value = kwargs.get('value', None) +class ArmIdWrapper(msrest.serialization.Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None + + class AvroDataset(Dataset): """Avro dataset. @@ -1555,7 +1788,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1596,7 +1829,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1724,7 +1957,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1745,7 +1978,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1771,6 +2004,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1782,6 +2022,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1792,6 +2034,8 @@ def __init__( self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class CustomSetupBase(msrest.serialization.Model): @@ -2529,6 +2773,10 @@ class AzureBlobStorageLinkedService(LinkedService): values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object + :param account_kind: Specify the kind of your storage account. Allowed values are: Storage + (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: + string (or Expression with resultType string). + :type account_kind: str :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2555,6 +2803,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2573,6 +2822,7 @@ def __init__( self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) + self.account_kind = kwargs.get('account_kind', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2742,6 +2992,395 @@ def __init__( self.block_size_in_mb = kwargs.get('block_size_in_mb', None) +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) + + +class ExportSettings(msrest.serialization.Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ExportSettings' # type: str + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -2763,10 +3402,16 @@ class AzureDatabricksLinkedService(LinkedService): :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to + :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~data_factory_management_client.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type existing_cluster_id: object @@ -2816,12 +3461,14 @@ class AzureDatabricksLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -2833,6 +3480,8 @@ class AzureDatabricksLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -2846,6 +3495,7 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } def __init__( @@ -2855,7 +3505,9 @@ def __init__( super(AzureDatabricksLinkedService, self).__init__(**kwargs) self.type = 'AzureDatabricks' # type: str self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] + self.access_token = kwargs.get('access_token', None) + self.authentication = kwargs.get('authentication', None) + self.workspace_resource_id = kwargs.get('workspace_resource_id', None) self.existing_cluster_id = kwargs.get('existing_cluster_id', None) self.instance_pool_id = kwargs.get('instance_pool_id', None) self.new_cluster_version = kwargs.get('new_cluster_version', None) @@ -2869,13 +3521,14 @@ def __init__( self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.policy_id = kwargs.get('policy_id', None) class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -2915,7 +3568,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( @@ -3009,27 +3662,24 @@ class AzureDataExplorerLinkedService(LinkedService): will be in the format https://:code:``.:code:``.kusto.windows.net. Type: string (or Expression with resultType string). :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). + :param service_principal_id: The ID of the service principal used to authenticate against Azure + Data Explorer. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. + :param service_principal_key: The key of the service principal used to authenticate against + Kusto. :type service_principal_key: ~data_factory_management_client.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). :type tenant: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { @@ -3053,10 +3703,10 @@ def __init__( super(AzureDataExplorerLinkedService, self).__init__(**kwargs) self.type = 'AzureDataExplorer' # type: str self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) self.database = kwargs['database'] - self.tenant = kwargs['tenant'] + self.tenant = kwargs.get('tenant', None) class AzureDataExplorerSink(CopySink): @@ -4254,7 +4904,7 @@ def __init__( self.secret_version = kwargs.get('secret_version', None) -class AzureMariaDBLinkedService(LinkedService): +class AzureMariaDbLinkedService(LinkedService): """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4303,14 +4953,14 @@ def __init__( self, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) + super(AzureMariaDbLinkedService, self).__init__(**kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMariaDBSource(TabularSource): +class AzureMariaDbSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -4359,12 +5009,12 @@ def __init__( self, **kwargs ): - super(AzureMariaDBSource, self).__init__(**kwargs) + super(AzureMariaDbSource, self).__init__(**kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = kwargs.get('query', None) -class AzureMariaDBTableDataset(Dataset): +class AzureMariaDbTableDataset(Dataset): """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -4417,7 +5067,7 @@ def __init__( self, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) + super(AzureMariaDbTableDataset, self).__init__(**kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -4515,9 +5165,15 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~data_factory_management_client.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -4527,6 +5183,10 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -4541,7 +5201,6 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -4554,8 +5213,11 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } @@ -4566,9 +5228,12 @@ def __init__( ): super(AzureMlExecutePipelineActivity, self).__init__(**kwargs) self.type = 'AzureMLExecutePipeline' # type: str - self.ml_pipeline_id = kwargs['ml_pipeline_id'] + self.ml_pipeline_id = kwargs.get('ml_pipeline_id', None) + self.ml_pipeline_endpoint_id = kwargs.get('ml_pipeline_endpoint_id', None) + self.version = kwargs.get('version', None) self.experiment_name = kwargs.get('experiment_name', None) self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) + self.data_path_assignments = kwargs.get('data_path_assignments', None) self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) @@ -4833,7 +5498,7 @@ def __init__( self.linked_service_name = kwargs['linked_service_name'] -class AzureMySQLLinkedService(LinkedService): +class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -4883,14 +5548,14 @@ def __init__( self, **kwargs ): - super(AzureMySQLLinkedService, self).__init__(**kwargs) + super(AzureMySqlLinkedService, self).__init__(**kwargs) self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMySQLSink(CopySink): +class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -4939,12 +5604,12 @@ def __init__( self, **kwargs ): - super(AzureMySQLSink, self).__init__(**kwargs) + super(AzureMySqlSink, self).__init__(**kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzureMySQLSource(TabularSource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4992,12 +5657,12 @@ def __init__( self, **kwargs ): - super(AzureMySQLSource, self).__init__(**kwargs) + super(AzureMySqlSource, self).__init__(**kwargs) self.type = 'AzureMySqlSource' # type: str self.query = kwargs.get('query', None) -class AzureMySQLTableDataset(Dataset): +class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5055,13 +5720,13 @@ def __init__( self, **kwargs ): - super(AzureMySQLTableDataset, self).__init__(**kwargs) + super(AzureMySqlTableDataset, self).__init__(**kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) -class AzurePostgreSQLLinkedService(LinkedService): +class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5110,14 +5775,14 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLLinkedService, self).__init__(**kwargs) + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = kwargs.get('connection_string', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzurePostgreSQLSink(CopySink): +class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5166,12 +5831,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLSink, self).__init__(**kwargs) + super(AzurePostgreSqlSink, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzurePostgreSQLSource(TabularSource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5220,12 +5885,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLSource, self).__init__(**kwargs) + super(AzurePostgreSqlSource, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class AzurePostgreSQLTableDataset(Dataset): +class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -5287,7 +5952,7 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLTableDataset, self).__init__(**kwargs) + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) @@ -5515,7 +6180,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLDatabaseLinkedService(LinkedService): +class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -5555,6 +6220,9 @@ class AzureSQLDatabaseLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -5576,13 +6244,14 @@ class AzureSQLDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, **kwargs ): - super(AzureSQLDatabaseLinkedService, self).__init__(**kwargs) + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5591,9 +6260,10 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) -class AzureSQLDWLinkedService(LinkedService): +class AzureSqlDwLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -5660,7 +6330,7 @@ def __init__( self, **kwargs ): - super(AzureSQLDWLinkedService, self).__init__(**kwargs) + super(AzureSqlDwLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5671,7 +6341,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLDWTableDataset(Dataset): +class AzureSqlDwTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -5733,14 +6403,14 @@ def __init__( self, **kwargs ): - super(AzureSQLDWTableDataset, self).__init__(**kwargs) + super(AzureSqlDwTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSQLMiLinkedService(LinkedService): +class AzureSqlMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -5780,6 +6450,9 @@ class AzureSQLMiLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -5801,13 +6474,14 @@ class AzureSQLMiLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, **kwargs ): - super(AzureSQLMiLinkedService, self).__init__(**kwargs) + super(AzureSqlMiLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5816,9 +6490,10 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) -class AzureSQLMiTableDataset(Dataset): +class AzureSqlMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -5880,14 +6555,14 @@ def __init__( self, **kwargs ): - super(AzureSQLMiTableDataset, self).__init__(**kwargs) + super(AzureSqlMiTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSQLSink(CopySink): +class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -5956,7 +6631,7 @@ def __init__( self, **kwargs ): - super(AzureSQLSink, self).__init__(**kwargs) + super(AzureSqlSink, self).__init__(**kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -5966,7 +6641,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class AzureSQLSource(TabularSource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -6005,9 +6680,9 @@ class AzureSQLSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -6026,15 +6701,15 @@ class AzureSQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(AzureSQLSource, self).__init__(**kwargs) + super(AzureSqlSource, self).__init__(**kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -6044,7 +6719,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class AzureSQLTableDataset(Dataset): +class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -6106,7 +6781,7 @@ def __init__( self, **kwargs ): - super(AzureSQLTableDataset, self).__init__(**kwargs) + super(AzureSqlTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -6722,7 +7397,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -6759,7 +7434,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -7365,6 +8040,26 @@ def __init__( self.password = kwargs['password'] +class CmkIdentityDefinition(msrest.serialization.Model): + """Managed Identity used for CMK. + + :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + customer's key vault. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CmkIdentityDefinition, self).__init__(**kwargs) + self.user_assigned_identity = kwargs.get('user_assigned_identity', None) + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -7699,7 +8394,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -7720,7 +8415,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -7750,6 +8445,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -7786,6 +8484,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -7801,6 +8500,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(**kwargs) self.type = 'Concur' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -8050,9 +8750,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8092,6 +8794,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -8116,12 +8819,38 @@ def __init__( self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) self.validate_data_consistency = kwargs.get('validate_data_consistency', None) self.skip_error_file = kwargs.get('skip_error_file', None) +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -8159,7 +8888,7 @@ def __init__( self.type = 'CopyTranslator' # type: str -class CosmosDBLinkedService(LinkedService): +class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -8188,6 +8917,31 @@ class CosmosDBLinkedService(LinkedService): :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :type account_key: ~data_factory_management_client.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~data_factory_management_client.models.CosmosDbServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object + :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or + Expression with resultType string). Possible values include: "Gateway", "Direct". + :type connection_mode: str or ~data_factory_management_client.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8209,6 +8963,12 @@ class CosmosDBLinkedService(LinkedService): 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'connection_mode': {'key': 'typeProperties.connectionMode', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8216,16 +8976,22 @@ def __init__( self, **kwargs ): - super(CosmosDBLinkedService, self).__init__(**kwargs) + super(CosmosDbLinkedService, self).__init__(**kwargs) self.type = 'CosmosDb' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_endpoint = kwargs.get('account_endpoint', None) self.database = kwargs.get('database', None) self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) + self.connection_mode = kwargs.get('connection_mode', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CosmosDBMongoDBApiCollectionDataset(Dataset): +class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -8280,12 +9046,12 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiCollectionDataset, self).__init__(**kwargs) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = kwargs['collection'] -class CosmosDBMongoDBApiLinkedService(LinkedService): +class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -8333,13 +9099,13 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiLinkedService, self).__init__(**kwargs) + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] -class CosmosDBMongoDBApiSink(CopySink): +class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -8389,12 +9155,12 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiSink, self).__init__(**kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) -class CosmosDBMongoDBApiSource(CopySource): +class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -8418,7 +9184,7 @@ class CosmosDBMongoDBApiSource(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -8443,7 +9209,7 @@ class CosmosDBMongoDBApiSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -8453,7 +9219,7 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiSource, self).__init__(**kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) @@ -8462,7 +9228,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class CosmosDBSQLApiCollectionDataset(Dataset): +class CosmosDbSqlApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -8517,12 +9283,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiCollectionDataset, self).__init__(**kwargs) + super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = kwargs['collection_name'] -class CosmosDBSQLApiSink(CopySink): +class CosmosDbSqlApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -8571,12 +9337,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiSink, self).__init__(**kwargs) + super(CosmosDbSqlApiSink, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) -class CosmosDBSQLApiSource(CopySource): +class CosmosDbSqlApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -8632,7 +9398,7 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiSource, self).__init__(**kwargs) + super(CosmosDbSqlApiSource, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) @@ -8820,18 +9586,16 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Set to use integration runtime setting for data flow debug session. + :type integration_runtime: + ~data_factory_management_client.models.IntegrationRuntimeDebugResource """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'name': {'key': 'integrationRuntime.name', 'type': 'str'}, - 'properties': {'key': 'integrationRuntime.properties', 'type': 'IntegrationRuntime'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, } def __init__( @@ -8842,8 +9606,7 @@ def __init__( self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) self.time_to_live = kwargs.get('time_to_live', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) + self.integration_runtime = kwargs.get('integration_runtime', None) class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): @@ -8967,6 +9730,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -8990,6 +9756,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -9004,6 +9771,7 @@ def __init__( self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.auto_user_specification = kwargs.get('auto_user_specification', None) class CustomActivityReferenceObject(msrest.serialization.Model): @@ -9133,6 +9901,71 @@ def __init__( self.type_properties = kwargs['type_properties'] +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(**kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = kwargs.get('subject_begins_with', None) + self.subject_ends_with = kwargs.get('subject_ends_with', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -9452,60 +10285,33 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param session_id: The ID of data flow debug session. :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource :param datasets: List of datasets. :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] :param linked_services: List of linked services. :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters_debug_settings_parameters: Data flow parameters. - :type parameters_debug_settings_parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters_staging_linked_service_parameters: Arguments for LinkedService. - :type parameters_staging_linked_service_parameters: dict[str, object] - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param staging: Staging info for debug session. + :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, - 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, - 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, - 'name': {'key': 'dataFlow.name', 'type': 'str'}, - 'properties': {'key': 'dataFlow.properties', 'type': 'DataFlow'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs @@ -9513,16 +10319,38 @@ def __init__( super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.session_id = kwargs.get('session_id', None) + self.data_flow = kwargs.get('data_flow', None) self.datasets = kwargs.get('datasets', None) self.linked_services = kwargs.get('linked_services', None) + self.staging = kwargs.get('staging', None) + self.debug_settings = kwargs.get('debug_settings', None) + + +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = kwargs.get('source_settings', None) - self.parameters_debug_settings_parameters = kwargs.get('parameters_debug_settings_parameters', None) + self.parameters = kwargs.get('parameters', None) self.dataset_parameters = kwargs.get('dataset_parameters', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_name = kwargs.get('reference_name', None) - self.parameters_staging_linked_service_parameters = kwargs.get('parameters_staging_linked_service_parameters', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) class SubResourceDebugResource(msrest.serialization.Model): @@ -9939,40 +10767,25 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - Variables are only populated by the server, and will be ignored when sending a request. - + :param linked_service: Staging linked service reference. + :type linked_service: ~data_factory_management_client.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'type': {'key': 'linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, - 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs ): super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = kwargs.get('linked_service', None) self.folder_path = kwargs.get('folder_path', None) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) class DataLakeAnalyticsUsqlActivity(ExecutionActivity): @@ -10063,7 +10876,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -10084,7 +10897,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -10398,6 +11211,68 @@ def __init__( self.type = kwargs.get('type', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.type = 'TarGZip' # type: str + self.level = kwargs.get('level', None) + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -10778,7 +11653,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -11005,6 +11880,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -11017,6 +11899,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -11027,6 +11911,8 @@ def __init__( self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs['file_extension'] + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class DependencyReference(msrest.serialization.Model): @@ -11099,7 +11985,7 @@ def __init__( self.distcp_options = kwargs.get('distcp_options', None) -class DocumentDBCollectionDataset(Dataset): +class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. All required parameters must be populated in order to send to Azure. @@ -11154,12 +12040,12 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionDataset, self).__init__(**kwargs) + super(DocumentDbCollectionDataset, self).__init__(**kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = kwargs['collection_name'] -class DocumentDBCollectionSink(CopySink): +class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. All required parameters must be populated in order to send to Azure. @@ -11212,13 +12098,13 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionSink, self).__init__(**kwargs) + super(DocumentDbCollectionSink, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = kwargs.get('nesting_separator', None) self.write_behavior = kwargs.get('write_behavior', None) -class DocumentDBCollectionSource(CopySource): +class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. All required parameters must be populated in order to send to Azure. @@ -11270,7 +12156,7 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionSource, self).__init__(**kwargs) + super(DocumentDbCollectionSource, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) @@ -11456,7 +12342,7 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class DWCopyCommandDefaultValue(msrest.serialization.Model): +class DwCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). @@ -11475,19 +12361,19 @@ def __init__( self, **kwargs ): - super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + super(DwCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = kwargs.get('column_name', None) self.default_value = kwargs.get('default_value', None) -class DWCopyCommandSettings(msrest.serialization.Model): +class DwCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. :param default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DWCopyCommandDefaultValue] + :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -11495,7 +12381,7 @@ class DWCopyCommandSettings(msrest.serialization.Model): """ _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'default_values': {'key': 'defaultValues', 'type': '[DwCopyCommandDefaultValue]'}, 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, } @@ -11503,7 +12389,7 @@ def __init__( self, **kwargs ): - super(DWCopyCommandSettings, self).__init__(**kwargs) + super(DwCopyCommandSettings, self).__init__(**kwargs) self.default_values = kwargs.get('default_values', None) self.additional_options = kwargs.get('additional_options', None) @@ -12483,6 +13369,47 @@ def __init__( self.query = kwargs.get('query', None) +class EncryptionConfiguration(msrest.serialization.Model): + """Definition of CMK for the factory. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :type key_name: str + :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :type vault_base_url: str + :param key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :type key_version: str + :param identity: User assigned identity to use to authenticate to customer's key vault. If not + provided Managed Service Identity will be used. + :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + """ + + _validation = { + 'key_name': {'required': True}, + 'vault_base_url': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'CmkIdentityDefinition'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionConfiguration, self).__init__(**kwargs) + self.key_name = kwargs['key_name'] + self.vault_base_url = kwargs['vault_base_url'] + self.key_version = kwargs.get('key_version', None) + self.identity = kwargs.get('identity', None) + + class EntityReference(msrest.serialization.Model): """The entity reference. @@ -12705,6 +13632,16 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param compute: Compute properties for data flow activity. :type compute: ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { @@ -12726,6 +13663,9 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( @@ -12738,22 +13678,26 @@ def __init__( self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) self.compute = kwargs.get('compute', None) + self.trace_level = kwargs.get('trace_level', None) + self.continue_on_error = kwargs.get('continue_on_error', None) + self.run_concurrently = kwargs.get('run_concurrently', None) class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( @@ -12927,43 +13871,6 @@ def __init__( self.log_location = kwargs.get('log_location', None) -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -13183,6 +14090,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param encryption: Properties to enable Customer Managed Key for the factory. + :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -13209,6 +14121,8 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -13223,6 +14137,8 @@ def __init__( self.version = None self.repo_configuration = kwargs.get('repo_configuration', None) self.global_parameters = kwargs.get('global_parameters', None) + self.encryption = kwargs.get('encryption', None) + self.public_network_access = kwargs.get('public_network_access', None) class FactoryRepoConfiguration(msrest.serialization.Model): @@ -13336,17 +14252,19 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". - :vartype type: str + :param type: Required. The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :type type: str or ~data_factory_management_client.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str + :param user_assigned_identities: List of user assigned identities for the factory. + :type user_assigned_identities: dict[str, object] """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'principal_id': {'readonly': True}, 'tenant_id': {'readonly': True}, } @@ -13355,17 +14273,18 @@ class FactoryIdentity(msrest.serialization.Model): 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, } - type = "SystemAssigned" - def __init__( self, **kwargs ): super(FactoryIdentity, self).__init__(**kwargs) + self.type = kwargs['type'] self.principal_id = None self.tenant_id = None + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) class FactoryListResponse(msrest.serialization.Model): @@ -16761,6 +17680,9 @@ class HttpLinkedService(LinkedService): :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression @@ -16796,6 +17718,7 @@ class HttpLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, @@ -16812,6 +17735,7 @@ def __init__( self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.auth_headers = kwargs.get('auth_headers', None) self.embedded_cert_data = kwargs.get('embedded_cert_data', None) self.cert_thumbprint = kwargs.get('cert_thumbprint', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -17446,43 +18370,6 @@ def __init__( self.query = kwargs.get('query', None) -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -18279,6 +19166,9 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -18291,6 +19181,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } def __init__( @@ -18303,6 +19194,7 @@ def __init__( self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) self.catalog_admin_password = kwargs.get('catalog_admin_password', None) self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + self.dual_standby_pair_name = kwargs.get('dual_standby_pair_name', None) class IntegrationRuntimeSsisProperties(msrest.serialization.Model): @@ -19323,8 +20215,74 @@ def __init__( self.properties = kwargs['properties'] +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs['log_location_settings'] + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -19634,6 +20592,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~data_factory_management_client.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~data_factory_management_client.models.IntegrationRuntimeComputeProperties @@ -19651,6 +20612,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -19662,6 +20624,7 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(**kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) self.compute_properties = kwargs.get('compute_properties', None) self.ssis_properties = kwargs.get('ssis_properties', None) @@ -19957,6 +20920,8 @@ class ManagedPrivateEndpointResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -19965,19 +20930,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :ivar is_reserved: Denotes whether the managed private endpoint is reserved. - :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :ivar provisioning_state: The managed private endpoint provisioning state. - :vartype provisioning_state: str + :param properties: Required. Managed private endpoint properties. + :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint """ _validation = { @@ -19985,8 +20939,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { @@ -19994,12 +20947,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, } def __init__( @@ -20007,12 +20955,7 @@ def __init__( **kwargs ): super(ManagedPrivateEndpointResource, self).__init__(**kwargs) - self.connection_state = kwargs.get('connection_state', None) - self.fqdns = kwargs.get('fqdns', None) - self.group_id = kwargs.get('group_id', None) - self.is_reserved = None - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.provisioning_state = None + self.properties = kwargs['properties'] class ManagedVirtualNetwork(msrest.serialization.Model): @@ -20079,6 +21022,40 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -20167,7 +21144,7 @@ def __init__( self.script = kwargs.get('script', None) -class MariaDBLinkedService(LinkedService): +class MariaDbLinkedService(LinkedService): """MariaDB server linked service. All required parameters must be populated in order to send to Azure. @@ -20216,14 +21193,14 @@ def __init__( self, **kwargs ): - super(MariaDBLinkedService, self).__init__(**kwargs) + super(MariaDbLinkedService, self).__init__(**kwargs) self.type = 'MariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MariaDBSource(TabularSource): +class MariaDbSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -20272,12 +21249,12 @@ def __init__( self, **kwargs ): - super(MariaDBSource, self).__init__(**kwargs) + super(MariaDbSource, self).__init__(**kwargs) self.type = 'MariaDBSource' # type: str self.query = kwargs.get('query', None) -class MariaDBTableDataset(Dataset): +class MariaDbTableDataset(Dataset): """MariaDB server dataset. All required parameters must be populated in order to send to Azure. @@ -20330,7 +21307,7 @@ def __init__( self, **kwargs ): - super(MariaDBTableDataset, self).__init__(**kwargs) + super(MariaDbTableDataset, self).__init__(**kwargs) self.type = 'MariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -20761,7 +21738,189 @@ def __init__( self.table_name = kwargs.get('table_name', None) -class MongoDBCollectionDataset(Dataset): +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = kwargs['collection'] + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(**kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -20816,12 +21975,12 @@ def __init__( self, **kwargs ): - super(MongoDBCollectionDataset, self).__init__(**kwargs) + super(MongoDbCollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = kwargs['collection_name'] -class MongoDBCursorMethodsProperties(msrest.serialization.Model): +class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -20856,7 +22015,7 @@ def __init__( self, **kwargs ): - super(MongoDBCursorMethodsProperties, self).__init__(**kwargs) + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.project = kwargs.get('project', None) self.sort = kwargs.get('sort', None) @@ -20864,7 +22023,7 @@ def __init__( self.limit = kwargs.get('limit', None) -class MongoDBLinkedService(LinkedService): +class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. @@ -20888,7 +22047,7 @@ class MongoDBLinkedService(LinkedService): :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or - ~data_factory_management_client.models.MongoDBAuthenticationType + ~data_factory_management_client.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -20944,7 +22103,7 @@ def __init__( self, **kwargs ): - super(MongoDBLinkedService, self).__init__(**kwargs) + super(MongoDbLinkedService, self).__init__(**kwargs) self.type = 'MongoDb' # type: str self.server = kwargs['server'] self.authentication_type = kwargs.get('authentication_type', None) @@ -20958,7 +22117,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MongoDBSource(CopySource): +class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -21003,13 +22162,13 @@ def __init__( self, **kwargs ): - super(MongoDBSource, self).__init__(**kwargs) + super(MongoDbSource, self).__init__(**kwargs) self.type = 'MongoDbSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) -class MongoDBV2CollectionDataset(Dataset): +class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -21064,12 +22223,12 @@ def __init__( self, **kwargs ): - super(MongoDBV2CollectionDataset, self).__init__(**kwargs) + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = kwargs['collection'] -class MongoDBV2LinkedService(LinkedService): +class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. @@ -21116,13 +22275,13 @@ def __init__( self, **kwargs ): - super(MongoDBV2LinkedService, self).__init__(**kwargs) + super(MongoDbV2LinkedService, self).__init__(**kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] -class MongoDBV2Source(CopySource): +class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -21146,7 +22305,7 @@ class MongoDBV2Source(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -21171,7 +22330,7 @@ class MongoDBV2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -21181,7 +22340,7 @@ def __init__( self, **kwargs ): - super(MongoDBV2Source, self).__init__(**kwargs) + super(MongoDbV2Source, self).__init__(**kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) @@ -21190,7 +22349,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class MySQLLinkedService(LinkedService): +class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -21239,14 +22398,14 @@ def __init__( self, **kwargs ): - super(MySQLLinkedService, self).__init__(**kwargs) + super(MySqlLinkedService, self).__init__(**kwargs) self.type = 'MySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MySQLSource(TabularSource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -21294,12 +22453,12 @@ def __init__( self, **kwargs ): - super(MySQLSource, self).__init__(**kwargs) + super(MySqlSource, self).__init__(**kwargs) self.type = 'MySqlSource' # type: str self.query = kwargs.get('query', None) -class MySQLTableDataset(Dataset): +class MySqlTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -21352,7 +22511,7 @@ def __init__( self, **kwargs ): - super(MySQLTableDataset, self).__init__(**kwargs) + super(MySqlTableDataset, self).__init__(**kwargs) self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -21475,7 +22634,7 @@ class NetezzaSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.NetezzaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings """ @@ -21493,7 +22652,7 @@ class NetezzaSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } @@ -21608,6 +22767,9 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). :type tenant: object @@ -21624,7 +22786,7 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -21659,6 +22821,7 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, @@ -21680,6 +22843,7 @@ def __init__( self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.auth_headers = kwargs.get('auth_headers', None) self.tenant = kwargs.get('tenant', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) @@ -22495,6 +23659,200 @@ def __init__( self.metric_specifications = kwargs.get('metric_specifications', None) +class OracleCloudStorageLinkedService(LinkedService): + """Linked service for Oracle Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~data_factory_management_client.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OracleCloudStorageLinkedService, self).__init__(**kwargs) + self.type = 'OracleCloudStorage' # type: str + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class OracleCloudStorageLocation(DatasetLocation): + """The location of Oracle Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OracleCloudStorageLocation, self).__init__(**kwargs) + self.type = 'OracleCloudStorageLocation' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class OracleCloudStorageReadSettings(StoreReadSettings): + """Oracle Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OracleCloudStorageReadSettings, self).__init__(**kwargs) + self.type = 'OracleCloudStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + class OracleLinkedService(LinkedService): """Oracle database. @@ -22861,7 +24219,7 @@ class OracleSource(CopySource): :type query_timeout: object :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.OraclePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -22881,7 +24239,7 @@ class OracleSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -22997,7 +24355,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -23092,6 +24450,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -23107,6 +24467,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -23116,6 +24477,7 @@ def __init__( super(OrcSink, self).__init__(**kwargs) self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class OrcSource(CopySource): @@ -23168,6 +24530,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OrcWriteSettings, self).__init__(**kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -23258,7 +24660,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -23353,6 +24755,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -23368,6 +24772,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -23377,6 +24782,7 @@ def __init__( super(ParquetSink, self).__init__(**kwargs) self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class ParquetSource(CopySource): @@ -23429,6 +24835,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -23847,6 +25293,44 @@ def __init__( self.query = kwargs.get('query', None) +class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = kwargs.get('duration', None) + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + class PipelineListResponse(msrest.serialization.Model): """A list of pipeline resources. @@ -23876,6 +25360,26 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class PipelinePolicy(msrest.serialization.Model): + """Pipeline Policy. + + :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :type elapsed_time_metric: + ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = kwargs.get('elapsed_time_metric', None) + + class PipelineReference(msrest.serialization.Model): """Pipeline reference type. @@ -23943,8 +25447,11 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_folder_name: The name of the folder that this Pipeline is in. - :type name_folder_name: str + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~data_factory_management_client.models.PipelineFolder + :param policy: Pipeline Policy. + :type policy: ~data_factory_management_client.models.PipelinePolicy """ _validation = { @@ -23968,7 +25475,8 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } def __init__( @@ -23984,7 +25492,8 @@ def __init__( self.concurrency = kwargs.get('concurrency', None) self.annotations = kwargs.get('annotations', None) self.run_dimensions = kwargs.get('run_dimensions', None) - self.name_folder_name = kwargs.get('name_folder_name', None) + self.folder = kwargs.get('folder', None) + self.policy = kwargs.get('policy', None) class PipelineRun(msrest.serialization.Model): @@ -24184,7 +25693,7 @@ def __init__( self.use_type_default = kwargs.get('use_type_default', None) -class PostgreSQLLinkedService(LinkedService): +class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -24233,14 +25742,14 @@ def __init__( self, **kwargs ): - super(PostgreSQLLinkedService, self).__init__(**kwargs) + super(PostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PostgreSQLSource(TabularSource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -24288,12 +25797,12 @@ def __init__( self, **kwargs ): - super(PostgreSQLSource, self).__init__(**kwargs) + super(PostgreSqlSource, self).__init__(**kwargs) self.type = 'PostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class PostgreSQLTableDataset(Dataset): +class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -24354,7 +25863,7 @@ def __init__( self, **kwargs ): - super(PostgreSQLTableDataset, self).__init__(**kwargs) + super(PostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) @@ -24598,6 +26107,262 @@ def __init__( self.query = kwargs.get('query', None) +class PrivateEndpointConnectionListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Private Endpoint Connections. + :type value: list[~data_factory_management_client.models.PrivateEndpointConnectionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~data_factory_management_client.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """The state of a private link connection. + + :param status: Status of a private link connection. + :type status: str + :param description: Description of a private link connection. + :type description: str + :param actions_required: ActionsRequired for a private link connection. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = kwargs.get('actions_required', None) + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkResourceProperties(msrest.serialization.Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar group_id: GroupId of a private link resource. + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource. + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource. + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(msrest.serialization.Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~data_factory_management_client.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = kwargs['value'] + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -25064,6 +26829,40 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class RemotePrivateEndpointConnection(msrest.serialization.Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :type private_endpoint: ~data_factory_management_client.models.ArmIdWrapper + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~data_factory_management_client.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + **kwargs + ): + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -25433,6 +27232,9 @@ class RestServiceLinkedService(LinkedService): :type user_name: object :param password: The password used in Basic authentication type. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param service_principal_id: The application's client ID used in AadServicePrincipal authentication type. :type service_principal_id: object @@ -25472,6 +27274,7 @@ class RestServiceLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, @@ -25491,6 +27294,7 @@ def __init__( self.authentication_type = kwargs['authentication_type'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.auth_headers = kwargs.get('auth_headers', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) @@ -25537,13 +27341,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25562,8 +27362,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -25576,8 +27375,7 @@ def __init__( self.additional_headers = kwargs.get('additional_headers', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) - self.compression_type = kwargs.get('compression_type', None) - self.wrap_request_json_in_an_object = kwargs.get('wrap_request_json_in_an_object', None) + self.http_compression_type = kwargs.get('http_compression_type', None) class RestSource(CopySource): @@ -27248,7 +29046,7 @@ class SapHanaSource(TabularSource): :type packet_size: object :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SapHanaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings @@ -27268,7 +29066,7 @@ class SapHanaSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } @@ -27858,7 +29656,7 @@ class SapTableSource(TabularSource): :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: str or ~data_factory_management_client.models.SapTablePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings @@ -27883,7 +29681,7 @@ class SapTableSource(TabularSource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -28791,7 +30589,7 @@ class SftpServerLinkedService(LinkedService): Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "SshPublicKey". + Possible values include: "Basic", "SshPublicKey", "MultiFactor". :type authentication_type: str or ~data_factory_management_client.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). @@ -29861,7 +31659,45 @@ def __init__( self.query = kwargs.get('query', None) -class SQLDWSink(CopySink): +class SqlAlwaysEncryptedProperties(msrest.serialization.Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity". + :type always_encrypted_akv_auth_type: str or + ~data_factory_management_client.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = kwargs['always_encrypted_akv_auth_type'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + + +class SqlDwSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -29899,7 +31735,7 @@ class SQLDWSink(CopySink): :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DWCopyCommandSettings + :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object @@ -29921,7 +31757,7 @@ class SQLDWSink(CopySink): 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } @@ -29929,7 +31765,7 @@ def __init__( self, **kwargs ): - super(SQLDWSink, self).__init__(**kwargs) + super(SqlDwSink, self).__init__(**kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) @@ -29939,7 +31775,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLDWSource(TabularSource): +class SqlDwSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -29977,9 +31813,9 @@ class SQLDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -29997,15 +31833,15 @@ class SQLDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLDWSource, self).__init__(**kwargs) + super(SqlDwSource, self).__init__(**kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30014,7 +31850,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLMiSink(CopySink): +class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -30083,7 +31919,7 @@ def __init__( self, **kwargs ): - super(SQLMiSink, self).__init__(**kwargs) + super(SqlMiSink, self).__init__(**kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30093,7 +31929,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLMiSource(TabularSource): +class SqlMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -30132,9 +31968,9 @@ class SQLMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30153,15 +31989,15 @@ class SQLMiSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLMiSource, self).__init__(**kwargs) + super(SqlMiSource, self).__init__(**kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30171,7 +32007,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLPartitionSettings(msrest.serialization.Model): +class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -30200,13 +32036,13 @@ def __init__( self, **kwargs ): - super(SQLPartitionSettings, self).__init__(**kwargs) + super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class SQLServerLinkedService(LinkedService): +class SqlServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -30236,6 +32072,9 @@ class SQLServerLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -30254,21 +32093,23 @@ class SQLServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, **kwargs ): - super(SQLServerLinkedService, self).__init__(**kwargs) + super(SqlServerLinkedService, self).__init__(**kwargs) self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) -class SQLServerSink(CopySink): +class SqlServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -30337,7 +32178,7 @@ def __init__( self, **kwargs ): - super(SQLServerSink, self).__init__(**kwargs) + super(SqlServerSink, self).__init__(**kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30347,7 +32188,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLServerSource(TabularSource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -30386,9 +32227,9 @@ class SQLServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30407,15 +32248,15 @@ class SQLServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLServerSource, self).__init__(**kwargs) + super(SqlServerSource, self).__init__(**kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30425,7 +32266,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLServerStoredProcedureActivity(ExecutionActivity): +class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -30479,13 +32320,13 @@ def __init__( self, **kwargs ): - super(SQLServerStoredProcedureActivity, self).__init__(**kwargs) + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) -class SQLServerTableDataset(Dataset): +class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -30547,14 +32388,14 @@ def __init__( self, **kwargs ): - super(SQLServerTableDataset, self).__init__(**kwargs) + super(SqlServerTableDataset, self).__init__(**kwargs) self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class SQLSink(CopySink): +class SqlSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -30623,7 +32464,7 @@ def __init__( self, **kwargs ): - super(SQLSink, self).__init__(**kwargs) + super(SqlSink, self).__init__(**kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30633,7 +32474,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLSource(TabularSource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -30674,9 +32515,9 @@ class SQLSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30695,15 +32536,15 @@ class SQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLSource, self).__init__(**kwargs) + super(SqlSource, self).__init__(**kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -31999,6 +33840,74 @@ def __init__( self.type_conversion_settings = kwargs.get('type_conversion_settings', None) +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarReadSettings, self).__init__(**kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32131,7 +34040,7 @@ class TeradataSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.TeradataPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings @@ -32150,7 +34059,7 @@ class TeradataSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } @@ -32688,7 +34597,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -33441,17 +35350,24 @@ class WebActivityAuthentication(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Web activity authentication (Basic/ClientCertificate/MSI). + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. :type pfx: ~data_factory_management_client.models.SecretBase - :param username: Web activity authentication user name for basic authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. + :param username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. Type: string (or Expression with resultType string). + :type username: object + :param password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. :type password: ~data_factory_management_client.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI - Authentication. - :type resource: str + Authentication. Type: string (or Expression with resultType string). + :type resource: object + :param user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :type user_tenant: object """ _validation = { @@ -33461,9 +35377,10 @@ class WebActivityAuthentication(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'username': {'key': 'username', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, 'password': {'key': 'password', 'type': 'SecretBase'}, - 'resource': {'key': 'resource', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'object'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } def __init__( @@ -33476,6 +35393,7 @@ def __init__( self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.resource = kwargs.get('resource', None) + self.user_tenant = kwargs.get('user_tenant', None) class WebLinkedServiceTypeProperties(msrest.serialization.Model): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index 060634eb408..f6ebc8328ae 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -378,7 +378,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AmazonS3CompatibleLinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -411,7 +411,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -547,7 +547,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -593,7 +593,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -693,7 +693,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -726,7 +726,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -750,7 +750,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDbSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDbSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDwSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -791,7 +791,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDbSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDbSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDwSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -1107,6 +1107,331 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema +class AmazonS3CompatibleLinkedService(LinkedService): + """Linked service for Amazon S3 Compatible. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access + Management (IAM) user. + :type secret_access_key: ~data_factory_management_client.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Amazon S3 Compatible + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style + access. Default value is false. Type: boolean (or Expression with resultType boolean). + :type force_path_style: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_key_id: Optional[object] = None, + secret_access_key: Optional["SecretBase"] = None, + service_url: Optional[object] = None, + force_path_style: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AmazonS3CompatibleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AmazonS3Compatible' # type: str + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.force_path_style = force_path_style + self.encrypted_credential = encrypted_credential + + +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFsLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, SftpLocation. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFsLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'SftpLocation': 'SftpLocation'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetLocation' # type: str + self.folder_path = folder_path + self.file_name = file_name + + +class AmazonS3CompatibleLocation(DatasetLocation): + """The location of Amazon S3 Compatible dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, + **kwargs + ): + super(AmazonS3CompatibleLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AmazonS3CompatibleLocation' # type: str + self.bucket_name = bucket_name + self.version = version + + +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFsReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFsReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'SftpReadSettings': 'SftpReadSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + **kwargs + ): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = max_concurrent_connections + + +class AmazonS3CompatibleReadSettings(StoreReadSettings): + """Amazon S3 Compatible read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression + with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + file_list_path: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AmazonS3CompatibleReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. @@ -1235,6 +1560,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1245,6 +1573,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1262,9 +1592,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1276,71 +1608,24 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, access_key_id: Optional[object] = None, secret_access_key: Optional["SecretBase"] = None, service_url: Optional[object] = None, + session_token: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3Location, AzureBlobFsLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFsLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - **kwargs - ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'DatasetLocation' # type: str - self.folder_path = folder_path - self.file_name = file_name - - class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. @@ -1394,53 +1679,8 @@ def __init__( self.version = version -class StoreReadSettings(msrest.serialization.Model): - """Connector read setting. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonS3ReadSettings, AzureBlobFsReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFsReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - **kwargs - ): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'StoreReadSettings' # type: str - self.max_concurrent_connections = max_concurrent_connections - - class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. + """Amazon S3 read settings. All required parameters must be populated in order to send to Azure. @@ -1593,6 +1833,31 @@ def __init__( self.value = value +class ArmIdWrapper(msrest.serialization.Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None + + class AvroDataset(Dataset): """Avro dataset. @@ -1766,7 +2031,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1807,7 +2072,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1958,7 +2223,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1979,7 +2244,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -2007,6 +2272,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -2018,6 +2290,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -2026,12 +2300,16 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class CustomSetupBase(msrest.serialization.Model): @@ -2867,6 +3145,10 @@ class AzureBlobStorageLinkedService(LinkedService): values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object + :param account_kind: Specify the kind of your storage account. Allowed values are: Storage + (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: + string (or Expression with resultType string). + :type account_kind: str :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2893,6 +3175,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2913,6 +3196,7 @@ def __init__( service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, + account_kind: Optional[str] = None, encrypted_credential: Optional[str] = None, **kwargs ): @@ -2927,6 +3211,7 @@ def __init__( self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type + self.account_kind = account_kind self.encrypted_credential = encrypted_credential @@ -3119,6 +3404,444 @@ def __init__( self.block_size_in_mb = block_size_in_mb +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table: Optional[object] = None, + database: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = table + self.database = database + + +class ExportSettings(msrest.serialization.Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ExportSettings' # type: str + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + domain: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + cluster_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = query + self.export_settings = export_settings + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -3140,10 +3863,16 @@ class AzureDatabricksLinkedService(LinkedService): :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to + :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~data_factory_management_client.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type existing_cluster_id: object @@ -3193,12 +3922,14 @@ class AzureDatabricksLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -3210,6 +3941,8 @@ class AzureDatabricksLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -3223,18 +3956,21 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } def __init__( self, *, domain: object, - access_token: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + authentication: Optional[object] = None, + workspace_resource_id: Optional[object] = None, existing_cluster_id: Optional[object] = None, instance_pool_id: Optional[object] = None, new_cluster_version: Optional[object] = None, @@ -3248,12 +3984,15 @@ def __init__( new_cluster_init_scripts: Optional[object] = None, new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, + policy_id: Optional[object] = None, **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDatabricks' # type: str self.domain = domain self.access_token = access_token + self.authentication = authentication + self.workspace_resource_id = workspace_resource_id self.existing_cluster_id = existing_cluster_id self.instance_pool_id = instance_pool_id self.new_cluster_version = new_cluster_version @@ -3267,13 +4006,14 @@ def __init__( self.new_cluster_init_scripts = new_cluster_init_scripts self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential + self.policy_id = policy_id class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -3313,7 +4053,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( @@ -3425,27 +4165,24 @@ class AzureDataExplorerLinkedService(LinkedService): will be in the format https://:code:``.:code:``.kusto.windows.net. Type: string (or Expression with resultType string). :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). + :param service_principal_id: The ID of the service principal used to authenticate against Azure + Data Explorer. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. + :param service_principal_key: The key of the service principal used to authenticate against + Kusto. :type service_principal_key: ~data_factory_management_client.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). :type tenant: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { @@ -3466,15 +4203,15 @@ def __init__( self, *, endpoint: object, - service_principal_id: object, - service_principal_key: "SecretBase", database: object, - tenant: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4859,7 +5596,7 @@ def __init__( self.secret_version = secret_version -class AzureMariaDBLinkedService(LinkedService): +class AzureMariaDbLinkedService(LinkedService): """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4917,14 +5654,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureMariaDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential -class AzureMariaDBSource(TabularSource): +class AzureMariaDbSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -4981,12 +5718,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query -class AzureMariaDBTableDataset(Dataset): +class AzureMariaDbTableDataset(Dataset): """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -5049,7 +5786,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureMariaDbTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name @@ -5158,9 +5895,15 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~data_factory_management_client.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -5170,6 +5913,10 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -5184,7 +5931,6 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -5197,8 +5943,11 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } @@ -5207,15 +5956,18 @@ def __init__( self, *, name: str, - ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, + ml_pipeline_id: Optional[object] = None, + ml_pipeline_endpoint_id: Optional[object] = None, + version: Optional[object] = None, experiment_name: Optional[object] = None, ml_pipeline_parameters: Optional[object] = None, + data_path_assignments: Optional[object] = None, ml_parent_run_id: Optional[object] = None, continue_on_step_failure: Optional[object] = None, **kwargs @@ -5223,8 +5975,11 @@ def __init__( super(AzureMlExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = ml_pipeline_id + self.ml_pipeline_endpoint_id = ml_pipeline_endpoint_id + self.version = version self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters + self.data_path_assignments = data_path_assignments self.ml_parent_run_id = ml_parent_run_id self.continue_on_step_failure = continue_on_step_failure @@ -5529,7 +6284,7 @@ def __init__( self.linked_service_name = linked_service_name -class AzureMySQLLinkedService(LinkedService): +class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -5588,14 +6343,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzureMySQLSink(CopySink): +class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -5652,12 +6407,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzureMySQLSource(TabularSource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -5713,12 +6468,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzureMySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query -class AzureMySQLTableDataset(Dataset): +class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5787,13 +6542,13 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureMySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = table_name self.table = table -class AzurePostgreSQLLinkedService(LinkedService): +class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5851,14 +6606,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzurePostgreSQLSink(CopySink): +class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5915,12 +6670,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzurePostgreSQLSource(TabularSource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5977,12 +6732,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query -class AzurePostgreSQLTableDataset(Dataset): +class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -6056,7 +6811,7 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = table_name self.table = table @@ -6318,7 +7073,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLDatabaseLinkedService(LinkedService): +class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -6358,6 +7113,9 @@ class AzureSQLDatabaseLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6379,6 +7137,7 @@ class AzureSQLDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -6396,9 +7155,10 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): - super(AzureSQLDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = connection_string self.password = password @@ -6407,9 +7167,10 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings -class AzureSQLDWLinkedService(LinkedService): +class AzureSqlDwLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -6489,7 +7250,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSQLDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = connection_string self.password = password @@ -6500,7 +7261,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLDWTableDataset(Dataset): +class AzureSqlDwTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -6574,14 +7335,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlDwTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSQLMiLinkedService(LinkedService): +class AzureSqlMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -6621,6 +7382,9 @@ class AzureSQLMiLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6642,6 +7406,7 @@ class AzureSQLMiLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -6659,9 +7424,10 @@ def __init__( tenant: Optional[object] = None, azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): - super(AzureSQLMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = connection_string self.password = password @@ -6670,9 +7436,10 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings -class AzureSQLMiTableDataset(Dataset): +class AzureSqlMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -6746,14 +7513,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSQLSink(CopySink): +class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -6835,7 +7602,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(AzureSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -6845,7 +7612,7 @@ def __init__( self.table_option = table_option -class AzureSQLSource(TabularSource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -6884,9 +7651,9 @@ class AzureSQLSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -6905,8 +7672,8 @@ class AzureSQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -6922,11 +7689,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -6936,7 +7703,7 @@ def __init__( self.partition_settings = partition_settings -class AzureSQLTableDataset(Dataset): +class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -7010,7 +7777,7 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema @@ -7713,7 +8480,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -7750,7 +8517,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -8446,6 +9213,28 @@ def __init__( self.password = password +class CmkIdentityDefinition(msrest.serialization.Model): + """Managed Identity used for CMK. + + :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + customer's key vault. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + user_assigned_identity: Optional[str] = None, + **kwargs + ): + super(CmkIdentityDefinition, self).__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -8828,7 +9617,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -8849,7 +9638,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -8881,6 +9670,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -8917,6 +9709,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8936,6 +9729,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, password: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -8945,6 +9739,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Concur' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -9218,9 +10013,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -9260,6 +10057,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -9288,6 +10086,7 @@ def __init__( enable_skip_incompatible_row: Optional[object] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["LogStorageSettings"] = None, + log_settings: Optional["LogSettings"] = None, preserve_rules: Optional[List[object]] = None, preserve: Optional[List[object]] = None, validate_data_consistency: Optional[object] = None, @@ -9308,12 +10107,41 @@ def __init__( self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency self.skip_error_file = skip_error_file +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + *, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -9353,7 +10181,7 @@ def __init__( self.type = 'CopyTranslator' # type: str -class CosmosDBLinkedService(LinkedService): +class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -9382,6 +10210,31 @@ class CosmosDBLinkedService(LinkedService): :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :type account_key: ~data_factory_management_client.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~data_factory_management_client.models.CosmosDbServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~data_factory_management_client.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object + :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or + Expression with resultType string). Possible values include: "Gateway", "Direct". + :type connection_mode: str or ~data_factory_management_client.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9403,6 +10256,12 @@ class CosmosDBLinkedService(LinkedService): 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'connection_mode': {'key': 'typeProperties.connectionMode', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9418,19 +10277,31 @@ def __init__( account_endpoint: Optional[object] = None, database: Optional[object] = None, account_key: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "CosmosDbServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, + connection_mode: Optional[Union[str, "CosmosDbConnectionMode"]] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CosmosDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDb' # type: str self.connection_string = connection_string self.account_endpoint = account_endpoint self.database = database self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.tenant = tenant + self.azure_cloud_type = azure_cloud_type + self.connection_mode = connection_mode self.encrypted_credential = encrypted_credential -class CosmosDBMongoDBApiCollectionDataset(Dataset): +class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -9495,12 +10366,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CosmosDBMongoDBApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = collection -class CosmosDBMongoDBApiLinkedService(LinkedService): +class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -9556,13 +10427,13 @@ def __init__( annotations: Optional[List[object]] = None, **kwargs ): - super(CosmosDBMongoDBApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = connection_string self.database = database -class CosmosDBMongoDBApiSink(CopySink): +class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -9620,12 +10491,12 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDBMongoDBApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior -class CosmosDBMongoDBApiSource(CopySource): +class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -9649,7 +10520,7 @@ class CosmosDBMongoDBApiSource(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -9674,7 +10545,7 @@ class CosmosDBMongoDBApiSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -9688,13 +10559,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, filter: Optional[object] = None, - cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDBMongoDBApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -9703,7 +10574,7 @@ def __init__( self.additional_columns = additional_columns -class CosmosDBSQLApiCollectionDataset(Dataset): +class CosmosDbSqlApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -9768,12 +10639,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CosmosDBSQLApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = collection_name -class CosmosDBSQLApiSink(CopySink): +class CosmosDbSqlApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -9830,12 +10701,12 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDBSQLApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior -class CosmosDBSQLApiSource(CopySource): +class CosmosDbSqlApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -9901,7 +10772,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDBSQLApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10116,18 +10987,16 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Set to use integration runtime setting for data flow debug session. + :type integration_runtime: + ~data_factory_management_client.models.IntegrationRuntimeDebugResource """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'name': {'key': 'integrationRuntime.name', 'type': 'str'}, - 'properties': {'key': 'integrationRuntime.properties', 'type': 'IntegrationRuntime'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, } def __init__( @@ -10136,16 +11005,14 @@ def __init__( compute_type: Optional[str] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["IntegrationRuntime"] = None, + integration_runtime: Optional["IntegrationRuntimeDebugResource"] = None, **kwargs ): super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live - self.name = name - self.properties = properties + self.integration_runtime = integration_runtime class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): @@ -10279,6 +11146,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -10302,6 +11172,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -10320,6 +11191,7 @@ def __init__( reference_objects: Optional["CustomActivityReferenceObject"] = None, extended_properties: Optional[Dict[str, object]] = None, retention_time_in_days: Optional[object] = None, + auto_user_specification: Optional[object] = None, **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -10330,6 +11202,7 @@ def __init__( self.reference_objects = reference_objects self.extended_properties = extended_properties self.retention_time_in_days = retention_time_in_days + self.auto_user_specification = auto_user_specification class CustomActivityReferenceObject(msrest.serialization.Model): @@ -10479,6 +11352,80 @@ def __init__( self.type_properties = type_properties +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + *, + events: List[object], + scope: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + subject_begins_with: Optional[str] = None, + subject_ends_with: Optional[str] = None, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = subject_begins_with + self.subject_ends_with = subject_ends_with + self.events = events + self.scope = scope + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -10847,90 +11794,84 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param session_id: The ID of data flow debug session. :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource :param datasets: List of datasets. :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] :param linked_services: List of linked services. :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters_debug_settings_parameters: Data flow parameters. - :type parameters_debug_settings_parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters_staging_linked_service_parameters: Arguments for LinkedService. - :type parameters_staging_linked_service_parameters: dict[str, object] - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param staging: Staging info for debug session. + :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, - 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, - 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, - 'name': {'key': 'dataFlow.name', 'type': 'str'}, - 'properties': {'key': 'dataFlow.properties', 'type': 'DataFlow'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } - type = "LinkedServiceReference" - def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, session_id: Optional[str] = None, + data_flow: Optional["DataFlowDebugResource"] = None, datasets: Optional[List["DatasetDebugResource"]] = None, linked_services: Optional[List["LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["DataFlowSourceSetting"]] = None, - parameters_debug_settings_parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - parameters_staging_linked_service_parameters: Optional[Dict[str, object]] = None, - name: Optional[str] = None, - properties: Optional["DataFlow"] = None, + staging: Optional["DataFlowStagingInfo"] = None, + debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, **kwargs ): super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = additional_properties self.session_id = session_id + self.data_flow = data_flow self.datasets = datasets self.linked_services = linked_services + self.staging = staging + self.debug_settings = debug_settings + + +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + *, + source_settings: Optional[List["DataFlowSourceSetting"]] = None, + parameters: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = source_settings - self.parameters_debug_settings_parameters = parameters_debug_settings_parameters + self.parameters = parameters self.dataset_parameters = dataset_parameters - self.folder_path = folder_path - self.reference_name = reference_name - self.parameters_staging_linked_service_parameters = parameters_staging_linked_service_parameters - self.name = name - self.properties = properties class SubResourceDebugResource(msrest.serialization.Model): @@ -11393,44 +12334,28 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - Variables are only populated by the server, and will be ignored when sending a request. - + :param linked_service: Staging linked service reference. + :type linked_service: ~data_factory_management_client.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'type': {'key': 'linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, - 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, *, + linked_service: Optional["LinkedServiceReference"] = None, folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - parameters: Optional[Dict[str, object]] = None, **kwargs ): super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = linked_service self.folder_path = folder_path - self.reference_name = reference_name - self.parameters = parameters class DataLakeAnalyticsUsqlActivity(ExecutionActivity): @@ -11536,7 +12461,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -11557,7 +12482,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -11901,6 +12826,73 @@ def __init__( self.type = type +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZip' # type: str + self.level = level + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -12335,7 +13327,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -12602,6 +13594,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -12614,6 +13613,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -12622,12 +13623,16 @@ def __init__( file_extension: object, additional_properties: Optional[Dict[str, object]] = None, quote_all_text: Optional[object] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class DependencyReference(msrest.serialization.Model): @@ -12704,7 +13709,7 @@ def __init__( self.distcp_options = distcp_options -class DocumentDBCollectionDataset(Dataset): +class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. All required parameters must be populated in order to send to Azure. @@ -12769,12 +13774,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(DocumentDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = collection_name -class DocumentDBCollectionSink(CopySink): +class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. All required parameters must be populated in order to send to Azure. @@ -12836,13 +13841,13 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(DocumentDBCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior -class DocumentDBCollectionSource(CopySource): +class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. All required parameters must be populated in order to send to Azure. @@ -12903,7 +13908,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DocumentDBCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -13118,7 +14123,7 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema -class DWCopyCommandDefaultValue(msrest.serialization.Model): +class DwCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). @@ -13140,19 +14145,19 @@ def __init__( default_value: Optional[object] = None, **kwargs ): - super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + super(DwCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = column_name self.default_value = default_value -class DWCopyCommandSettings(msrest.serialization.Model): +class DwCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. :param default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DWCopyCommandDefaultValue] + :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -13160,18 +14165,18 @@ class DWCopyCommandSettings(msrest.serialization.Model): """ _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'default_values': {'key': 'defaultValues', 'type': '[DwCopyCommandDefaultValue]'}, 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, } def __init__( self, *, - default_values: Optional[List["DWCopyCommandDefaultValue"]] = None, + default_values: Optional[List["DwCopyCommandDefaultValue"]] = None, additional_options: Optional[Dict[str, str]] = None, **kwargs ): - super(DWCopyCommandSettings, self).__init__(**kwargs) + super(DwCopyCommandSettings, self).__init__(**kwargs) self.default_values = default_values self.additional_options = additional_options @@ -14303,6 +15308,52 @@ def __init__( self.query = query +class EncryptionConfiguration(msrest.serialization.Model): + """Definition of CMK for the factory. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :type key_name: str + :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :type vault_base_url: str + :param key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :type key_version: str + :param identity: User assigned identity to use to authenticate to customer's key vault. If not + provided Managed Service Identity will be used. + :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + """ + + _validation = { + 'key_name': {'required': True}, + 'vault_base_url': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'CmkIdentityDefinition'}, + } + + def __init__( + self, + *, + key_name: str, + vault_base_url: str, + key_version: Optional[str] = None, + identity: Optional["CmkIdentityDefinition"] = None, + **kwargs + ): + super(EncryptionConfiguration, self).__init__(**kwargs) + self.key_name = key_name + self.vault_base_url = vault_base_url + self.key_version = key_version + self.identity = identity + + class EntityReference(msrest.serialization.Model): """The entity reference. @@ -14553,6 +15604,16 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param compute: Compute properties for data flow activity. :type compute: ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { @@ -14574,6 +15635,9 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( @@ -14590,6 +15654,9 @@ def __init__( staging: Optional["DataFlowStagingInfo"] = None, integration_runtime: Optional["IntegrationRuntimeReference"] = None, compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + trace_level: Optional[object] = None, + continue_on_error: Optional[object] = None, + run_concurrently: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -14598,29 +15665,33 @@ def __init__( self.staging = staging self.integration_runtime = integration_runtime self.compute = compute + self.trace_level = trace_level + self.continue_on_error = continue_on_error + self.run_concurrently = run_concurrently class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( self, *, - compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, - core_count: Optional[int] = None, + compute_type: Optional[object] = None, + core_count: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) @@ -14819,45 +15890,6 @@ def __init__( self.log_location = log_location -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -15089,6 +16121,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param encryption: Properties to enable Customer Managed Key for the factory. + :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -15115,6 +16152,8 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -15126,6 +16165,8 @@ def __init__( identity: Optional["FactoryIdentity"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, + encryption: Optional["EncryptionConfiguration"] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): super(Factory, self).__init__(location=location, tags=tags, **kwargs) @@ -15136,6 +16177,8 @@ def __init__( self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters + self.encryption = encryption + self.public_network_access = public_network_access class FactoryRepoConfiguration(msrest.serialization.Model): @@ -15262,17 +16305,19 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". - :vartype type: str + :param type: Required. The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned". + :type type: str or ~data_factory_management_client.models.FactoryIdentityType :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str + :param user_assigned_identities: List of user assigned identities for the factory. + :type user_assigned_identities: dict[str, object] """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'principal_id': {'readonly': True}, 'tenant_id': {'readonly': True}, } @@ -15281,17 +16326,21 @@ class FactoryIdentity(msrest.serialization.Model): 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, } - type = "SystemAssigned" - def __init__( self, + *, + type: Union[str, "FactoryIdentityType"], + user_assigned_identities: Optional[Dict[str, object]] = None, **kwargs ): super(FactoryIdentity, self).__init__(**kwargs) + self.type = type self.principal_id = None self.tenant_id = None + self.user_assigned_identities = user_assigned_identities class FactoryListResponse(msrest.serialization.Model): @@ -19233,6 +20282,9 @@ class HttpLinkedService(LinkedService): :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression @@ -19268,6 +20320,7 @@ class HttpLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, @@ -19286,6 +20339,7 @@ def __init__( authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, + auth_headers: Optional[object] = None, embedded_cert_data: Optional[object] = None, cert_thumbprint: Optional[object] = None, encrypted_credential: Optional[object] = None, @@ -19298,6 +20352,7 @@ def __init__( self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.auth_headers = auth_headers self.embedded_cert_data = embedded_cert_data self.cert_thumbprint = cert_thumbprint self.encrypted_credential = encrypted_credential @@ -20030,45 +21085,6 @@ def __init__( self.query = query -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -20949,6 +21965,9 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -20961,6 +21980,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } def __init__( @@ -20971,6 +21991,7 @@ def __init__( catalog_admin_user_name: Optional[str] = None, catalog_admin_password: Optional["SecureString"] = None, catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, + dual_standby_pair_name: Optional[str] = None, **kwargs ): super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) @@ -20979,6 +22000,7 @@ def __init__( self.catalog_admin_user_name = catalog_admin_user_name self.catalog_admin_password = catalog_admin_password self.catalog_pricing_tier = catalog_pricing_tier + self.dual_standby_pair_name = dual_standby_pair_name class IntegrationRuntimeSsisProperties(msrest.serialization.Model): @@ -22113,8 +23135,81 @@ def __init__( self.properties = properties +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: Optional[object] = None, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + *, + log_location_settings: "LogLocationSettings", + enable_copy_activity_log: Optional[object] = None, + copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -22471,6 +23566,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~data_factory_management_client.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~data_factory_management_client.models.IntegrationRuntimeComputeProperties @@ -22488,6 +23586,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -22497,6 +23596,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + managed_virtual_network: Optional["ManagedVirtualNetworkReference"] = None, compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, **kwargs @@ -22504,6 +23604,7 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = managed_virtual_network self.compute_properties = compute_properties self.ssis_properties = ssis_properties @@ -22817,6 +23918,8 @@ class ManagedPrivateEndpointResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -22825,19 +23928,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :ivar is_reserved: Denotes whether the managed private endpoint is reserved. - :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :ivar provisioning_state: The managed private endpoint provisioning state. - :vartype provisioning_state: str + :param properties: Required. Managed private endpoint properties. + :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint """ _validation = { @@ -22845,8 +23937,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { @@ -22854,30 +23945,17 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, } def __init__( self, *, - connection_state: Optional["ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, + properties: "ManagedPrivateEndpoint", **kwargs ): super(ManagedPrivateEndpointResource, self).__init__(**kwargs) - self.connection_state = connection_state - self.fqdns = fqdns - self.group_id = group_id - self.is_reserved = None - self.private_link_resource_id = private_link_resource_id - self.provisioning_state = None + self.properties = properties class ManagedVirtualNetwork(msrest.serialization.Model): @@ -22949,6 +24027,42 @@ def __init__( self.next_link = next_link +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + *, + reference_name: str, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = reference_name + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -23047,7 +24161,7 @@ def __init__( self.script = script -class MariaDBLinkedService(LinkedService): +class MariaDbLinkedService(LinkedService): """MariaDB server linked service. All required parameters must be populated in order to send to Azure. @@ -23105,14 +24219,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MariaDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential -class MariaDBSource(TabularSource): +class MariaDbSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -23169,12 +24283,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query -class MariaDBTableDataset(Dataset): +class MariaDbTableDataset(Dataset): """MariaDB server dataset. All required parameters must be populated in order to send to Azure. @@ -23237,7 +24351,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MariaDbTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MariaDBTable' # type: str self.table_name = table_name @@ -23736,7 +24850,217 @@ def __init__( self.table_name = table_name -class MongoDBCollectionDataset(Dataset): +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = collection + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = connection_string + self.database = database + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + self.additional_columns = additional_columns + + +class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -23801,12 +25125,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(MongoDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = collection_name -class MongoDBCursorMethodsProperties(msrest.serialization.Model): +class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -23847,7 +25171,7 @@ def __init__( limit: Optional[object] = None, **kwargs ): - super(MongoDBCursorMethodsProperties, self).__init__(**kwargs) + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.project = project self.sort = sort @@ -23855,7 +25179,7 @@ def __init__( self.limit = limit -class MongoDBLinkedService(LinkedService): +class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. @@ -23879,7 +25203,7 @@ class MongoDBLinkedService(LinkedService): :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or - ~data_factory_management_client.models.MongoDBAuthenticationType + ~data_factory_management_client.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -23941,7 +25265,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "MongoDBAuthenticationType"]] = None, + authentication_type: Optional[Union[str, "MongoDbAuthenticationType"]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, auth_source: Optional[object] = None, @@ -23951,7 +25275,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MongoDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDb' # type: str self.server = server self.authentication_type = authentication_type @@ -23965,7 +25289,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class MongoDBSource(CopySource): +class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -24017,13 +25341,13 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns -class MongoDBV2CollectionDataset(Dataset): +class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -24088,12 +25412,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(MongoDBV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = collection -class MongoDBV2LinkedService(LinkedService): +class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. @@ -24148,13 +25472,13 @@ def __init__( annotations: Optional[List[object]] = None, **kwargs ): - super(MongoDBV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = connection_string self.database = database -class MongoDBV2Source(CopySource): +class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -24178,7 +25502,7 @@ class MongoDBV2Source(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -24203,7 +25527,7 @@ class MongoDBV2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -24217,13 +25541,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, filter: Optional[object] = None, - cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDBV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -24232,7 +25556,7 @@ def __init__( self.additional_columns = additional_columns -class MySQLLinkedService(LinkedService): +class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -24290,14 +25614,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class MySQLSource(TabularSource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -24353,12 +25677,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(MySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query -class MySQLTableDataset(Dataset): +class MySqlTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -24421,7 +25745,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MySqlTable' # type: str self.table_name = table_name @@ -24557,7 +25881,7 @@ class NetezzaSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.NetezzaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings """ @@ -24575,7 +25899,7 @@ class NetezzaSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } @@ -24589,7 +25913,7 @@ def __init__( query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, - partition_option: Optional[Union[str, "NetezzaPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): @@ -24712,6 +26036,9 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). :type tenant: object @@ -24728,7 +26055,7 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -24763,6 +26090,7 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, @@ -24786,11 +26114,12 @@ def __init__( authentication_type: Optional[Union[str, "ODataAuthenticationType"]] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, + auth_headers: Optional[object] = None, tenant: Optional[object] = None, service_principal_id: Optional[object] = None, azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, - aad_service_principal_credential_type: Optional[Union[str, "ODataAADServicePrincipalCredentialType"]] = None, + aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, service_principal_key: Optional["SecretBase"] = None, service_principal_embedded_cert: Optional["SecretBase"] = None, service_principal_embedded_cert_password: Optional["SecretBase"] = None, @@ -24803,6 +26132,7 @@ def __init__( self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.auth_headers = auth_headers self.tenant = tenant self.service_principal_id = service_principal_id self.azure_cloud_type = azure_cloud_type @@ -25745,6 +27075,229 @@ def __init__( self.metric_specifications = metric_specifications +class OracleCloudStorageLinkedService(LinkedService): + """Linked service for Oracle Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~data_factory_management_client.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Oracle Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_key_id: Optional[object] = None, + secret_access_key: Optional["SecretBase"] = None, + service_url: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(OracleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'OracleCloudStorage' # type: str + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + + +class OracleCloudStorageLocation(DatasetLocation): + """The location of Oracle Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, + **kwargs + ): + super(OracleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'OracleCloudStorageLocation' # type: str + self.bucket_name = bucket_name + self.version = version + + +class OracleCloudStorageReadSettings(StoreReadSettings): + """Oracle Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + file_list_path: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OracleCloudStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + class OracleLinkedService(LinkedService): """Oracle database. @@ -26164,7 +27717,7 @@ class OracleSource(CopySource): :type query_timeout: object :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.OraclePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -26184,7 +27737,7 @@ class OracleSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -26198,7 +27751,7 @@ def __init__( max_concurrent_connections: Optional[object] = None, oracle_reader_query: Optional[object] = None, query_timeout: Optional[object] = None, - partition_option: Optional[Union[str, "OraclePartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["OraclePartitionSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs @@ -26322,7 +27875,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -26432,6 +27985,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -26447,6 +28002,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -26459,11 +28015,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class OrcSource(CopySource): @@ -26523,6 +28081,50 @@ def __init__( self.additional_columns = additional_columns +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -26619,7 +28221,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -26729,6 +28331,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -26744,6 +28348,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -26756,11 +28361,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class ParquetSource(CopySource): @@ -26820,6 +28427,50 @@ def __init__( self.additional_columns = additional_columns +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -27307,6 +28958,48 @@ def __init__( self.query = query +class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__( + self, + *, + duration: Optional[object] = None, + **kwargs + ): + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = duration + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = name + + class PipelineListResponse(msrest.serialization.Model): """A list of pipeline resources. @@ -27339,6 +29032,28 @@ def __init__( self.next_link = next_link +class PipelinePolicy(msrest.serialization.Model): + """Pipeline Policy. + + :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :type elapsed_time_metric: + ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__( + self, + *, + elapsed_time_metric: Optional["PipelineElapsedTimeMetricPolicy"] = None, + **kwargs + ): + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = elapsed_time_metric + + class PipelineReference(msrest.serialization.Model): """Pipeline reference type. @@ -27409,8 +29124,11 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_folder_name: The name of the folder that this Pipeline is in. - :type name_folder_name: str + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~data_factory_management_client.models.PipelineFolder + :param policy: Pipeline Policy. + :type policy: ~data_factory_management_client.models.PipelinePolicy """ _validation = { @@ -27434,7 +29152,8 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } def __init__( @@ -27448,7 +29167,8 @@ def __init__( concurrency: Optional[int] = None, annotations: Optional[List[object]] = None, run_dimensions: Optional[Dict[str, object]] = None, - name_folder_name: Optional[str] = None, + folder: Optional["PipelineFolder"] = None, + policy: Optional["PipelinePolicy"] = None, **kwargs ): super(PipelineResource, self).__init__(**kwargs) @@ -27460,7 +29180,8 @@ def __init__( self.concurrency = concurrency self.annotations = annotations self.run_dimensions = run_dimensions - self.name_folder_name = name_folder_name + self.folder = folder + self.policy = policy class PipelineRun(msrest.serialization.Model): @@ -27671,7 +29392,7 @@ def __init__( self.use_type_default = use_type_default -class PostgreSQLLinkedService(LinkedService): +class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -27729,14 +29450,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(PostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'PostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class PostgreSQLSource(TabularSource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -27792,12 +29513,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(PostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query -class PostgreSQLTableDataset(Dataset): +class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -27870,7 +29591,7 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(PostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = table_name self.table = table @@ -28154,6 +29875,279 @@ def __init__( self.query = query +class PrivateEndpointConnectionListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Private Endpoint Connections. + :type value: list[~data_factory_management_client.models.PrivateEndpointConnectionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["PrivateEndpointConnectionResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__( + self, + *, + properties: Optional["RemotePrivateEndpointConnection"] = None, + **kwargs + ): + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~data_factory_management_client.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + *, + private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = private_link_service_connection_state + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__( + self, + *, + properties: Optional["PrivateLinkConnectionApprovalRequest"] = None, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """The state of a private link connection. + + :param status: Status of a private link connection. + :type status: str + :param description: Description of a private link connection. + :type description: str + :param actions_required: ActionsRequired for a private link connection. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[str] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~data_factory_management_client.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__( + self, + *, + properties: Optional["PrivateLinkResourceProperties"] = None, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkResourceProperties(msrest.serialization.Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar group_id: GroupId of a private link resource. + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource. + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource. + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(msrest.serialization.Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~data_factory_management_client.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + *, + value: List["PrivateLinkResource"], + **kwargs + ): + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = value + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -28691,6 +30685,43 @@ def __init__( self.table_name = table_name +class RemotePrivateEndpointConnection(msrest.serialization.Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :type private_endpoint: ~data_factory_management_client.models.ArmIdWrapper + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~data_factory_management_client.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["ArmIdWrapper"] = None, + private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, + **kwargs + ): + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -29113,6 +31144,9 @@ class RestServiceLinkedService(LinkedService): :type user_name: object :param password: The password used in Basic authentication type. :type password: ~data_factory_management_client.models.SecretBase + :param auth_headers: The additional HTTP headers in the request to RESTful API used for + authorization. Type: object (or Expression with resultType object). + :type auth_headers: object :param service_principal_id: The application's client ID used in AadServicePrincipal authentication type. :type service_principal_id: object @@ -29152,6 +31186,7 @@ class RestServiceLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_headers': {'key': 'typeProperties.authHeaders', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, @@ -29173,6 +31208,7 @@ def __init__( enable_server_certificate_validation: Optional[object] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, + auth_headers: Optional[object] = None, service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, @@ -29188,6 +31224,7 @@ def __init__( self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.auth_headers = auth_headers self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant @@ -29234,13 +31271,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -29259,8 +31292,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -29276,8 +31308,7 @@ def __init__( additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, - compression_type: Optional[object] = None, - wrap_request_json_in_an_object: Optional[object] = None, + http_compression_type: Optional[object] = None, **kwargs ): super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -29286,8 +31317,7 @@ def __init__( self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval - self.compression_type = compression_type - self.wrap_request_json_in_an_object = wrap_request_json_in_an_object + self.http_compression_type = http_compression_type class RestSource(CopySource): @@ -31209,7 +33239,7 @@ class SapHanaSource(TabularSource): :type packet_size: object :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SapHanaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings @@ -31229,7 +33259,7 @@ class SapHanaSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } @@ -31244,7 +33274,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, packet_size: Optional[object] = None, - partition_option: Optional[Union[str, "SapHanaPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): @@ -31918,7 +33948,7 @@ class SapTableSource(TabularSource): :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: str or ~data_factory_management_client.models.SapTablePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings @@ -31943,7 +33973,7 @@ class SapTableSource(TabularSource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -31963,7 +33993,7 @@ def __init__( batch_size: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, sap_data_column_delimiter: Optional[object] = None, - partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): @@ -32959,7 +34989,7 @@ class SftpServerLinkedService(LinkedService): Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "SshPublicKey". + Possible values include: "Basic", "SshPublicKey", "MultiFactor". :type authentication_type: str or ~data_factory_management_client.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). @@ -34197,7 +36227,49 @@ def __init__( self.query = query -class SQLDWSink(CopySink): +class SqlAlwaysEncryptedProperties(msrest.serialization.Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity". + :type always_encrypted_akv_auth_type: str or + ~data_factory_management_client.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__( + self, + *, + always_encrypted_akv_auth_type: Union[str, "SqlAlwaysEncryptedAkvAuthType"], + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + **kwargs + ): + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = always_encrypted_akv_auth_type + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + + +class SqlDwSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -34235,7 +36307,7 @@ class SQLDWSink(CopySink): :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DWCopyCommandSettings + :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object @@ -34257,7 +36329,7 @@ class SQLDWSink(CopySink): 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } @@ -34274,11 +36346,11 @@ def __init__( allow_poly_base: Optional[object] = None, poly_base_settings: Optional["PolybaseSettings"] = None, allow_copy_command: Optional[object] = None, - copy_command_settings: Optional["DWCopyCommandSettings"] = None, + copy_command_settings: Optional["DwCopyCommandSettings"] = None, table_option: Optional[object] = None, **kwargs ): - super(SQLDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -34288,7 +36360,7 @@ def __init__( self.table_option = table_option -class SQLDWSource(TabularSource): +class SqlDwSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -34326,9 +36398,9 @@ class SQLDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34346,8 +36418,8 @@ class SQLDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34362,11 +36434,11 @@ def __init__( sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34375,7 +36447,7 @@ def __init__( self.partition_settings = partition_settings -class SQLMiSink(CopySink): +class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -34457,7 +36529,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -34467,7 +36539,7 @@ def __init__( self.table_option = table_option -class SQLMiSource(TabularSource): +class SqlMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -34506,9 +36578,9 @@ class SQLMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34527,8 +36599,8 @@ class SQLMiSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34544,11 +36616,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34558,7 +36630,7 @@ def __init__( self.partition_settings = partition_settings -class SQLPartitionSettings(msrest.serialization.Model): +class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -34591,13 +36663,13 @@ def __init__( partition_lower_bound: Optional[object] = None, **kwargs ): - super(SQLPartitionSettings, self).__init__(**kwargs) + super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class SQLServerLinkedService(LinkedService): +class SqlServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -34627,6 +36699,9 @@ class SQLServerLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~data_factory_management_client.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -34645,6 +36720,7 @@ class SQLServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -34659,17 +36735,19 @@ def __init__( user_name: Optional[object] = None, password: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): - super(SQLServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SqlServer' # type: str self.connection_string = connection_string self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings -class SQLServerSink(CopySink): +class SqlServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -34751,7 +36829,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -34761,7 +36839,7 @@ def __init__( self.table_option = table_option -class SQLServerSource(TabularSource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -34800,9 +36878,9 @@ class SQLServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34821,8 +36899,8 @@ class SQLServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34838,11 +36916,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34852,7 +36930,7 @@ def __init__( self.partition_settings = partition_settings -class SQLServerStoredProcedureActivity(ExecutionActivity): +class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -34916,13 +36994,13 @@ def __init__( stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): - super(SQLServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters -class SQLServerTableDataset(Dataset): +class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -34996,14 +37074,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(SQLServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SqlServerTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class SQLSink(CopySink): +class SqlSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -35085,7 +37163,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -35095,7 +37173,7 @@ def __init__( self.table_option = table_option -class SQLSource(TabularSource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -35136,9 +37214,9 @@ class SQLSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -35157,8 +37235,8 @@ class SQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -35174,11 +37252,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, isolation_level: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36665,6 +38743,80 @@ def __init__( self.type_conversion_settings = type_conversion_settings +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -36813,7 +38965,7 @@ class TeradataSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.TeradataPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings @@ -36832,7 +38984,7 @@ class TeradataSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } @@ -36846,7 +38998,7 @@ def __init__( query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, - partition_option: Optional[Union[str, "TeradataPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): @@ -37427,7 +39579,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -38293,17 +40445,24 @@ class WebActivityAuthentication(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Web activity authentication (Basic/ClientCertificate/MSI). + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. :type pfx: ~data_factory_management_client.models.SecretBase - :param username: Web activity authentication user name for basic authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. + :param username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. Type: string (or Expression with resultType string). + :type username: object + :param password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. :type password: ~data_factory_management_client.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI - Authentication. - :type resource: str + Authentication. Type: string (or Expression with resultType string). + :type resource: object + :param user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :type user_tenant: object """ _validation = { @@ -38313,9 +40472,10 @@ class WebActivityAuthentication(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'username': {'key': 'username', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, 'password': {'key': 'password', 'type': 'SecretBase'}, - 'resource': {'key': 'resource', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'object'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } def __init__( @@ -38323,9 +40483,10 @@ def __init__( *, type: str, pfx: Optional["SecretBase"] = None, - username: Optional[str] = None, + username: Optional[object] = None, password: Optional["SecretBase"] = None, - resource: Optional[str] = None, + resource: Optional[object] = None, + user_tenant: Optional[object] = None, **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) @@ -38334,6 +40495,7 @@ def __init__( self.username = username self.password = password self.resource = resource + self.user_tenant = user_tenant class WebLinkedServiceTypeProperties(msrest.serialization.Model): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py index 3f6a32ff284..c1da8c996a3 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py @@ -6,40 +6,46 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operation_operations import OperationOperations -from ._factory_operations import FactoryOperations +from ._operations import Operations +from ._factories_operations import FactoriesOperations from ._exposure_control_operations import ExposureControlOperations -from ._integration_runtime_operations import IntegrationRuntimeOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations import IntegrationRuntimeNodeOperations -from ._linked_service_operations import LinkedServiceOperations -from ._dataset_operations import DatasetOperations -from ._pipeline_operations import PipelineOperations -from ._pipeline_run_operations import PipelineRunOperations -from ._activity_run_operations import ActivityRunOperations -from ._trigger_operations import TriggerOperations -from ._trigger_run_operations import TriggerRunOperations -from ._data_flow_operations import DataFlowOperations +from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from ._linked_services_operations import LinkedServicesOperations +from ._datasets_operations import DatasetsOperations +from ._pipelines_operations import PipelinesOperations +from ._pipeline_runs_operations import PipelineRunsOperations +from ._activity_runs_operations import ActivityRunsOperations +from ._triggers_operations import TriggersOperations +from ._trigger_runs_operations import TriggerRunsOperations +from ._data_flows_operations import DataFlowsOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations import ManagedPrivateEndpointOperations +from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations +from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations +from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations __all__ = [ - 'OperationOperations', - 'FactoryOperations', + 'Operations', + 'FactoriesOperations', 'ExposureControlOperations', - 'IntegrationRuntimeOperations', + 'IntegrationRuntimesOperations', 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', + 'IntegrationRuntimeNodesOperations', + 'LinkedServicesOperations', + 'DatasetsOperations', + 'PipelinesOperations', + 'PipelineRunsOperations', + 'ActivityRunsOperations', + 'TriggersOperations', + 'TriggerRunsOperations', + 'DataFlowsOperations', 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', + 'ManagedVirtualNetworksOperations', + 'ManagedPrivateEndpointsOperations', + 'PrivateEndPointConnectionsOperations', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourcesOperations', ] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py new file mode 100644 index 00000000000..f51ff306dc7 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ActivityRunsOperations(object): + """ActivityRunsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def query_by_pipeline_run( + self, + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + filter_parameters, # type: "models.RunFilterParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.ActivityRunsQueryResponse" + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ActivityRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_pipeline_run.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 446c117302f..976a9653c6e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -20,7 +20,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,21 +51,18 @@ def _create_initial( self, resource_group_name, # type: str factory_name, # type: str - compute_type=None, # type: Optional[str] - core_count=None, # type: Optional[int] - time_to_live=None, # type: Optional[int] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.IntegrationRuntime"] + request, # type: "models.CreateDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore @@ -83,13 +80,12 @@ def _create_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -115,11 +111,7 @@ def begin_create( self, resource_group_name, # type: str factory_name, # type: str - compute_type=None, # type: Optional[str] - core_count=None, # type: Optional[int] - time_to_live=None, # type: Optional[int] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.IntegrationRuntime"] + request, # type: "models.CreateDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> LROPoller["models.CreateDataFlowDebugSessionResponse"] @@ -129,18 +121,8 @@ def begin_create( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param request: Data flow debug session definition. + :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -162,11 +144,7 @@ def begin_create( raw_result = self._create_initial( resource_group_name=resource_group_name, factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -181,7 +159,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -214,14 +198,17 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -271,16 +258,7 @@ def add_data_flow( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - datasets=None, # type: Optional[List["models.DatasetDebugResource"]] - linked_services=None, # type: Optional[List["models.LinkedServiceDebugResource"]] - source_settings=None, # type: Optional[List["models.DataFlowSourceSetting"]] - parameters=None, # type: Optional[Dict[str, object]] - dataset_parameters=None, # type: Optional[object] - folder_path=None, # type: Optional[object] - reference_name=None, # type: Optional[str] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.DataFlow"] + request, # type: "models.DataFlowDebugPackage" **kwargs # type: Any ): # type: (...) -> "models.AddDataFlowToDebugSessionResponse" @@ -290,39 +268,21 @@ def add_data_flow( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param request: Data flow debug session definition with debug content. + :type request: ~data_factory_management_client.models.DataFlowDebugPackage :keyword callable cls: A custom type or function that will be passed the direct response :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -340,13 +300,12 @@ def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -366,7 +325,7 @@ def delete( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] + request, # type: "models.DeleteDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> None @@ -376,20 +335,21 @@ def delete( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str + :param request: Data flow debug session definition for deletion. + :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -407,12 +367,12 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -429,19 +389,18 @@ def _execute_command_initial( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - command=None, # type: Optional[Union[str, "models.DataFlowDebugCommandType"]] - command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] + request, # type: "models.DataFlowDebugCommandRequest" **kwargs # type: Any ): # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -459,13 +418,12 @@ def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -491,9 +449,7 @@ def begin_execute_command( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - command=None, # type: Optional[Union[str, "models.DataFlowDebugCommandType"]] - command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] + request, # type: "models.DataFlowDebugCommandRequest" **kwargs # type: Any ): # type: (...) -> LROPoller["models.DataFlowDebugCommandResponse"] @@ -503,12 +459,8 @@ def begin_execute_command( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload + :param request: Data flow debug command definition. + :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -530,9 +482,7 @@ def begin_execute_command( raw_result = self._execute_command_initial( resource_group_name=resource_group_name, factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -547,7 +497,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py new file mode 100644 index 00000000000..41292015b17 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py @@ -0,0 +1,327 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataFlowsOperations(object): + """DataFlowsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + data_flow, # type: "models.DataFlowResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.DataFlowResource" + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param data_flow: Data flow resource definition. + :type data_flow: ~data_factory_management_client.models.DataFlowResource + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DataFlowResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(data_flow, 'DataFlowResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.DataFlowResource" + """Gets a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the + ETag matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataFlowResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DataFlowResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + data_flow_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.DataFlowListResponse"] + """Lists data flows. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DataFlowListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py new file mode 100644 index 00000000000..3ad92c858c9 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py @@ -0,0 +1,329 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DatasetsOperations(object): + """DatasetsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.DatasetListResponse"] + """Lists datasets. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DatasetListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DatasetListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + dataset, # type: "models.DatasetResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.DatasetResource" + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param dataset: Dataset resource definition. + :type dataset: ~data_factory_management_client.models.DatasetResource + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DatasetResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(dataset, 'DatasetResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.DatasetResource"] + """Gets a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatasetResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.DatasetResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + dataset_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index d2667ffac81..b419a713e9f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -17,7 +17,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,8 +47,7 @@ def __init__(self, client, config, serializer, deserializer): def get_feature_value( self, location_id, # type: str - feature_name=None, # type: Optional[str] - feature_type=None, # type: Optional[str] + exposure_control_request, # type: "models.ExposureControlRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlResponse" @@ -56,22 +55,21 @@ def get_feature_value( :param location_id: The location identifier. :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value.metadata['url'] # type: ignore @@ -88,13 +86,12 @@ def get_feature_value( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -114,8 +111,7 @@ def get_feature_value_by_factory( self, resource_group_name, # type: str factory_name, # type: str - feature_name=None, # type: Optional[str] - feature_type=None, # type: Optional[str] + exposure_control_request, # type: "models.ExposureControlRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlResponse" @@ -125,22 +121,21 @@ def get_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value_by_factory.metadata['url'] # type: ignore @@ -158,13 +153,12 @@ def get_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -180,11 +174,11 @@ def get_feature_value_by_factory( return deserialized get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - def query_feature_value_by_factory( + def query_feature_values_by_factory( self, resource_group_name, # type: str factory_name, # type: str - exposure_control_requests, # type: List["models.ExposureControlRequest"] + exposure_control_batch_request, # type: "models.ExposureControlBatchRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlBatchResponse" @@ -194,23 +188,24 @@ def query_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] + :param exposure_control_batch_request: The exposure control request for list of features. + :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore + url = self.query_feature_values_by_factory.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -225,13 +220,12 @@ def query_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -245,4 +239,4 @@ def query_feature_value_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py new file mode 100644 index 00000000000..29d7d4af8a9 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py @@ -0,0 +1,644 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class FactoriesOperations(object): + """FactoriesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.FactoryListResponse"] + """Lists factories under the specified subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FactoryListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('FactoryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore + + def configure_factory_repo( + self, + location_id, # type: str + factory_repo_update, # type: "models.FactoryRepoUpdate" + **kwargs # type: Any + ): + # type: (...) -> "models.Factory" + """Updates a factory's repo information. + + :param location_id: The location identifier. + :type location_id: str + :param factory_repo_update: Update factory repo request definition. + :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.configure_factory_repo.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'locationId': self._serialize.url("location_id", location_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.FactoryListResponse"] + """Lists factories. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either FactoryListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('FactoryListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + factory, # type: "models.Factory" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Factory" + """Creates or updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param factory: Factory resource definition. + :type factory: ~data_factory_management_client.models.Factory + :param if_match: ETag of the factory entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory, 'Factory') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + factory_name, # type: str + factory_update_parameters, # type: "models.FactoryUpdateParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.Factory" + """Updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param factory_update_parameters: The parameters for updating a factory. + :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.Factory"] + """Gets a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Factory, or the result of cls(response) + :rtype: ~data_factory_management_client.models.Factory or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Factory', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore + + def get_git_hub_access_token( + self, + resource_group_name, # type: str + factory_name, # type: str + git_hub_access_token_request, # type: "models.GitHubAccessTokenRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.GitHubAccessTokenResponse" + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request definition. + :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GitHubAccessTokenResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_git_hub_access_token.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore + + def get_data_plane_access( + self, + resource_group_name, # type: str + factory_name, # type: str + policy, # type: "models.UserAccessPolicy" + **kwargs # type: Any + ): + # type: (...) -> "models.AccessPolicyResponse" + """Get Data Plane access. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param policy: Data Plane user access policy definition. + :type policy: ~data_factory_management_client.models.UserAccessPolicy + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessPolicyResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.AccessPolicyResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.get_data_plane_access.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(policy, 'UserAccessPolicy') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py new file mode 100644 index 00000000000..c9623854aa9 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py @@ -0,0 +1,319 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimeNodesOperations(object): + """IntegrationRuntimeNodesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" + """Gets a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + update_integration_runtime_node_request, # type: "models.UpdateIntegrationRuntimeNodeRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. + :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) + :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore + + def get_ip_address( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + node_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeNodeIpAddress" + """Get the IP address of self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_ip_address.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index 461ab7b6539..a04018b467e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod @@ -55,9 +55,12 @@ def _refresh_initial( ): # type: (...) -> Optional["models.SsisObjectMetadataStatusResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._refresh_initial.metadata['url'] # type: ignore @@ -75,7 +78,7 @@ def _refresh_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -147,7 +150,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -166,7 +176,7 @@ def get( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - metadata_path=None, # type: Optional[str] + get_metadata_request=None, # type: Optional["models.GetSsisObjectMetadataRequest"] **kwargs # type: Any ): # type: (...) -> "models.SsisObjectMetadataListResponse" @@ -179,20 +189,21 @@ def get( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. + :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SsisObjectMetadataListResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -211,7 +222,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: @@ -220,7 +231,6 @@ def get( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py new file mode 100644 index 00000000000..d0a57313403 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -0,0 +1,1235 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations(object): + """IntegrationRuntimesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"] + """Lists integration runtimes. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + integration_runtime, # type: "models.IntegrationRuntimeResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param integration_runtime: Integration runtime resource definition. + :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.IntegrationRuntimeResource"] + """Gets an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + update_integration_runtime_request, # type: "models.UpdateIntegrationRuntimeRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def get_status( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeStatusResponse" + """Gets detailed status information for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + def get_connection_info( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeConnectionInfo" + """Gets the on-premises integration runtime connection information for encrypting the on-premises + data source credentials. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_connection_info.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + def regenerate_auth_key( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + regenerate_key_parameters, # type: "models.IntegrationRuntimeRegenerateKeyParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeAuthKeys" + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.regenerate_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + def list_auth_keys( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeAuthKeys" + """Retrieves the authentication keys for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeAuthKeys, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.list_auth_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + + def _start_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + def begin_start( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"] + """Starts a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + def _stop_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + def begin_stop( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Stops a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + def sync_credentials( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Force the integration runtime to synchronize credentials across integration runtime nodes, and + this will override the credentials across all worker nodes with those available on the + dispatcher node. If you already have the latest credential backup file, you should manually + import it (preferred) on any self-hosted integration runtime node than using this API directly. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.sync_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + def get_monitoring_data( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeMonitoringData" + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes + under this integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeMonitoringData, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_monitoring_data.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + def upgrade( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Upgrade self-hosted integration runtime to latest version if availability. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.upgrade.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + def remove_links( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + linked_integration_runtime_request, # type: "models.LinkedIntegrationRuntimeRequest" + **kwargs # type: Any + ): + # type: (...) -> None + """Remove all linked integration runtimes under specific data factory in a self-hosted integration + runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. + :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.remove_links.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + def create_linked_integration_runtime( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + create_linked_integration_runtime_request, # type: "models.CreateLinkedIntegrationRuntimeRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeStatusResponse" + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_linked_integration_runtime.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py new file mode 100644 index 00000000000..ffb243da168 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py @@ -0,0 +1,330 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LinkedServicesOperations(object): + """LinkedServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.LinkedServiceListResponse"] + """Lists linked services. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + linked_service, # type: "models.LinkedServiceResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.LinkedServiceResource" + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param linked_service: Linked service resource definition. + :type linked_service: ~data_factory_management_client.models.LinkedServiceResource + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.LinkedServiceResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(linked_service, 'LinkedServiceResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.LinkedServiceResource"] + """Gets a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param if_none_match: ETag of the linked service entity. Should only be specified for get. If + the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedServiceResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.LinkedServiceResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py new file mode 100644 index 00000000000..d1c7c89531f --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py @@ -0,0 +1,344 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagedPrivateEndpointsOperations(object): + """ManagedPrivateEndpointsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"] + """Lists managed private endpoints. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + managed_private_endpoint, # type: "models.ManagedPrivateEndpointResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedPrivateEndpointResource" + """Creates or updates a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. + :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :param if_match: ETag of the managed private endpoint entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedPrivateEndpointResource" + """Gets a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedPrivateEndpointResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_private_endpoint_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a managed private endpoint. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_private_endpoint_name: Managed private endpoint name. + :type managed_private_endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py new file mode 100644 index 00000000000..8f81cdf0c80 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py @@ -0,0 +1,268 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagedVirtualNetworksOperations(object): + """ManagedVirtualNetworksOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"] + """Lists managed Virtual Networks. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + managed_virtual_network, # type: "models.ManagedVirtualNetworkResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedVirtualNetworkResource" + """Creates or updates a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param managed_virtual_network: Managed Virtual Network resource definition. + :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :param if_match: ETag of the managed Virtual Network entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + managed_virtual_network_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.ManagedVirtualNetworkResource" + """Gets a managed Virtual Network. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param managed_virtual_network_name: Managed virtual network name. + :type managed_virtual_network_name: str + :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for + get. If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ManagedVirtualNetworkResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py new file mode 100644 index 00000000000..9795a6e8c4e --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.OperationListResponse"] + """Lists the available Azure Data Factory API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py new file mode 100644 index 00000000000..be684c71f0a --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py @@ -0,0 +1,241 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PipelineRunsOperations(object): + """PipelineRunsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def query_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + filter_parameters, # type: "models.RunFilterParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.PipelineRunsQueryResponse" + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.PipelineRun" + """Get a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineRun, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineRun + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineRun', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore + + def cancel( + self, + resource_group_name, # type: str + factory_name, # type: str + run_id, # type: str + is_recursive=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + """Cancel a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current + pipeline. + :type is_recursive: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if is_recursive is not None: + query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py new file mode 100644 index 00000000000..d4a5594d606 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py @@ -0,0 +1,428 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PipelinesOperations(object): + """PipelinesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.PipelineListResponse"] + """Lists pipelines. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PipelineListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PipelineListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + pipeline, # type: "models.PipelineResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.PipelineResource" + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~data_factory_management_client.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(pipeline, 'PipelineResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.PipelineResource"] + """Gets a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the + ETag matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PipelineResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PipelineResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore + + def create_run( + self, + resource_group_name, # type: str + factory_name, # type: str + pipeline_name, # type: str + reference_pipeline_run_id=None, # type: Optional[str] + is_recovery=None, # type: Optional[bool] + start_activity_name=None, # type: Optional[str] + start_from_failure=None, # type: Optional[bool] + parameters=None, # type: Optional[Dict[str, object]] + **kwargs # type: Any + ): + # type: (...) -> "models.CreateRunResponse" + """Creates a run of a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the + parameters of the specified run will be used to create a new run. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified + referenced pipeline run and the new run will be grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not + specified, all activities will run. + :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed + activities. The property will be used only if startActivityName is not specified. + :type start_from_failure: bool + :param parameters: Parameters of the pipeline run. These parameters will be used only if the + runId is not specified. + :type parameters: dict[str, object] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CreateRunResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.CreateRunResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_run.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if reference_pipeline_run_id is not None: + query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if parameters is not None: + body_content = self._serialize.body(parameters, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CreateRunResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py new file mode 100644 index 00000000000..11471ac9d41 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_end_point_connections_operations.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndPointConnectionsOperations(object): + """PrivateEndPointConnectionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.PrivateEndpointConnectionListResponse"] + """Lists Private endpoint connections. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PrivateEndpointConnectionListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py new file mode 100644 index 00000000000..60bd6a37157 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,252 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations(object): + """PrivateEndpointConnectionOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + private_endpoint_wrapper, # type: "models.PrivateLinkConnectionApprovalRequestResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpointConnectionResource" + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: + :type private_endpoint_wrapper: ~data_factory_management_client.models.PrivateLinkConnectionApprovalRequestResource + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpointConnectionResource" + """Gets a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param if_none_match: ETag of the private endpoint connection entity. Should only be specified + for get. If the ETag matches the existing entity tag, or if * was provided, then no content + will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py new file mode 100644 index 00000000000..89847585015 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_private_link_resources_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateLinkResourcesWrapper" + """Gets the private link resources. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :rtype: ~data_factory_management_client.models.PrivateLinkResourcesWrapper + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourcesWrapper"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py new file mode 100644 index 00000000000..ca2b12d4a29 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py @@ -0,0 +1,240 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TriggerRunsOperations(object): + """TriggerRunsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def rerun( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Rerun single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.rerun.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + + def cancel( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Cancel a single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.cancel.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + + def query_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + filter_parameters, # type: "models.RunFilterParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerRunsQueryResponse" + """Query trigger runs. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerRunsQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py new file mode 100644 index 00000000000..f85d33b9c68 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py @@ -0,0 +1,945 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TriggersOperations(object): + """TriggersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~data_factory_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.TriggerListResponse"] + """Lists triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either TriggerListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('TriggerListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + + def query_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + filter_parameters, # type: "models.TriggerFilterParameters" + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerQueryResponse" + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the triggers. + :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + trigger, # type: "models.TriggerResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerResource" + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param trigger: Trigger resource definition. + :type trigger: ~data_factory_management_client.models.TriggerResource + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(trigger, 'TriggerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.TriggerResource"] + """Gets a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def _subscribe_to_events_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._subscribe_to_events_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + def begin_subscribe_to_events( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._subscribe_to_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + def get_event_subscription_status( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerSubscriptionOperationStatus" + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerSubscriptionOperationStatus, or the result of cls(response) + :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get_event_subscription_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + + def _unsubscribe_from_events_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + def begin_unsubscribe_from_events( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._unsubscribe_from_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + def _start_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + def begin_start( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + def _stop_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + def begin_stop( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip new file mode 100644 index 00000000000..296cd2dfd07 Binary files /dev/null and b/src/datafactory/gen.zip differ diff --git a/src/datafactory/linter_exclusions.yml b/src/datafactory/linter_exclusions.yml new file mode 100644 index 00000000000..cdfa831be54 --- /dev/null +++ b/src/datafactory/linter_exclusions.yml @@ -0,0 +1,5 @@ +datafactory get-git-hub-access-token: + parameters: + git_hub_access_token_base_url: + rule_exclusions: + - option_length_too_long diff --git a/src/datafactory/report.md b/src/datafactory/report.md index fb50389d775..1d9bdfb4cf1 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -1,161 +1,221 @@ # Azure CLI Module Creation Report -### datafactory activity-run query-by-pipeline-run - -query-by-pipeline-run a datafactory activity-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory activity-run|ActivityRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-pipeline-run|QueryByPipelineRun| - -#### Parameters +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az datafactory|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az datafactory` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az datafactory|Factories|[commands](#CommandsInFactories)| +|az datafactory integration-runtime|IntegrationRuntimes|[commands](#CommandsInIntegrationRuntimes)| +|az datafactory integration-runtime-node|IntegrationRuntimeNodes|[commands](#CommandsInIntegrationRuntimeNodes)| +|az datafactory linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| +|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| +|az datafactory pipeline|Pipelines|[commands](#CommandsInPipelines)| +|az datafactory pipeline-run|PipelineRuns|[commands](#CommandsInPipelineRuns)| +|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| +|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| +|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| + +## COMMANDS +### Commands in `az datafactory` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory list](#FactoriesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersFactoriesListByResourceGroup)|[Example](#ExamplesFactoriesListByResourceGroup)| +|[az datafactory list](#FactoriesList)|List|[Parameters](#ParametersFactoriesList)|[Example](#ExamplesFactoriesList)| +|[az datafactory show](#FactoriesGet)|Get|[Parameters](#ParametersFactoriesGet)|[Example](#ExamplesFactoriesGet)| +|[az datafactory create](#FactoriesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersFactoriesCreateOrUpdate#Create)|[Example](#ExamplesFactoriesCreateOrUpdate#Create)| +|[az datafactory update](#FactoriesUpdate)|Update|[Parameters](#ParametersFactoriesUpdate)|[Example](#ExamplesFactoriesUpdate)| +|[az datafactory delete](#FactoriesDelete)|Delete|[Parameters](#ParametersFactoriesDelete)|[Example](#ExamplesFactoriesDelete)| +|[az datafactory configure-factory-repo](#FactoriesConfigureFactoryRepo)|ConfigureFactoryRepo|[Parameters](#ParametersFactoriesConfigureFactoryRepo)|[Example](#ExamplesFactoriesConfigureFactoryRepo)| +|[az datafactory get-data-plane-access](#FactoriesGetDataPlaneAccess)|GetDataPlaneAccess|[Parameters](#ParametersFactoriesGetDataPlaneAccess)|[Example](#ExamplesFactoriesGetDataPlaneAccess)| +|[az datafactory get-git-hub-access-token](#FactoriesGetGitHubAccessToken)|GetGitHubAccessToken|[Parameters](#ParametersFactoriesGetGitHubAccessToken)|[Example](#ExamplesFactoriesGetGitHubAccessToken)| + +### Commands in `az datafactory activity-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory activity-run query-by-pipeline-run](#ActivityRunsQueryByPipelineRun)|QueryByPipelineRun|[Parameters](#ParametersActivityRunsQueryByPipelineRun)|[Example](#ExamplesActivityRunsQueryByPipelineRun)| + +### Commands in `az datafactory dataset` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| +|[az datafactory dataset show](#DatasetsGet)|Get|[Parameters](#ParametersDatasetsGet)|[Example](#ExamplesDatasetsGet)| +|[az datafactory dataset create](#DatasetsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatasetsCreateOrUpdate#Create)|[Example](#ExamplesDatasetsCreateOrUpdate#Create)| +|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|[Example](#ExamplesDatasetsCreateOrUpdate#Update)| +|[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| + +### Commands in `az datafactory integration-runtime` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime list](#IntegrationRuntimesListByFactory)|ListByFactory|[Parameters](#ParametersIntegrationRuntimesListByFactory)|[Example](#ExamplesIntegrationRuntimesListByFactory)| +|[az datafactory integration-runtime show](#IntegrationRuntimesGet)|Get|[Parameters](#ParametersIntegrationRuntimesGet)|[Example](#ExamplesIntegrationRuntimesGet)| +|[az datafactory integration-runtime linked-integration-runtime create](#IntegrationRuntimesCreateLinkedIntegrationRuntime)|CreateLinkedIntegrationRuntime|[Parameters](#ParametersIntegrationRuntimesCreateLinkedIntegrationRuntime)|[Example](#ExamplesIntegrationRuntimesCreateLinkedIntegrationRuntime)| +|[az datafactory integration-runtime managed create](#IntegrationRuntimesCreateOrUpdate#Create#Managed)|CreateOrUpdate#Create#Managed|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#Managed)|Not Found| +|[az datafactory integration-runtime self-hosted create](#IntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|CreateOrUpdate#Create#SelfHosted|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|[Example](#ExamplesIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)| +|[az datafactory integration-runtime update](#IntegrationRuntimesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimesUpdate)|[Example](#ExamplesIntegrationRuntimesUpdate)| +|[az datafactory integration-runtime delete](#IntegrationRuntimesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimesDelete)|[Example](#ExamplesIntegrationRuntimesDelete)| +|[az datafactory integration-runtime get-connection-info](#IntegrationRuntimesGetConnectionInfo)|GetConnectionInfo|[Parameters](#ParametersIntegrationRuntimesGetConnectionInfo)|[Example](#ExamplesIntegrationRuntimesGetConnectionInfo)| +|[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)| +|[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)| +|[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)| +|[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)| +|[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)| +|[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)| +|[az datafactory integration-runtime stop](#IntegrationRuntimesStop)|Stop|[Parameters](#ParametersIntegrationRuntimesStop)|[Example](#ExamplesIntegrationRuntimesStop)| +|[az datafactory integration-runtime sync-credentials](#IntegrationRuntimesSyncCredentials)|SyncCredentials|[Parameters](#ParametersIntegrationRuntimesSyncCredentials)|[Example](#ExamplesIntegrationRuntimesSyncCredentials)| +|[az datafactory integration-runtime upgrade](#IntegrationRuntimesUpgrade)|Upgrade|[Parameters](#ParametersIntegrationRuntimesUpgrade)|[Example](#ExamplesIntegrationRuntimesUpgrade)| + +### Commands in `az datafactory integration-runtime-node` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime-node show](#IntegrationRuntimeNodesGet)|Get|[Parameters](#ParametersIntegrationRuntimeNodesGet)|[Example](#ExamplesIntegrationRuntimeNodesGet)| +|[az datafactory integration-runtime-node update](#IntegrationRuntimeNodesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimeNodesUpdate)|[Example](#ExamplesIntegrationRuntimeNodesUpdate)| +|[az datafactory integration-runtime-node delete](#IntegrationRuntimeNodesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimeNodesDelete)|[Example](#ExamplesIntegrationRuntimeNodesDelete)| +|[az datafactory integration-runtime-node get-ip-address](#IntegrationRuntimeNodesGetIpAddress)|GetIpAddress|[Parameters](#ParametersIntegrationRuntimeNodesGetIpAddress)|[Example](#ExamplesIntegrationRuntimeNodesGetIpAddress)| + +### Commands in `az datafactory linked-service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| +|[az datafactory linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| +|[az datafactory linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| +|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Update)| +|[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| + +### Commands in `az datafactory pipeline` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline list](#PipelinesListByFactory)|ListByFactory|[Parameters](#ParametersPipelinesListByFactory)|[Example](#ExamplesPipelinesListByFactory)| +|[az datafactory pipeline show](#PipelinesGet)|Get|[Parameters](#ParametersPipelinesGet)|[Example](#ExamplesPipelinesGet)| +|[az datafactory pipeline create](#PipelinesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPipelinesCreateOrUpdate#Create)|[Example](#ExamplesPipelinesCreateOrUpdate#Create)| +|[az datafactory pipeline update](#PipelinesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPipelinesCreateOrUpdate#Update)|[Example](#ExamplesPipelinesCreateOrUpdate#Update)| +|[az datafactory pipeline delete](#PipelinesDelete)|Delete|[Parameters](#ParametersPipelinesDelete)|[Example](#ExamplesPipelinesDelete)| +|[az datafactory pipeline create-run](#PipelinesCreateRun)|CreateRun|[Parameters](#ParametersPipelinesCreateRun)|[Example](#ExamplesPipelinesCreateRun)| + +### Commands in `az datafactory pipeline-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline-run show](#PipelineRunsGet)|Get|[Parameters](#ParametersPipelineRunsGet)|[Example](#ExamplesPipelineRunsGet)| +|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| +|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| + +### Commands in `az datafactory trigger` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| +|[az datafactory trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)| +|[az datafactory trigger create](#TriggersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersTriggersCreateOrUpdate#Create)|[Example](#ExamplesTriggersCreateOrUpdate#Create)| +|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|[Example](#ExamplesTriggersCreateOrUpdate#Update)| +|[az datafactory trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)| +|[az datafactory trigger get-event-subscription-status](#TriggersGetEventSubscriptionStatus)|GetEventSubscriptionStatus|[Parameters](#ParametersTriggersGetEventSubscriptionStatus)|[Example](#ExamplesTriggersGetEventSubscriptionStatus)| +|[az datafactory trigger query-by-factory](#TriggersQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggersQueryByFactory)|[Example](#ExamplesTriggersQueryByFactory)| +|[az datafactory trigger start](#TriggersStart)|Start|[Parameters](#ParametersTriggersStart)|[Example](#ExamplesTriggersStart)| +|[az datafactory trigger stop](#TriggersStop)|Stop|[Parameters](#ParametersTriggersStop)|[Example](#ExamplesTriggersStop)| +|[az datafactory trigger subscribe-to-event](#TriggersSubscribeToEvents)|SubscribeToEvents|[Parameters](#ParametersTriggersSubscribeToEvents)|[Example](#ExamplesTriggersSubscribeToEvents)| +|[az datafactory trigger unsubscribe-from-event](#TriggersUnsubscribeFromEvents)|UnsubscribeFromEvents|[Parameters](#ParametersTriggersUnsubscribeFromEvents)|[Example](#ExamplesTriggersUnsubscribeFromEvents)| + +### Commands in `az datafactory trigger-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger-run cancel](#TriggerRunsCancel)|Cancel|[Parameters](#ParametersTriggerRunsCancel)|[Example](#ExamplesTriggerRunsCancel)| +|[az datafactory trigger-run query-by-factory](#TriggerRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggerRunsQueryByFactory)|[Example](#ExamplesTriggerRunsQueryByFactory)| +|[az datafactory trigger-run rerun](#TriggerRunsRerun)|Rerun|[Parameters](#ParametersTriggerRunsRerun)|[Example](#ExamplesTriggerRunsRerun)| + + +## COMMAND DETAILS + +### group `az datafactory` +#### Command `az datafactory list` + +##### Example +``` +az datafactory list --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| -|**--last-updated-after**|date-time|The time at or after which the run event was updated in 'ISO 8601' format.|last_updated_after|lastUpdatedAfter| -|**--last-updated-before**|date-time|The time at or before which the run event was updated in 'ISO 8601' format.|last_updated_before|lastUpdatedBefore| -|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| -|**--filters**|array|List of filters.|filters|filters| -|**--order-by**|array|List of OrderBy option.|order_by|orderBy| - -### datafactory dataset create - -create a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory list` -#### Parameters +##### Example +``` +az datafactory list +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--properties**|object|Dataset properties.|properties|properties| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory dataset delete +#### Command `az datafactory show` -delete a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| - -### datafactory dataset list - -list a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| +#### Command `az datafactory create` -#### Parameters +##### Example +``` +az datafactory create --location "East US" --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--location**|string|The resource location.|location|location| +|**--tags**|dictionary|The resource tags.|tags|tags| +|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| +|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| -### datafactory dataset show - -show a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory update` -#### Parameters +##### Example +``` +az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory dataset update - -update a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +|**--tags**|dictionary|The resource tags.|tags|tags| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +#### Command `az datafactory delete` -#### Parameters +##### Example +``` +az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Dataset description.|description|description| -|**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| -|**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| -|**--parameters**|dictionary|Parameters for dataset.|parameters|parameters| -|**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| -|**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| - -### datafactory factory configure-factory-repo - -configure-factory-repo a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|configure-factory-repo|ConfigureFactoryRepo| +#### Command `az datafactory configure-factory-repo` -#### Parameters +##### Example +``` +az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourc\ +eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ +--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ +repository-name="repo" root-folder="/" tenant-id="" --location "East US" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--location**|string|The location identifier.|location|locationId| @@ -163,67 +223,15 @@ configure-factory-repo a datafactory factory. |**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| |**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -### datafactory factory create - -create a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--location**|string|The resource location.|location|location| -|**--tags**|dictionary|The resource tags.|tags|tags| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| - -### datafactory factory delete - -delete a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory factory get-data-plane-access - -get-data-plane-access a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-data-plane-access|GetDataPlaneAccess| +#### Command `az datafactory get-data-plane-access` -#### Parameters +##### Example +``` +az datafactory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" --expire-time \ +"2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ +"2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -234,21 +242,14 @@ get-data-plane-access a datafactory factory. |**--start-time**|string|Start time for the token. If not specified the current time will be used.|start_time|startTime| |**--expire-time**|string|Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours.|expire_time|expireTime| -### datafactory factory get-git-hub-access-token +#### Command `az datafactory get-git-hub-access-token` -get-git-hub-access-token a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-git-hub-access-token|GetGitHubAccessToken| - -#### Parameters +##### Example +``` +az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -257,167 +258,151 @@ get-git-hub-access-token a datafactory factory. |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| -### datafactory factory list - -list a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| +### group `az datafactory activity-run` +#### Command `az datafactory activity-run query-by-pipeline-run` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByResourceGroup| -|list|List| - -#### Parameters +##### Example +``` +az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" --last-updated-after \ +"2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| +|**--last-updated-after**|date-time|The time at or after which the run event was updated in 'ISO 8601' format.|last_updated_after|lastUpdatedAfter| +|**--last-updated-before**|date-time|The time at or before which the run event was updated in 'ISO 8601' format.|last_updated_before|lastUpdatedBefore| +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| +|**--filters**|array|List of filters.|filters|filters| +|**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory factory show - -show a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory dataset` +#### Command `az datafactory dataset list` -#### Parameters +##### Example +``` +az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory factory update +#### Command `az datafactory dataset show` -update a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--tags**|dictionary|The resource tags.|tags|tags| - -### datafactory integration-runtime delete - -delete a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory dataset create` + +##### Example +``` +az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\\":\\"LinkedSe\ +rviceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Str\ +ing\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"TextFormat\\"},\ +\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\\"type\\":\\"Ex\ +pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-connection-info - -get-connection-info a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--properties**|object|Dataset properties.|properties|properties| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-connection-info|GetConnectionInfo| +#### Command `az datafactory dataset update` -#### Parameters +##### Example +``` +az datafactory dataset update --description "Example description" --linked-service-name "{\\"type\\":\\"LinkedServiceRe\ +ference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters "{\\"MyFileName\\":{\\"type\\":\\"String\\"},\ +\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-monitoring-data - -get-monitoring-data a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Dataset description.|description|description| +|**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| +|**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| +|**--parameters**|dictionary|Parameters for dataset.|parameters|parameters| +|**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| +|**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-monitoring-data|GetMonitoringData| +#### Command `az datafactory dataset delete` -#### Parameters +##### Example +``` +az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-status +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -get-status a datafactory integration-runtime. +### group `az datafactory integration-runtime` +#### Command `az datafactory integration-runtime list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +##### Example +``` +az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-status|GetStatus| +#### Command `az datafactory integration-runtime show` -#### Parameters +##### Example +``` +az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory integration-runtime linked-integration-runtime create - -linked-integration-runtime create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|linked-integration-runtime create|CreateLinkedIntegrationRuntime| +#### Command `az datafactory integration-runtime linked-integration-runtime create` -#### Parameters +##### Example +``` +az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" \ +--location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-id \ +"061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -428,328 +413,217 @@ linked-integration-runtime create a datafactory integration-runtime. |**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| |**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| -### datafactory integration-runtime list +#### Command `az datafactory integration-runtime managed create` -list a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|managed_description|description| +|**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| +|**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| -### datafactory integration-runtime list-auth-key - -list-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list-auth-key|ListAuthKeys| +#### Command `az datafactory integration-runtime self-hosted create` -#### Parameters +##### Example +``` +az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description "A selfhosted \ +integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|self_hosted_description|description| +|**--linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| -### datafactory integration-runtime managed create - -managed create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|managed create|CreateOrUpdate#Create#Managed| +#### Command `az datafactory integration-runtime update` -#### Parameters +##### Example +``` +az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|managed_description|description| -|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| -|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| - -### datafactory integration-runtime regenerate-auth-key - -regenerate-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| +|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|regenerate-auth-key|RegenerateAuthKey| +#### Command `az datafactory integration-runtime delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| - -### datafactory integration-runtime remove-link -remove-link a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime get-connection-info` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|remove-link|RemoveLinks| - -#### Parameters +##### Example +``` +az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| - -### datafactory integration-runtime self-hosted create - -self-hosted create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|self-hosted create|CreateOrUpdate#Create#SelfHosted| +#### Command `az datafactory integration-runtime get-monitoring-data` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|self_hosted_description|description| -|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| - -### datafactory integration-runtime show -show a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory integration-runtime get-status` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory integration-runtime start - -start a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| +#### Command `az datafactory integration-runtime list-auth-key` -#### Parameters +##### Example +``` +az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime stop +#### Command `az datafactory integration-runtime regenerate-auth-key` -stop a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| -### datafactory integration-runtime sync-credentials - -sync-credentials a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +#### Command `az datafactory integration-runtime remove-link` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|sync-credentials|SyncCredentials| - -#### Parameters +##### Example +``` +az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| -### datafactory integration-runtime update - -update a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| +#### Command `az datafactory integration-runtime start` -#### Parameters +##### Example +``` +az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| -|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| - -### datafactory integration-runtime upgrade - -upgrade a datafactory integration-runtime. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|upgrade|Upgrade| +#### Command `az datafactory integration-runtime stop` -#### Parameters +##### Example +``` +az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime-node delete - -delete a datafactory integration-runtime-node. +#### Command `az datafactory integration-runtime sync-credentials` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| - -### datafactory integration-runtime-node get-ip-address -get-ip-address a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| +#### Command `az datafactory integration-runtime upgrade` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-ip-address|GetIpAddress| - -#### Parameters +##### Example +``` +az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| - -### datafactory integration-runtime-node show -show a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory integration-runtime-node` +#### Command `az datafactory integration-runtime-node show` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node show --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -757,21 +631,14 @@ show a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node update +#### Command `az datafactory integration-runtime-node update` -update a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node update --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" --concurrent-jobs-limit 2 +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -780,85 +647,57 @@ update a datafactory integration-runtime-node. |**--node-name**|string|The integration runtime node name.|node_name|nodeName| |**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| -### datafactory linked-service create - -create a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory integration-runtime-node delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--properties**|object|Properties of linked service.|properties|properties| -|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory linked-service delete - -delete a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory integration-runtime-node get-ip-address` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| - -### datafactory linked-service list - -list a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| +### group `az datafactory linked-service` +#### Command `az datafactory linked-service list` -#### Parameters +##### Example +``` +az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -### datafactory linked-service show - -show a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +#### Command `az datafactory linked-service show` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -866,21 +705,32 @@ show a datafactory linked-service. |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| |**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory linked-service update +#### Command `az datafactory linked-service create` -update a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +##### Example +``` +az datafactory linked-service create --factory-name "exampleFactoryName" --properties "{\\"type\\":\\"AzureStorage\\",\ +\\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=htt\ +ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| +|**--properties**|object|Properties of linked service.|properties|properties| +|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +#### Command `az datafactory linked-service update` -#### Parameters +##### Example +``` +az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example description" --name \ +"exampleLinkedService" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -892,133 +742,90 @@ update a datafactory linked-service. |**--parameters**|dictionary|Parameters for linked service.|parameters|parameters| |**--annotations**|array|List of tags that can be used for describing the linked service.|annotations|annotations| -### datafactory pipeline create - -create a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory linked-service delete` -#### Parameters +##### Example +``` +az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| -|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory pipeline create-run - -create-run a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create-run|CreateRun| +### group `az datafactory pipeline` +#### Command `az datafactory pipeline list` -#### Parameters +##### Example +``` +az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| -|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| -|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| -|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| -|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| - -### datafactory pipeline delete -delete a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +#### Command `az datafactory pipeline show` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory pipeline list - -list a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory pipeline show - -show a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +#### Command `az datafactory pipeline create` + +##### Example +``` +az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline "{\\"activities\\":[{\\"name\\":\\"Exampl\ +eForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCopyActivity\ +\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":\\"exampl\ +econtainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"outputs\\":[\ +{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@item\ +()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"typeProperties\\":{\\"d\ +ataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"BlobSource\\"}}}],\\"isSeq\ +uential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}\ +],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}},\\"variabl\ +es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\"JobId\\":{\\"type\\":\\"Expression\\",\ +\\"value\\":\\"@pipeline().parameters.JobId\\"}},\\"duration\\":\\"0.00:10:00\\"}" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory pipeline update - -update a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| +|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Parameters +#### Command `az datafactory pipeline update` + +##### Example +``` +az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" --activities \ +"[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\"\ +:\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"\ +MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDatas\ +et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\ +\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\\ +"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ +obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ +OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \ +--name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1032,45 +839,81 @@ update a datafactory pipeline. |**--concurrency**|integer|The max number of concurrent runs for the pipeline.|concurrency|concurrency| |**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations| |**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| -|**--folder-name**|string|The name of the folder that this Pipeline is in.|name|name| +|**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration| +|**--folder-name**|string|The name of the folder that this Pipeline is in.|folder_name|name| -### datafactory pipeline-run cancel +#### Command `az datafactory pipeline delete` + +##### Example +``` +az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -cancel a datafactory pipeline-run. +#### Command `az datafactory pipeline create-run` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +##### Example +``` +az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| +|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| +|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| +|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| +|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| +### group `az datafactory pipeline-run` +#### Command `az datafactory pipeline-run show` -#### Parameters +##### Example +``` +az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -### datafactory pipeline-run query-by-factory +#### Command `az datafactory pipeline-run cancel` -query-by-factory a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +##### Example +``` +az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| +|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory pipeline-run query-by-factory` -#### Parameters +##### Example +``` +az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters operand="PipelineName" \ +operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1081,279 +924,178 @@ query-by-factory a datafactory pipeline-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory pipeline-run show - -show a datafactory pipeline-run. +### group `az datafactory trigger` +#### Command `az datafactory trigger list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| - -### datafactory trigger create - -create a datafactory trigger. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory trigger show` -#### Parameters +##### Example +``` +az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--properties**|object|Properties of the trigger.|properties|properties| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory trigger delete - -delete a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory trigger create` + +##### Example +``` +az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --properties \ +"{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\\ +"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typePro\ +perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\ +\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--properties**|object|Properties of the trigger.|properties|properties| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory trigger get-event-subscription-status - -get-event-subscription-status a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-event-subscription-status|GetEventSubscriptionStatus| +#### Command `az datafactory trigger update` -#### Parameters +##### Example +``` +az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--description "Example description" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Trigger description.|description|description| +|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger list - -list a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory trigger query-by-factory - -query-by-factory a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory trigger delete` -#### Parameters +##### Example +``` +az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| -|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| - -### datafactory trigger show - -show a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory trigger get-event-subscription-status` -#### Parameters +##### Example +``` +az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory trigger start +#### Command `az datafactory trigger query-by-factory` -start a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| - -#### Parameters +##### Example +``` +az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "exampleTrigger" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| - -### datafactory trigger stop - -stop a datafactory trigger. +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| +|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +#### Command `az datafactory trigger start` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger subscribe-to-event - -subscribe-to-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|subscribe-to-event|SubscribeToEvents| +#### Command `az datafactory trigger stop` -#### Parameters +##### Example +``` +az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger unsubscribe-from-event - -unsubscribe-from-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|unsubscribe-from-event|UnsubscribeFromEvents| +#### Command `az datafactory trigger subscribe-to-event` -#### Parameters +##### Example +``` +az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger update +#### Command `az datafactory trigger unsubscribe-from-event` -update a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| - -#### Parameters +##### Example +``` +az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Trigger description.|description|description| -|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger-run cancel +### group `az datafactory trigger-run` +#### Command `az datafactory trigger-run cancel` -cancel a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| - -#### Parameters +##### Example +``` +az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1361,21 +1103,15 @@ cancel a datafactory trigger-run. |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -### datafactory trigger-run query-by-factory - -query-by-factory a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| +#### Command `az datafactory trigger-run query-by-factory` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| - -#### Parameters +##### Example +``` +az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters operand="TriggerName" \ +operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1386,21 +1122,14 @@ query-by-factory a datafactory trigger-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory trigger-run rerun - -rerun a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|rerun|Rerun| +#### Command `az datafactory trigger-run rerun` -#### Parameters +##### Example +``` +az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index 116e5e2b8cc..a26a3db9b84 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = '0.2.0' +VERSION = '0.1.0' try: from azext_datafactory.manual.version import VERSION except ImportError: @@ -33,7 +33,7 @@ DEPENDENCIES = [] try: - from .manual.dependency import DEPENDENCIES + from azext_datafactory.manual.dependency import DEPENDENCIES except ImportError: pass