diff --git a/src/test/scenarios/datafactory/input/datafactory.json b/src/test/scenarios/datafactory/input/datafactory.json index 785a370b3..4e1217871 100644 --- a/src/test/scenarios/datafactory/input/datafactory.json +++ b/src/test/scenarios/datafactory/input/datafactory.json @@ -1279,7 +1279,7 @@ "200": { "description": "OK.", "schema": { - "$ref": "./definitions/IntegrationRuntimeConnectionInfo" + "$ref": "#/definitions/IntegrationRuntimeConnectionInfo" } }, "default": { @@ -1325,7 +1325,7 @@ "in": "body", "required": true, "schema": { - "$ref": "./definitions/IntegrationRuntimeRegenerateKeyParameters" + "$ref": "#/definitions/IntegrationRuntimeRegenerateKeyParameters" } } ], @@ -1333,7 +1333,7 @@ "200": { "description": "OK.", "schema": { - "$ref": "./definitions/IntegrationRuntimeAuthKeys" + "$ref": "#/definitions/IntegrationRuntimeAuthKeys" } }, "default": { @@ -1378,7 +1378,7 @@ "200": { "description": "OK.", "schema": { - "$ref": "./definitions/IntegrationRuntimeAuthKeys" + "$ref": "#/definitions/IntegrationRuntimeAuthKeys" } }, "default": { @@ -1560,7 +1560,7 @@ "200": { "description": "OK.", "schema": { - "$ref": "./definitions/IntegrationRuntimeMonitoringData" + "$ref": "#/definitions/IntegrationRuntimeMonitoringData" } }, "default": { @@ -2220,7 +2220,7 @@ ], "properties": { "properties": { - "$ref": "./definitions/Trigger", + "$ref": "#/definitions/Trigger", "description": "Properties of the trigger." } }, @@ -2300,12 +2300,49 @@ "pipelines": { "type": "array", "items": { - "$ref": "./definitions/TriggerPipelineReference" + "$ref": "#/definitions/TriggerPipelineReference" }, "description": "Pipelines that need to be started." } } }, + "TriggerPipelineReference": { + "description": "Pipeline that needs to be triggered with the given parameters.", + "properties": { + "pipelineReference": { + "description": "Pipeline reference.", + "$ref": "#/definitions/PipelineReference" + }, + "parameters": { + "description": "Pipeline parameters.", + "$ref": "#/definitions/ParameterValueSpecification" + } + } + }, + "PipelineReference": { + "description": "Pipeline reference type.", + "properties": { + "type": { + "type": "string", + "description": "Pipeline reference type.", + "enum": [ + "PipelineReference" + ] + }, + "referenceName": { + "type": "string", + "description": "Reference pipeline name." + }, + "name": { + "type": "string", + "description": "Reference name." + } + }, + "required": [ + "type", + "referenceName" + ] + }, "ScheduleTrigger": { "description": "Trigger that creates pipeline runs periodically, on schedule.", "type": "object", @@ -2504,7 +2541,7 @@ }, "linkedService": { "description": "The Azure Storage linked service reference.", - "$ref": "./definitions/LinkedServiceReference" + "$ref": "#/definitions/LinkedServiceReference" } }, "required": [ @@ -2518,6 +2555,30 @@ "typeProperties" ] }, + "LinkedServiceReference": { + "description": "Linked service reference type.", + "properties": { + "type": { + "type": "string", + "description": "Linked service reference type.", + "enum": [ + "LinkedServiceReference" + ] + }, + "referenceName": { + "type": "string", + "description": "Reference LinkedService name." + }, + "parameters": { + "$ref": "#/definitions/ParameterValueSpecification", + "description": "Arguments for LinkedService." + } + }, + "required": [ + "type", + "referenceName" + ] + }, "BlobEventsTrigger": { "description": "Trigger that runs every time a Blob event occurs.", "type": "object", @@ -2587,7 +2648,7 @@ ], "properties": { "pipeline": { - "$ref": "./definitions/TriggerPipelineReference", + "$ref": "#/definitions/TriggerPipelineReference", "description": "Pipeline for which runs are created when an event is fired for trigger window that is ready." }, "typeProperties": { @@ -2837,7 +2898,7 @@ ], "properties": { "pipeline": { - "$ref": "./definitions/TriggerPipelineReference", + "$ref": "#/definitions/TriggerPipelineReference", "description": "Pipeline for which runs are created when all upstream pipelines complete successfully." }, "typeProperties": { @@ -2847,7 +2908,7 @@ "dependsOn": { "type": "array", "items": { - "$ref": "./definitions/PipelineReference" + "$ref": "#/definitions/PipelineReference" }, "description": "Upstream Pipelines." }, @@ -2902,7 +2963,7 @@ "description": "Reference integration runtime name." }, "parameters": { - "$ref": "./definitions/ParameterValueSpecification", + "$ref": "#/definitions/ParameterValueSpecification", "description": "Arguments for integration runtime." } }, @@ -2927,7 +2988,7 @@ ], "properties": { "properties": { - "$ref": "./definitions/IntegrationRuntime", + "$ref": "#/definitions/IntegrationRuntime", "description": "Integration runtime properties." } }, @@ -2944,7 +3005,7 @@ "readOnly": true }, "properties": { - "$ref": "./definitions/IntegrationRuntimeStatus", + "$ref": "#/definitions/IntegrationRuntimeStatus", "description": "Integration runtime properties." } }, @@ -2978,7 +3039,7 @@ "properties": { "autoUpdate": { "description": "Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.", - "$ref": "./definitions/IntegrationRuntimeAutoUpdate" + "$ref": "#/definitions/IntegrationRuntimeAutoUpdate" }, "updateDelayOffset": { "description": "The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.", @@ -3257,7 +3318,7 @@ }, "catalogAdminPassword": { "description": "The password of the administrator user account of the catalog database.", - "$ref": "./definitions/SecureString" + "$ref": "#/definitions/SecureString" }, "catalogPricingTier": { "description": "The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/", @@ -3288,7 +3349,7 @@ }, "sasToken": { "description": "The SAS token of the Azure blob container.", - "$ref": "./definitions/SecureString" + "$ref": "#/definitions/SecureString" } } }, @@ -3375,7 +3436,7 @@ }, "password": { "description": "The password of data source access.", - "$ref": "./definitions/SecretBase" + "$ref": "#/definitions/SecretBase" } }, "required": [ @@ -3450,7 +3511,7 @@ }, "licenseKey": { "description": "The license key to activate the component.", - "$ref": "./definitions/SecretBase" + "$ref": "#/definitions/SecretBase" } }, "required": [ @@ -3531,7 +3592,7 @@ "properties": { "key": { "description": "The key used for authorization.", - "$ref": "./definitions/SecureString" + "$ref": "#/definitions/SecureString" } }, "required": [ diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py index 96709f5b0..e38c0c4f7 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py @@ -333,17 +333,6 @@ ntime" --resource-group "exampleResourceGroup" """ -helps['datafactory integration-runtime create'] = """ - type: command - short-summary: Creates or updates an integration runtime. - examples: - - name: IntegrationRuntimes_Create - text: |- - az datafactory integration-runtime create --factory-name "exampleFactoryName" --properties "{\\"type\\":\ -\\"SelfHosted\\",\\"description\\":\\"A selfhosted integration runtime\\"}" --name "exampleIntegrationRuntime" --resour\ -ce-group "exampleResourceGroup" -""" - helps['datafactory integration-runtime linked-integration-runtime'] = """ type: group short-summary: datafactory integration-runtime sub group linked-integration-runtime @@ -361,6 +350,36 @@ ationRuntime" --resource-group "exampleResourceGroup" --subscription-id "12345678-1234-1234-1234-12345678abc" """ +helps['datafactory integration-runtime managed'] = """ + type: group + short-summary: datafactory integration-runtime sub group managed +""" + +helps['datafactory integration-runtime managed create'] = """ + type: command + short-summary: Creates or updates an integration runtime. + examples: + - name: IntegrationRuntimes_Create + text: |- + az datafactory integration-runtime managed create --factory-name "exampleFactoryName" --description "A s\ +elfhosted integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime self-hosted'] = """ + type: group + short-summary: datafactory integration-runtime sub group self-hosted +""" + +helps['datafactory integration-runtime self-hosted create'] = """ + type: command + short-summary: Creates or updates an integration runtime. + examples: + - name: IntegrationRuntimes_Create + text: |- + az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description \ +"A selfhosted integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +""" + helps['datafactory integration-runtime update'] = """ type: command short-summary: Updates an integration runtime. @@ -430,8 +449,7 @@ - name: IntegrationRuntimes_RegenerateAuthKey text: |- az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name "examp\ -leIntegrationRuntime" --regenerate-key-parameters "{\\"keyName\\":\\"authKey2\\"}" --resource-group "exampleResourceGro\ -up" +leIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime remove-link'] = """ diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py index c490f5665..7ea62b3b7 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py @@ -13,6 +13,7 @@ from knack.arguments import CLIArgumentType from azure.cli.core.commands.parameters import ( tags_type, + get_enum_type, resource_group_name_type, get_location_type ) @@ -173,15 +174,6 @@ def load_arguments(self, _): c.argument('if_none_match', help='ETag of the integration runtime entity. Should only be specified for get. If ' 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - with self.argument_context('datafactory integration-runtime create') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('factory_name', help='The factory name.') - c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') - c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for w' - 'hich it should match existing entity or can be * for unconditional update.') - c.argument('properties', arg_type=CLIArgumentType(options_list=['--properties'], help='Integration runtime prop' - 'erties. Expected value: json-string/@json-file.')) - with self.argument_context('datafactory integration-runtime linked-integration-runtime create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', help='The factory name.') @@ -194,14 +186,38 @@ def load_arguments(self, _): c.argument('data_factory_location', help='The location of the data factory that the linked integration runtime ' 'belongs to.') + with self.argument_context('datafactory integration-runtime managed create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') + c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for w' + 'hich it should match existing entity or can be * for unconditional update.') + c.argument('description', help='Integration runtime description.') + c.argument('type_properties_compute_properties', arg_type=CLIArgumentType(options_list=['--type-properties-comp' + 'ute-properties'], help='The compute resource for managed integration runtime. Expected value: json-' + 'string/@json-file.')) + c.argument('type_properties_ssis_properties', arg_type=CLIArgumentType(options_list=['--type-properties-ssis-pr' + 'operties'], help='SSIS properties for managed integration runtime. Expected value: json-string/@jso' + 'n-file.')) + + with self.argument_context('datafactory integration-runtime self-hosted create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') + c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for w' + 'hich it should match existing entity or can be * for unconditional update.') + c.argument('description', help='Integration runtime description.') + c.argument('type_properties_linked_info', arg_type=CLIArgumentType(options_list=['--type-properties-linked-info' + ''], help='The base definition of a linked integration runtime. Expected value: json-string/@json-fi' + 'le.')) + with self.argument_context('datafactory integration-runtime update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', help='The factory name.', id_part='name') c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', id_part='child_name_1') - c.argument('auto_update', arg_type=CLIArgumentType(options_list=['--auto-update'], help='Enables or disables th' - 'e auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?' - 'linkid=854189. Expected value: json-string/@json-file.')) + c.argument('auto_update', arg_type=get_enum_type(['On', 'Off']), help='Enables or disables the auto-update feat' + 'ure of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.') c.argument('update_delay_offset', help='The time offset (in hours) in the day, e.g., PT03H is 3 hours. The inte' 'gration runtime auto update will happen on that time.') @@ -239,9 +255,8 @@ def load_arguments(self, _): c.argument('factory_name', help='The factory name.', id_part='name') c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', id_part='child_name_1') - c.argument('regenerate_key_parameters', arg_type=CLIArgumentType(options_list=['--regenerate-key-parameters'], - help='The parameters for regenerating integration runtime authentication key. Expected value: json-s' - 'tring/@json-file.')) + c.argument('key_name', arg_type=get_enum_type(['authKey1', 'authKey2']), help='The name of the authentication k' + 'ey to regenerate.') with self.argument_context('datafactory integration-runtime remove-link') as c: c.argument('resource_group_name', resource_group_name_type) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py index 9e817806a..47ca5ab6a 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py @@ -59,9 +59,10 @@ def load_command_table(self, _): client_factory=cf_integration_runtime, is_experimental=True) as g: g.custom_command('list', 'datafactory_integration_runtime_list') g.custom_show_command('show', 'datafactory_integration_runtime_show') - g.custom_command('create', 'datafactory_integration_runtime_create') g.custom_command('linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runti' 'me_create') + g.custom_command('managed create', 'datafactory_integration_runtime_managed_create') + g.custom_command('self-hosted create', 'datafactory_integration_runtime_self_hosted_create') g.custom_command('update', 'datafactory_integration_runtime_update') g.custom_command('delete', 'datafactory_integration_runtime_delete') g.custom_command('get-connection-info', 'datafactory_integration_runtime_get_connection_info') diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py index 93343fccf..db4a9dfd3 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py @@ -261,21 +261,6 @@ def datafactory_integration_runtime_show(cmd, client, if_none_match=if_none_match) -def datafactory_integration_runtime_create(cmd, client, - resource_group_name, - factory_name, - integration_runtime_name, - properties, - if_match=None): - if isinstance(properties, str): - properties = json.loads(properties) - return client.create_or_update(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - if_match=if_match, - properties=properties) - - def datafactory_integration_runtime_linked_integration_runtime_create(cmd, client, resource_group_name, factory_name, @@ -293,14 +278,56 @@ def datafactory_integration_runtime_linked_integration_runtime_create(cmd, clien data_factory_location=data_factory_location) +def datafactory_integration_runtime_managed_create(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + if_match=None, + description=None, + type_properties_compute_properties=None, + type_properties_ssis_properties=None): + if isinstance(type_properties_compute_properties, str): + type_properties_compute_properties = json.loads(type_properties_compute_properties) + if isinstance(type_properties_ssis_properties, str): + type_properties_ssis_properties = json.loads(type_properties_ssis_properties) + properties = {} + properties['type'] = 'Managed' + properties['description'] = description + properties['compute_properties'] = type_properties_compute_properties + properties['ssis_properties'] = type_properties_ssis_properties + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_match=if_match, + properties=properties) + + +def datafactory_integration_runtime_self_hosted_create(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + if_match=None, + description=None, + type_properties_linked_info=None): + if isinstance(type_properties_linked_info, str): + type_properties_linked_info = json.loads(type_properties_linked_info) + properties = {} + properties['type'] = 'SelfHosted' + properties['description'] = description + properties['linked_info'] = type_properties_linked_info + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_match=if_match, + properties=properties) + + def datafactory_integration_runtime_update(cmd, client, resource_group_name, factory_name, integration_runtime_name, auto_update=None, update_delay_offset=None): - if isinstance(auto_update, str): - auto_update = json.loads(auto_update) return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -357,13 +384,11 @@ def datafactory_integration_runtime_regenerate_auth_key(cmd, client, resource_group_name, factory_name, integration_runtime_name, - regenerate_key_parameters): - if isinstance(regenerate_key_parameters, str): - regenerate_key_parameters = json.loads(regenerate_key_parameters) + key_name=None): return client.regenerate_auth_key(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - regenerate_key_parameters=regenerate_key_parameters) + key_name=key_name) def datafactory_integration_runtime_remove_link(cmd, client, diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 39dbc4b11..5dc310a06 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -108,9 +108,20 @@ def step__factories_get_factories_get(test, rg): # EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create @try_manual def step__integrationruntimes_put_integrationruntimes_create(test, rg): - test.cmd('az datafactory integration-runtime create ' + test.cmd('az datafactory integration-runtime managed create ' '--factory-name "{exampleFactoryName}" ' - '--properties "{{\\"type\\":\\"SelfHosted\\",\\"description\\":\\"A selfhosted integration runtime\\"}}" ' + '--description "A selfhosted integration runtime" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create +@try_manual +def step__integrationruntimes_put_integrationruntimes_create(test, rg): + test.cmd('az datafactory integration-runtime managed create ' + '--factory-name "{exampleFactoryName}" ' + '--description "A selfhosted integration runtime" ' '--name "{exampleIntegrationRuntime}" ' '--resource-group "{rg}"', checks=[]) @@ -186,17 +197,7 @@ def step__integrationruntimes_post_integrationruntimes_regenerateauthkey(test, r test.cmd('az datafactory integration-runtime regenerate-auth-key ' '--factory-name "{exampleFactoryName}" ' '--name "{exampleIntegrationRuntime}" ' - '--regenerate-key-parameters "{{\\"keyName\\":\\"authKey2\\"}}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start -@try_manual -def step__integrationruntimes_post_integrationruntimes_start(test, rg): - test.cmd('az datafactory integration-runtime start ' - '--factory-name "{exampleFactoryName}" ' - '--name "{IntegrationRuntimes_2}" ' + '--key-name "authKey2" ' '--resource-group "{rg}"', checks=[]) @@ -216,6 +217,16 @@ def step__triggers_put_triggers_create(test, rg): checks=[]) +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop +@try_manual +def step__integrationruntimes_post_integrationruntimes_stop(test, rg): + test.cmd('az datafactory integration-runtime stop ' + '--factory-name "{exampleFactoryName}" ' + '--name "{IntegrationRuntimes_2}" ' + '--resource-group "{rg}"', + checks=[]) + + # EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials @try_manual def step__integrationruntimes_post_integrationruntimes_synccredentials(test, rg): @@ -372,10 +383,10 @@ def step__integrationruntimes_get_integrationruntimes_get(test, rg): checks=[]) -# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start @try_manual -def step__integrationruntimes_post_integrationruntimes_stop(test, rg): - test.cmd('az datafactory integration-runtime stop ' +def step__integrationruntimes_post_integrationruntimes_start(test, rg): + test.cmd('az datafactory integration-runtime start ' '--factory-name "{exampleFactoryName}" ' '--name "{IntegrationRuntimes_2}" ' '--resource-group "{rg}"', @@ -418,6 +429,7 @@ def call_scenario(test, rg): step__factories_patch_factories_update(test, rg) step__factories_get_factories_get(test, rg) step__integrationruntimes_put_integrationruntimes_create(test, rg) + step__integrationruntimes_put_integrationruntimes_create(test, rg) step__integrationruntimes_get_integrationruntimes_listbyfactory(test, rg) step__integrationruntimes_post_integrationruntimes_createlinkedintegrationruntime(test, rg) step__integrationruntimes_post_integrationruntimes_getconnectioninfo(test, rg) @@ -425,8 +437,8 @@ def call_scenario(test, rg): step__integrationruntimes_post_integrationruntimes_getstatus(test, rg) step__integrationruntimes_post_integrationruntimes_listauthkeys(test, rg) step__integrationruntimes_post_integrationruntimes_regenerateauthkey(test, rg) - step__integrationruntimes_post_integrationruntimes_start(test, rg) step__triggers_put_triggers_create(test, rg) + step__integrationruntimes_post_integrationruntimes_stop(test, rg) step__integrationruntimes_post_integrationruntimes_synccredentials(test, rg) step__integrationruntimes_post_integrationruntimes_upgrade(test, rg) step__integrationruntimes_post_integrationruntimes_upgrade(test, rg) @@ -442,7 +454,7 @@ def call_scenario(test, rg): step__triggers_post_triggers_unsubscribefromevents(test, rg) step__triggers_delete_triggers_delete(test, rg) step__integrationruntimes_get_integrationruntimes_get(test, rg) - step__integrationruntimes_post_integrationruntimes_stop(test, rg) + step__integrationruntimes_post_integrationruntimes_start(test, rg) step__integrationruntimes_delete_integrationruntimes_delete(test, rg) step__factories_delete_factories_delete(test, rg) cleanup(test, rg) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py index 5f0c7d951..8e01fb3f2 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py @@ -119,7 +119,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, integration_runtime_name: str, - properties: object, + properties: "models.IntegrationRuntime", if_match: Optional[str] = None, **kwargs ) -> "models.IntegrationRuntimeResource": @@ -132,7 +132,7 @@ async def create_or_update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param properties: Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -265,7 +265,7 @@ async def update( resource_group_name: str, factory_name: str, integration_runtime_name: str, - auto_update: Optional[object] = None, + auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None, update_delay_offset: Optional[str] = None, **kwargs ) -> "models.IntegrationRuntimeResource": @@ -279,7 +279,7 @@ async def update( :type integration_runtime_name: str :param auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: object + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str @@ -458,7 +458,7 @@ async def get_connection_info( factory_name: str, integration_runtime_name: str, **kwargs - ) -> object: + ) -> "models.IntegrationRuntimeConnectionInfo": """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. :param resource_group_name: The resource group name. @@ -468,11 +468,11 @@ async def get_connection_info( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeConnectionInfo or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -504,7 +504,7 @@ async def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -517,9 +517,9 @@ async def regenerate_auth_key( resource_group_name: str, factory_name: str, integration_runtime_name: str, - regenerate_key_parameters: object, + key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None, **kwargs - ) -> object: + ) -> "models.IntegrationRuntimeAuthKeys": """Regenerates the authentication key for an integration runtime. :param resource_group_name: The resource group name. @@ -528,17 +528,18 @@ async def regenerate_auth_key( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param regenerate_key_parameters: The parameters for regenerating integration runtime - authentication key. - :type regenerate_key_parameters: object + :param key_name: The name of the authentication key to regenerate. + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + + _regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -563,7 +564,7 @@ async def regenerate_auth_key( # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'object') + body_content = self._serialize.body(_regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -574,7 +575,7 @@ async def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -588,7 +589,7 @@ async def list_auth_key( factory_name: str, integration_runtime_name: str, **kwargs - ) -> object: + ) -> "models.IntegrationRuntimeAuthKeys": """Retrieves the authentication keys for an integration runtime. :param resource_group_name: The resource group name. @@ -598,11 +599,11 @@ async def list_auth_key( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -634,7 +635,7 @@ async def list_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -895,7 +896,7 @@ async def get_monitoring_data( factory_name: str, integration_runtime_name: str, **kwargs - ) -> object: + ) -> "models.IntegrationRuntimeMonitoringData": """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. :param resource_group_name: The resource group name. @@ -905,11 +906,11 @@ async def get_monitoring_data( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeMonitoringData or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -941,7 +942,7 @@ async def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py index 29b673b79..6ced928b5 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py @@ -191,7 +191,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, trigger_name: str, - properties: object, + properties: "models.Trigger", if_match: Optional[str] = None, **kwargs ) -> "models.TriggerResource": @@ -204,7 +204,7 @@ async def create_or_update( :param trigger_name: The trigger name. :type trigger_name: str :param properties: Properties of the trigger. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.Trigger :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 6aab9e028..cbbfb90fc 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -55,6 +55,7 @@ from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization from ._models_py3 import LinkedIntegrationRuntimeRequest from ._models_py3 import LinkedIntegrationRuntimeType + from ._models_py3 import LinkedServiceReference from ._models_py3 import ManagedIntegrationRuntime from ._models_py3 import ManagedIntegrationRuntimeError from ._models_py3 import ManagedIntegrationRuntimeNode @@ -62,6 +63,7 @@ from ._models_py3 import ManagedIntegrationRuntimeStatus from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import PackageStore + from ._models_py3 import PipelineReference from ._models_py3 import RecurrenceSchedule from ._models_py3 import RecurrenceScheduleOccurrence from ._models_py3 import RerunTumblingWindowTrigger @@ -89,6 +91,7 @@ from ._models_py3 import TriggerDependencyReference from ._models_py3 import TriggerFilterParameters from ._models_py3 import TriggerListResponse + from ._models_py3 import TriggerPipelineReference from ._models_py3 import TriggerQueryResponse from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource @@ -146,6 +149,7 @@ from ._models import LinkedIntegrationRuntimeRbacAuthorization # type: ignore from ._models import LinkedIntegrationRuntimeRequest # type: ignore from ._models import LinkedIntegrationRuntimeType # type: ignore + from ._models import LinkedServiceReference # type: ignore from ._models import ManagedIntegrationRuntime # type: ignore from ._models import ManagedIntegrationRuntimeError # type: ignore from ._models import ManagedIntegrationRuntimeNode # type: ignore @@ -153,6 +157,7 @@ from ._models import ManagedIntegrationRuntimeStatus # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import PackageStore # type: ignore + from ._models import PipelineReference # type: ignore from ._models import RecurrenceSchedule # type: ignore from ._models import RecurrenceScheduleOccurrence # type: ignore from ._models import RerunTumblingWindowTrigger # type: ignore @@ -180,6 +185,7 @@ from ._models import TriggerDependencyReference # type: ignore from ._models import TriggerFilterParameters # type: ignore from ._models import TriggerListResponse # type: ignore + from ._models import TriggerPipelineReference # type: ignore from ._models import TriggerQueryResponse # type: ignore from ._models import TriggerReference # type: ignore from ._models import TriggerResource # type: ignore @@ -262,6 +268,7 @@ 'LinkedIntegrationRuntimeRbacAuthorization', 'LinkedIntegrationRuntimeRequest', 'LinkedIntegrationRuntimeType', + 'LinkedServiceReference', 'ManagedIntegrationRuntime', 'ManagedIntegrationRuntimeError', 'ManagedIntegrationRuntimeNode', @@ -269,6 +276,7 @@ 'ManagedIntegrationRuntimeStatus', 'MultiplePipelineTrigger', 'PackageStore', + 'PipelineReference', 'RecurrenceSchedule', 'RecurrenceScheduleOccurrence', 'RerunTumblingWindowTrigger', @@ -296,6 +304,7 @@ 'TriggerDependencyReference', 'TriggerFilterParameters', 'TriggerListResponse', + 'TriggerPipelineReference', 'TriggerQueryResponse', 'TriggerReference', 'TriggerResource', diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index 8ad7b4a3e..016566c63 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -112,7 +112,7 @@ class MultiplePipelineTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -126,7 +126,7 @@ class MultiplePipelineTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } _subtype_map = { @@ -162,7 +162,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: @@ -193,7 +193,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, @@ -234,14 +234,14 @@ class BlobTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. :type folder_path: str :param max_concurrency: Required. The max number of parallel files to handle when it is triggered. :type max_concurrency: int :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: object + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -258,10 +258,10 @@ class BlobTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'object'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -296,9 +296,9 @@ class ChainingTrigger(Trigger): :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: object + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[object] + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. :type run_dimension: str @@ -318,8 +318,8 @@ class ChainingTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'object'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } @@ -416,7 +416,7 @@ class CmdkeySetup(CustomSetupBase): :param user_name: Required. The user name of data source access. :type user_name: object :param password: Required. The password of data source access. - :type password: object + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -430,7 +430,7 @@ class CmdkeySetup(CustomSetupBase): 'type': {'key': 'type', 'type': 'str'}, 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } def __init__( @@ -454,7 +454,7 @@ class ComponentSetup(CustomSetupBase): :param component_name: Required. The name of the 3rd party component. :type component_name: str :param license_key: The license key to activate the component. - :type license_key: object + :type license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -465,7 +465,7 @@ class ComponentSetup(CustomSetupBase): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, - 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'object'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, } def __init__( @@ -1264,12 +1264,12 @@ class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): script. :type blob_container_uri: str :param sas_token: The SAS token of the Azure blob container. - :type sas_token: object + :type sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'object'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, } def __init__( @@ -1503,7 +1503,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. - :type parameters: object + :type parameters: dict[str, object] """ _validation = { @@ -1514,7 +1514,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, } type = "IntegrationRuntimeReference" @@ -1604,7 +1604,7 @@ class IntegrationRuntimeResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -1620,7 +1620,7 @@ class IntegrationRuntimeResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } def __init__( @@ -1643,7 +1643,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :type catalog_admin_user_name: str :param catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: object + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". @@ -1659,7 +1659,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'object'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } @@ -1821,7 +1821,7 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str :param properties: Required. Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -1831,7 +1831,7 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, } def __init__( @@ -1965,7 +1965,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): sharing.Constant filled by server. :type authorization_type: str :param key: Required. The key used for authorization. - :type key: object + :type key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -1975,7 +1975,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): _attribute_map = { 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'object'}, + 'key': {'key': 'key', 'type': 'SecureString'}, } def __init__( @@ -2043,6 +2043,43 @@ def __init__( self.linked_factory_name = kwargs['linked_factory_name'] +class LinkedServiceReference(msrest.serialization.Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__( + self, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -2326,6 +2363,43 @@ def __init__( self.package_store_linked_service = kwargs['package_store_linked_service'] +class PipelineReference(msrest.serialization.Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__( + self, + **kwargs + ): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + self.name = kwargs.get('name', None) + + class RecurrenceSchedule(msrest.serialization.Model): """The recurrence schedule. @@ -2508,7 +2582,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ @@ -2525,7 +2599,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } @@ -3445,6 +3519,29 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class TriggerPipelineReference(msrest.serialization.Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = kwargs.get('pipeline_reference', None) + self.parameters = kwargs.get('parameters', None) + + class TriggerQueryResponse(msrest.serialization.Model): """A query of triggers. @@ -3524,7 +3621,7 @@ class TriggerResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of the trigger. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -3540,7 +3637,7 @@ class TriggerResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, } def __init__( @@ -3603,7 +3700,7 @@ class TumblingWindowTrigger(Trigger): :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: object + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour". :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency @@ -3646,7 +3743,7 @@ class TumblingWindowTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, @@ -3719,15 +3816,16 @@ class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): """Update integration runtime request. :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: object + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", + "Off". + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str """ _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'object'}, + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index d98f9c0ee..9a88cf941 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -125,7 +125,7 @@ class MultiplePipelineTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { @@ -139,7 +139,7 @@ class MultiplePipelineTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } _subtype_map = { @@ -152,7 +152,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, annotations: Optional[List[object]] = None, - pipelines: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) @@ -180,7 +180,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: @@ -211,7 +211,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, @@ -227,7 +227,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, annotations: Optional[List[object]] = None, - pipelines: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, blob_path_begins_with: Optional[str] = None, blob_path_ends_with: Optional[str] = None, ignore_empty_blobs: Optional[bool] = None, @@ -262,14 +262,14 @@ class BlobTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. :type folder_path: str :param max_concurrency: Required. The max number of parallel files to handle when it is triggered. :type max_concurrency: int :param linked_service: Required. The Azure Storage linked service reference. - :type linked_service: object + :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -286,10 +286,10 @@ class BlobTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'object'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -297,11 +297,11 @@ def __init__( *, folder_path: str, max_concurrency: int, - linked_service: object, + linked_service: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, annotations: Optional[List[object]] = None, - pipelines: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) @@ -332,9 +332,9 @@ class ChainingTrigger(Trigger): :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. - :type pipeline: object + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[object] + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. :type run_dimension: str @@ -354,16 +354,16 @@ class ChainingTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'object'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, *, - pipeline: object, - depends_on: List[object], + pipeline: "TriggerPipelineReference", + depends_on: List["PipelineReference"], run_dimension: str, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, @@ -464,7 +464,7 @@ class CmdkeySetup(CustomSetupBase): :param user_name: Required. The user name of data source access. :type user_name: object :param password: Required. The password of data source access. - :type password: object + :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -478,7 +478,7 @@ class CmdkeySetup(CustomSetupBase): 'type': {'key': 'type', 'type': 'str'}, 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } def __init__( @@ -486,7 +486,7 @@ def __init__( *, target_name: object, user_name: object, - password: object, + password: "SecretBase", **kwargs ): super(CmdkeySetup, self).__init__(**kwargs) @@ -506,7 +506,7 @@ class ComponentSetup(CustomSetupBase): :param component_name: Required. The name of the 3rd party component. :type component_name: str :param license_key: The license key to activate the component. - :type license_key: object + :type license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { @@ -517,14 +517,14 @@ class ComponentSetup(CustomSetupBase): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, - 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'object'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, } def __init__( self, *, component_name: str, - license_key: Optional[object] = None, + license_key: Optional["SecretBase"] = None, **kwargs ): super(ComponentSetup, self).__init__(**kwargs) @@ -1396,19 +1396,19 @@ class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): script. :type blob_container_uri: str :param sas_token: The SAS token of the Azure blob container. - :type sas_token: object + :type sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'object'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, } def __init__( self, *, blob_container_uri: Optional[str] = None, - sas_token: Optional[object] = None, + sas_token: Optional["SecureString"] = None, **kwargs ): super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) @@ -1655,7 +1655,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. - :type parameters: object + :type parameters: dict[str, object] """ _validation = { @@ -1666,7 +1666,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, } type = "IntegrationRuntimeReference" @@ -1675,7 +1675,7 @@ def __init__( self, *, reference_name: str, - parameters: Optional[object] = None, + parameters: Optional[Dict[str, object]] = None, **kwargs ): super(IntegrationRuntimeReference, self).__init__(**kwargs) @@ -1761,7 +1761,7 @@ class IntegrationRuntimeResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { @@ -1777,13 +1777,13 @@ class IntegrationRuntimeResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } def __init__( self, *, - properties: object, + properties: "IntegrationRuntime", **kwargs ): super(IntegrationRuntimeResource, self).__init__(**kwargs) @@ -1802,7 +1802,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :type catalog_admin_user_name: str :param catalog_admin_password: The password of the administrator user account of the catalog database. - :type catalog_admin_password: object + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: "Basic", "Standard", "Premium", "PremiumRS". @@ -1818,7 +1818,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'object'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } @@ -1828,7 +1828,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, catalog_server_endpoint: Optional[str] = None, catalog_admin_user_name: Optional[str] = None, - catalog_admin_password: Optional[object] = None, + catalog_admin_password: Optional["SecureString"] = None, catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, **kwargs ): @@ -2000,7 +2000,7 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): :ivar name: The integration runtime name. :vartype name: str :param properties: Required. Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { @@ -2010,13 +2010,13 @@ class IntegrationRuntimeStatusResponse(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, } def __init__( self, *, - properties: object, + properties: "IntegrationRuntimeStatus", **kwargs ): super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) @@ -2151,7 +2151,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): sharing.Constant filled by server. :type authorization_type: str :param key: Required. The key used for authorization. - :type key: object + :type key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { @@ -2161,13 +2161,13 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): _attribute_map = { 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'object'}, + 'key': {'key': 'key', 'type': 'SecureString'}, } def __init__( self, *, - key: object, + key: "SecureString", **kwargs ): super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) @@ -2235,6 +2235,46 @@ def __init__( self.linked_factory_name = linked_factory_name +class LinkedServiceReference(msrest.serialization.Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__( + self, + *, + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. @@ -2535,6 +2575,46 @@ def __init__( self.package_store_linked_service = package_store_linked_service +class PipelineReference(msrest.serialization.Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__( + self, + *, + reference_name: str, + name: Optional[str] = None, + **kwargs + ): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.name = name + + class RecurrenceSchedule(msrest.serialization.Model): """The recurrence schedule. @@ -2739,7 +2819,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param pipelines: Pipelines that need to be started. - :type pipelines: list[object] + :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ @@ -2756,7 +2836,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } @@ -2767,7 +2847,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, annotations: Optional[List[object]] = None, - pipelines: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) @@ -3772,6 +3852,32 @@ def __init__( self.next_link = next_link +class TriggerPipelineReference(msrest.serialization.Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + pipeline_reference: Optional["PipelineReference"] = None, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = pipeline_reference + self.parameters = parameters + + class TriggerQueryResponse(msrest.serialization.Model): """A query of triggers. @@ -3856,7 +3962,7 @@ class TriggerResource(SubResource): :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of the trigger. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { @@ -3872,13 +3978,13 @@ class TriggerResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, } def __init__( self, *, - properties: object, + properties: "Trigger", **kwargs ): super(TriggerResource, self).__init__(**kwargs) @@ -3937,7 +4043,7 @@ class TumblingWindowTrigger(Trigger): :type annotations: list[object] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. - :type pipeline: object + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: "Minute", "Hour". :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency @@ -3980,7 +4086,7 @@ class TumblingWindowTrigger(Trigger): 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, @@ -3994,7 +4100,7 @@ class TumblingWindowTrigger(Trigger): def __init__( self, *, - pipeline: object, + pipeline: "TriggerPipelineReference", frequency: Union[str, "TumblingWindowFrequency"], interval: int, start_time: datetime.datetime, @@ -4070,22 +4176,23 @@ class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): """Update integration runtime request. :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: object + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: "On", + "Off". + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str """ _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'object'}, + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } def __init__( self, *, - auto_update: Optional[object] = None, + auto_update: Optional[Union[str, "IntegrationRuntimeAutoUpdate"]] = None, update_delay_offset: Optional[str] = None, **kwargs ): diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py index 51273f9cd..dd19a9932 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py @@ -124,7 +124,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - properties, # type: object + properties, # type: "models.IntegrationRuntime" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -138,7 +138,7 @@ def create_or_update( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param properties: Integration runtime properties. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -272,7 +272,7 @@ def update( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - auto_update=None, # type: Optional[object] + auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] update_delay_offset=None, # type: Optional[str] **kwargs # type: Any ): @@ -287,7 +287,7 @@ def update( :type integration_runtime_name: str :param auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: object + :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str @@ -469,7 +469,7 @@ def get_connection_info( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> object + # type: (...) -> "models.IntegrationRuntimeConnectionInfo" """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. :param resource_group_name: The resource group name. @@ -479,11 +479,11 @@ def get_connection_info( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeConnectionInfo or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -515,7 +515,7 @@ def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -528,10 +528,10 @@ def regenerate_auth_key( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - regenerate_key_parameters, # type: object + key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] **kwargs # type: Any ): - # type: (...) -> object + # type: (...) -> "models.IntegrationRuntimeAuthKeys" """Regenerates the authentication key for an integration runtime. :param resource_group_name: The resource group name. @@ -540,17 +540,18 @@ def regenerate_auth_key( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param regenerate_key_parameters: The parameters for regenerating integration runtime - authentication key. - :type regenerate_key_parameters: object + :param key_name: The name of the authentication key to regenerate. + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + + _regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") @@ -575,7 +576,7 @@ def regenerate_auth_key( # Construct and send request body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'object') + body_content = self._serialize.body(_regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) @@ -586,7 +587,7 @@ def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -601,7 +602,7 @@ def list_auth_key( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> object + # type: (...) -> "models.IntegrationRuntimeAuthKeys" """Retrieves the authentication keys for an integration runtime. :param resource_group_name: The resource group name. @@ -611,11 +612,11 @@ def list_auth_key( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeAuthKeys or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -647,7 +648,7 @@ def list_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) @@ -914,7 +915,7 @@ def get_monitoring_data( integration_runtime_name, # type: str **kwargs # type: Any ): - # type: (...) -> object + # type: (...) -> "models.IntegrationRuntimeMonitoringData" """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. :param resource_group_name: The resource group name. @@ -924,11 +925,11 @@ def get_monitoring_data( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: object or the result of cls(response) - :rtype: object + :return: IntegrationRuntimeMonitoringData or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[object] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" @@ -960,7 +961,7 @@ def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize('object', pipeline_response) + deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py index c135de638..5c953b799 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py @@ -197,7 +197,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str trigger_name, # type: str - properties, # type: object + properties, # type: "models.Trigger" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -211,7 +211,7 @@ def create_or_update( :param trigger_name: The trigger name. :type trigger_name: str :param properties: Properties of the trigger. - :type properties: object + :type properties: ~azure.mgmt.datafactory.models.Trigger :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str diff --git a/src/test/scenarios/datafactory/output/src/datafactory/report.md b/src/test/scenarios/datafactory/output/src/datafactory/report.md index 61e7c10ff..891d099c9 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/report.md +++ b/src/test/scenarios/datafactory/output/src/datafactory/report.md @@ -57,17 +57,6 @@ get-git-hub-access-token a datafactory. |**--git-hub-access-code**|string|GitHub access code.|git_hub_access_code| |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id| -### datafactory integration-runtime create - -create a datafactory integration-runtime. - -|Option|Type|Description|Path (SDK)|Path (swagger)| -|------|----|-----------|----------|--------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name| -|**--factory-name**|string|The factory name.|factory_name| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| -|**--properties**|any|Integration runtime properties.|properties| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| ### datafactory integration-runtime delete delete a datafactory integration-runtime. @@ -134,6 +123,19 @@ list-auth-key a datafactory integration-runtime. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime managed create + +managed create a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| +|**--description**|string|Integration runtime description.|managed_description| +|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties| +|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties| ### datafactory integration-runtime regenerate-auth-key regenerate-auth-key a datafactory integration-runtime. @@ -143,7 +145,7 @@ regenerate-auth-key a datafactory integration-runtime. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| -|**--regenerate-key-parameters**|any|The parameters for regenerating integration runtime authentication key.|regenerate_key_parameters| +|**--key-name**|choice|The name of the authentication key to regenerate.|key_name| ### datafactory integration-runtime remove-link remove-link a datafactory integration-runtime. @@ -154,6 +156,18 @@ remove-link a datafactory integration-runtime. |**--factory-name**|string|The factory name.|factory_name| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| |**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name| +### datafactory integration-runtime self-hosted create + +self-hosted create a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| +|**--description**|string|Integration runtime description.|self_hosted_description| +|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info| ### datafactory integration-runtime show show a datafactory integration-runtime. @@ -200,7 +214,7 @@ update a datafactory integration-runtime. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| -|**--auto-update**|any|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update| |**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset| ### datafactory integration-runtime upgrade @@ -236,7 +250,7 @@ create a datafactory trigger. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--trigger-name**|string|The trigger name.|trigger_name| -|**--properties**|any|Properties of the trigger.|properties| +|**--properties**|object|Properties of the trigger.|properties| |**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| ### datafactory trigger delete @@ -329,7 +343,7 @@ create a datafactory trigger. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--trigger-name**|string|The trigger name.|trigger_name| -|**--properties**|any|Properties of the trigger.|properties| +|**--properties**|object|Properties of the trigger.|properties| |**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| ### datafactory update