Skip to content

Commit

Permalink
[AutoPR datafactory/resource-manager] [Datafactory] Adding script act…
Browse files Browse the repository at this point in the history
…ions support for HDI on demand linked service (#3451)

* Generated from 9dec6b08742c433d0429a8737a65fdc690863d6b

Adding parameters

* Packaging update of azure-mgmt-datafactory
  • Loading branch information
AutorestCI authored Sep 28, 2018
1 parent eb0767e commit fa39544
Show file tree
Hide file tree
Showing 11 changed files with 125 additions and 67 deletions.
1 change: 0 additions & 1 deletion azure-mgmt-datafactory/MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
include *.rst
include azure_bdist_wheel.py
2 changes: 1 addition & 1 deletion azure-mgmt-datafactory/azure/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__import__('pkg_resources').declare_namespace(__name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
2 changes: 1 addition & 1 deletion azure-mgmt-datafactory/azure/mgmt/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__import__('pkg_resources').declare_namespace(__name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
from .responsys_linked_service_py3 import ResponsysLinkedService
from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService
from .script_action_py3 import ScriptAction
from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService
from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService
from .netezza_linked_service_py3 import NetezzaLinkedService
Expand Down Expand Up @@ -418,6 +419,7 @@
from .responsys_linked_service import ResponsysLinkedService
from .azure_databricks_linked_service import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService
from .script_action import ScriptAction
from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService
from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService
from .netezza_linked_service import NetezzaLinkedService
Expand Down Expand Up @@ -831,6 +833,7 @@
'ResponsysLinkedService',
'AzureDatabricksLinkedService',
'AzureDataLakeAnalyticsLinkedService',
'ScriptAction',
'HDInsightOnDemandLinkedService',
'SalesforceMarketingCloudLinkedService',
'NetezzaLinkedService',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ class HDInsightOnDemandLinkedService(LinkedService):
:param zookeeper_node_size: Specifies the size of the Zoo Keeper node for
the HDInsight cluster.
:type zookeeper_node_size: object
:param script_actions: Custom script actions to run on HDI ondemand
cluster once it's up. Please refer to
https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions.
:type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction]
"""

_validation = {
Expand Down Expand Up @@ -182,6 +186,7 @@ class HDInsightOnDemandLinkedService(LinkedService):
'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'},
'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'},
'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'},
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
}

def __init__(self, **kwargs):
Expand Down Expand Up @@ -216,4 +221,5 @@ def __init__(self, **kwargs):
self.head_node_size = kwargs.get('head_node_size', None)
self.data_node_size = kwargs.get('data_node_size', None)
self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None)
self.script_actions = kwargs.get('script_actions', None)
self.type = 'HDInsightOnDemand'
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ class HDInsightOnDemandLinkedService(LinkedService):
:param zookeeper_node_size: Specifies the size of the Zoo Keeper node for
the HDInsight cluster.
:type zookeeper_node_size: object
:param script_actions: Custom script actions to run on HDI ondemand
cluster once it's up. Please refer to
https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions.
:type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction]
"""

_validation = {
Expand Down Expand Up @@ -182,9 +186,10 @@ class HDInsightOnDemandLinkedService(LinkedService):
'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'},
'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'},
'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'},
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
}

def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, **kwargs) -> None:
def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, **kwargs) -> None:
super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.cluster_size = cluster_size
self.time_to_live = time_to_live
Expand Down Expand Up @@ -216,4 +221,5 @@ def __init__(self, *, cluster_size, time_to_live, version, linked_service_name,
self.head_node_size = head_node_size
self.data_node_size = data_node_size
self.zookeeper_node_size = zookeeper_node_size
self.script_actions = script_actions
self.type = 'HDInsightOnDemand'
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from msrest.serialization import Model


class ScriptAction(Model):
"""Custom script action to run on HDI ondemand cluster once it's up.
All required parameters must be populated in order to send to Azure.
:param name: Required. The user provided name of the script action.
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
:param roles: Required. The node types on which the script action should
be executed.
:type roles: object
:param parameters: The parameters for the script action.
:type parameters: str
"""

_validation = {
'name': {'required': True},
'uri': {'required': True},
'roles': {'required': True},
}

_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'roles': {'key': 'roles', 'type': 'object'},
'parameters': {'key': 'parameters', 'type': 'str'},
}

def __init__(self, **kwargs):
super(ScriptAction, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.uri = kwargs.get('uri', None)
self.roles = kwargs.get('roles', None)
self.parameters = kwargs.get('parameters', None)
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from msrest.serialization import Model


class ScriptAction(Model):
"""Custom script action to run on HDI ondemand cluster once it's up.
All required parameters must be populated in order to send to Azure.
:param name: Required. The user provided name of the script action.
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
:param roles: Required. The node types on which the script action should
be executed.
:type roles: object
:param parameters: The parameters for the script action.
:type parameters: str
"""

_validation = {
'name': {'required': True},
'uri': {'required': True},
'roles': {'required': True},
}

_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'roles': {'key': 'roles', 'type': 'object'},
'parameters': {'key': 'parameters', 'type': 'str'},
}

def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None:
super(ScriptAction, self).__init__(**kwargs)
self.name = name
self.uri = uri
self.roles = roles
self.parameters = parameters
54 changes: 0 additions & 54 deletions azure-mgmt-datafactory/azure_bdist_wheel.py

This file was deleted.

1 change: 0 additions & 1 deletion azure-mgmt-datafactory/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
[bdist_wheel]
universal=1
azure-namespace-package=azure-mgmt-nspkg
17 changes: 9 additions & 8 deletions azure-mgmt-datafactory/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,6 @@
import os.path
from io import open
from setuptools import find_packages, setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}

# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-datafactory"
Expand Down Expand Up @@ -76,11 +70,18 @@
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=["tests"]),
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.5.0',
'msrestazure>=0.4.32,<2.0.0',
'azure-common~=1.1',
],
cmdclass=cmdclass
extras_require={
":python_version<'3.0'": ['azure-mgmt-nspkg'],
}
)

0 comments on commit fa39544

Please sign in to comment.