diff --git a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md index c9609e6fc1e0..a84b16da6e5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md +++ b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md @@ -1,5 +1,23 @@ # Release History +## 0.14.0 (2020-10-23) + +**Features** + + - Model OrcSink has a new parameter format_settings + - Model DelimitedTextWriteSettings has a new parameter max_rows_per_file + - Model DelimitedTextWriteSettings has a new parameter file_name_prefix + - Model RestSink has a new parameter http_compression_type + - Model ParquetSink has a new parameter format_settings + - Model AvroWriteSettings has a new parameter max_rows_per_file + - Model AvroWriteSettings has a new parameter file_name_prefix + +**Breaking changes** + + - Model RestSink no longer has parameter wrap_request_json_in_an_object + - Model RestSink no longer has parameter compression_type + + ## 0.13.0 (2020-08-25) **Features** diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 95dc22d97aad..7cc4b3b52518 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -48,6 +48,12 @@ from ._models_py3 import AzureBlobStorageLocation from ._models_py3 import AzureBlobStorageReadSettings from ._models_py3 import AzureBlobStorageWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureDataExplorerCommandActivity from ._models_py3 import AzureDataExplorerLinkedService @@ -129,8 +135,10 @@ from ._models_py3 import ConnectionStateProperties from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource + from ._models_py3 import CopyTranslator from ._models_py3 import CosmosDbLinkedService from ._models_py3 import CosmosDbMongoDbApiCollectionDataset from ._models_py3 import CosmosDbMongoDbApiLinkedService @@ -173,6 +181,7 @@ from ._models_py3 import Dataset from ._models_py3 import DatasetBZip2Compression from ._models_py3 import DatasetCompression + from ._models_py3 import DatasetDataElement from ._models_py3 import DatasetDebugResource from ._models_py3 import DatasetDeflateCompression from ._models_py3 import DatasetFolder @@ -180,7 +189,10 @@ from ._models_py3 import DatasetLocation from ._models_py3 import DatasetReference from ._models_py3 import DatasetResource + from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -342,6 +354,8 @@ from ._models_py3 import LinkedServiceDebugResource from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -367,6 +381,9 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset + from ._models_py3 import MongoDbAtlasCollectionDataset + from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties from ._models_py3 import MongoDbLinkedService @@ -411,12 +428,14 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -567,6 +586,9 @@ from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource + from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -585,6 +607,7 @@ from ._models_py3 import TriggerSubscriptionOperationStatus from ._models_py3 import TumblingWindowTrigger from ._models_py3 import TumblingWindowTriggerDependencyReference + from ._models_py3 import TypeConversionSettings from ._models_py3 import UntilActivity from ._models_py3 import UpdateIntegrationRuntimeNodeRequest from ._models_py3 import UpdateIntegrationRuntimeRequest @@ -655,6 +678,12 @@ from ._models import AzureBlobStorageLocation from ._models import AzureBlobStorageReadSettings from ._models import AzureBlobStorageWriteSettings + from ._models import AzureDatabricksDeltaLakeDataset + from ._models import AzureDatabricksDeltaLakeExportCommand + from ._models import AzureDatabricksDeltaLakeImportCommand + from ._models import AzureDatabricksDeltaLakeLinkedService + from ._models import AzureDatabricksDeltaLakeSink + from ._models import AzureDatabricksDeltaLakeSource from ._models import AzureDatabricksLinkedService from ._models import AzureDataExplorerCommandActivity from ._models import AzureDataExplorerLinkedService @@ -736,8 +765,10 @@ from ._models import ConnectionStateProperties from ._models import ControlActivity from ._models import CopyActivity + from ._models import CopyActivityLogSettings from ._models import CopySink from ._models import CopySource + from ._models import CopyTranslator from ._models import CosmosDbLinkedService from ._models import CosmosDbMongoDbApiCollectionDataset from ._models import CosmosDbMongoDbApiLinkedService @@ -780,6 +811,7 @@ from ._models import Dataset from ._models import DatasetBZip2Compression from ._models import DatasetCompression + from ._models import DatasetDataElement from ._models import DatasetDebugResource from ._models import DatasetDeflateCompression from ._models import DatasetFolder @@ -787,7 +819,10 @@ from ._models import DatasetLocation from ._models import DatasetReference from ._models import DatasetResource + from ._models import DatasetSchemaDataElement from ._models import DatasetStorageFormat + from ._models import DatasetTarCompression + from ._models import DatasetTarGZipCompression from ._models import DatasetZipDeflateCompression from ._models import Db2LinkedService from ._models import Db2Source @@ -949,6 +984,8 @@ from ._models import LinkedServiceDebugResource from ._models import LinkedServiceReference from ._models import LinkedServiceResource + from ._models import LogLocationSettings + from ._models import LogSettings from ._models import LogStorageSettings from ._models import LookupActivity from ._models import MagentoLinkedService @@ -974,6 +1011,9 @@ from ._models import MicrosoftAccessSink from ._models import MicrosoftAccessSource from ._models import MicrosoftAccessTableDataset + from ._models import MongoDbAtlasCollectionDataset + from ._models import MongoDbAtlasLinkedService + from ._models import MongoDbAtlasSource from ._models import MongoDbCollectionDataset from ._models import MongoDbCursorMethodsProperties from ._models import MongoDbLinkedService @@ -1018,12 +1058,14 @@ from ._models import OrcFormat from ._models import OrcSink from ._models import OrcSource + from ._models import OrcWriteSettings from ._models import PackageStore from ._models import ParameterSpecification from ._models import ParquetDataset from ._models import ParquetFormat from ._models import ParquetSink from ._models import ParquetSource + from ._models import ParquetWriteSettings from ._models import PaypalLinkedService from ._models import PaypalObjectDataset from ._models import PaypalSource @@ -1174,6 +1216,9 @@ from ._models import SybaseSource from ._models import SybaseTableDataset from ._models import TabularSource + from ._models import TabularTranslator + from ._models import TarGZipReadSettings + from ._models import TarReadSettings from ._models import TeradataLinkedService from ._models import TeradataPartitionSettings from ._models import TeradataSource @@ -1192,6 +1237,7 @@ from ._models import TriggerSubscriptionOperationStatus from ._models import TumblingWindowTrigger from ._models import TumblingWindowTriggerDependencyReference + from ._models import TypeConversionSettings from ._models import UntilActivity from ._models import UpdateIntegrationRuntimeNodeRequest from ._models import UpdateIntegrationRuntimeRequest @@ -1236,6 +1282,7 @@ from ._paged_models import TriggerResourcePaged from ._data_factory_management_client_enums import ( GlobalParameterType, + PublicNetworkAccess, IntegrationRuntimeState, IntegrationRuntimeAutoUpdate, ParameterType, @@ -1297,6 +1344,7 @@ SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, + DynamicsSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, PolybaseSettingsRejectType, JsonWriteFilePattern, @@ -1313,6 +1361,7 @@ IntegrationRuntimeEdition, SsisObjectMetadataType, IntegrationRuntimeAuthKeyName, + CopyBehaviorType, ) __all__ = [ @@ -1354,6 +1403,12 @@ 'AzureBlobStorageLocation', 'AzureBlobStorageReadSettings', 'AzureBlobStorageWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureDataExplorerCommandActivity', 'AzureDataExplorerLinkedService', @@ -1435,8 +1490,10 @@ 'ConnectionStateProperties', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', + 'CopyTranslator', 'CosmosDbLinkedService', 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbMongoDbApiLinkedService', @@ -1479,6 +1536,7 @@ 'Dataset', 'DatasetBZip2Compression', 'DatasetCompression', + 'DatasetDataElement', 'DatasetDebugResource', 'DatasetDeflateCompression', 'DatasetFolder', @@ -1486,7 +1544,10 @@ 'DatasetLocation', 'DatasetReference', 'DatasetResource', + 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1648,6 +1709,8 @@ 'LinkedServiceDebugResource', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1673,6 +1736,9 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', + 'MongoDbAtlasCollectionDataset', + 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', 'MongoDbLinkedService', @@ -1717,12 +1783,14 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1873,6 +1941,9 @@ 'SybaseSource', 'SybaseTableDataset', 'TabularSource', + 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -1891,6 +1962,7 @@ 'TriggerSubscriptionOperationStatus', 'TumblingWindowTrigger', 'TumblingWindowTriggerDependencyReference', + 'TypeConversionSettings', 'UntilActivity', 'UpdateIntegrationRuntimeNodeRequest', 'UpdateIntegrationRuntimeRequest', @@ -1934,6 +2006,7 @@ 'ManagedVirtualNetworkResourcePaged', 'ManagedPrivateEndpointResourcePaged', 'GlobalParameterType', + 'PublicNetworkAccess', 'IntegrationRuntimeState', 'IntegrationRuntimeAutoUpdate', 'ParameterType', @@ -1995,6 +2068,7 @@ 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', + 'DynamicsSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', 'PolybaseSettingsRejectType', 'JsonWriteFilePattern', @@ -2011,4 +2085,5 @@ 'IntegrationRuntimeEdition', 'SsisObjectMetadataType', 'IntegrationRuntimeAuthKeyName', + 'CopyBehaviorType', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 4eb33cce9961..7ada06846cbb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -22,6 +22,12 @@ class GlobalParameterType(str, Enum): array = "Array" +class PublicNetworkAccess(str, Enum): + + enabled = "Enabled" + disabled = "Disabled" + + class IntegrationRuntimeState(str, Enum): initial = "Initial" @@ -320,6 +326,7 @@ class OrcCompressionCodec(str, Enum): none = "none" zlib = "zlib" snappy = "snappy" + lzo = "lzo" class AvroCompressionCodec(str, Enum): @@ -335,6 +342,7 @@ class TumblingWindowFrequency(str, Enum): minute = "Minute" hour = "Hour" + month = "Month" class BlobEventTypes(str, Enum): @@ -499,6 +507,11 @@ class SalesforceSinkWriteBehavior(str, Enum): upsert = "Upsert" +class DynamicsSinkWriteBehavior(str, Enum): + + upsert = "Upsert" + + class AzureSearchIndexWriteBehaviorType(str, Enum): merge = "Merge" @@ -605,3 +618,10 @@ class IntegrationRuntimeAuthKeyName(str, Enum): auth_key1 = "authKey1" auth_key2 = "authKey2" + + +class CopyBehaviorType(str, Enum): + + preserve_hierarchy = "PreserveHierarchy" + flatten_hierarchy = "FlattenHierarchy" + merge_files = "MergeFiles" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index d712ea8c161c..26c9212d4308 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -334,40 +334,41 @@ class LinkedService(Model): AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + ResponsysLinkedService, AzureDatabricksDeltaLakeLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLServiceLinkedService, AzureMLLinkedService, TeradataLinkedService, - Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, - MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, - GoogleCloudStorageLinkedService, AzureFileStorageLinkedService, - FileServerLinkedService, HDInsightLinkedService, - CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlMILinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + MongoDbV2LinkedService, MongoDbAtlasLinkedService, MongoDbLinkedService, + CassandraLinkedService, WebLinkedService, ODataLinkedService, + HdfsLinkedService, MicrosoftAccessLinkedService, InformixLinkedService, + OdbcLinkedService, AzureMLServiceLinkedService, AzureMLLinkedService, + TeradataLinkedService, Db2LinkedService, SybaseLinkedService, + PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, + OracleLinkedService, GoogleCloudStorageLinkedService, + AzureFileStorageLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -403,7 +404,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -515,7 +516,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + sub-classes are: AzureDatabricksDeltaLakeDataset, + SharePointOnlineListResourceDataset, SnowflakeDataset, GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, @@ -539,8 +541,8 @@ class Dataset(Model): Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CosmosDbSqlApiCollectionDataset, CustomDataset, CassandraTableDataset, @@ -597,7 +599,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -677,16 +679,17 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + sub-classes are: SharePointOnlineListSource, + AzureDatabricksDeltaLakeSource, SnowflakeSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, - OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, - RestSource, SalesforceServiceCloudSource, ODataSource, - MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, - DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, - OrcSource, XmlSource, JsonSource, DelimitedTextSource, ParquetSource, - ExcelSource, AvroSource + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbAtlasSource, + MongoDbSource, WebSource, OracleSource, AzureDataExplorerSource, + HdfsSource, FileSystemSource, RestSource, SalesforceServiceCloudSource, + ODataSource, MicrosoftAccessSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + CosmosDbSqlApiSource, DocumentDbCollectionSource, BlobSource, + TabularSource, BinarySource, OrcSource, XmlSource, JsonSource, + DelimitedTextSource, ParquetSource, ExcelSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -721,7 +724,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -1186,6 +1189,10 @@ class AmazonS3LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param authentication_type: The authentication type of S3. Allowed value: + AccessKey (default) or TemporarySecurityCredentials. Type: string (or + Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). @@ -1198,6 +1205,9 @@ class AmazonS3LinkedService(LinkedService): try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security + credential. + :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1215,17 +1225,21 @@ class AmazonS3LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, **kwargs): super(AmazonS3LinkedService, self).__init__(**kwargs) + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonS3' @@ -1708,8 +1722,8 @@ class CopySink(Model): SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, RestSink, - OrcSink, JsonSink, DelimitedTextSink + SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, + AzurePostgreSqlSink, RestSink, OrcSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1753,7 +1767,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -1880,7 +1894,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1915,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, **kwargs): @@ -1925,6 +1939,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1959,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(AvroWriteSettings, self).__init__(**kwargs) self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'AvroWriteSettings' @@ -2896,6 +2923,391 @@ def __init__(self, **kwargs): self.type = 'AzureBlobStorageWriteSettings' +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The name of delta table. Type: string (or Expression with + resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or + Expression with resultType string). + :type database: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) + self.type = 'AzureDatabricksDeltaLakeDataset' + + +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand, + AzureDatabricksDeltaLakeExportCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand', 'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand'} + } + + def __init__(self, **kwargs): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + self.type = 'AzureDatabricksDeltaLakeExportCommand' + + +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, + SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks + Delta Lake Copy. Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + self.type = 'AzureDatabricksDeltaLakeImportCommand' + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDatabricksDeltaLake' + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + self.type = 'AzureDatabricksDeltaLakeSink' + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Azure Databricks Delta Lake Sql query. Type: string (or + Expression with resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + self.type = 'AzureDatabricksDeltaLakeSource' + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -7998,7 +8410,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8432,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, **kwargs): @@ -8049,6 +8462,10 @@ class ConcurLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_properties: Properties used to connect to Concur. It is + mutually exclusive with any other properties in the linked service. Type: + object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object @@ -8087,6 +8504,7 @@ class ConcurLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8098,6 +8516,7 @@ class ConcurLinkedService(LinkedService): def __init__(self, **kwargs): super(ConcurLinkedService, self).__init__(**kwargs) + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs.get('client_id', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -8311,10 +8730,13 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide - when enabling session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log + storage settings customer need to provide when enabling session log. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling + log. + :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8356,6 +8778,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -8376,6 +8799,7 @@ def __init__(self, **kwargs): self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) self.validate_data_consistency = kwargs.get('validate_data_consistency', None) @@ -8385,6 +8809,62 @@ def __init__(self, **kwargs): self.type = 'Copy' +class CopyActivityLogSettings(Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. + Type: string (or Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable + logging. Type: boolean (or Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) + + +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, **kwargs): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. @@ -10220,7 +10700,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10723,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, **kwargs): @@ -10277,6 +10758,28 @@ def __init__(self, **kwargs): self.type = 'BZip2' +class DatasetDataElement(Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + class DatasetDebugResource(SubResourceDebugResource): """Dataset debug resource. @@ -10456,6 +10959,89 @@ def __init__(self, **kwargs): self.properties = kwargs.get('properties', None) +class DatasetSchemaDataElement(Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'TarGZip' + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11081,6 +11667,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11688,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(DelimitedTextWriteSettings, self).__init__(**kwargs) self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs.get('file_extension', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'DelimitedTextWriteSettings' @@ -13047,40 +13646,6 @@ def __init__(self, **kwargs): self.type = 'ExecuteSSISPackage' -class ExportSettings(Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__(self, **kwargs): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - class ExposureControlBatchRequest(Model): """A list of exposure control features. @@ -13289,6 +13854,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is + allowed for the data factory. Possible values include: 'Enabled', + 'Disabled' + :type public_network_access: str or + ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -13315,6 +13885,7 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__(self, **kwargs): @@ -13326,6 +13897,7 @@ def __init__(self, **kwargs): self.version = None self.repo_configuration = kwargs.get('repo_configuration', None) self.global_parameters = kwargs.get('global_parameters', None) + self.public_network_access = kwargs.get('public_network_access', None) class FactoryRepoConfiguration(Model): @@ -17577,40 +18149,6 @@ def __init__(self, **kwargs): self.type = 'ImpalaSource' -class ImportSettings(Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__(self, **kwargs): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -19361,8 +19899,72 @@ def __init__(self, **kwargs): self.properties = kwargs.get('properties', None) +class LogLocationSettings(Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + + +class LogSettings(Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity + log. Type: boolean (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity + log. + :type copy_activity_log_settings: + ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer + needs to provide when enabling log. + :type log_location_settings: + ~azure.mgmt.datafactory.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__(self, **kwargs): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs.get('log_location_settings', None) + + class LogStorageSettings(Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -20706,14 +21308,75 @@ class MicrosoftAccessTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' + + +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB Atlas + database. Type: string (or Expression with resultType string). + :type collection: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -20726,13 +21389,139 @@ class MicrosoftAccessTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__(self, **kwargs): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MicrosoftAccessTable' + super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbAtlasCollection' + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB Atlas connection string. + Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you + want to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbAtlasLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbAtlas' + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB Atlas instance. In most cases, + modifying the batch size will not affect the user or the application. This + property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__(self, **kwargs): + super(MongoDbAtlasSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) + self.type = 'MongoDbAtlasSource' class MongoDbCollectionDataset(Dataset): @@ -22997,7 +23786,7 @@ class OrcDataset(Dataset): :param location: Required. The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param orc_compression_codec: Possible values include: 'none', 'zlib', - 'snappy' + 'snappy', 'lzo' :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec """ @@ -23093,6 +23882,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23899,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__(self, **kwargs): super(OrcSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'OrcSink' @@ -23167,6 +23960,45 @@ def __init__(self, **kwargs): self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OrcWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +24183,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +24200,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__(self, **kwargs): super(ParquetSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'ParquetSink' @@ -23425,6 +24261,45 @@ def __init__(self, **kwargs): self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -25546,14 +26421,10 @@ class RestSink(CopySink): :param request_interval: The time to await before sending next request, in milliseconds :type request_interval: object - :param compression_type: Compression Type to Send data in compressed - format with Optimal Compression Level, Default is None. And The Only - Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an - Object before calling the rest endpoint , Default is false. ex: if true - request content sample format is { rows:[]} else the format is [] - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in + compressed format with Optimal Compression Level, Default is None. And The + Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25572,8 +26443,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25582,8 +26452,7 @@ def __init__(self, **kwargs): self.additional_headers = kwargs.get('additional_headers', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) - self.compression_type = kwargs.get('compression_type', None) - self.wrap_request_json_in_an_object = kwargs.get('wrap_request_json_in_an_object', None) + self.http_compression_type = kwargs.get('http_compression_type', None) self.type = 'RestSink' @@ -32041,6 +32910,142 @@ def __init__(self, **kwargs): self.type = 'SybaseTable' +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). This property will be retired. Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). This property will be retired. Please + use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and + object) values to simple strings in json format. Type: boolean (or + Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular + example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion + feature in the Copy activity. Type: boolean (or Expression with resultType + boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings + :type type_conversion_settings: + ~azure.mgmt.datafactory.models.TypeConversionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, + } + + def __init__(self, **kwargs): + super(TabularTranslator, self).__init__(**kwargs) + self.column_mappings = kwargs.get('column_mappings', None) + self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) + self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) + self.mappings = kwargs.get('mappings', None) + self.type_conversion = kwargs.get('type_conversion', None) + self.type_conversion_settings = kwargs.get('type_conversion_settings', None) + self.type = 'TabularTranslator' + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32702,7 +33707,7 @@ class TumblingWindowTrigger(Trigger): event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' + values include: 'Minute', 'Hour', 'Month' :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum @@ -32811,6 +33816,49 @@ def __init__(self, **kwargs): self.type = 'TumblingWindowTriggerDependencyReference' +class TypeConversionSettings(Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when + converting the data. Type: boolean (or Expression with resultType + boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as + numbers. Type: boolean (or Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or + Expression with resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. + Type: string (or Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or + Expression with resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: + string (or Expression with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = kwargs.get('allow_data_truncation', None) + self.treat_boolean_as_number = kwargs.get('treat_boolean_as_number', None) + self.date_time_format = kwargs.get('date_time_format', None) + self.date_time_offset_format = kwargs.get('date_time_offset_format', None) + self.time_span_format = kwargs.get('time_span_format', None) + self.culture = kwargs.get('culture', None) + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8723c634a3e4..ece04cb31e05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -334,40 +334,41 @@ class LinkedService(Model): AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + ResponsysLinkedService, AzureDatabricksDeltaLakeLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLServiceLinkedService, AzureMLLinkedService, TeradataLinkedService, - Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, - MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, - GoogleCloudStorageLinkedService, AzureFileStorageLinkedService, - FileServerLinkedService, HDInsightLinkedService, - CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlMILinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + MongoDbV2LinkedService, MongoDbAtlasLinkedService, MongoDbLinkedService, + CassandraLinkedService, WebLinkedService, ODataLinkedService, + HdfsLinkedService, MicrosoftAccessLinkedService, InformixLinkedService, + OdbcLinkedService, AzureMLServiceLinkedService, AzureMLLinkedService, + TeradataLinkedService, Db2LinkedService, SybaseLinkedService, + PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, + OracleLinkedService, GoogleCloudStorageLinkedService, + AzureFileStorageLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -403,7 +404,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -515,7 +516,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + sub-classes are: AzureDatabricksDeltaLakeDataset, + SharePointOnlineListResourceDataset, SnowflakeDataset, GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, @@ -539,8 +541,8 @@ class Dataset(Model): Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CosmosDbSqlApiCollectionDataset, CustomDataset, CassandraTableDataset, @@ -597,7 +599,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -677,16 +679,17 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + sub-classes are: SharePointOnlineListSource, + AzureDatabricksDeltaLakeSource, SnowflakeSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, - OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, - RestSource, SalesforceServiceCloudSource, ODataSource, - MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, - DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, - OrcSource, XmlSource, JsonSource, DelimitedTextSource, ParquetSource, - ExcelSource, AvroSource + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbAtlasSource, + MongoDbSource, WebSource, OracleSource, AzureDataExplorerSource, + HdfsSource, FileSystemSource, RestSource, SalesforceServiceCloudSource, + ODataSource, MicrosoftAccessSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + CosmosDbSqlApiSource, DocumentDbCollectionSource, BlobSource, + TabularSource, BinarySource, OrcSource, XmlSource, JsonSource, + DelimitedTextSource, ParquetSource, ExcelSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -721,7 +724,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1186,6 +1189,10 @@ class AmazonS3LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param authentication_type: The authentication type of S3. Allowed value: + AccessKey (default) or TemporarySecurityCredentials. Type: string (or + Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). @@ -1198,6 +1205,9 @@ class AmazonS3LinkedService(LinkedService): try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security + credential. + :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1215,17 +1225,21 @@ class AmazonS3LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, access_key_id=None, secret_access_key=None, service_url=None, session_token=None, encrypted_credential=None, **kwargs) -> None: super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential self.type = 'AmazonS3' @@ -1708,8 +1722,8 @@ class CopySink(Model): SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, RestSink, - OrcSink, JsonSink, DelimitedTextSink + SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, + AzurePostgreSqlSink, RestSink, OrcSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1753,7 +1767,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1880,7 +1894,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1915,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -1925,6 +1939,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1959,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'AvroWriteSettings' @@ -2842,40 +2869,422 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.prefix = prefix - self.file_list_path = file_list_path - self.enable_partition_discovery = enable_partition_discovery - self.partition_root_path = partition_root_path - self.delete_files_after_completion = delete_files_after_completion - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.type = 'AzureBlobStorageReadSettings' + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.type = 'AzureBlobStorageReadSettings' + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' + + +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The name of delta table. Type: string (or Expression with + resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or + Expression with resultType string). + :type database: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, database=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.database = database + self.type = 'AzureDatabricksDeltaLakeDataset' + + +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand, + AzureDatabricksDeltaLakeExportCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand', 'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, date_format=None, timestamp_format=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.date_format = date_format + self.timestamp_format = timestamp_format + self.type = 'AzureDatabricksDeltaLakeExportCommand' + + +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, + SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks + Delta Lake Copy. Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, date_format=None, timestamp_format=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.date_format = date_format + self.timestamp_format = timestamp_format + self.type = 'AzureDatabricksDeltaLakeImportCommand' + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, cluster_id=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricksDeltaLake' + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, import_settings=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + self.type = 'AzureDatabricksDeltaLakeSink' -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or + Expression with resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ _validation = { @@ -2884,16 +3293,19 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.block_size_in_mb = block_size_in_mb - self.type = 'AzureBlobStorageWriteSettings' + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, export_settings=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.export_settings = export_settings + self.type = 'AzureDatabricksDeltaLakeSource' class AzureDatabricksLinkedService(LinkedService): @@ -7998,7 +8410,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8432,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -8049,6 +8462,10 @@ class ConcurLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_properties: Properties used to connect to Concur. It is + mutually exclusive with any other properties in the linked service. Type: + object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object @@ -8087,6 +8504,7 @@ class ConcurLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8096,8 +8514,9 @@ class ConcurLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -8311,10 +8730,13 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide - when enabling session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log + storage settings customer need to provide when enabling session log. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling + log. + :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8356,6 +8778,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -8364,7 +8787,7 @@ class CopyActivity(ExecutionActivity): 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, log_storage_settings=None, preserve_rules=None, preserve=None, validate_data_consistency=None, skip_error_file=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, log_storage_settings=None, log_settings=None, preserve_rules=None, preserve=None, validate_data_consistency=None, skip_error_file=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -8376,6 +8799,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency @@ -8385,6 +8809,62 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.type = 'Copy' +class CopyActivityLogSettings(Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. + Type: string (or Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable + logging. Type: boolean (or Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__(self, *, log_level=None, enable_reliable_logging=None, **kwargs) -> None: + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging + + +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. @@ -10220,7 +10700,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10723,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -10277,6 +10758,28 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.type = 'BZip2' +class DatasetDataElement(Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, *, name=None, type=None, **kwargs) -> None: + super(DatasetDataElement, self).__init__(**kwargs) + self.name = name + self.type = type + + class DatasetDebugResource(SubResourceDebugResource): """Dataset debug resource. @@ -10456,6 +10959,89 @@ def __init__(self, *, properties, **kwargs) -> None: self.properties = properties +class DatasetSchemaDataElement(Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, name=None, type=None, **kwargs) -> None: + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.type = type + + +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'TarGZip' + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11081,6 +11667,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11688,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'DelimitedTextWriteSettings' @@ -13047,40 +13646,6 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.type = 'ExecuteSSISPackage' -class ExportSettings(Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - - class ExposureControlBatchRequest(Model): """A list of exposure control features. @@ -13289,6 +13854,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + :param public_network_access: Whether or not public network access is + allowed for the data factory. Possible values include: 'Enabled', + 'Disabled' + :type public_network_access: str or + ~azure.mgmt.datafactory.models.PublicNetworkAccess """ _validation = { @@ -13315,9 +13885,10 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } - def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, global_parameters=None, **kwargs) -> None: + def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, global_parameters=None, public_network_access=None, **kwargs) -> None: super(Factory, self).__init__(location=location, tags=tags, **kwargs) self.additional_properties = additional_properties self.identity = identity @@ -13326,6 +13897,7 @@ def __init__(self, *, location: str=None, tags=None, additional_properties=None, self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters + self.public_network_access = public_network_access class FactoryRepoConfiguration(Model): @@ -17568,47 +18140,13 @@ class ImpalaSource(TabularSource): 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) - self.query = query - self.type = 'ImpalaSource' - - -class ImportSettings(Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.query = query + self.type = 'ImpalaSource' class InformixLinkedService(LinkedService): @@ -19361,8 +19899,72 @@ def __init__(self, *, properties, **kwargs) -> None: self.properties = properties +class LogLocationSettings(Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path=None, **kwargs) -> None: + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity + log. Type: boolean (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity + log. + :type copy_activity_log_settings: + ~azure.mgmt.datafactory.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer + needs to provide when enabling log. + :type log_location_settings: + ~azure.mgmt.datafactory.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__(self, *, log_location_settings, enable_copy_activity_log=None, copy_activity_log_settings=None, **kwargs) -> None: + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -20597,34 +21199,261 @@ class MicrosoftAccessSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.additional_columns = additional_columns + self.type = 'MicrosoftAccessSource' + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' + + +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB Atlas + database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbAtlasCollection' + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB Atlas connection string. + Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you + want to access. Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'MicrosoftAccessSink' + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbAtlas' -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. All required parameters must be populated in order to send to Azure. @@ -20644,9 +21473,23 @@ class MicrosoftAccessSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB Atlas instance. In most cases, + modifying the batch size will not affect the user or the application. This + property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). @@ -20664,75 +21507,21 @@ class MicrosoftAccessSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout self.additional_columns = additional_columns - self.type = 'MicrosoftAccessSource' - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MicrosoftAccessTable' + self.type = 'MongoDbAtlasSource' class MongoDbCollectionDataset(Dataset): @@ -22997,7 +23786,7 @@ class OrcDataset(Dataset): :param location: Required. The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param orc_compression_codec: Possible values include: 'none', 'zlib', - 'snappy' + 'snappy', 'lzo' :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec """ @@ -23093,6 +23882,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23899,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'OrcSink' @@ -23167,6 +23960,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +24183,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +24200,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'ParquetSink' @@ -23425,6 +24261,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -25546,14 +26421,10 @@ class RestSink(CopySink): :param request_interval: The time to await before sending next request, in milliseconds :type request_interval: object - :param compression_type: Compression Type to Send data in compressed - format with Optimal Compression Level, Default is None. And The Only - Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an - Object before calling the rest endpoint , Default is false. ex: if true - request content sample format is { rows:[]} else the format is [] - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in + compressed format with Optimal Compression Level, Default is None. And The + Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25572,18 +26443,16 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, request_method=None, additional_headers=None, http_request_timeout=None, request_interval=None, compression_type=None, wrap_request_json_in_an_object=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, request_method=None, additional_headers=None, http_request_timeout=None, request_interval=None, http_compression_type=None, **kwargs) -> None: super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval - self.compression_type = compression_type - self.wrap_request_json_in_an_object = wrap_request_json_in_an_object + self.http_compression_type = http_compression_type self.type = 'RestSink' @@ -32041,6 +32910,142 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SybaseTable' +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). This property will be retired. Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). This property will be retired. Please + use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and + object) values to simple strings in json format. Type: boolean (or + Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular + example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion + feature in the Copy activity. Type: boolean (or Expression with resultType + boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings + :type type_conversion_settings: + ~azure.mgmt.datafactory.models.TypeConversionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, + } + + def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, map_complex_values_to_string=None, mappings=None, type_conversion=None, type_conversion_settings=None, **kwargs) -> None: + super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) + self.column_mappings = column_mappings + self.schema_mapping = schema_mapping + self.collection_reference = collection_reference + self.map_complex_values_to_string = map_complex_values_to_string + self.mappings = mappings + self.type_conversion = type_conversion + self.type_conversion_settings = type_conversion_settings + self.type = 'TabularTranslator' + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32702,7 +33707,7 @@ class TumblingWindowTrigger(Trigger): event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' + values include: 'Minute', 'Hour', 'Month' :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum @@ -32811,6 +33816,49 @@ def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwa self.type = 'TumblingWindowTriggerDependencyReference' +class TypeConversionSettings(Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when + converting the data. Type: boolean (or Expression with resultType + boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as + numbers. Type: boolean (or Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or + Expression with resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. + Type: string (or Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or + Expression with resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: + string (or Expression with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__(self, *, allow_data_truncation=None, treat_boolean_as_number=None, date_time_format=None, date_time_offset_format=None, time_span_format=None, culture=None, **kwargs) -> None: + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = allow_data_truncation + self.treat_boolean_as_number = treat_boolean_as_number + self.date_time_format = date_time_format + self.date_time_offset_format = date_time_offset_format + self.time_span_format = time_span_format + self.culture = culture + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index aeae719acfc3..eba31a2798cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.13.0" +VERSION = "0.14.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 2287f0c260af..3b3fd5c68d7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -36,7 +36,7 @@ pass # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') +with open(os.path.join(package_folder_path, 'version.py') if os.path.exists(os.path.join(package_folder_path, 'version.py')) else os.path.join(package_folder_path, '_version.py'), 'r') as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',