diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl index 3af08530e..6b3c9f7ce 100644 --- a/.codegen/__init__.py.tmpl +++ b/.codegen/__init__.py.tmpl @@ -1,14 +1,21 @@ -{{range .Packages}} -import databricks.sdk.service.{{.Name}} as {{.Name}}{{end}} import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils -import databricks.sdk.mixins.dbfs as dbfs_mixin -import databricks.sdk.mixins.compute as compute_mixin + +from databricks.sdk.mixins.dbfs import DbfsExt +from databricks.sdk.mixins.compute import ClustersExt +{{- range .Services}} +from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}} {{$args := list "host" "account_id" "username" "password" "client_id" "client_secret" "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret" "azure_client_id" "azure_tenant_id" "azure_environment" "auth_type" "cluster_id"}} +{{- define "api" -}} + {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" -}} + {{- $genApi := concat .PascalName "API" -}} + {{- getOrDefault $mixins $genApi $genApi -}} +{{- end -}} + class WorkspaceClient: def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, debug_truncate_bytes: int = None, @@ -28,7 +35,7 @@ class WorkspaceClient: self.dbutils = dbutils.RemoteDbUtils(self.config) self.api_client = client.ApiClient(self.config) {{- range .Services}}{{if not .IsAccounts}} - self.{{.SnakeName}} = {{if eq .SnakeName "dbfs"}}dbfs_mixin.DbfsExt{{else if eq .SnakeName "clusters"}}compute_mixin.ClustersExt{{else}}{{.Package.Name}}.{{.Name}}API{{end}}(self.api_client){{end -}}{{end}} + self.{{.SnakeName}} = {{template "api" .}}(self.api_client){{end -}}{{end}} class AccountClient: def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, @@ -48,4 +55,4 @@ class AccountClient: self.config = config self.api_client = client.ApiClient(self.config) {{- range .Services}}{{if .IsAccounts}} - self.{{.SnakeName}} = {{.Package.Name}}.{{.Name}}API(self.api_client){{end -}}{{end}} + self.{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self.api_client){{end -}}{{end}} diff --git a/.gitattributes b/.gitattributes index 52a4595c8..df786ad5c 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,27 +1,16 @@ databricks/sdk/__init__.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true -databricks/sdk/service/clusterpolicies.py linguist-generated=true -databricks/sdk/service/clusters.py linguist-generated=true -databricks/sdk/service/commands.py linguist-generated=true -databricks/sdk/service/dbfs.py linguist-generated=true -databricks/sdk/service/deployment.py linguist-generated=true -databricks/sdk/service/endpoints.py linguist-generated=true -databricks/sdk/service/gitcredentials.py linguist-generated=true -databricks/sdk/service/globalinitscripts.py linguist-generated=true -databricks/sdk/service/instancepools.py linguist-generated=true -databricks/sdk/service/ipaccesslists.py linguist-generated=true +databricks/sdk/service/catalog.py linguist-generated=true +databricks/sdk/service/compute.py linguist-generated=true +databricks/sdk/service/files.py linguist-generated=true +databricks/sdk/service/iam.py linguist-generated=true databricks/sdk/service/jobs.py linguist-generated=true -databricks/sdk/service/libraries.py linguist-generated=true -databricks/sdk/service/mlflow.py linguist-generated=true +databricks/sdk/service/ml.py linguist-generated=true databricks/sdk/service/oauth2.py linguist-generated=true -databricks/sdk/service/permissions.py linguist-generated=true databricks/sdk/service/pipelines.py linguist-generated=true -databricks/sdk/service/repos.py linguist-generated=true -databricks/sdk/service/scim.py linguist-generated=true -databricks/sdk/service/secrets.py linguist-generated=true +databricks/sdk/service/provisioning.py linguist-generated=true +databricks/sdk/service/serving.py linguist-generated=true +databricks/sdk/service/settings.py linguist-generated=true +databricks/sdk/service/sharing.py linguist-generated=true databricks/sdk/service/sql.py linguist-generated=true -databricks/sdk/service/tokenmanagement.py linguist-generated=true -databricks/sdk/service/tokens.py linguist-generated=true -databricks/sdk/service/unitycatalog.py linguist-generated=true databricks/sdk/service/workspace.py linguist-generated=true -databricks/sdk/service/workspaceconf.py linguist-generated=true diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py old mode 100644 new mode 100755 index 6726dd4d7..d7e9abb52 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,33 +1,56 @@ import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils -import databricks.sdk.mixins.compute as compute_mixin -import databricks.sdk.mixins.dbfs as dbfs_mixin -import databricks.sdk.service.billing as billing -import databricks.sdk.service.clusterpolicies as clusterpolicies -import databricks.sdk.service.clusters as clusters -import databricks.sdk.service.commands as commands -import databricks.sdk.service.dbfs as dbfs -import databricks.sdk.service.deployment as deployment -import databricks.sdk.service.endpoints as endpoints -import databricks.sdk.service.gitcredentials as gitcredentials -import databricks.sdk.service.globalinitscripts as globalinitscripts -import databricks.sdk.service.instancepools as instancepools -import databricks.sdk.service.ipaccesslists as ipaccesslists -import databricks.sdk.service.jobs as jobs -import databricks.sdk.service.libraries as libraries -import databricks.sdk.service.mlflow as mlflow -import databricks.sdk.service.oauth2 as oauth2 -import databricks.sdk.service.permissions as permissions -import databricks.sdk.service.pipelines as pipelines -import databricks.sdk.service.repos as repos -import databricks.sdk.service.scim as scim -import databricks.sdk.service.secrets as secrets -import databricks.sdk.service.sql as sql -import databricks.sdk.service.tokenmanagement as tokenmanagement -import databricks.sdk.service.tokens as tokens -import databricks.sdk.service.unitycatalog as unitycatalog -import databricks.sdk.service.workspace as workspace -import databricks.sdk.service.workspaceconf as workspaceconf +from databricks.sdk.mixins.compute import ClustersExt +from databricks.sdk.mixins.dbfs import DbfsExt +from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, + LogDeliveryAPI) +from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, + AccountMetastoresAPI, + AccountStorageCredentialsAPI, + CatalogsAPI, ExternalLocationsAPI, + FunctionsAPI, GrantsAPI, + MetastoresAPI, SchemasAPI, + StorageCredentialsAPI, + TableConstraintsAPI, TablesAPI, + VolumesAPI) +from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, + CommandExecutionAPI, + GlobalInitScriptsAPI, + InstancePoolsAPI, + InstanceProfilesAPI, LibrariesAPI, + PolicyFamiliesAPI) +from databricks.sdk.service.files import DbfsAPI +from databricks.sdk.service.iam import (AccountGroupsAPI, + AccountServicePrincipalsAPI, + AccountUsersAPI, CurrentUserAPI, + GroupsAPI, PermissionsAPI, + ServicePrincipalsAPI, UsersAPI, + WorkspaceAssignmentAPI) +from databricks.sdk.service.jobs import JobsAPI +from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI +from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI, + OAuthEnrollmentAPI, + PublishedAppIntegrationAPI) +from databricks.sdk.service.pipelines import PipelinesAPI +from databricks.sdk.service.provisioning import (CredentialsAPI, + EncryptionKeysAPI, + NetworksAPI, PrivateAccessAPI, + StorageAPI, VpcEndpointsAPI, + WorkspacesAPI) +from databricks.sdk.service.serving import ServingEndpointsAPI +from databricks.sdk.service.settings import (AccountIpAccessListsAPI, + IpAccessListsAPI, + TokenManagementAPI, TokensAPI, + WorkspaceConfAPI) +from databricks.sdk.service.sharing import (ProvidersAPI, + RecipientActivationAPI, + RecipientsAPI, SharesAPI) +from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI, + DataSourcesAPI, DbsqlPermissionsAPI, + QueriesAPI, QueryHistoryAPI, + StatementExecutionAPI, WarehousesAPI) +from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, + SecretsAPI, WorkspaceAPI) class WorkspaceClient: @@ -81,62 +104,55 @@ def __init__(self, self.config = config self.dbutils = dbutils.RemoteDbUtils(self.config) self.api_client = client.ApiClient(self.config) - self.alerts = sql.AlertsAPI(self.api_client) - self.catalogs = unitycatalog.CatalogsAPI(self.api_client) - self.cluster_policies = clusterpolicies.ClusterPoliciesAPI(self.api_client) - self.clusters = compute_mixin.ClustersExt(self.api_client) - self.command_execution = commands.CommandExecutionAPI(self.api_client) - self.current_user = scim.CurrentUserAPI(self.api_client) - self.dashboards = sql.DashboardsAPI(self.api_client) - self.data_sources = sql.DataSourcesAPI(self.api_client) - self.dbfs = dbfs_mixin.DbfsExt(self.api_client) - self.dbsql_permissions = sql.DbsqlPermissionsAPI(self.api_client) - self.experiments = mlflow.ExperimentsAPI(self.api_client) - self.external_locations = unitycatalog.ExternalLocationsAPI(self.api_client) - self.functions = unitycatalog.FunctionsAPI(self.api_client) - self.git_credentials = gitcredentials.GitCredentialsAPI(self.api_client) - self.global_init_scripts = globalinitscripts.GlobalInitScriptsAPI(self.api_client) - self.grants = unitycatalog.GrantsAPI(self.api_client) - self.groups = scim.GroupsAPI(self.api_client) - self.instance_pools = instancepools.InstancePoolsAPI(self.api_client) - self.instance_profiles = clusters.InstanceProfilesAPI(self.api_client) - self.ip_access_lists = ipaccesslists.IpAccessListsAPI(self.api_client) - self.jobs = jobs.JobsAPI(self.api_client) - self.libraries = libraries.LibrariesAPI(self.api_client) - self.m_lflow_artifacts = mlflow.MLflowArtifactsAPI(self.api_client) - self.m_lflow_databricks = mlflow.MLflowDatabricksAPI(self.api_client) - self.m_lflow_metrics = mlflow.MLflowMetricsAPI(self.api_client) - self.m_lflow_runs = mlflow.MLflowRunsAPI(self.api_client) - self.metastores = unitycatalog.MetastoresAPI(self.api_client) - self.model_version_comments = mlflow.ModelVersionCommentsAPI(self.api_client) - self.model_versions = mlflow.ModelVersionsAPI(self.api_client) - self.permissions = permissions.PermissionsAPI(self.api_client) - self.pipelines = pipelines.PipelinesAPI(self.api_client) - self.policy_families = clusterpolicies.PolicyFamiliesAPI(self.api_client) - self.providers = unitycatalog.ProvidersAPI(self.api_client) - self.queries = sql.QueriesAPI(self.api_client) - self.query_history = sql.QueryHistoryAPI(self.api_client) - self.recipient_activation = unitycatalog.RecipientActivationAPI(self.api_client) - self.recipients = unitycatalog.RecipientsAPI(self.api_client) - self.registered_models = mlflow.RegisteredModelsAPI(self.api_client) - self.registry_webhooks = mlflow.RegistryWebhooksAPI(self.api_client) - self.repos = repos.ReposAPI(self.api_client) - self.schemas = unitycatalog.SchemasAPI(self.api_client) - self.secrets = secrets.SecretsAPI(self.api_client) - self.service_principals = scim.ServicePrincipalsAPI(self.api_client) - self.serving_endpoints = endpoints.ServingEndpointsAPI(self.api_client) - self.shares = unitycatalog.SharesAPI(self.api_client) - self.statement_execution = sql.StatementExecutionAPI(self.api_client) - self.storage_credentials = unitycatalog.StorageCredentialsAPI(self.api_client) - self.table_constraints = unitycatalog.TableConstraintsAPI(self.api_client) - self.tables = unitycatalog.TablesAPI(self.api_client) - self.token_management = tokenmanagement.TokenManagementAPI(self.api_client) - self.tokens = tokens.TokensAPI(self.api_client) - self.transition_requests = mlflow.TransitionRequestsAPI(self.api_client) - self.users = scim.UsersAPI(self.api_client) - self.warehouses = sql.WarehousesAPI(self.api_client) - self.workspace = workspace.WorkspaceAPI(self.api_client) - self.workspace_conf = workspaceconf.WorkspaceConfAPI(self.api_client) + self.alerts = AlertsAPI(self.api_client) + self.catalogs = CatalogsAPI(self.api_client) + self.cluster_policies = ClusterPoliciesAPI(self.api_client) + self.clusters = ClustersExt(self.api_client) + self.command_execution = CommandExecutionAPI(self.api_client) + self.current_user = CurrentUserAPI(self.api_client) + self.dashboards = DashboardsAPI(self.api_client) + self.data_sources = DataSourcesAPI(self.api_client) + self.dbfs = DbfsExt(self.api_client) + self.dbsql_permissions = DbsqlPermissionsAPI(self.api_client) + self.experiments = ExperimentsAPI(self.api_client) + self.external_locations = ExternalLocationsAPI(self.api_client) + self.functions = FunctionsAPI(self.api_client) + self.git_credentials = GitCredentialsAPI(self.api_client) + self.global_init_scripts = GlobalInitScriptsAPI(self.api_client) + self.grants = GrantsAPI(self.api_client) + self.groups = GroupsAPI(self.api_client) + self.instance_pools = InstancePoolsAPI(self.api_client) + self.instance_profiles = InstanceProfilesAPI(self.api_client) + self.ip_access_lists = IpAccessListsAPI(self.api_client) + self.jobs = JobsAPI(self.api_client) + self.libraries = LibrariesAPI(self.api_client) + self.metastores = MetastoresAPI(self.api_client) + self.model_registry = ModelRegistryAPI(self.api_client) + self.permissions = PermissionsAPI(self.api_client) + self.pipelines = PipelinesAPI(self.api_client) + self.policy_families = PolicyFamiliesAPI(self.api_client) + self.providers = ProvidersAPI(self.api_client) + self.queries = QueriesAPI(self.api_client) + self.query_history = QueryHistoryAPI(self.api_client) + self.recipient_activation = RecipientActivationAPI(self.api_client) + self.recipients = RecipientsAPI(self.api_client) + self.repos = ReposAPI(self.api_client) + self.schemas = SchemasAPI(self.api_client) + self.secrets = SecretsAPI(self.api_client) + self.service_principals = ServicePrincipalsAPI(self.api_client) + self.serving_endpoints = ServingEndpointsAPI(self.api_client) + self.shares = SharesAPI(self.api_client) + self.statement_execution = StatementExecutionAPI(self.api_client) + self.storage_credentials = StorageCredentialsAPI(self.api_client) + self.table_constraints = TableConstraintsAPI(self.api_client) + self.tables = TablesAPI(self.api_client) + self.token_management = TokenManagementAPI(self.api_client) + self.tokens = TokensAPI(self.api_client) + self.users = UsersAPI(self.api_client) + self.volumes = VolumesAPI(self.api_client) + self.warehouses = WarehousesAPI(self.api_client) + self.workspace = WorkspaceAPI(self.api_client) + self.workspace_conf = WorkspaceConfAPI(self.api_client) class AccountClient: @@ -189,23 +205,24 @@ def __init__(self, product_version=product_version) self.config = config self.api_client = client.ApiClient(self.config) - self.billable_usage = billing.BillableUsageAPI(self.api_client) - self.budgets = billing.BudgetsAPI(self.api_client) - self.credentials = deployment.CredentialsAPI(self.api_client) - self.custom_app_integration = oauth2.CustomAppIntegrationAPI(self.api_client) - self.encryption_keys = deployment.EncryptionKeysAPI(self.api_client) - self.account_groups = scim.AccountGroupsAPI(self.api_client) - self.log_delivery = billing.LogDeliveryAPI(self.api_client) - self.account_metastore_assignments = unitycatalog.AccountMetastoreAssignmentsAPI(self.api_client) - self.account_metastores = unitycatalog.AccountMetastoresAPI(self.api_client) - self.networks = deployment.NetworksAPI(self.api_client) - self.o_auth_enrollment = oauth2.OAuthEnrollmentAPI(self.api_client) - self.private_access = deployment.PrivateAccessAPI(self.api_client) - self.published_app_integration = oauth2.PublishedAppIntegrationAPI(self.api_client) - self.account_service_principals = scim.AccountServicePrincipalsAPI(self.api_client) - self.storage = deployment.StorageAPI(self.api_client) - self.account_storage_credentials = unitycatalog.AccountStorageCredentialsAPI(self.api_client) - self.account_users = scim.AccountUsersAPI(self.api_client) - self.vpc_endpoints = deployment.VpcEndpointsAPI(self.api_client) - self.workspace_assignment = permissions.WorkspaceAssignmentAPI(self.api_client) - self.workspaces = deployment.WorkspacesAPI(self.api_client) + self.billable_usage = BillableUsageAPI(self.api_client) + self.budgets = BudgetsAPI(self.api_client) + self.credentials = CredentialsAPI(self.api_client) + self.custom_app_integration = CustomAppIntegrationAPI(self.api_client) + self.encryption_keys = EncryptionKeysAPI(self.api_client) + self.groups = AccountGroupsAPI(self.api_client) + self.ip_access_lists = AccountIpAccessListsAPI(self.api_client) + self.log_delivery = LogDeliveryAPI(self.api_client) + self.metastore_assignments = AccountMetastoreAssignmentsAPI(self.api_client) + self.metastores = AccountMetastoresAPI(self.api_client) + self.networks = NetworksAPI(self.api_client) + self.o_auth_enrollment = OAuthEnrollmentAPI(self.api_client) + self.private_access = PrivateAccessAPI(self.api_client) + self.published_app_integration = PublishedAppIntegrationAPI(self.api_client) + self.service_principals = AccountServicePrincipalsAPI(self.api_client) + self.storage = StorageAPI(self.api_client) + self.storage_credentials = AccountStorageCredentialsAPI(self.api_client) + self.users = AccountUsersAPI(self.api_client) + self.vpc_endpoints = VpcEndpointsAPI(self.api_client) + self.workspace_assignment = WorkspaceAssignmentAPI(self.api_client) + self.workspaces = WorkspacesAPI(self.api_client) diff --git a/databricks/sdk/dbutils.py b/databricks/sdk/dbutils.py index f84c73e6e..68f68a45b 100644 --- a/databricks/sdk/dbutils.py +++ b/databricks/sdk/dbutils.py @@ -6,8 +6,9 @@ from collections import namedtuple from .core import ApiClient, Config -from .mixins import compute, dbfs -from .service import commands, secrets +from .mixins import compute as compute_ext +from .mixins import dbfs as dbfs_ext +from .service import compute, workspace class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])): @@ -31,7 +32,7 @@ class SecretMetadata(namedtuple('SecretMetadata', ['key'])): class _FsUtil: """ Manipulates the Databricks filesystem (DBFS) """ - def __init__(self, dbfs_ext: dbfs.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']): + def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: typing.Callable[[str], '_ProxyUtil']): self._dbfs = dbfs_ext self._proxy_factory = proxy_factory @@ -177,7 +178,7 @@ def register(): class _SecretsUtil: """Remote equivalent of secrets util""" - def __init__(self, secrets_api: secrets.SecretsAPI): + def __init__(self, secrets_api: workspace.SecretsAPI): self._api = secrets_api # nolint def getBytes(self, scope: str, key: str) -> bytes: @@ -215,13 +216,13 @@ class RemoteDbUtils: def __init__(self, config: 'Config' = None): self._config = Config() if not config else config self._client = ApiClient(self._config) - self._clusters = compute.ClustersExt(self._client) - self._commands = commands.CommandExecutionAPI(self._client) + self._clusters = compute_ext.ClustersExt(self._client) + self._commands = compute.CommandExecutionAPI(self._client) self._lock = threading.Lock() self._ctx = None - self.fs = _FsUtil(dbfs.DbfsExt(self._client), self.__getattr__) - self.secrets = _SecretsUtil(secrets.SecretsAPI(self._client)) + self.fs = _FsUtil(dbfs_ext.DbfsExt(self._client), self.__getattr__) + self.secrets = _SecretsUtil(workspace.SecretsAPI(self._client)) @property def _cluster_id(self) -> str: @@ -231,7 +232,7 @@ def _cluster_id(self) -> str: raise ValueError(self._config.wrap_debug_info(message)) return cluster_id - def _running_command_context(self) -> commands.ContextStatusResponse: + def _running_command_context(self) -> compute.ContextStatusResponse: if self._ctx: return self._ctx with self._lock: @@ -239,7 +240,7 @@ def _running_command_context(self) -> commands.ContextStatusResponse: return self._ctx self._clusters.ensure_cluster_is_running(self._cluster_id) self._ctx = self._commands.create(cluster_id=self._cluster_id, - language=commands.Language.python).result() + language=compute.Language.python).result() return self._ctx def __getattr__(self, util) -> '_ProxyUtil': @@ -252,9 +253,9 @@ def __getattr__(self, util) -> '_ProxyUtil': class _ProxyUtil: """Enables temporary workaround to call remote in-REPL dbutils without having to re-implement them""" - def __init__(self, *, command_execution: commands.CommandExecutionAPI, - context_factory: typing.Callable[[], commands.ContextStatusResponse], cluster_id: str, - name: str): + def __init__(self, *, command_execution: compute.CommandExecutionAPI, + context_factory: typing.Callable[[], + compute.ContextStatusResponse], cluster_id: str, name: str): self._commands = command_execution self._cluster_id = cluster_id self._context_factory = context_factory @@ -270,8 +271,8 @@ def __getattr__(self, method: str) -> '_ProxyCall': class _ProxyCall: - def __init__(self, *, command_execution: commands.CommandExecutionAPI, - context_factory: typing.Callable[[], commands.ContextStatusResponse], cluster_id: str, + def __init__(self, *, command_execution: compute.CommandExecutionAPI, + context_factory: typing.Callable[[], compute.ContextStatusResponse], cluster_id: str, util: str, method: str): self._commands = command_execution self._cluster_id = cluster_id @@ -289,10 +290,10 @@ def __call__(self, *args, **kwargs): ''' ctx = self._context_factory() result = self._commands.execute(cluster_id=self._cluster_id, - language=commands.Language.python, + language=compute.Language.python, context_id=ctx.id, command=code).result() - if result.status == commands.CommandStatus.Finished: + if result.status == compute.CommandStatus.Finished: raw = result.results.data return json.loads(raw) else: diff --git a/databricks/sdk/mixins/compute.py b/databricks/sdk/mixins/compute.py index 910ae3c45..d153235d1 100644 --- a/databricks/sdk/mixins/compute.py +++ b/databricks/sdk/mixins/compute.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from typing import Optional -from databricks.sdk.service import clusters +from databricks.sdk.service import compute @dataclass @@ -54,7 +54,7 @@ def __lt__(self, other: 'SemVer'): return self.build < other.build -class ClustersExt(clusters.ClustersAPI): +class ClustersExt(compute.ClustersAPI): def select_spark_version(self, long_term_support: bool = False, @@ -92,7 +92,7 @@ def select_spark_version(self, return versions[0] @staticmethod - def _node_sorting_tuple(item: clusters.NodeType) -> tuple: + def _node_sorting_tuple(item: compute.NodeType) -> tuple: local_disks = local_disk_size_gb = local_nvme_disk = local_nvme_disk_size_gb = 0 if item.node_instance_type is not None: local_disks = item.node_instance_type.local_disks @@ -103,12 +103,12 @@ def _node_sorting_tuple(item: clusters.NodeType) -> tuple: local_nvme_disk, local_nvme_disk_size_gb, item.num_gpus, item.instance_type_id) @staticmethod - def _should_node_be_skipped(nt: clusters.NodeType) -> bool: + def _should_node_be_skipped(nt: compute.NodeType) -> bool: if not nt.node_info: return False if not nt.node_info.status: return False - val = clusters.CloudProviderNodeStatus + val = compute.CloudProviderNodeStatus for st in nt.node_info.status: if st in (val.NotAvailableInRegion, val.NotEnabledOnSubscription): return True @@ -173,7 +173,7 @@ def select_node_type(self, raise ValueError("cannot determine smallest node type") def ensure_cluster_is_running(self, cluster_id: str): - state = clusters.State + state = compute.State info = self.get(cluster_id) if info.state == state.TERMINATED: self.start(cluster_id).result() diff --git a/databricks/sdk/mixins/dbfs.py b/databricks/sdk/mixins/dbfs.py index a10e990a1..467effcb2 100644 --- a/databricks/sdk/mixins/dbfs.py +++ b/databricks/sdk/mixins/dbfs.py @@ -9,7 +9,7 @@ from databricks.sdk.core import DatabricksError -from ..service import dbfs +from ..service import files if TYPE_CHECKING: from _typeshed import Self @@ -18,13 +18,13 @@ class _DbfsIO(BinaryIO): MAX_CHUNK_SIZE = 1024 * 1024 - _status: dbfs.FileInfo = None - _created: dbfs.CreateResponse = None + _status: files.FileInfo = None + _created: files.CreateResponse = None _offset = 0 _closed = False def __init__(self, - api: dbfs.DbfsAPI, + api: files.DbfsAPI, path: str, *, read: bool = False, @@ -308,12 +308,12 @@ def __repr__(self) -> str: return f'<_DbfsPath {self._path}>' -class DbfsExt(dbfs.DbfsAPI): +class DbfsExt(files.DbfsAPI): def open(self, path: str, *, read: bool = False, write: bool = False, overwrite: bool = False) -> _DbfsIO: return _DbfsIO(self, path, read=read, write=write, overwrite=overwrite) - def list(self, path: str, *, recursive=False) -> Iterator[dbfs.FileInfo]: + def list(self, path: str, *, recursive=False) -> Iterator[files.FileInfo]: """List directory contents or file details. List the contents of a directory, or details of the file. If the file or directory does not exist, diff --git a/databricks/sdk/service/unitycatalog.py b/databricks/sdk/service/catalog.py similarity index 78% rename from databricks/sdk/service/unitycatalog.py rename to databricks/sdk/service/catalog.py index 8f9034b96..13fbbee95 100755 --- a/databricks/sdk/service/unitycatalog.py +++ b/databricks/sdk/service/catalog.py @@ -12,13 +12,6 @@ # all definitions in this file are in alphabetical order -class AuthenticationType(Enum): - """The delta sharing authentication type.""" - - DATABRICKS = 'DATABRICKS' - TOKEN = 'TOKEN' - - @dataclass class AwsIamRole: role_arn: str @@ -421,65 +414,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateMetastoreAssignment': workspace_id=d.get('workspace_id', None)) -@dataclass -class CreateProvider: - name: str - authentication_type: 'AuthenticationType' - comment: str = None - recipient_profile_str: str = None - - def as_dict(self) -> dict: - body = {} - if self.authentication_type: body['authentication_type'] = self.authentication_type.value - if self.comment: body['comment'] = self.comment - if self.name: body['name'] = self.name - if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateProvider': - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), - comment=d.get('comment', None), - name=d.get('name', None), - recipient_profile_str=d.get('recipient_profile_str', None)) - - -@dataclass -class CreateRecipient: - name: str - authentication_type: 'AuthenticationType' - comment: str = None - data_recipient_global_metastore_id: Any = None - ip_access_list: 'IpAccessList' = None - owner: str = None - properties_kvpairs: Any = None - sharing_code: str = None - - def as_dict(self) -> dict: - body = {} - if self.authentication_type: body['authentication_type'] = self.authentication_type.value - if self.comment: body['comment'] = self.comment - if self.data_recipient_global_metastore_id: - body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs - if self.sharing_code: body['sharing_code'] = self.sharing_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateRecipient': - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), - comment=d.get('comment', None), - data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), - ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), - name=d.get('name', None), - owner=d.get('owner', None), - properties_kvpairs=d.get('properties_kvpairs', None), - sharing_code=d.get('sharing_code', None)) - - @dataclass class CreateSchema: name: str @@ -506,22 +440,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateSchema': storage_root=d.get('storage_root', None)) -@dataclass -class CreateShare: - name: str - comment: str = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.name: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateShare': - return cls(comment=d.get('comment', None), name=d.get('name', None)) - - @dataclass class CreateStorageCredential: name: str @@ -576,6 +494,35 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateTableConstraint': full_name_arg=d.get('full_name_arg', None)) +@dataclass +class CreateVolumeRequestContent: + catalog_name: str + name: str + schema_name: str + volume_type: 'VolumeType' + comment: str = None + storage_location: str = None + + def as_dict(self) -> dict: + body = {} + if self.catalog_name: body['catalog_name'] = self.catalog_name + if self.comment: body['comment'] = self.comment + if self.name: body['name'] = self.name + if self.schema_name: body['schema_name'] = self.schema_name + if self.storage_location: body['storage_location'] = self.storage_location + if self.volume_type: body['volume_type'] = self.volume_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateVolumeRequestContent': + return cls(catalog_name=d.get('catalog_name', None), + comment=d.get('comment', None), + name=d.get('name', None), + schema_name=d.get('schema_name', None), + storage_location=d.get('storage_location', None), + volume_type=_enum(d, 'volume_type', VolumeType)) + + class DataSourceFormat(Enum): """Data source format""" @@ -637,20 +584,6 @@ class DeleteMetastoreRequest: force: bool = None -@dataclass -class DeleteProviderRequest: - """Delete a provider""" - - name: str - - -@dataclass -class DeleteRecipientRequest: - """Delete a share recipient""" - - name: str - - @dataclass class DeleteSchemaRequest: """Delete a schema""" @@ -658,13 +591,6 @@ class DeleteSchemaRequest: full_name: str -@dataclass -class DeleteShareRequest: - """Delete a share""" - - name: str - - @dataclass class DeleteStorageCredentialRequest: """Delete a credential""" @@ -689,6 +615,13 @@ class DeleteTableRequest: full_name: str +@dataclass +class DeleteVolumeRequest: + """Delete a Volume""" + + full_name_arg: str + + @dataclass class Dependency: """A dependency of a SQL object. Either the __table__ field or the __function__ field must be @@ -1115,13 +1048,6 @@ class GetAccountStorageCredentialRequest: name: str -@dataclass -class GetActivationUrlInfoRequest: - """Get a share activation URL""" - - activation_url: str - - @dataclass class GetCatalogRequest: """Get a catalog""" @@ -1248,34 +1174,6 @@ class GetMetastoreSummaryResponseDeltaSharingScope(Enum): INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL' -@dataclass -class GetProviderRequest: - """Get a provider""" - - name: str - - -@dataclass -class GetRecipientRequest: - """Get a share recipient""" - - name: str - - -@dataclass -class GetRecipientSharePermissionsResponse: - permissions_out: 'List[ShareToPrivilegeAssignment]' = None - - def as_dict(self) -> dict: - body = {} - if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetRecipientSharePermissionsResponse': - return cls(permissions_out=_repeated(d, 'permissions_out', ShareToPrivilegeAssignment)) - - @dataclass class GetSchemaRequest: """Get a schema""" @@ -1283,14 +1181,6 @@ class GetSchemaRequest: full_name: str -@dataclass -class GetShareRequest: - """Get a share""" - - name: str - include_shared_data: bool = None - - @dataclass class GetStorageCredentialRequest: """Get a credential""" @@ -1306,20 +1196,6 @@ class GetTableRequest: include_delta_metadata: bool = None -@dataclass -class IpAccessList: - allowed_ip_addresses: 'List[str]' = None - - def as_dict(self) -> dict: - body = {} - if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'IpAccessList': - return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) - - @dataclass class ListAccountMetastoreAssignmentsRequest: """Get all workspaces assigned to a metastore""" @@ -1399,62 +1275,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListMetastoresResponse': return cls(metastores=_repeated(d, 'metastores', MetastoreInfo)) -@dataclass -class ListProviderSharesResponse: - shares: 'List[ProviderShare]' = None - - def as_dict(self) -> dict: - body = {} - if self.shares: body['shares'] = [v.as_dict() for v in self.shares] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListProviderSharesResponse': - return cls(shares=_repeated(d, 'shares', ProviderShare)) - - -@dataclass -class ListProvidersRequest: - """List providers""" - - data_provider_global_metastore_id: str = None - - -@dataclass -class ListProvidersResponse: - providers: 'List[ProviderInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.providers: body['providers'] = [v.as_dict() for v in self.providers] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListProvidersResponse': - return cls(providers=_repeated(d, 'providers', ProviderInfo)) - - -@dataclass -class ListRecipientsRequest: - """List share recipients""" - - data_recipient_global_metastore_id: str = None - - -@dataclass -class ListRecipientsResponse: - recipients: 'List[RecipientInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListRecipientsResponse': - return cls(recipients=_repeated(d, 'recipients', RecipientInfo)) - - @dataclass class ListSchemasRequest: """List schemas""" @@ -1476,27 +1296,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListSchemasResponse': return cls(schemas=_repeated(d, 'schemas', SchemaInfo)) -@dataclass -class ListSharesRequest: - """List shares by Provider""" - - name: str - - -@dataclass -class ListSharesResponse: - shares: 'List[ShareInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.shares: body['shares'] = [v.as_dict() for v in self.shares] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListSharesResponse': - return cls(shares=_repeated(d, 'shares', ShareInfo)) - - @dataclass class ListSummariesRequest: """List table summaries""" @@ -1532,20 +1331,46 @@ class ListTablesRequest: catalog_name: str schema_name: str include_delta_metadata: bool = None + max_results: int = None + page_token: str = None @dataclass class ListTablesResponse: + next_page_token: str = None tables: 'List[TableInfo]' = None def as_dict(self) -> dict: body = {} + if self.next_page_token: body['next_page_token'] = self.next_page_token if self.tables: body['tables'] = [v.as_dict() for v in self.tables] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListTablesResponse': - return cls(tables=_repeated(d, 'tables', TableInfo)) + return cls(next_page_token=d.get('next_page_token', None), tables=_repeated(d, 'tables', TableInfo)) + + +@dataclass +class ListVolumesRequest: + """List Volumes""" + + catalog_name: str + schema_name: str + + +@dataclass +class ListVolumesResponseContent: + volumes: 'List[VolumeInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListVolumesResponseContent': + return cls(volumes=_repeated(d, 'volumes', VolumeInfo)) @dataclass @@ -1661,50 +1486,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'NamedTableConstraint': return cls(name=d.get('name', None)) -@dataclass -class Partition: - values: 'List[PartitionValue]' = None - - def as_dict(self) -> dict: - body = {} - if self.values: body['values'] = [v.as_dict() for v in self.values] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Partition': - return cls(values=_repeated(d, 'values', PartitionValue)) - - -@dataclass -class PartitionValue: - name: str = None - op: 'PartitionValueOp' = None - recipient_property_key: str = None - value: str = None - - def as_dict(self) -> dict: - body = {} - if self.name: body['name'] = self.name - if self.op: body['op'] = self.op.value - if self.recipient_property_key: body['recipient_property_key'] = self.recipient_property_key - if self.value: body['value'] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PartitionValue': - return cls(name=d.get('name', None), - op=_enum(d, 'op', PartitionValueOp), - recipient_property_key=d.get('recipient_property_key', None), - value=d.get('value', None)) - - -class PartitionValueOp(Enum): - """The operator to apply for the value.""" - - EQUAL = 'EQUAL' - LIKE = 'LIKE' - - @dataclass class PermissionsChange: add: 'List[Privilege]' = None @@ -1805,290 +1586,63 @@ def from_dict(cls, d: Dict[str, any]) -> 'PrivilegeAssignment': @dataclass -class ProviderInfo: - authentication_type: 'AuthenticationType' = None - cloud: str = None +class ReadVolumeRequest: + """Get a Volume""" + + full_name_arg: str + + +@dataclass +class SchemaInfo: + catalog_name: str = None + catalog_type: str = None comment: str = None created_at: int = None created_by: str = None - data_provider_global_metastore_id: str = None + effective_auto_maintenance_flag: 'EffectiveAutoMaintenanceFlag' = None + enable_auto_maintenance: 'EnableAutoMaintenance' = None + full_name: str = None metastore_id: str = None name: str = None owner: str = None - recipient_profile: 'RecipientProfile' = None - recipient_profile_str: str = None - region: str = None + properties: 'Dict[str,str]' = None + storage_location: str = None + storage_root: str = None updated_at: int = None updated_by: str = None def as_dict(self) -> dict: body = {} - if self.authentication_type: body['authentication_type'] = self.authentication_type.value - if self.cloud: body['cloud'] = self.cloud + if self.catalog_name: body['catalog_name'] = self.catalog_name + if self.catalog_type: body['catalog_type'] = self.catalog_type if self.comment: body['comment'] = self.comment if self.created_at: body['created_at'] = self.created_at if self.created_by: body['created_by'] = self.created_by - if self.data_provider_global_metastore_id: - body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id + if self.effective_auto_maintenance_flag: + body['effective_auto_maintenance_flag'] = self.effective_auto_maintenance_flag.as_dict() + if self.enable_auto_maintenance: body['enable_auto_maintenance'] = self.enable_auto_maintenance.value + if self.full_name: body['full_name'] = self.full_name if self.metastore_id: body['metastore_id'] = self.metastore_id if self.name: body['name'] = self.name if self.owner: body['owner'] = self.owner - if self.recipient_profile: body['recipient_profile'] = self.recipient_profile.as_dict() - if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str - if self.region: body['region'] = self.region + if self.properties: body['properties'] = self.properties + if self.storage_location: body['storage_location'] = self.storage_location + if self.storage_root: body['storage_root'] = self.storage_root if self.updated_at: body['updated_at'] = self.updated_at if self.updated_by: body['updated_by'] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ProviderInfo': - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), - cloud=d.get('cloud', None), + def from_dict(cls, d: Dict[str, any]) -> 'SchemaInfo': + return cls(catalog_name=d.get('catalog_name', None), + catalog_type=d.get('catalog_type', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), - data_provider_global_metastore_id=d.get('data_provider_global_metastore_id', None), - metastore_id=d.get('metastore_id', None), - name=d.get('name', None), - owner=d.get('owner', None), - recipient_profile=_from_dict(d, 'recipient_profile', RecipientProfile), - recipient_profile_str=d.get('recipient_profile_str', None), - region=d.get('region', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class ProviderShare: - name: str = None - - def as_dict(self) -> dict: - body = {} - if self.name: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ProviderShare': - return cls(name=d.get('name', None)) - - -@dataclass -class RecipientInfo: - activated: bool = None - activation_url: str = None - authentication_type: 'AuthenticationType' = None - cloud: str = None - comment: str = None - created_at: int = None - created_by: str = None - data_recipient_global_metastore_id: Any = None - ip_access_list: 'IpAccessList' = None - metastore_id: str = None - name: str = None - owner: str = None - properties_kvpairs: Any = None - region: str = None - sharing_code: str = None - tokens: 'List[RecipientTokenInfo]' = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.activated: body['activated'] = self.activated - if self.activation_url: body['activation_url'] = self.activation_url - if self.authentication_type: body['authentication_type'] = self.authentication_type.value - if self.cloud: body['cloud'] = self.cloud - if self.comment: body['comment'] = self.comment - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.data_recipient_global_metastore_id: - body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.metastore_id: body['metastore_id'] = self.metastore_id - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs - if self.region: body['region'] = self.region - if self.sharing_code: body['sharing_code'] = self.sharing_code - if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RecipientInfo': - return cls(activated=d.get('activated', None), - activation_url=d.get('activation_url', None), - authentication_type=_enum(d, 'authentication_type', AuthenticationType), - cloud=d.get('cloud', None), - comment=d.get('comment', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), - ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), - metastore_id=d.get('metastore_id', None), - name=d.get('name', None), - owner=d.get('owner', None), - properties_kvpairs=d.get('properties_kvpairs', None), - region=d.get('region', None), - sharing_code=d.get('sharing_code', None), - tokens=_repeated(d, 'tokens', RecipientTokenInfo), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class RecipientProfile: - bearer_token: str = None - endpoint: str = None - share_credentials_version: int = None - - def as_dict(self) -> dict: - body = {} - if self.bearer_token: body['bearer_token'] = self.bearer_token - if self.endpoint: body['endpoint'] = self.endpoint - if self.share_credentials_version: body['share_credentials_version'] = self.share_credentials_version - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RecipientProfile': - return cls(bearer_token=d.get('bearer_token', None), - endpoint=d.get('endpoint', None), - share_credentials_version=d.get('share_credentials_version', None)) - - -@dataclass -class RecipientTokenInfo: - activation_url: str = None - created_at: int = None - created_by: str = None - expiration_time: int = None - id: str = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.activation_url: body['activation_url'] = self.activation_url - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.expiration_time: body['expiration_time'] = self.expiration_time - if self.id: body['id'] = self.id - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RecipientTokenInfo': - return cls(activation_url=d.get('activation_url', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - expiration_time=d.get('expiration_time', None), - id=d.get('id', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class RetrieveTokenRequest: - """Get an access token""" - - activation_url: str - - -@dataclass -class RetrieveTokenResponse: - bearer_token: str = None - endpoint: str = None - expiration_time: str = None - share_credentials_version: int = None - - def as_dict(self) -> dict: - body = {} - if self.bearer_token: body['bearerToken'] = self.bearer_token - if self.endpoint: body['endpoint'] = self.endpoint - if self.expiration_time: body['expirationTime'] = self.expiration_time - if self.share_credentials_version: body['shareCredentialsVersion'] = self.share_credentials_version - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RetrieveTokenResponse': - return cls(bearer_token=d.get('bearerToken', None), - endpoint=d.get('endpoint', None), - expiration_time=d.get('expirationTime', None), - share_credentials_version=d.get('shareCredentialsVersion', None)) - - -@dataclass -class RotateRecipientToken: - existing_token_expire_in_seconds: int - name: str - - def as_dict(self) -> dict: - body = {} - if self.existing_token_expire_in_seconds: - body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds - if self.name: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RotateRecipientToken': - return cls(existing_token_expire_in_seconds=d.get('existing_token_expire_in_seconds', None), - name=d.get('name', None)) - - -@dataclass -class SchemaInfo: - catalog_name: str = None - catalog_type: str = None - comment: str = None - created_at: int = None - created_by: str = None - effective_auto_maintenance_flag: 'EffectiveAutoMaintenanceFlag' = None - enable_auto_maintenance: 'EnableAutoMaintenance' = None - full_name: str = None - metastore_id: str = None - name: str = None - owner: str = None - properties: 'Dict[str,str]' = None - storage_location: str = None - storage_root: str = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.catalog_name: body['catalog_name'] = self.catalog_name - if self.catalog_type: body['catalog_type'] = self.catalog_type - if self.comment: body['comment'] = self.comment - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.effective_auto_maintenance_flag: - body['effective_auto_maintenance_flag'] = self.effective_auto_maintenance_flag.as_dict() - if self.enable_auto_maintenance: body['enable_auto_maintenance'] = self.enable_auto_maintenance.value - if self.full_name: body['full_name'] = self.full_name - if self.metastore_id: body['metastore_id'] = self.metastore_id - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.storage_location: body['storage_location'] = self.storage_location - if self.storage_root: body['storage_root'] = self.storage_root - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SchemaInfo': - return cls(catalog_name=d.get('catalog_name', None), - catalog_type=d.get('catalog_type', None), - comment=d.get('comment', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - effective_auto_maintenance_flag=_from_dict(d, 'effective_auto_maintenance_flag', - EffectiveAutoMaintenanceFlag), - enable_auto_maintenance=_enum(d, 'enable_auto_maintenance', EnableAutoMaintenance), - full_name=d.get('full_name', None), + effective_auto_maintenance_flag=_from_dict(d, 'effective_auto_maintenance_flag', + EffectiveAutoMaintenanceFlag), + enable_auto_maintenance=_enum(d, 'enable_auto_maintenance', EnableAutoMaintenance), + full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), @@ -2117,139 +1671,6 @@ class SecurableType(Enum): TABLE = 'TABLE' -@dataclass -class ShareInfo: - comment: str = None - created_at: int = None - created_by: str = None - name: str = None - objects: 'List[SharedDataObject]' = None - owner: str = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.name: body['name'] = self.name - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] - if self.owner: body['owner'] = self.owner - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ShareInfo': - return cls(comment=d.get('comment', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - name=d.get('name', None), - objects=_repeated(d, 'objects', SharedDataObject), - owner=d.get('owner', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class SharePermissionsRequest: - """Get recipient share permissions""" - - name: str - - -@dataclass -class ShareToPrivilegeAssignment: - privilege_assignments: 'List[PrivilegeAssignment]' = None - share_name: str = None - - def as_dict(self) -> dict: - body = {} - if self.privilege_assignments: - body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] - if self.share_name: body['share_name'] = self.share_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ShareToPrivilegeAssignment': - return cls(privilege_assignments=_repeated(d, 'privilege_assignments', PrivilegeAssignment), - share_name=d.get('share_name', None)) - - -@dataclass -class SharedDataObject: - name: str - added_at: int = None - added_by: str = None - cdf_enabled: bool = None - comment: str = None - data_object_type: str = None - partitions: 'List[Partition]' = None - shared_as: str = None - start_version: int = None - status: 'SharedDataObjectStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.added_at: body['added_at'] = self.added_at - if self.added_by: body['added_by'] = self.added_by - if self.cdf_enabled: body['cdf_enabled'] = self.cdf_enabled - if self.comment: body['comment'] = self.comment - if self.data_object_type: body['data_object_type'] = self.data_object_type - if self.name: body['name'] = self.name - if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] - if self.shared_as: body['shared_as'] = self.shared_as - if self.start_version: body['start_version'] = self.start_version - if self.status: body['status'] = self.status.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SharedDataObject': - return cls(added_at=d.get('added_at', None), - added_by=d.get('added_by', None), - cdf_enabled=d.get('cdf_enabled', None), - comment=d.get('comment', None), - data_object_type=d.get('data_object_type', None), - name=d.get('name', None), - partitions=_repeated(d, 'partitions', Partition), - shared_as=d.get('shared_as', None), - start_version=d.get('start_version', None), - status=_enum(d, 'status', SharedDataObjectStatus)) - - -class SharedDataObjectStatus(Enum): - """One of: **ACTIVE**, **PERMISSION_DENIED**.""" - - ACTIVE = 'ACTIVE' - PERMISSION_DENIED = 'PERMISSION_DENIED' - - -@dataclass -class SharedDataObjectUpdate: - action: 'SharedDataObjectUpdateAction' = None - data_object: 'SharedDataObject' = None - - def as_dict(self) -> dict: - body = {} - if self.action: body['action'] = self.action.value - if self.data_object: body['data_object'] = self.data_object.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SharedDataObjectUpdate': - return cls(action=_enum(d, 'action', SharedDataObjectUpdateAction), - data_object=_from_dict(d, 'data_object', SharedDataObject)) - - -class SharedDataObjectUpdateAction(Enum): - """One of: **ADD**, **REMOVE**, **UPDATE**.""" - - ADD = 'ADD' - REMOVE = 'REMOVE' - UPDATE = 'UPDATE' - - @dataclass class StorageCredentialInfo: aws_iam_role: 'AwsIamRole' = None @@ -2506,6 +1927,42 @@ class UnassignRequest: metastore_id: str +@dataclass +class UpdateAutoMaintenance: + metastore_id: str + enable: bool + + def as_dict(self) -> dict: + body = {} + if self.enable: body['enable'] = self.enable + if self.metastore_id: body['metastore_id'] = self.metastore_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateAutoMaintenance': + return cls(enable=d.get('enable', None), metastore_id=d.get('metastore_id', None)) + + +@dataclass +class UpdateAutoMaintenanceResponse: + state: bool = None + user_id: int = None + username: str = None + + def as_dict(self) -> dict: + body = {} + if self.state: body['state'] = self.state + if self.user_id: body['user_id'] = self.user_id + if self.username: body['username'] = self.username + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateAutoMaintenanceResponse': + return cls(state=d.get('state', None), + user_id=d.get('user_id', None), + username=d.get('username', None)) + + @dataclass class UpdateCatalog: name: str @@ -2667,55 +2124,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdatePermissions': securable_type=_enum(d, 'securable_type', SecurableType)) -@dataclass -class UpdateProvider: - name: str - comment: str = None - owner: str = None - recipient_profile_str: str = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateProvider': - return cls(comment=d.get('comment', None), - name=d.get('name', None), - owner=d.get('owner', None), - recipient_profile_str=d.get('recipient_profile_str', None)) - - -@dataclass -class UpdateRecipient: - name: str - comment: str = None - ip_access_list: 'IpAccessList' = None - owner: str = None - properties_kvpairs: Any = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateRecipient': - return cls(comment=d.get('comment', None), - ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), - name=d.get('name', None), - owner=d.get('owner', None), - properties_kvpairs=d.get('properties_kvpairs', None)) - - @dataclass class UpdateSchema: full_name: str @@ -2742,45 +2150,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateSchema': properties=d.get('properties', None)) -@dataclass -class UpdateShare: - name: str - comment: str = None - owner: str = None - updates: 'List[SharedDataObjectUpdate]' = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.name: body['name'] = self.name - if self.owner: body['owner'] = self.owner - if self.updates: body['updates'] = [v.as_dict() for v in self.updates] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateShare': - return cls(comment=d.get('comment', None), - name=d.get('name', None), - owner=d.get('owner', None), - updates=_repeated(d, 'updates', SharedDataObjectUpdate)) - - -@dataclass -class UpdateSharePermissions: - name: str - changes: 'List[PermissionsChange]' = None - - def as_dict(self) -> dict: - body = {} - if self.changes: body['changes'] = [v.as_dict() for v in self.changes] - if self.name: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateSharePermissions': - return cls(changes=_repeated(d, 'changes', PermissionsChange), name=d.get('name', None)) - - @dataclass class UpdateStorageCredential: name: str @@ -2821,6 +2190,29 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateStorageCredential': skip_validation=d.get('skip_validation', None)) +@dataclass +class UpdateVolumeRequestContent: + full_name_arg: str + comment: str = None + name: str = None + owner: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.full_name_arg: body['full_name_arg'] = self.full_name_arg + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateVolumeRequestContent': + return cls(comment=d.get('comment', None), + full_name_arg=d.get('full_name_arg', None), + name=d.get('name', None), + owner=d.get('owner', None)) + + @dataclass class ValidateStorageCredential: aws_iam_role: 'AwsIamRole' = None @@ -2900,12 +2292,71 @@ class ValidationResultOperation(Enum): WRITE = 'WRITE' -class ValidationResultResult(Enum): - """The results of the tested operation.""" +class ValidationResultResult(Enum): + """The results of the tested operation.""" + + FAIL = 'FAIL' + PASS = 'PASS' + SKIP = 'SKIP' + + +@dataclass +class VolumeInfo: + catalog_name: str = None + comment: str = None + created_at: int = None + created_by: str = None + full_name: str = None + metastore_id: str = None + name: str = None + owner: str = None + schema_name: str = None + storage_location: str = None + updated_at: int = None + updated_by: str = None + volume_id: str = None + volume_type: 'VolumeType' = None + + def as_dict(self) -> dict: + body = {} + if self.catalog_name: body['catalog_name'] = self.catalog_name + if self.comment: body['comment'] = self.comment + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.full_name: body['full_name'] = self.full_name + if self.metastore_id: body['metastore_id'] = self.metastore_id + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.schema_name: body['schema_name'] = self.schema_name + if self.storage_location: body['storage_location'] = self.storage_location + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + if self.volume_id: body['volume_id'] = self.volume_id + if self.volume_type: body['volume_type'] = self.volume_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'VolumeInfo': + return cls(catalog_name=d.get('catalog_name', None), + comment=d.get('comment', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + full_name=d.get('full_name', None), + metastore_id=d.get('metastore_id', None), + name=d.get('name', None), + owner=d.get('owner', None), + schema_name=d.get('schema_name', None), + storage_location=d.get('storage_location', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None), + volume_id=d.get('volume_id', None), + volume_type=_enum(d, 'volume_type', VolumeType)) + + +class VolumeType(Enum): - FAIL = 'FAIL' - PASS = 'PASS' - SKIP = 'SKIP' + EXTERNAL = 'EXTERNAL' + MANAGED = 'MANAGED' class AccountMetastoreAssignmentsAPI: @@ -3659,6 +3110,18 @@ def list(self) -> Iterator[MetastoreInfo]: json = self._api.do('GET', '/api/2.1/unity-catalog/metastores') return [MetastoreInfo.from_dict(v) for v in json.get('metastores', [])] + def maintenance(self, metastore_id: str, enable: bool, **kwargs) -> UpdateAutoMaintenanceResponse: + """Enables or disables auto maintenance on the metastore. + + Enables or disables auto maintenance on the metastore.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateAutoMaintenance(enable=enable, metastore_id=metastore_id) + body = request.as_dict() + + json = self._api.do('PATCH', '/api/2.0/auto-maintenance/service', body=body) + return UpdateAutoMaintenanceResponse.from_dict(json) + def summary(self) -> GetMetastoreSummaryResponse: """Get a metastore summary. @@ -3738,272 +3201,6 @@ def update_assignment(self, body=body) -class ProvidersAPI: - """Databricks Delta Sharing: Providers REST API""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - name: str, - authentication_type: AuthenticationType, - *, - comment: str = None, - recipient_profile_str: str = None, - **kwargs) -> ProviderInfo: - """Create an auth provider. - - Creates a new authentication provider minimally based on a name and authentication type. The caller - must be an admin on the metastore.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateProvider(authentication_type=authentication_type, - comment=comment, - name=name, - recipient_profile_str=recipient_profile_str) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.1/unity-catalog/providers', body=body) - return ProviderInfo.from_dict(json) - - def delete(self, name: str, **kwargs): - """Delete a provider. - - Deletes an authentication provider, if the caller is a metastore admin or is the owner of the - provider.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteProviderRequest(name=name) - - self._api.do('DELETE', f'/api/2.1/unity-catalog/providers/{request.name}') - - def get(self, name: str, **kwargs) -> ProviderInfo: - """Get a provider. - - Gets a specific authentication provider. The caller must supply the name of the provider, and must - either be a metastore admin or the owner of the provider.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetProviderRequest(name=name) - - json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{request.name}') - return ProviderInfo.from_dict(json) - - def list(self, *, data_provider_global_metastore_id: str = None, **kwargs) -> Iterator[ProviderInfo]: - """List providers. - - Gets an array of available authentication providers. The caller must either be a metastore admin or - the owner of the providers. Providers not owned by the caller are not included in the response. There - is no guarantee of a specific ordering of the elements in the array.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListProvidersRequest( - data_provider_global_metastore_id=data_provider_global_metastore_id) - - query = {} - if data_provider_global_metastore_id: - query['data_provider_global_metastore_id'] = request.data_provider_global_metastore_id - - json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query) - return [ProviderInfo.from_dict(v) for v in json.get('providers', [])] - - def list_shares(self, name: str, **kwargs) -> ListProviderSharesResponse: - """List shares by Provider. - - Gets an array of a specified provider's shares within the metastore where: - - * the caller is a metastore admin, or * the caller is the owner.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListSharesRequest(name=name) - - json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{request.name}/shares') - return ListProviderSharesResponse.from_dict(json) - - def update(self, - name: str, - *, - comment: str = None, - owner: str = None, - recipient_profile_str: str = None, - **kwargs) -> ProviderInfo: - """Update a provider. - - Updates the information for an authentication provider, if the caller is a metastore admin or is the - owner of the provider. If the update changes the provider name, the caller must be both a metastore - admin and the owner of the provider.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateProvider(comment=comment, - name=name, - owner=owner, - recipient_profile_str=recipient_profile_str) - body = request.as_dict() - - json = self._api.do('PATCH', f'/api/2.1/unity-catalog/providers/{request.name}', body=body) - return ProviderInfo.from_dict(json) - - -class RecipientActivationAPI: - """Databricks Delta Sharing: Recipient Activation REST API""" - - def __init__(self, api_client): - self._api = api_client - - def get_activation_url_info(self, activation_url: str, **kwargs): - """Get a share activation URL. - - Gets an activation URL for a share.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetActivationUrlInfoRequest(activation_url=activation_url) - - self._api.do('GET', - f'/api/2.1/unity-catalog/public/data_sharing_activation_info/{request.activation_url}') - - def retrieve_token(self, activation_url: str, **kwargs) -> RetrieveTokenResponse: - """Get an access token. - - Retrieve access token with an activation url. This is a public API without any authentication.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = RetrieveTokenRequest(activation_url=activation_url) - - json = self._api.do( - 'GET', f'/api/2.1/unity-catalog/public/data_sharing_activation/{request.activation_url}') - return RetrieveTokenResponse.from_dict(json) - - -class RecipientsAPI: - """Databricks Delta Sharing: Recipients REST API""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - name: str, - authentication_type: AuthenticationType, - *, - comment: str = None, - data_recipient_global_metastore_id: Any = None, - ip_access_list: IpAccessList = None, - owner: str = None, - properties_kvpairs: Any = None, - sharing_code: str = None, - **kwargs) -> RecipientInfo: - """Create a share recipient. - - Creates a new recipient with the delta sharing authentication type in the metastore. The caller must - be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateRecipient(authentication_type=authentication_type, - comment=comment, - data_recipient_global_metastore_id=data_recipient_global_metastore_id, - ip_access_list=ip_access_list, - name=name, - owner=owner, - properties_kvpairs=properties_kvpairs, - sharing_code=sharing_code) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.1/unity-catalog/recipients', body=body) - return RecipientInfo.from_dict(json) - - def delete(self, name: str, **kwargs): - """Delete a share recipient. - - Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteRecipientRequest(name=name) - - self._api.do('DELETE', f'/api/2.1/unity-catalog/recipients/{request.name}') - - def get(self, name: str, **kwargs) -> RecipientInfo: - """Get a share recipient. - - Gets a share recipient from the metastore if: - - * the caller is the owner of the share recipient, or: * is a metastore admin""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetRecipientRequest(name=name) - - json = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{request.name}') - return RecipientInfo.from_dict(json) - - def list(self, *, data_recipient_global_metastore_id: str = None, **kwargs) -> Iterator[RecipientInfo]: - """List share recipients. - - Gets an array of all share recipients within the current metastore where: - - * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific - ordering of the elements in the array.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListRecipientsRequest( - data_recipient_global_metastore_id=data_recipient_global_metastore_id) - - query = {} - if data_recipient_global_metastore_id: - query['data_recipient_global_metastore_id'] = request.data_recipient_global_metastore_id - - json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query) - return [RecipientInfo.from_dict(v) for v in json.get('recipients', [])] - - def rotate_token(self, existing_token_expire_in_seconds: int, name: str, **kwargs) -> RecipientInfo: - """Rotate a token. - - Refreshes the specified recipient's delta sharing authentication token with the provided token info. - The caller must be the owner of the recipient.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = RotateRecipientToken(existing_token_expire_in_seconds=existing_token_expire_in_seconds, - name=name) - body = request.as_dict() - - json = self._api.do('POST', - f'/api/2.1/unity-catalog/recipients/{request.name}/rotate-token', - body=body) - return RecipientInfo.from_dict(json) - - def share_permissions(self, name: str, **kwargs) -> GetRecipientSharePermissionsResponse: - """Get recipient share permissions. - - Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the - owner of the Recipient.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = SharePermissionsRequest(name=name) - - json = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{request.name}/share-permissions') - return GetRecipientSharePermissionsResponse.from_dict(json) - - def update(self, - name: str, - *, - comment: str = None, - ip_access_list: IpAccessList = None, - owner: str = None, - properties_kvpairs: Any = None, - **kwargs): - """Update a share recipient. - - Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of - the recipient. If the recipient name will be updated, the user must be both a metastore admin and the - owner of the recipient.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateRecipient(comment=comment, - ip_access_list=ip_access_list, - name=name, - owner=owner, - properties_kvpairs=properties_kvpairs) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{request.name}', body=body) - - class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have @@ -4104,117 +3301,6 @@ def update(self, return SchemaInfo.from_dict(json) -class SharesAPI: - """Databricks Delta Sharing: Shares REST API""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, name: str, *, comment: str = None, **kwargs) -> ShareInfo: - """Create a share. - - Creates a new share for data objects. Data objects can be added at this time or after creation with - **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the - metastore.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateShare(comment=comment, name=name) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.1/unity-catalog/shares', body=body) - return ShareInfo.from_dict(json) - - def delete(self, name: str, **kwargs): - """Delete a share. - - Deletes a data object share from the metastore. The caller must be an owner of the share.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteShareRequest(name=name) - - self._api.do('DELETE', f'/api/2.1/unity-catalog/shares/{request.name}') - - def get(self, name: str, *, include_shared_data: bool = None, **kwargs) -> ShareInfo: - """Get a share. - - Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the - share.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetShareRequest(include_shared_data=include_shared_data, name=name) - - query = {} - if include_shared_data: query['include_shared_data'] = request.include_shared_data - - json = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{request.name}', query=query) - return ShareInfo.from_dict(json) - - def list(self) -> Iterator[ShareInfo]: - """List shares. - - Gets an array of data object shares from the metastore. The caller must be a metastore admin or the - owner of the share. There is no guarantee of a specific ordering of the elements in the array.""" - - json = self._api.do('GET', '/api/2.1/unity-catalog/shares') - return [ShareInfo.from_dict(v) for v in json.get('shares', [])] - - def share_permissions(self, name: str, **kwargs) -> PermissionsList: - """Get permissions. - - Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the - owner of the share.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = SharePermissionsRequest(name=name) - - json = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{request.name}/permissions') - return PermissionsList.from_dict(json) - - def update(self, - name: str, - *, - comment: str = None, - owner: str = None, - updates: List[SharedDataObjectUpdate] = None, - **kwargs) -> ShareInfo: - """Update a share. - - Updates the share with the changes and data objects in the request. The caller must be the owner of - the share or a metastore admin. - - When the caller is a metastore admin, only the __owner__ field can be updated. - - In the case that the share name is changed, **updateShare** requires that the caller is both the share - owner and a metastore admin. - - For each table that is added through this method, the share owner must also have **SELECT** privilege - on the table. This privilege must be maintained indefinitely for recipients to be able to access the - table. Typically, you should use a group as the share owner. - - Table removals through **update** do not require additional privileges.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateShare(comment=comment, name=name, owner=owner, updates=updates) - body = request.as_dict() - - json = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{request.name}', body=body) - return ShareInfo.from_dict(json) - - def update_permissions(self, name: str, *, changes: List[PermissionsChange] = None, **kwargs): - """Update permissions. - - Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an - owner of the share. - - For new recipient grants, the user must also be the owner of the recipients. recipient revocations do - not require additional privileges.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateSharePermissions(changes=changes, name=name) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{request.name}/permissions', body=body) - - class StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that @@ -4477,6 +3563,8 @@ def list(self, schema_name: str, *, include_delta_metadata: bool = None, + max_results: int = None, + page_token: str = None, **kwargs) -> Iterator[TableInfo]: """List tables. @@ -4489,15 +3577,26 @@ def list(self, if not request: # request is not given through keyed args request = ListTablesRequest(catalog_name=catalog_name, include_delta_metadata=include_delta_metadata, + max_results=max_results, + page_token=page_token, schema_name=schema_name) query = {} if catalog_name: query['catalog_name'] = request.catalog_name if include_delta_metadata: query['include_delta_metadata'] = request.include_delta_metadata + if max_results: query['max_results'] = request.max_results + if page_token: query['page_token'] = request.page_token if schema_name: query['schema_name'] = request.schema_name - json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query) - return [TableInfo.from_dict(v) for v in json.get('tables', [])] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query) + if 'tables' not in json or not json['tables']: + return + for v in json['tables']: + yield TableInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def list_summaries(self, catalog_name: str, @@ -4536,3 +3635,133 @@ def list_summaries(self, json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query) return ListTableSummariesResponse.from_dict(json) + + +class VolumesAPI: + """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing + files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF + files, organizing data sets during the data exploration stages in data science, working with libraries + that require access to the local file system on cluster machines, storing library and config files of + arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or + transforming and querying non-tabular data files in ETL.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + catalog_name: str, + name: str, + schema_name: str, + volume_type: VolumeType, + *, + comment: str = None, + storage_location: str = None, + **kwargs) -> VolumeInfo: + """Create a Volume. + + Creates a new volume. + + The user could create either an external volume or a managed volume. An external volume will be + created in the specified external location, while a managed volume will be located in the default + location which is specified by the parent schema, or the parent catalog, or the Metastore. + + For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a + metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller + must have **CREATE VOLUME** privilege on the parent schema. + + For an external volume, following conditions also need to satisfy - The caller must have **CREATE + EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes + existing in the specified storage location. - The specified storage location is not under the location + of other tables, nor volumes, or catalogs or schemas.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateVolumeRequestContent(catalog_name=catalog_name, + comment=comment, + name=name, + schema_name=schema_name, + storage_location=storage_location, + volume_type=volume_type) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.1/unity-catalog/volumes', body=body) + return VolumeInfo.from_dict(json) + + def delete(self, full_name_arg: str, **kwargs): + """Delete a Volume. + + Deletes a volume from the specified parent catalog and schema. + + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must + also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteVolumeRequest(full_name_arg=full_name_arg) + + self._api.do('DELETE', f'/api/2.1/unity-catalog/volumes/{request.full_name_arg}') + + def list(self, catalog_name: str, schema_name: str, **kwargs) -> Iterator[VolumeInfo]: + """List Volumes. + + Gets an array of all volumes for the current metastore under the parent catalog and schema. + + The returned volumes are filtered based on the privileges of the calling user. For example, the + metastore admin is able to list all the volumes. A regular user needs to be the owner or have the + **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, + the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the + **USE_SCHEMA** privilege on the parent schema. + + There is no guarantee of a specific ordering of the elements in the array.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListVolumesRequest(catalog_name=catalog_name, schema_name=schema_name) + + query = {} + if catalog_name: query['catalog_name'] = request.catalog_name + if schema_name: query['schema_name'] = request.schema_name + + json = self._api.do('GET', '/api/2.1/unity-catalog/volumes', query=query) + return [VolumeInfo.from_dict(v) for v in json.get('volumes', [])] + + def read(self, full_name_arg: str, **kwargs) -> VolumeInfo: + """Get a Volume. + + Gets a volume from the metastore for a specific catalog and schema. + + The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the + volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege + on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ReadVolumeRequest(full_name_arg=full_name_arg) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/volumes/{request.full_name_arg}') + return VolumeInfo.from_dict(json) + + def update(self, + full_name_arg: str, + *, + comment: str = None, + name: str = None, + owner: str = None, + **kwargs) -> VolumeInfo: + """Update a Volume. + + Updates the specified volume under the specified parent catalog and schema. + + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must + also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema. + + Currently only the name, the owner or the comment of the volume could be updated.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateVolumeRequestContent(comment=comment, + full_name_arg=full_name_arg, + name=name, + owner=owner) + body = request.as_dict() + + json = self._api.do('PATCH', f'/api/2.1/unity-catalog/volumes/{request.full_name_arg}', body=body) + return VolumeInfo.from_dict(json) diff --git a/databricks/sdk/service/clusterpolicies.py b/databricks/sdk/service/clusterpolicies.py deleted file mode 100755 index 40f0824ee..000000000 --- a/databricks/sdk/service/clusterpolicies.py +++ /dev/null @@ -1,399 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, Iterator, List - -from ._internal import _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreatePolicy: - name: str - definition: str = None - description: str = None - max_clusters_per_user: int = None - policy_family_definition_overrides: str = None - policy_family_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.definition: body['definition'] = self.definition - if self.description: body['description'] = self.description - if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name: body['name'] = self.name - if self.policy_family_definition_overrides: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id: body['policy_family_id'] = self.policy_family_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicy': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None)) - - -@dataclass -class CreatePolicyResponse: - policy_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.policy_id: body['policy_id'] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicyResponse': - return cls(policy_id=d.get('policy_id', None)) - - -@dataclass -class DeletePolicy: - policy_id: str - - def as_dict(self) -> dict: - body = {} - if self.policy_id: body['policy_id'] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeletePolicy': - return cls(policy_id=d.get('policy_id', None)) - - -@dataclass -class EditPolicy: - policy_id: str - name: str - definition: str = None - description: str = None - max_clusters_per_user: int = None - policy_family_definition_overrides: str = None - policy_family_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.definition: body['definition'] = self.definition - if self.description: body['description'] = self.description - if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name: body['name'] = self.name - if self.policy_family_definition_overrides: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id: body['policy_family_id'] = self.policy_family_id - if self.policy_id: body['policy_id'] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'EditPolicy': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None), - policy_id=d.get('policy_id', None)) - - -@dataclass -class Get: - """Get entity""" - - policy_id: str - - -@dataclass -class GetPolicyFamilyRequest: - policy_family_id: str - - -@dataclass -class ListRequest: - """Get a cluster policy""" - - sort_column: 'ListSortColumn' = None - sort_order: 'ListSortOrder' = None - - -@dataclass -class ListPoliciesResponse: - policies: 'List[Policy]' = None - - def as_dict(self) -> dict: - body = {} - if self.policies: body['policies'] = [v.as_dict() for v in self.policies] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListPoliciesResponse': - return cls(policies=_repeated(d, 'policies', Policy)) - - -@dataclass -class ListPolicyFamiliesRequest: - max_results: int = None - page_token: str = None - - -@dataclass -class ListPolicyFamiliesResponse: - policy_families: 'List[PolicyFamily]' - next_page_token: str = None - - def as_dict(self) -> dict: - body = {} - if self.next_page_token: body['next_page_token'] = self.next_page_token - if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListPolicyFamiliesResponse': - return cls(next_page_token=d.get('next_page_token', None), - policy_families=_repeated(d, 'policy_families', PolicyFamily)) - - -class ListSortColumn(Enum): - - POLICY_CREATION_TIME = 'POLICY_CREATION_TIME' - POLICY_NAME = 'POLICY_NAME' - - -class ListSortOrder(Enum): - - ASC = 'ASC' - DESC = 'DESC' - - -@dataclass -class Policy: - created_at_timestamp: int = None - creator_user_name: str = None - definition: str = None - description: str = None - is_default: bool = None - max_clusters_per_user: int = None - name: str = None - policy_family_definition_overrides: str = None - policy_family_id: str = None - policy_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.created_at_timestamp: body['created_at_timestamp'] = self.created_at_timestamp - if self.creator_user_name: body['creator_user_name'] = self.creator_user_name - if self.definition: body['definition'] = self.definition - if self.description: body['description'] = self.description - if self.is_default: body['is_default'] = self.is_default - if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name: body['name'] = self.name - if self.policy_family_definition_overrides: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id: body['policy_family_id'] = self.policy_family_id - if self.policy_id: body['policy_id'] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Policy': - return cls(created_at_timestamp=d.get('created_at_timestamp', None), - creator_user_name=d.get('creator_user_name', None), - definition=d.get('definition', None), - description=d.get('description', None), - is_default=d.get('is_default', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None), - policy_id=d.get('policy_id', None)) - - -@dataclass -class PolicyFamily: - policy_family_id: str - name: str - description: str - definition: str - - def as_dict(self) -> dict: - body = {} - if self.definition: body['definition'] = self.definition - if self.description: body['description'] = self.description - if self.name: body['name'] = self.name - if self.policy_family_id: body['policy_family_id'] = self.policy_family_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PolicyFamily': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - name=d.get('name', None), - policy_family_id=d.get('policy_family_id', None)) - - -class ClusterPoliciesAPI: - """Cluster policy limits the ability to configure clusters based on a set of rules. The policy rules limit - the attributes or attribute values available for cluster creation. Cluster policies have ACLs that limit - their use to specific users and groups. - - Cluster policies let you limit users to create clusters with prescribed settings, simplify the user - interface and enable more users to create their own clusters (by fixing and hiding some values), control - cost by limiting per cluster maximum cost (by setting limits on attributes whose values contribute to - hourly price). - - Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user - creates a cluster: - A user who has cluster create permission can select the Unrestricted policy and - create fully-configurable clusters. - A user who has both cluster create permission and access to cluster - policies can select the Unrestricted policy and policies they have access to. - A user that has access to - only cluster policies, can select the policies they have access to. - - If no policies have been created in the workspace, the Policy drop-down does not display. - - Only admin users can create, edit, and delete policies. Admin users also have access to all policies.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - name: str, - *, - definition: str = None, - description: str = None, - max_clusters_per_user: int = None, - policy_family_definition_overrides: str = None, - policy_family_id: str = None, - **kwargs) -> CreatePolicyResponse: - """Create a new policy. - - Creates a new policy with prescribed settings.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreatePolicy(definition=definition, - description=description, - max_clusters_per_user=max_clusters_per_user, - name=name, - policy_family_definition_overrides=policy_family_definition_overrides, - policy_family_id=policy_family_id) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/policies/clusters/create', body=body) - return CreatePolicyResponse.from_dict(json) - - def delete(self, policy_id: str, **kwargs): - """Delete a cluster policy. - - Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeletePolicy(policy_id=policy_id) - body = request.as_dict() - self._api.do('POST', '/api/2.0/policies/clusters/delete', body=body) - - def edit(self, - policy_id: str, - name: str, - *, - definition: str = None, - description: str = None, - max_clusters_per_user: int = None, - policy_family_definition_overrides: str = None, - policy_family_id: str = None, - **kwargs): - """Update a cluster policy. - - Update an existing policy for cluster. This operation may make some clusters governed by the previous - policy invalid.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = EditPolicy(definition=definition, - description=description, - max_clusters_per_user=max_clusters_per_user, - name=name, - policy_family_definition_overrides=policy_family_definition_overrides, - policy_family_id=policy_family_id, - policy_id=policy_id) - body = request.as_dict() - self._api.do('POST', '/api/2.0/policies/clusters/edit', body=body) - - def get(self, policy_id: str, **kwargs) -> Policy: - """Get entity. - - Get a cluster policy entity. Creation and editing is available to admins only.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(policy_id=policy_id) - - query = {} - if policy_id: query['policy_id'] = request.policy_id - - json = self._api.do('GET', '/api/2.0/policies/clusters/get', query=query) - return Policy.from_dict(json) - - def list(self, - *, - sort_column: ListSortColumn = None, - sort_order: ListSortOrder = None, - **kwargs) -> Iterator[Policy]: - """Get a cluster policy. - - Returns a list of policies accessible by the requesting user.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListRequest(sort_column=sort_column, sort_order=sort_order) - - query = {} - if sort_column: query['sort_column'] = request.sort_column.value - if sort_order: query['sort_order'] = request.sort_order.value - - json = self._api.do('GET', '/api/2.0/policies/clusters/list', query=query) - return [Policy.from_dict(v) for v in json.get('policies', [])] - - -class PolicyFamiliesAPI: - """View available policy families. A policy family contains a policy definition providing best practices for - configuring clusters for a particular use case. - - Databricks manages and provides policy families for several common cluster use cases. You cannot create, - edit, or delete policy families. - - Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a - policy family. Cluster policies created using a policy family inherit the policy family's policy - definition.""" - - def __init__(self, api_client): - self._api = api_client - - def get(self, policy_family_id: str, **kwargs) -> PolicyFamily: - - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetPolicyFamilyRequest(policy_family_id=policy_family_id) - - json = self._api.do('GET', f'/api/2.0/policy-families/{request.policy_family_id}') - return PolicyFamily.from_dict(json) - - def list(self, *, max_results: int = None, page_token: str = None, **kwargs) -> Iterator[PolicyFamily]: - - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListPolicyFamiliesRequest(max_results=max_results, page_token=page_token) - - query = {} - if max_results: query['max_results'] = request.max_results - if page_token: query['page_token'] = request.page_token - - while True: - json = self._api.do('GET', '/api/2.0/policy-families', query=query) - if 'policy_families' not in json or not json['policy_families']: - return - for v in json['policy_families']: - yield PolicyFamily.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] diff --git a/databricks/sdk/service/commands.py b/databricks/sdk/service/commands.py deleted file mode 100755 index b81b045e5..000000000 --- a/databricks/sdk/service/commands.py +++ /dev/null @@ -1,478 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -import random -import time -from dataclasses import dataclass -from datetime import timedelta -from enum import Enum -from typing import Any, Callable, Dict, List - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CancelCommand: - cluster_id: str = None - command_id: str = None - context_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['clusterId'] = self.cluster_id - if self.command_id: body['commandId'] = self.command_id - if self.context_id: body['contextId'] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CancelCommand': - return cls(cluster_id=d.get('clusterId', None), - command_id=d.get('commandId', None), - context_id=d.get('contextId', None)) - - -@dataclass -class Command: - cluster_id: str = None - command: str = None - context_id: str = None - language: 'Language' = None - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['clusterId'] = self.cluster_id - if self.command: body['command'] = self.command - if self.context_id: body['contextId'] = self.context_id - if self.language: body['language'] = self.language.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Command': - return cls(cluster_id=d.get('clusterId', None), - command=d.get('command', None), - context_id=d.get('contextId', None), - language=_enum(d, 'language', Language)) - - -class CommandStatus(Enum): - - Cancelled = 'Cancelled' - Cancelling = 'Cancelling' - Error = 'Error' - Finished = 'Finished' - Queued = 'Queued' - Running = 'Running' - - -@dataclass -class CommandStatusRequest: - """Get command info""" - - cluster_id: str - context_id: str - command_id: str - - -@dataclass -class CommandStatusResponse: - id: str = None - results: 'Results' = None - status: 'CommandStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.id: body['id'] = self.id - if self.results: body['results'] = self.results.as_dict() - if self.status: body['status'] = self.status.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CommandStatusResponse': - return cls(id=d.get('id', None), - results=_from_dict(d, 'results', Results), - status=_enum(d, 'status', CommandStatus)) - - -class ContextStatus(Enum): - - Error = 'Error' - Pending = 'Pending' - Running = 'Running' - - -@dataclass -class ContextStatusRequest: - """Get status""" - - cluster_id: str - context_id: str - - -@dataclass -class ContextStatusResponse: - id: str = None - status: 'ContextStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.id: body['id'] = self.id - if self.status: body['status'] = self.status.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ContextStatusResponse': - return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus)) - - -@dataclass -class CreateContext: - cluster_id: str = None - language: 'Language' = None - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['clusterId'] = self.cluster_id - if self.language: body['language'] = self.language.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateContext': - return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language)) - - -@dataclass -class Created: - id: str = None - - def as_dict(self) -> dict: - body = {} - if self.id: body['id'] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Created': - return cls(id=d.get('id', None)) - - -@dataclass -class DestroyContext: - cluster_id: str - context_id: str - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['clusterId'] = self.cluster_id - if self.context_id: body['contextId'] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DestroyContext': - return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None)) - - -class Language(Enum): - - python = 'python' - scala = 'scala' - sql = 'sql' - - -class ResultType(Enum): - - error = 'error' - image = 'image' - images = 'images' - table = 'table' - text = 'text' - - -@dataclass -class Results: - cause: str = None - data: Any = None - file_name: str = None - file_names: 'List[str]' = None - is_json_schema: bool = None - pos: int = None - result_type: 'ResultType' = None - schema: 'List[Dict[str,Any]]' = None - summary: str = None - truncated: bool = None - - def as_dict(self) -> dict: - body = {} - if self.cause: body['cause'] = self.cause - if self.data: body['data'] = self.data - if self.file_name: body['fileName'] = self.file_name - if self.file_names: body['fileNames'] = [v for v in self.file_names] - if self.is_json_schema: body['isJsonSchema'] = self.is_json_schema - if self.pos: body['pos'] = self.pos - if self.result_type: body['resultType'] = self.result_type.value - if self.schema: body['schema'] = [v for v in self.schema] - if self.summary: body['summary'] = self.summary - if self.truncated: body['truncated'] = self.truncated - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Results': - return cls(cause=d.get('cause', None), - data=d.get('data', None), - file_name=d.get('fileName', None), - file_names=d.get('fileNames', None), - is_json_schema=d.get('isJsonSchema', None), - pos=d.get('pos', None), - result_type=_enum(d, 'resultType', ResultType), - schema=d.get('schema', None), - summary=d.get('summary', None), - truncated=d.get('truncated', None)) - - -class CommandExecutionAPI: - """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" - - def __init__(self, api_client): - self._api = api_client - - def wait_command_status_command_execution_cancelled( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Callable[[CommandStatusResponse], None] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.Cancelled, ) - failure_states = (CommandStatus.Error, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if poll.results: - status_message = poll.results.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Cancelled, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_command_status_command_execution_finished_or_error( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Callable[[CommandStatusResponse], None] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.Finished, CommandStatus.Error, ) - failure_states = (CommandStatus.Cancelled, CommandStatus.Cancelling, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Finished or Error, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_context_status_command_execution_running( - self, - cluster_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Callable[[ContextStatusResponse], None] = None) -> ContextStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (ContextStatus.Running, ) - failure_states = (ContextStatus.Error, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.context_status(cluster_id=cluster_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Running, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def cancel(self, - *, - cluster_id: str = None, - command_id: str = None, - context_id: str = None, - **kwargs) -> Wait[CommandStatusResponse]: - """Cancel a command. - - Cancels a currently running command within an execution context. - - The command ID is obtained from a prior successful call to __execute__.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CancelCommand(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - body = request.as_dict() - self._api.do('POST', '/api/1.2/commands/cancel', body=body) - return Wait(self.wait_command_status_command_execution_cancelled, - cluster_id=request.cluster_id, - command_id=request.command_id, - context_id=request.context_id) - - def cancel_and_wait(self, - *, - cluster_id: str = None, - command_id: str = None, - context_id: str = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: - return self.cancel(cluster_id=cluster_id, command_id=command_id, - context_id=context_id).result(timeout=timeout) - - def command_status(self, cluster_id: str, context_id: str, command_id: str, - **kwargs) -> CommandStatusResponse: - """Get command info. - - Gets the status of and, if available, the results from a currently executing command. - - The command ID is obtained from a prior successful call to __execute__.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CommandStatusRequest(cluster_id=cluster_id, - command_id=command_id, - context_id=context_id) - - query = {} - if cluster_id: query['clusterId'] = request.cluster_id - if command_id: query['commandId'] = request.command_id - if context_id: query['contextId'] = request.context_id - - json = self._api.do('GET', '/api/1.2/commands/status', query=query) - return CommandStatusResponse.from_dict(json) - - def context_status(self, cluster_id: str, context_id: str, **kwargs) -> ContextStatusResponse: - """Get status. - - Gets the status for an execution context.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ContextStatusRequest(cluster_id=cluster_id, context_id=context_id) - - query = {} - if cluster_id: query['clusterId'] = request.cluster_id - if context_id: query['contextId'] = request.context_id - - json = self._api.do('GET', '/api/1.2/contexts/status', query=query) - return ContextStatusResponse.from_dict(json) - - def create(self, - *, - cluster_id: str = None, - language: Language = None, - **kwargs) -> Wait[ContextStatusResponse]: - """Create an execution context. - - Creates an execution context for running cluster commands. - - If successful, this method returns the ID of the new execution context.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateContext(cluster_id=cluster_id, language=language) - body = request.as_dict() - op_response = self._api.do('POST', '/api/1.2/contexts/create', body=body) - return Wait(self.wait_context_status_command_execution_running, - response=Created.from_dict(op_response), - cluster_id=request.cluster_id, - context_id=op_response['id']) - - def create_and_wait(self, - *, - cluster_id: str = None, - language: Language = None, - timeout=timedelta(minutes=20)) -> ContextStatusResponse: - return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) - - def destroy(self, cluster_id: str, context_id: str, **kwargs): - """Delete an execution context. - - Deletes an execution context.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DestroyContext(cluster_id=cluster_id, context_id=context_id) - body = request.as_dict() - self._api.do('POST', '/api/1.2/contexts/destroy', body=body) - - def execute(self, - *, - cluster_id: str = None, - command: str = None, - context_id: str = None, - language: Language = None, - **kwargs) -> Wait[CommandStatusResponse]: - """Run a command. - - Runs a cluster command in the given execution context, using the provided language. - - If successful, it returns an ID for tracking the status of the command's execution.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Command(cluster_id=cluster_id, - command=command, - context_id=context_id, - language=language) - body = request.as_dict() - op_response = self._api.do('POST', '/api/1.2/commands/execute', body=body) - return Wait(self.wait_command_status_command_execution_finished_or_error, - response=Created.from_dict(op_response), - cluster_id=request.cluster_id, - command_id=op_response['id'], - context_id=request.context_id) - - def execute_and_wait(self, - *, - cluster_id: str = None, - command: str = None, - context_id: str = None, - language: Language = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: - return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, - language=language).result(timeout=timeout) diff --git a/databricks/sdk/service/clusters.py b/databricks/sdk/service/compute.py similarity index 54% rename from databricks/sdk/service/clusters.py rename to databricks/sdk/service/compute.py index 5710cc937..cbaf2f8d7 100755 --- a/databricks/sdk/service/clusters.py +++ b/databricks/sdk/service/compute.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Callable, Dict, Iterator, List +from typing import Any, Callable, Dict, Iterator, List from ..errors import OperationFailed from ._internal import Wait, _enum, _from_dict, _repeated @@ -220,6 +220,26 @@ def from_dict(cls, d: Dict[str, any]) -> 'BaseClusterInfo': workload_type=_from_dict(d, 'workload_type', WorkloadType)) +@dataclass +class CancelCommand: + cluster_id: str = None + command_id: str = None + context_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['clusterId'] = self.cluster_id + if self.command_id: body['commandId'] = self.command_id + if self.context_id: body['contextId'] = self.context_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CancelCommand': + return cls(cluster_id=d.get('clusterId', None), + command_id=d.get('commandId', None), + context_id=d.get('contextId', None)) + + @dataclass class ChangeClusterOwner: cluster_id: str @@ -512,6 +532,23 @@ def from_dict(cls, d: Dict[str, any]) -> 'ClusterInfo': workload_type=_from_dict(d, 'workload_type', WorkloadType)) +@dataclass +class ClusterLibraryStatuses: + cluster_id: str = None + library_statuses: 'List[LibraryFullStatus]' = None + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['cluster_id'] = self.cluster_id + if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ClusterLibraryStatuses': + return cls(cluster_id=d.get('cluster_id', None), + library_statuses=_repeated(d, 'library_statuses', LibraryFullStatus)) + + @dataclass class ClusterLogConf: dbfs: 'DbfsStorageInfo' = None @@ -557,6 +594,106 @@ class ClusterSource(Enum): UI = 'UI' +@dataclass +class ClusterStatusRequest: + """Get status""" + + cluster_id: str + + +@dataclass +class Command: + cluster_id: str = None + command: str = None + context_id: str = None + language: 'Language' = None + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['clusterId'] = self.cluster_id + if self.command: body['command'] = self.command + if self.context_id: body['contextId'] = self.context_id + if self.language: body['language'] = self.language.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Command': + return cls(cluster_id=d.get('clusterId', None), + command=d.get('command', None), + context_id=d.get('contextId', None), + language=_enum(d, 'language', Language)) + + +class CommandStatus(Enum): + + Cancelled = 'Cancelled' + Cancelling = 'Cancelling' + Error = 'Error' + Finished = 'Finished' + Queued = 'Queued' + Running = 'Running' + + +@dataclass +class CommandStatusRequest: + """Get command info""" + + cluster_id: str + context_id: str + command_id: str + + +@dataclass +class CommandStatusResponse: + id: str = None + results: 'Results' = None + status: 'CommandStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.id: body['id'] = self.id + if self.results: body['results'] = self.results.as_dict() + if self.status: body['status'] = self.status.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CommandStatusResponse': + return cls(id=d.get('id', None), + results=_from_dict(d, 'results', Results), + status=_enum(d, 'status', CommandStatus)) + + +class ContextStatus(Enum): + + Error = 'Error' + Pending = 'Pending' + Running = 'Running' + + +@dataclass +class ContextStatusRequest: + """Get status""" + + cluster_id: str + context_id: str + + +@dataclass +class ContextStatusResponse: + id: str = None + status: 'ContextStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.id: body['id'] = self.id + if self.status: body['status'] = self.status.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ContextStatusResponse': + return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus)) + + @dataclass class CreateCluster: spark_version: str @@ -656,6 +793,163 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateClusterResponse': return cls(cluster_id=d.get('cluster_id', None)) +@dataclass +class CreateContext: + cluster_id: str = None + language: 'Language' = None + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['clusterId'] = self.cluster_id + if self.language: body['language'] = self.language.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateContext': + return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language)) + + +@dataclass +class CreateInstancePool: + instance_pool_name: str + node_type_id: str + aws_attributes: 'InstancePoolAwsAttributes' = None + azure_attributes: 'InstancePoolAzureAttributes' = None + custom_tags: 'Dict[str,str]' = None + disk_spec: 'DiskSpec' = None + enable_elastic_disk: bool = None + idle_instance_autotermination_minutes: int = None + instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None + max_capacity: int = None + min_idle_instances: int = None + preloaded_docker_images: 'List[DockerImage]' = None + preloaded_spark_versions: 'List[str]' = None + + def as_dict(self) -> dict: + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.idle_instance_autotermination_minutes: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_fleet_attributes: + body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() + if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity: body['max_capacity'] = self.max_capacity + if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: + body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePool': + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), + custom_tags=d.get('custom_tags', None), + disk_spec=_from_dict(d, 'disk_spec', DiskSpec), + enable_elastic_disk=d.get('enable_elastic_disk', None), + idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), + instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', + InstancePoolFleetAttributes), + instance_pool_name=d.get('instance_pool_name', None), + max_capacity=d.get('max_capacity', None), + min_idle_instances=d.get('min_idle_instances', None), + node_type_id=d.get('node_type_id', None), + preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), + preloaded_spark_versions=d.get('preloaded_spark_versions', None)) + + +@dataclass +class CreateInstancePoolResponse: + instance_pool_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePoolResponse': + return cls(instance_pool_id=d.get('instance_pool_id', None)) + + +@dataclass +class CreatePolicy: + name: str + definition: str = None + description: str = None + max_clusters_per_user: int = None + policy_family_definition_overrides: str = None + policy_family_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.definition: body['definition'] = self.definition + if self.description: body['description'] = self.description + if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name: body['name'] = self.name + if self.policy_family_definition_overrides: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id: body['policy_family_id'] = self.policy_family_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicy': + return cls(definition=d.get('definition', None), + description=d.get('description', None), + max_clusters_per_user=d.get('max_clusters_per_user', None), + name=d.get('name', None), + policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), + policy_family_id=d.get('policy_family_id', None)) + + +@dataclass +class CreatePolicyResponse: + policy_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.policy_id: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicyResponse': + return cls(policy_id=d.get('policy_id', None)) + + +@dataclass +class CreateResponse: + script_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.script_id: body['script_id'] = self.script_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateResponse': + return cls(script_id=d.get('script_id', None)) + + +@dataclass +class Created: + id: str = None + + def as_dict(self) -> dict: + body = {} + if self.id: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Created': + return cls(id=d.get('id', None)) + + @dataclass class DataPlaneEventDetails: event_type: 'DataPlaneEventDetailsEventType' = None @@ -725,6 +1019,144 @@ def from_dict(cls, d: Dict[str, any]) -> 'DeleteCluster': return cls(cluster_id=d.get('cluster_id', None)) +@dataclass +class DeleteGlobalInitScriptRequest: + """Delete init script""" + + script_id: str + + +@dataclass +class DeleteInstancePool: + instance_pool_id: str + + def as_dict(self) -> dict: + body = {} + if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteInstancePool': + return cls(instance_pool_id=d.get('instance_pool_id', None)) + + +@dataclass +class DeletePolicy: + policy_id: str + + def as_dict(self) -> dict: + body = {} + if self.policy_id: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeletePolicy': + return cls(policy_id=d.get('policy_id', None)) + + +@dataclass +class DestroyContext: + cluster_id: str + context_id: str + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['clusterId'] = self.cluster_id + if self.context_id: body['contextId'] = self.context_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DestroyContext': + return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None)) + + +@dataclass +class DiskSpec: + disk_count: int = None + disk_iops: int = None + disk_size: int = None + disk_throughput: int = None + disk_type: 'DiskType' = None + + def as_dict(self) -> dict: + body = {} + if self.disk_count: body['disk_count'] = self.disk_count + if self.disk_iops: body['disk_iops'] = self.disk_iops + if self.disk_size: body['disk_size'] = self.disk_size + if self.disk_throughput: body['disk_throughput'] = self.disk_throughput + if self.disk_type: body['disk_type'] = self.disk_type.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DiskSpec': + return cls(disk_count=d.get('disk_count', None), + disk_iops=d.get('disk_iops', None), + disk_size=d.get('disk_size', None), + disk_throughput=d.get('disk_throughput', None), + disk_type=_from_dict(d, 'disk_type', DiskType)) + + +@dataclass +class DiskType: + azure_disk_volume_type: 'DiskTypeAzureDiskVolumeType' = None + ebs_volume_type: 'DiskTypeEbsVolumeType' = None + + def as_dict(self) -> dict: + body = {} + if self.azure_disk_volume_type: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value + if self.ebs_volume_type: body['ebs_volume_type'] = self.ebs_volume_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DiskType': + return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), + ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) + + +class DiskTypeAzureDiskVolumeType(Enum): + + PREMIUM_LRS = 'PREMIUM_LRS' + STANDARD_LRS = 'STANDARD_LRS' + + +class DiskTypeEbsVolumeType(Enum): + + GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' + THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' + + +@dataclass +class DockerBasicAuth: + password: str = None + username: str = None + + def as_dict(self) -> dict: + body = {} + if self.password: body['password'] = self.password + if self.username: body['username'] = self.username + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DockerBasicAuth': + return cls(password=d.get('password', None), username=d.get('username', None)) + + +@dataclass +class DockerImage: + basic_auth: 'DockerBasicAuth' = None + url: str = None + + def as_dict(self) -> dict: + body = {} + if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict() + if self.url: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DockerImage': + return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None)) + + class EbsVolumeType(Enum): """The type of EBS volumes that will be launched with this cluster.""" @@ -821,27 +1253,118 @@ def from_dict(cls, d: Dict[str, any]) -> 'EditCluster': @dataclass -class EventDetails: - attributes: 'ClusterAttributes' = None - cause: 'EventDetailsCause' = None - cluster_size: 'ClusterSize' = None - current_num_vcpus: int = None - current_num_workers: int = None - did_not_expand_reason: str = None - disk_size: int = None - driver_state_message: str = None - enable_termination_for_node_blocklisted: bool = None - free_space: int = None - instance_id: str = None - job_run_name: str = None - previous_attributes: 'ClusterAttributes' = None - previous_cluster_size: 'ClusterSize' = None - previous_disk_size: int = None - reason: 'TerminationReason' = None - target_num_vcpus: int = None - target_num_workers: int = None - user: str = None - +class EditInstancePool: + instance_pool_id: str + instance_pool_name: str + node_type_id: str + aws_attributes: 'InstancePoolAwsAttributes' = None + azure_attributes: 'InstancePoolAzureAttributes' = None + custom_tags: 'Dict[str,str]' = None + disk_spec: 'DiskSpec' = None + enable_elastic_disk: bool = None + idle_instance_autotermination_minutes: int = None + instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None + max_capacity: int = None + min_idle_instances: int = None + preloaded_docker_images: 'List[DockerImage]' = None + preloaded_spark_versions: 'List[str]' = None + + def as_dict(self) -> dict: + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.idle_instance_autotermination_minutes: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_fleet_attributes: + body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() + if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity: body['max_capacity'] = self.max_capacity + if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: + body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'EditInstancePool': + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), + custom_tags=d.get('custom_tags', None), + disk_spec=_from_dict(d, 'disk_spec', DiskSpec), + enable_elastic_disk=d.get('enable_elastic_disk', None), + idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), + instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', + InstancePoolFleetAttributes), + instance_pool_id=d.get('instance_pool_id', None), + instance_pool_name=d.get('instance_pool_name', None), + max_capacity=d.get('max_capacity', None), + min_idle_instances=d.get('min_idle_instances', None), + node_type_id=d.get('node_type_id', None), + preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), + preloaded_spark_versions=d.get('preloaded_spark_versions', None)) + + +@dataclass +class EditPolicy: + policy_id: str + name: str + definition: str = None + description: str = None + max_clusters_per_user: int = None + policy_family_definition_overrides: str = None + policy_family_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.definition: body['definition'] = self.definition + if self.description: body['description'] = self.description + if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name: body['name'] = self.name + if self.policy_family_definition_overrides: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id: body['policy_family_id'] = self.policy_family_id + if self.policy_id: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'EditPolicy': + return cls(definition=d.get('definition', None), + description=d.get('description', None), + max_clusters_per_user=d.get('max_clusters_per_user', None), + name=d.get('name', None), + policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), + policy_family_id=d.get('policy_family_id', None), + policy_id=d.get('policy_id', None)) + + +@dataclass +class EventDetails: + attributes: 'ClusterAttributes' = None + cause: 'EventDetailsCause' = None + cluster_size: 'ClusterSize' = None + current_num_vcpus: int = None + current_num_workers: int = None + did_not_expand_reason: str = None + disk_size: int = None + driver_state_message: str = None + enable_termination_for_node_blocklisted: bool = None + free_space: int = None + instance_id: str = None + job_run_name: str = None + previous_attributes: 'ClusterAttributes' = None + previous_cluster_size: 'ClusterSize' = None + previous_disk_size: int = None + reason: 'TerminationReason' = None + target_num_vcpus: int = None + target_num_workers: int = None + user: str = None + def as_dict(self) -> dict: body = {} if self.attributes: body['attributes'] = self.attributes.as_dict() @@ -928,6 +1451,89 @@ class EventType(Enum): UPSIZE_COMPLETED = 'UPSIZE_COMPLETED' +@dataclass +class FleetLaunchTemplateOverride: + availability_zone: str + instance_type: str + max_price: float = None + priority: float = None + + def as_dict(self) -> dict: + body = {} + if self.availability_zone: body['availability_zone'] = self.availability_zone + if self.instance_type: body['instance_type'] = self.instance_type + if self.max_price: body['max_price'] = self.max_price + if self.priority: body['priority'] = self.priority + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FleetLaunchTemplateOverride': + return cls(availability_zone=d.get('availability_zone', None), + instance_type=d.get('instance_type', None), + max_price=d.get('max_price', None), + priority=d.get('priority', None)) + + +@dataclass +class FleetOnDemandOption: + allocation_strategy: 'FleetOnDemandOptionAllocationStrategy' = None + max_total_price: float = None + use_capacity_reservations_first: bool = None + + def as_dict(self) -> dict: + body = {} + if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value + if self.max_total_price: body['max_total_price'] = self.max_total_price + if self.use_capacity_reservations_first: + body['use_capacity_reservations_first'] = self.use_capacity_reservations_first + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FleetOnDemandOption': + return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetOnDemandOptionAllocationStrategy), + max_total_price=d.get('max_total_price', None), + use_capacity_reservations_first=d.get('use_capacity_reservations_first', None)) + + +class FleetOnDemandOptionAllocationStrategy(Enum): + """Only lowest-price and prioritized are allowed""" + + CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED' + DIVERSIFIED = 'DIVERSIFIED' + LOWEST_PRICE = 'LOWEST_PRICE' + PRIORITIZED = 'PRIORITIZED' + + +@dataclass +class FleetSpotOption: + allocation_strategy: 'FleetSpotOptionAllocationStrategy' = None + instance_pools_to_use_count: int = None + max_total_price: float = None + + def as_dict(self) -> dict: + body = {} + if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value + if self.instance_pools_to_use_count: + body['instance_pools_to_use_count'] = self.instance_pools_to_use_count + if self.max_total_price: body['max_total_price'] = self.max_total_price + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FleetSpotOption': + return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetSpotOptionAllocationStrategy), + instance_pools_to_use_count=d.get('instance_pools_to_use_count', None), + max_total_price=d.get('max_total_price', None)) + + +class FleetSpotOptionAllocationStrategy(Enum): + """lowest-price | diversified | capacity-optimized""" + + CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED' + DIVERSIFIED = 'DIVERSIFIED' + LOWEST_PRICE = 'LOWEST_PRICE' + PRIORITIZED = 'PRIORITIZED' + + @dataclass class GcpAttributes: availability: 'GcpAvailability' = None @@ -958,7 +1564,14 @@ class GcpAvailability(Enum): @dataclass -class Get: +class GetClusterPolicyRequest: + """Get entity""" + + policy_id: str + + +@dataclass +class GetClusterRequest: """Get cluster info""" cluster_id: str @@ -996,45 +1609,467 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetEvents': start_time=d.get('start_time', None)) -class GetEventsOrder(Enum): - """The order to list events in; either "ASC" or "DESC". Defaults to "DESC".""" +class GetEventsOrder(Enum): + """The order to list events in; either "ASC" or "DESC". Defaults to "DESC".""" + + ASC = 'ASC' + DESC = 'DESC' + + +@dataclass +class GetEventsResponse: + events: 'List[ClusterEvent]' = None + next_page: 'GetEvents' = None + total_count: int = None + + def as_dict(self) -> dict: + body = {} + if self.events: body['events'] = [v.as_dict() for v in self.events] + if self.next_page: body['next_page'] = self.next_page.as_dict() + if self.total_count: body['total_count'] = self.total_count + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetEventsResponse': + return cls(events=_repeated(d, 'events', ClusterEvent), + next_page=_from_dict(d, 'next_page', GetEvents), + total_count=d.get('total_count', None)) + + +@dataclass +class GetGlobalInitScriptRequest: + """Get an init script""" + + script_id: str + + +@dataclass +class GetInstancePool: + instance_pool_id: str + aws_attributes: 'InstancePoolAwsAttributes' = None + azure_attributes: 'InstancePoolAzureAttributes' = None + custom_tags: 'Dict[str,str]' = None + default_tags: 'Dict[str,str]' = None + disk_spec: 'DiskSpec' = None + enable_elastic_disk: bool = None + idle_instance_autotermination_minutes: int = None + instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None + instance_pool_name: str = None + max_capacity: int = None + min_idle_instances: int = None + node_type_id: str = None + preloaded_docker_images: 'List[DockerImage]' = None + preloaded_spark_versions: 'List[str]' = None + state: 'InstancePoolState' = None + stats: 'InstancePoolStats' = None + status: 'InstancePoolStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.idle_instance_autotermination_minutes: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_fleet_attributes: + body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() + if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity: body['max_capacity'] = self.max_capacity + if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: + body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.state: body['state'] = self.state.value + if self.stats: body['stats'] = self.stats.as_dict() + if self.status: body['status'] = self.status.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetInstancePool': + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), + custom_tags=d.get('custom_tags', None), + default_tags=d.get('default_tags', None), + disk_spec=_from_dict(d, 'disk_spec', DiskSpec), + enable_elastic_disk=d.get('enable_elastic_disk', None), + idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), + instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', + InstancePoolFleetAttributes), + instance_pool_id=d.get('instance_pool_id', None), + instance_pool_name=d.get('instance_pool_name', None), + max_capacity=d.get('max_capacity', None), + min_idle_instances=d.get('min_idle_instances', None), + node_type_id=d.get('node_type_id', None), + preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), + preloaded_spark_versions=d.get('preloaded_spark_versions', None), + state=_enum(d, 'state', InstancePoolState), + stats=_from_dict(d, 'stats', InstancePoolStats), + status=_from_dict(d, 'status', InstancePoolStatus)) + + +@dataclass +class GetInstancePoolRequest: + """Get instance pool information""" + + instance_pool_id: str + + +@dataclass +class GetPolicyFamilyRequest: + policy_family_id: str + + +@dataclass +class GetSparkVersionsResponse: + versions: 'List[SparkVersion]' = None + + def as_dict(self) -> dict: + body = {} + if self.versions: body['versions'] = [v.as_dict() for v in self.versions] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetSparkVersionsResponse': + return cls(versions=_repeated(d, 'versions', SparkVersion)) + + +@dataclass +class GlobalInitScriptCreateRequest: + name: str + script: str + enabled: bool = None + position: int = None + + def as_dict(self) -> dict: + body = {} + if self.enabled: body['enabled'] = self.enabled + if self.name: body['name'] = self.name + if self.position: body['position'] = self.position + if self.script: body['script'] = self.script + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptCreateRequest': + return cls(enabled=d.get('enabled', None), + name=d.get('name', None), + position=d.get('position', None), + script=d.get('script', None)) + + +@dataclass +class GlobalInitScriptDetails: + created_at: int = None + created_by: str = None + enabled: bool = None + name: str = None + position: int = None + script_id: str = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.enabled: body['enabled'] = self.enabled + if self.name: body['name'] = self.name + if self.position: body['position'] = self.position + if self.script_id: body['script_id'] = self.script_id + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetails': + return cls(created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + enabled=d.get('enabled', None), + name=d.get('name', None), + position=d.get('position', None), + script_id=d.get('script_id', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class GlobalInitScriptDetailsWithContent: + created_at: int = None + created_by: str = None + enabled: bool = None + name: str = None + position: int = None + script: str = None + script_id: str = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.enabled: body['enabled'] = self.enabled + if self.name: body['name'] = self.name + if self.position: body['position'] = self.position + if self.script: body['script'] = self.script + if self.script_id: body['script_id'] = self.script_id + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetailsWithContent': + return cls(created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + enabled=d.get('enabled', None), + name=d.get('name', None), + position=d.get('position', None), + script=d.get('script', None), + script_id=d.get('script_id', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class GlobalInitScriptUpdateRequest: + name: str + script: str + script_id: str + enabled: bool = None + position: int = None + + def as_dict(self) -> dict: + body = {} + if self.enabled: body['enabled'] = self.enabled + if self.name: body['name'] = self.name + if self.position: body['position'] = self.position + if self.script: body['script'] = self.script + if self.script_id: body['script_id'] = self.script_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptUpdateRequest': + return cls(enabled=d.get('enabled', None), + name=d.get('name', None), + position=d.get('position', None), + script=d.get('script', None), + script_id=d.get('script_id', None)) + + +@dataclass +class InstallLibraries: + cluster_id: str + libraries: 'List[Library]' + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InstallLibraries': + return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) + + +@dataclass +class InstancePoolAndStats: + aws_attributes: 'InstancePoolAwsAttributes' = None + azure_attributes: 'InstancePoolAzureAttributes' = None + custom_tags: 'Dict[str,str]' = None + default_tags: 'Dict[str,str]' = None + disk_spec: 'DiskSpec' = None + enable_elastic_disk: bool = None + idle_instance_autotermination_minutes: int = None + instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None + instance_pool_id: str = None + instance_pool_name: str = None + max_capacity: int = None + min_idle_instances: int = None + node_type_id: str = None + preloaded_docker_images: 'List[DockerImage]' = None + preloaded_spark_versions: 'List[str]' = None + state: 'InstancePoolState' = None + stats: 'InstancePoolStats' = None + status: 'InstancePoolStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.idle_instance_autotermination_minutes: + body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_fleet_attributes: + body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() + if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity: body['max_capacity'] = self.max_capacity + if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: + body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.state: body['state'] = self.state.value + if self.stats: body['stats'] = self.stats.as_dict() + if self.status: body['status'] = self.status.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAndStats': + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), + custom_tags=d.get('custom_tags', None), + default_tags=d.get('default_tags', None), + disk_spec=_from_dict(d, 'disk_spec', DiskSpec), + enable_elastic_disk=d.get('enable_elastic_disk', None), + idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), + instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', + InstancePoolFleetAttributes), + instance_pool_id=d.get('instance_pool_id', None), + instance_pool_name=d.get('instance_pool_name', None), + max_capacity=d.get('max_capacity', None), + min_idle_instances=d.get('min_idle_instances', None), + node_type_id=d.get('node_type_id', None), + preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), + preloaded_spark_versions=d.get('preloaded_spark_versions', None), + state=_enum(d, 'state', InstancePoolState), + stats=_from_dict(d, 'stats', InstancePoolStats), + status=_from_dict(d, 'status', InstancePoolStatus)) + + +@dataclass +class InstancePoolAwsAttributes: + availability: 'InstancePoolAwsAttributesAvailability' = None + spot_bid_price_percent: int = None + zone_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.availability: body['availability'] = self.availability.value + if self.spot_bid_price_percent: body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id: body['zone_id'] = self.zone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAwsAttributes': + return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), + spot_bid_price_percent=d.get('spot_bid_price_percent', None), + zone_id=d.get('zone_id', None)) + + +class InstancePoolAwsAttributesAvailability(Enum): + """Availability type used for the spot nodes. + + The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability""" + + ON_DEMAND = 'ON_DEMAND' + SPOT = 'SPOT' + SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK' + + +@dataclass +class InstancePoolAzureAttributes: + availability: 'InstancePoolAzureAttributesAvailability' = None + spot_bid_max_price: float = None + + def as_dict(self) -> dict: + body = {} + if self.availability: body['availability'] = self.availability.value + if self.spot_bid_max_price: body['spot_bid_max_price'] = self.spot_bid_max_price + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAzureAttributes': + return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), + spot_bid_max_price=d.get('spot_bid_max_price', None)) + + +class InstancePoolAzureAttributesAvailability(Enum): + """Shows the Availability type used for the spot nodes. + + The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability""" + + ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' + SPOT_AZURE = 'SPOT_AZURE' + SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE' + + +@dataclass +class InstancePoolFleetAttributes: + fleet_on_demand_option: 'FleetOnDemandOption' = None + fleet_spot_option: 'FleetSpotOption' = None + launch_template_overrides: 'List[FleetLaunchTemplateOverride]' = None + + def as_dict(self) -> dict: + body = {} + if self.fleet_on_demand_option: body['fleet_on_demand_option'] = self.fleet_on_demand_option.as_dict() + if self.fleet_spot_option: body['fleet_spot_option'] = self.fleet_spot_option.as_dict() + if self.launch_template_overrides: + body['launch_template_overrides'] = [v.as_dict() for v in self.launch_template_overrides] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolFleetAttributes': + return cls(fleet_on_demand_option=_from_dict(d, 'fleet_on_demand_option', FleetOnDemandOption), + fleet_spot_option=_from_dict(d, 'fleet_spot_option', FleetSpotOption), + launch_template_overrides=_repeated(d, 'launch_template_overrides', + FleetLaunchTemplateOverride)) + - ASC = 'ASC' - DESC = 'DESC' +class InstancePoolState(Enum): + """Current state of the instance pool.""" + + ACTIVE = 'ACTIVE' + DELETED = 'DELETED' + STOPPED = 'STOPPED' @dataclass -class GetEventsResponse: - events: 'List[ClusterEvent]' = None - next_page: 'GetEvents' = None - total_count: int = None +class InstancePoolStats: + idle_count: int = None + pending_idle_count: int = None + pending_used_count: int = None + used_count: int = None def as_dict(self) -> dict: body = {} - if self.events: body['events'] = [v.as_dict() for v in self.events] - if self.next_page: body['next_page'] = self.next_page.as_dict() - if self.total_count: body['total_count'] = self.total_count + if self.idle_count: body['idle_count'] = self.idle_count + if self.pending_idle_count: body['pending_idle_count'] = self.pending_idle_count + if self.pending_used_count: body['pending_used_count'] = self.pending_used_count + if self.used_count: body['used_count'] = self.used_count return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetEventsResponse': - return cls(events=_repeated(d, 'events', ClusterEvent), - next_page=_from_dict(d, 'next_page', GetEvents), - total_count=d.get('total_count', None)) + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStats': + return cls(idle_count=d.get('idle_count', None), + pending_idle_count=d.get('pending_idle_count', None), + pending_used_count=d.get('pending_used_count', None), + used_count=d.get('used_count', None)) @dataclass -class GetSparkVersionsResponse: - versions: 'List[SparkVersion]' = None +class InstancePoolStatus: + pending_instance_errors: 'List[PendingInstanceError]' = None def as_dict(self) -> dict: body = {} - if self.versions: body['versions'] = [v.as_dict() for v in self.versions] + if self.pending_instance_errors: + body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetSparkVersionsResponse': - return cls(versions=_repeated(d, 'versions', SparkVersion)) + def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStatus': + return cls(pending_instance_errors=_repeated(d, 'pending_instance_errors', PendingInstanceError)) @dataclass @@ -1057,11 +2092,90 @@ def from_dict(cls, d: Dict[str, any]) -> 'InstanceProfile': is_meta_instance_profile=d.get('is_meta_instance_profile', None)) +class Language(Enum): + + python = 'python' + scala = 'scala' + sql = 'sql' + + @dataclass -class ListRequest: - """List all clusters""" +class Library: + cran: 'RCranLibrary' = None + egg: str = None + jar: str = None + maven: 'MavenLibrary' = None + pypi: 'PythonPyPiLibrary' = None + whl: str = None - can_use_client: str = None + def as_dict(self) -> dict: + body = {} + if self.cran: body['cran'] = self.cran.as_dict() + if self.egg: body['egg'] = self.egg + if self.jar: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven.as_dict() + if self.pypi: body['pypi'] = self.pypi.as_dict() + if self.whl: body['whl'] = self.whl + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Library': + return cls(cran=_from_dict(d, 'cran', RCranLibrary), + egg=d.get('egg', None), + jar=d.get('jar', None), + maven=_from_dict(d, 'maven', MavenLibrary), + pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), + whl=d.get('whl', None)) + + +@dataclass +class LibraryFullStatus: + is_library_for_all_clusters: bool = None + library: 'Library' = None + messages: 'List[str]' = None + status: 'LibraryFullStatusStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.is_library_for_all_clusters: + body['is_library_for_all_clusters'] = self.is_library_for_all_clusters + if self.library: body['library'] = self.library.as_dict() + if self.messages: body['messages'] = [v for v in self.messages] + if self.status: body['status'] = self.status.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'LibraryFullStatus': + return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), + library=_from_dict(d, 'library', Library), + messages=d.get('messages', None), + status=_enum(d, 'status', LibraryFullStatusStatus)) + + +class LibraryFullStatusStatus(Enum): + """Status of installing the library on the cluster.""" + + FAILED = 'FAILED' + INSTALLED = 'INSTALLED' + INSTALLING = 'INSTALLING' + PENDING = 'PENDING' + RESOLVING = 'RESOLVING' + SKIPPED = 'SKIPPED' + UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART' + + +@dataclass +class ListAllClusterLibraryStatusesResponse: + statuses: 'List[ClusterLibraryStatuses]' = None + + def as_dict(self) -> dict: + body = {} + if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListAllClusterLibraryStatusesResponse': + return cls(statuses=_repeated(d, 'statuses', ClusterLibraryStatuses)) @dataclass @@ -1080,6 +2194,21 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListAvailableZonesResponse': return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) +@dataclass +class ListClusterPoliciesRequest: + """Get a cluster policy""" + + sort_column: 'ListSortColumn' = None + sort_order: 'ListSortOrder' = None + + +@dataclass +class ListClustersRequest: + """List all clusters""" + + can_use_client: str = None + + @dataclass class ListClustersResponse: clusters: 'List[ClusterInfo]' = None @@ -1094,6 +2223,34 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListClustersResponse': return cls(clusters=_repeated(d, 'clusters', ClusterInfo)) +@dataclass +class ListGlobalInitScriptsResponse: + scripts: 'List[GlobalInitScriptDetails]' = None + + def as_dict(self) -> dict: + body = {} + if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListGlobalInitScriptsResponse': + return cls(scripts=_repeated(d, 'scripts', GlobalInitScriptDetails)) + + +@dataclass +class ListInstancePools: + instance_pools: 'List[InstancePoolAndStats]' = None + + def as_dict(self) -> dict: + body = {} + if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListInstancePools': + return cls(instance_pools=_repeated(d, 'instance_pools', InstancePoolAndStats)) + + @dataclass class ListInstanceProfilesResponse: instance_profiles: 'List[InstanceProfile]' = None @@ -1122,6 +2279,55 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListNodeTypesResponse': return cls(node_types=_repeated(d, 'node_types', NodeType)) +@dataclass +class ListPoliciesResponse: + policies: 'List[Policy]' = None + + def as_dict(self) -> dict: + body = {} + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListPoliciesResponse': + return cls(policies=_repeated(d, 'policies', Policy)) + + +@dataclass +class ListPolicyFamiliesRequest: + max_results: int = None + page_token: str = None + + +@dataclass +class ListPolicyFamiliesResponse: + policy_families: 'List[PolicyFamily]' + next_page_token: str = None + + def as_dict(self) -> dict: + body = {} + if self.next_page_token: body['next_page_token'] = self.next_page_token + if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListPolicyFamiliesResponse': + return cls(next_page_token=d.get('next_page_token', None), + policy_families=_repeated(d, 'policy_families', PolicyFamily)) + + +class ListSortColumn(Enum): + + POLICY_CREATION_TIME = 'POLICY_CREATION_TIME' + POLICY_NAME = 'POLICY_NAME' + + +class ListSortOrder(Enum): + + ASC = 'ASC' + DESC = 'DESC' + + @dataclass class LogAnalyticsInfo: log_analytics_primary_key: str = None @@ -1156,6 +2362,26 @@ def from_dict(cls, d: Dict[str, any]) -> 'LogSyncStatus': return cls(last_attempted=d.get('last_attempted', None), last_exception=d.get('last_exception', None)) +@dataclass +class MavenLibrary: + coordinates: str + exclusions: 'List[str]' = None + repo: str = None + + def as_dict(self) -> dict: + body = {} + if self.coordinates: body['coordinates'] = self.coordinates + if self.exclusions: body['exclusions'] = [v for v in self.exclusions] + if self.repo: body['repo'] = self.repo + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'MavenLibrary': + return cls(coordinates=d.get('coordinates', None), + exclusions=d.get('exclusions', None), + repo=d.get('repo', None)) + + @dataclass class NodeInstanceType: instance_type_id: str = None @@ -1253,6 +2479,22 @@ def from_dict(cls, d: Dict[str, any]) -> 'NodeType': support_port_forwarding=d.get('support_port_forwarding', None)) +@dataclass +class PendingInstanceError: + instance_id: str = None + message: str = None + + def as_dict(self) -> dict: + body = {} + if self.instance_id: body['instance_id'] = self.instance_id + if self.message: body['message'] = self.message + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PendingInstanceError': + return cls(instance_id=d.get('instance_id', None), message=d.get('message', None)) + + @dataclass class PermanentDeleteCluster: cluster_id: str @@ -1281,6 +2523,103 @@ def from_dict(cls, d: Dict[str, any]) -> 'PinCluster': return cls(cluster_id=d.get('cluster_id', None)) +@dataclass +class Policy: + created_at_timestamp: int = None + creator_user_name: str = None + definition: str = None + description: str = None + is_default: bool = None + max_clusters_per_user: int = None + name: str = None + policy_family_definition_overrides: str = None + policy_family_id: str = None + policy_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.created_at_timestamp: body['created_at_timestamp'] = self.created_at_timestamp + if self.creator_user_name: body['creator_user_name'] = self.creator_user_name + if self.definition: body['definition'] = self.definition + if self.description: body['description'] = self.description + if self.is_default: body['is_default'] = self.is_default + if self.max_clusters_per_user: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name: body['name'] = self.name + if self.policy_family_definition_overrides: + body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id: body['policy_family_id'] = self.policy_family_id + if self.policy_id: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Policy': + return cls(created_at_timestamp=d.get('created_at_timestamp', None), + creator_user_name=d.get('creator_user_name', None), + definition=d.get('definition', None), + description=d.get('description', None), + is_default=d.get('is_default', None), + max_clusters_per_user=d.get('max_clusters_per_user', None), + name=d.get('name', None), + policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), + policy_family_id=d.get('policy_family_id', None), + policy_id=d.get('policy_id', None)) + + +@dataclass +class PolicyFamily: + policy_family_id: str + name: str + description: str + definition: str + + def as_dict(self) -> dict: + body = {} + if self.definition: body['definition'] = self.definition + if self.description: body['description'] = self.description + if self.name: body['name'] = self.name + if self.policy_family_id: body['policy_family_id'] = self.policy_family_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PolicyFamily': + return cls(definition=d.get('definition', None), + description=d.get('description', None), + name=d.get('name', None), + policy_family_id=d.get('policy_family_id', None)) + + +@dataclass +class PythonPyPiLibrary: + package: str + repo: str = None + + def as_dict(self) -> dict: + body = {} + if self.package: body['package'] = self.package + if self.repo: body['repo'] = self.repo + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PythonPyPiLibrary': + return cls(package=d.get('package', None), repo=d.get('repo', None)) + + +@dataclass +class RCranLibrary: + package: str + repo: str = None + + def as_dict(self) -> dict: + body = {} + if self.package: body['package'] = self.package + if self.repo: body['repo'] = self.repo + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RCranLibrary': + return cls(package=d.get('package', None), repo=d.get('repo', None)) + + @dataclass class RemoveInstanceProfile: instance_profile_arn: str @@ -1331,6 +2670,56 @@ def from_dict(cls, d: Dict[str, any]) -> 'RestartCluster': return cls(cluster_id=d.get('cluster_id', None), restart_user=d.get('restart_user', None)) +class ResultType(Enum): + + error = 'error' + image = 'image' + images = 'images' + table = 'table' + text = 'text' + + +@dataclass +class Results: + cause: str = None + data: Any = None + file_name: str = None + file_names: 'List[str]' = None + is_json_schema: bool = None + pos: int = None + result_type: 'ResultType' = None + schema: 'List[Dict[str,Any]]' = None + summary: str = None + truncated: bool = None + + def as_dict(self) -> dict: + body = {} + if self.cause: body['cause'] = self.cause + if self.data: body['data'] = self.data + if self.file_name: body['fileName'] = self.file_name + if self.file_names: body['fileNames'] = [v for v in self.file_names] + if self.is_json_schema: body['isJsonSchema'] = self.is_json_schema + if self.pos: body['pos'] = self.pos + if self.result_type: body['resultType'] = self.result_type.value + if self.schema: body['schema'] = [v for v in self.schema] + if self.summary: body['summary'] = self.summary + if self.truncated: body['truncated'] = self.truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Results': + return cls(cause=d.get('cause', None), + data=d.get('data', None), + file_name=d.get('fileName', None), + file_names=d.get('fileNames', None), + is_json_schema=d.get('isJsonSchema', None), + pos=d.get('pos', None), + result_type=_enum(d, 'resultType', ResultType), + schema=d.get('schema', None), + summary=d.get('summary', None), + truncated=d.get('truncated', None)) + + class RuntimeEngine(Enum): """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version.""" @@ -1574,6 +2963,22 @@ class TerminationReasonType(Enum): SUCCESS = 'SUCCESS' +@dataclass +class UninstallLibraries: + cluster_id: str + libraries: 'List[Library]' + + def as_dict(self) -> dict: + body = {} + if self.cluster_id: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UninstallLibraries': + return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) + + @dataclass class UnpinCluster: cluster_id: str @@ -1583,23 +2988,141 @@ def as_dict(self) -> dict: if self.cluster_id: body['cluster_id'] = self.cluster_id return body - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UnpinCluster': - return cls(cluster_id=d.get('cluster_id', None)) + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UnpinCluster': + return cls(cluster_id=d.get('cluster_id', None)) + + +@dataclass +class WorkloadType: + clients: 'ClientsTypes' = None + + def as_dict(self) -> dict: + body = {} + if self.clients: body['clients'] = self.clients.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'WorkloadType': + return cls(clients=_from_dict(d, 'clients', ClientsTypes)) + + +class ClusterPoliciesAPI: + """Cluster policy limits the ability to configure clusters based on a set of rules. The policy rules limit + the attributes or attribute values available for cluster creation. Cluster policies have ACLs that limit + their use to specific users and groups. + + Cluster policies let you limit users to create clusters with prescribed settings, simplify the user + interface and enable more users to create their own clusters (by fixing and hiding some values), control + cost by limiting per cluster maximum cost (by setting limits on attributes whose values contribute to + hourly price). + + Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user + creates a cluster: - A user who has cluster create permission can select the Unrestricted policy and + create fully-configurable clusters. - A user who has both cluster create permission and access to cluster + policies can select the Unrestricted policy and policies they have access to. - A user that has access to + only cluster policies, can select the policies they have access to. + + If no policies have been created in the workspace, the Policy drop-down does not display. + + Only admin users can create, edit, and delete policies. Admin users also have access to all policies.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + name: str, + *, + definition: str = None, + description: str = None, + max_clusters_per_user: int = None, + policy_family_definition_overrides: str = None, + policy_family_id: str = None, + **kwargs) -> CreatePolicyResponse: + """Create a new policy. + + Creates a new policy with prescribed settings.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreatePolicy(definition=definition, + description=description, + max_clusters_per_user=max_clusters_per_user, + name=name, + policy_family_definition_overrides=policy_family_definition_overrides, + policy_family_id=policy_family_id) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/policies/clusters/create', body=body) + return CreatePolicyResponse.from_dict(json) + + def delete(self, policy_id: str, **kwargs): + """Delete a cluster policy. + + Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeletePolicy(policy_id=policy_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/policies/clusters/delete', body=body) + + def edit(self, + policy_id: str, + name: str, + *, + definition: str = None, + description: str = None, + max_clusters_per_user: int = None, + policy_family_definition_overrides: str = None, + policy_family_id: str = None, + **kwargs): + """Update a cluster policy. + + Update an existing policy for cluster. This operation may make some clusters governed by the previous + policy invalid.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = EditPolicy(definition=definition, + description=description, + max_clusters_per_user=max_clusters_per_user, + name=name, + policy_family_definition_overrides=policy_family_definition_overrides, + policy_family_id=policy_family_id, + policy_id=policy_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/policies/clusters/edit', body=body) + + def get(self, policy_id: str, **kwargs) -> Policy: + """Get entity. + + Get a cluster policy entity. Creation and editing is available to admins only.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetClusterPolicyRequest(policy_id=policy_id) + + query = {} + if policy_id: query['policy_id'] = request.policy_id + json = self._api.do('GET', '/api/2.0/policies/clusters/get', query=query) + return Policy.from_dict(json) -@dataclass -class WorkloadType: - clients: 'ClientsTypes' = None + def list(self, + *, + sort_column: ListSortColumn = None, + sort_order: ListSortOrder = None, + **kwargs) -> Iterator[Policy]: + """Get a cluster policy. + + Returns a list of policies accessible by the requesting user.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListClusterPoliciesRequest(sort_column=sort_column, sort_order=sort_order) - def as_dict(self) -> dict: - body = {} - if self.clients: body['clients'] = self.clients.as_dict() - return body + query = {} + if sort_column: query['sort_column'] = request.sort_column.value + if sort_order: query['sort_order'] = request.sort_order.value - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'WorkloadType': - return cls(clients=_from_dict(d, 'clients', ClientsTypes)) + json = self._api.do('GET', '/api/2.0/policies/clusters/list', query=query) + return [Policy.from_dict(v) for v in json.get('policies', [])] class ClustersAPI: @@ -2006,7 +3529,7 @@ def get(self, cluster_id: str, **kwargs) -> ClusterInfo: are running, or up to 60 days after they are terminated.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Get(cluster_id=cluster_id) + request = GetClusterRequest(cluster_id=cluster_id) query = {} if cluster_id: query['cluster_id'] = request.cluster_id @@ -2027,7 +3550,7 @@ def list(self, *, can_use_client: str = None, **kwargs) -> Iterator[ClusterInfo] terminated job clusters.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRequest(can_use_client=can_use_client) + request = ListClustersRequest(can_use_client=can_use_client) query = {} if can_use_client: query['can_use_client'] = request.can_use_client @@ -2162,6 +3685,471 @@ def unpin(self, cluster_id: str, **kwargs): self._api.do('POST', '/api/2.0/clusters/unpin', body=body) +class CommandExecutionAPI: + """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" + + def __init__(self, api_client): + self._api = api_client + + def wait_command_status_command_execution_cancelled( + self, + cluster_id: str, + command_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Callable[[CommandStatusResponse], None] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.Cancelled, ) + failure_states = (CommandStatus.Error, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if poll.results: + status_message = poll.results.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Cancelled, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_command_status_command_execution_finished_or_error( + self, + cluster_id: str, + command_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Callable[[CommandStatusResponse], None] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.Finished, CommandStatus.Error, ) + failure_states = (CommandStatus.Cancelled, CommandStatus.Cancelling, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Finished or Error, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_context_status_command_execution_running( + self, + cluster_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Callable[[ContextStatusResponse], None] = None) -> ContextStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (ContextStatus.Running, ) + failure_states = (ContextStatus.Error, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.context_status(cluster_id=cluster_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Running, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def cancel(self, + *, + cluster_id: str = None, + command_id: str = None, + context_id: str = None, + **kwargs) -> Wait[CommandStatusResponse]: + """Cancel a command. + + Cancels a currently running command within an execution context. + + The command ID is obtained from a prior successful call to __execute__.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CancelCommand(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + body = request.as_dict() + self._api.do('POST', '/api/1.2/commands/cancel', body=body) + return Wait(self.wait_command_status_command_execution_cancelled, + cluster_id=request.cluster_id, + command_id=request.command_id, + context_id=request.context_id) + + def cancel_and_wait(self, + *, + cluster_id: str = None, + command_id: str = None, + context_id: str = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: + return self.cancel(cluster_id=cluster_id, command_id=command_id, + context_id=context_id).result(timeout=timeout) + + def command_status(self, cluster_id: str, context_id: str, command_id: str, + **kwargs) -> CommandStatusResponse: + """Get command info. + + Gets the status of and, if available, the results from a currently executing command. + + The command ID is obtained from a prior successful call to __execute__.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CommandStatusRequest(cluster_id=cluster_id, + command_id=command_id, + context_id=context_id) + + query = {} + if cluster_id: query['clusterId'] = request.cluster_id + if command_id: query['commandId'] = request.command_id + if context_id: query['contextId'] = request.context_id + + json = self._api.do('GET', '/api/1.2/commands/status', query=query) + return CommandStatusResponse.from_dict(json) + + def context_status(self, cluster_id: str, context_id: str, **kwargs) -> ContextStatusResponse: + """Get status. + + Gets the status for an execution context.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ContextStatusRequest(cluster_id=cluster_id, context_id=context_id) + + query = {} + if cluster_id: query['clusterId'] = request.cluster_id + if context_id: query['contextId'] = request.context_id + + json = self._api.do('GET', '/api/1.2/contexts/status', query=query) + return ContextStatusResponse.from_dict(json) + + def create(self, + *, + cluster_id: str = None, + language: Language = None, + **kwargs) -> Wait[ContextStatusResponse]: + """Create an execution context. + + Creates an execution context for running cluster commands. + + If successful, this method returns the ID of the new execution context.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateContext(cluster_id=cluster_id, language=language) + body = request.as_dict() + op_response = self._api.do('POST', '/api/1.2/contexts/create', body=body) + return Wait(self.wait_context_status_command_execution_running, + response=Created.from_dict(op_response), + cluster_id=request.cluster_id, + context_id=op_response['id']) + + def create_and_wait(self, + *, + cluster_id: str = None, + language: Language = None, + timeout=timedelta(minutes=20)) -> ContextStatusResponse: + return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) + + def destroy(self, cluster_id: str, context_id: str, **kwargs): + """Delete an execution context. + + Deletes an execution context.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DestroyContext(cluster_id=cluster_id, context_id=context_id) + body = request.as_dict() + self._api.do('POST', '/api/1.2/contexts/destroy', body=body) + + def execute(self, + *, + cluster_id: str = None, + command: str = None, + context_id: str = None, + language: Language = None, + **kwargs) -> Wait[CommandStatusResponse]: + """Run a command. + + Runs a cluster command in the given execution context, using the provided language. + + If successful, it returns an ID for tracking the status of the command's execution.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = Command(cluster_id=cluster_id, + command=command, + context_id=context_id, + language=language) + body = request.as_dict() + op_response = self._api.do('POST', '/api/1.2/commands/execute', body=body) + return Wait(self.wait_command_status_command_execution_finished_or_error, + response=Created.from_dict(op_response), + cluster_id=request.cluster_id, + command_id=op_response['id'], + context_id=request.context_id) + + def execute_and_wait(self, + *, + cluster_id: str = None, + command: str = None, + context_id: str = None, + language: Language = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: + return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, + language=language).result(timeout=timeout) + + +class GlobalInitScriptsAPI: + """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts + for their workspace. These scripts run on every node in every cluster in the workspace. + + **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. + Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark + container fails to launch and init scripts with later position are skipped. If enough containers fail, the + entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + name: str, + script: str, + *, + enabled: bool = None, + position: int = None, + **kwargs) -> CreateResponse: + """Create init script. + + Creates a new global init script in this workspace.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GlobalInitScriptCreateRequest(enabled=enabled, + name=name, + position=position, + script=script) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/global-init-scripts', body=body) + return CreateResponse.from_dict(json) + + def delete(self, script_id: str, **kwargs): + """Delete init script. + + Deletes a global init script.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteGlobalInitScriptRequest(script_id=script_id) + + self._api.do('DELETE', f'/api/2.0/global-init-scripts/{request.script_id}') + + def get(self, script_id: str, **kwargs) -> GlobalInitScriptDetailsWithContent: + """Get an init script. + + Gets all the details of a script, including its Base64-encoded contents.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetGlobalInitScriptRequest(script_id=script_id) + + json = self._api.do('GET', f'/api/2.0/global-init-scripts/{request.script_id}') + return GlobalInitScriptDetailsWithContent.from_dict(json) + + def list(self) -> Iterator[GlobalInitScriptDetails]: + """Get init scripts. + + "Get a list of all global init scripts for this workspace. This returns all properties for each script + but **not** the script contents. To retrieve the contents of a script, use the [get a global init + script](#operation/get-script) operation.""" + + json = self._api.do('GET', '/api/2.0/global-init-scripts') + return [GlobalInitScriptDetails.from_dict(v) for v in json.get('scripts', [])] + + def update(self, + name: str, + script: str, + script_id: str, + *, + enabled: bool = None, + position: int = None, + **kwargs): + """Update init script. + + Updates a global init script, specifying only the fields to change. All fields are optional. + Unspecified fields retain their current value.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GlobalInitScriptUpdateRequest(enabled=enabled, + name=name, + position=position, + script=script, + script_id=script_id) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.0/global-init-scripts/{request.script_id}', body=body) + + +class InstancePoolsAPI: + """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud + instances which reduces a cluster start and auto-scaling times. + + Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use + instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle + instances. If the pool has no idle instances, the pool expands by allocating a new instance from the + instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it + returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that + pool’s idle instances. + + You can specify a different pool for the driver node and worker nodes, or use the same pool for both. + + Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does + apply. See pricing.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + instance_pool_name: str, + node_type_id: str, + *, + aws_attributes: InstancePoolAwsAttributes = None, + azure_attributes: InstancePoolAzureAttributes = None, + custom_tags: Dict[str, str] = None, + disk_spec: DiskSpec = None, + enable_elastic_disk: bool = None, + idle_instance_autotermination_minutes: int = None, + instance_pool_fleet_attributes: InstancePoolFleetAttributes = None, + max_capacity: int = None, + min_idle_instances: int = None, + preloaded_docker_images: List[DockerImage] = None, + preloaded_spark_versions: List[str] = None, + **kwargs) -> CreateInstancePoolResponse: + """Create a new instance pool. + + Creates a new instance pool using idle and ready-to-use cloud instances.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateInstancePool( + aws_attributes=aws_attributes, + azure_attributes=azure_attributes, + custom_tags=custom_tags, + disk_spec=disk_spec, + enable_elastic_disk=enable_elastic_disk, + idle_instance_autotermination_minutes=idle_instance_autotermination_minutes, + instance_pool_fleet_attributes=instance_pool_fleet_attributes, + instance_pool_name=instance_pool_name, + max_capacity=max_capacity, + min_idle_instances=min_idle_instances, + node_type_id=node_type_id, + preloaded_docker_images=preloaded_docker_images, + preloaded_spark_versions=preloaded_spark_versions) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/instance-pools/create', body=body) + return CreateInstancePoolResponse.from_dict(json) + + def delete(self, instance_pool_id: str, **kwargs): + """Delete an instance pool. + + Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteInstancePool(instance_pool_id=instance_pool_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/instance-pools/delete', body=body) + + def edit(self, + instance_pool_id: str, + instance_pool_name: str, + node_type_id: str, + *, + aws_attributes: InstancePoolAwsAttributes = None, + azure_attributes: InstancePoolAzureAttributes = None, + custom_tags: Dict[str, str] = None, + disk_spec: DiskSpec = None, + enable_elastic_disk: bool = None, + idle_instance_autotermination_minutes: int = None, + instance_pool_fleet_attributes: InstancePoolFleetAttributes = None, + max_capacity: int = None, + min_idle_instances: int = None, + preloaded_docker_images: List[DockerImage] = None, + preloaded_spark_versions: List[str] = None, + **kwargs): + """Edit an existing instance pool. + + Modifies the configuration of an existing instance pool.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = EditInstancePool( + aws_attributes=aws_attributes, + azure_attributes=azure_attributes, + custom_tags=custom_tags, + disk_spec=disk_spec, + enable_elastic_disk=enable_elastic_disk, + idle_instance_autotermination_minutes=idle_instance_autotermination_minutes, + instance_pool_fleet_attributes=instance_pool_fleet_attributes, + instance_pool_id=instance_pool_id, + instance_pool_name=instance_pool_name, + max_capacity=max_capacity, + min_idle_instances=min_idle_instances, + node_type_id=node_type_id, + preloaded_docker_images=preloaded_docker_images, + preloaded_spark_versions=preloaded_spark_versions) + body = request.as_dict() + self._api.do('POST', '/api/2.0/instance-pools/edit', body=body) + + def get(self, instance_pool_id: str, **kwargs) -> GetInstancePool: + """Get instance pool information. + + Retrieve the information for an instance pool based on its identifier.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetInstancePoolRequest(instance_pool_id=instance_pool_id) + + query = {} + if instance_pool_id: query['instance_pool_id'] = request.instance_pool_id + + json = self._api.do('GET', '/api/2.0/instance-pools/get', query=query) + return GetInstancePool.from_dict(json) + + def list(self) -> Iterator[InstancePoolAndStats]: + """List instance pool info. + + Gets a list of instance pools with their statistics.""" + + json = self._api.do('GET', '/api/2.0/instance-pools/list') + return [InstancePoolAndStats.from_dict(v) for v in json.get('instance_pools', [])] + + class InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 @@ -2242,3 +4230,130 @@ def remove(self, instance_profile_arn: str, **kwargs): request = RemoveInstanceProfile(instance_profile_arn=instance_profile_arn) body = request.as_dict() self._api.do('POST', '/api/2.0/instance-profiles/remove', body=body) + + +class LibrariesAPI: + """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a + cluster. + + To make third-party or custom code available to notebooks and jobs running on your clusters, you can + install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Java, Scala, and + Python libraries and point to external packages in PyPI, Maven, and CRAN repositories. + + Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library + directly from a public repository such as PyPI or Maven, using a previously installed workspace library, + or using an init script. + + When you install a library on a cluster, a notebook already attached to that cluster will not immediately + see the new library. You must first detach and then reattach the notebook to the cluster. + + When you uninstall a library from a cluster, the library is removed only when you restart the cluster. + Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.""" + + def __init__(self, api_client): + self._api = api_client + + def all_cluster_statuses(self) -> ListAllClusterLibraryStatusesResponse: + """Get all statuses. + + Get the status of all libraries on all clusters. A status will be available for all libraries + installed on this cluster via the API or the libraries UI as well as libraries set to be installed on + all clusters via the libraries UI.""" + + json = self._api.do('GET', '/api/2.0/libraries/all-cluster-statuses') + return ListAllClusterLibraryStatusesResponse.from_dict(json) + + def cluster_status(self, cluster_id: str, **kwargs) -> ClusterLibraryStatuses: + """Get status. + + Get the status of libraries on a cluster. A status will be available for all libraries installed on + this cluster via the API or the libraries UI as well as libraries set to be installed on all clusters + via the libraries UI. The order of returned libraries will be as follows. + + 1. Libraries set to be installed on this cluster will be returned first. Within this group, the final + order will be order in which the libraries were added to the cluster. + + 2. Libraries set to be installed on all clusters are returned next. Within this group there is no + order guarantee. + + 3. Libraries that were previously requested on this cluster or on all clusters, but now marked for + removal. Within this group there is no order guarantee.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ClusterStatusRequest(cluster_id=cluster_id) + + query = {} + if cluster_id: query['cluster_id'] = request.cluster_id + + json = self._api.do('GET', '/api/2.0/libraries/cluster-status', query=query) + return ClusterLibraryStatuses.from_dict(json) + + def install(self, cluster_id: str, libraries: List[Library], **kwargs): + """Add a library. + + Add libraries to be installed on a cluster. The installation is asynchronous; it happens in the + background after the completion of this request. + + **Note**: The actual set of libraries to be installed on a cluster is the union of the libraries + specified via this method and the libraries set to be installed on all clusters via the libraries UI.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = InstallLibraries(cluster_id=cluster_id, libraries=libraries) + body = request.as_dict() + self._api.do('POST', '/api/2.0/libraries/install', body=body) + + def uninstall(self, cluster_id: str, libraries: List[Library], **kwargs): + """Uninstall libraries. + + Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the cluster is + restarted. Uninstalling libraries that are not installed on the cluster will have no impact but is not + an error.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UninstallLibraries(cluster_id=cluster_id, libraries=libraries) + body = request.as_dict() + self._api.do('POST', '/api/2.0/libraries/uninstall', body=body) + + +class PolicyFamiliesAPI: + """View available policy families. A policy family contains a policy definition providing best practices for + configuring clusters for a particular use case. + + Databricks manages and provides policy families for several common cluster use cases. You cannot create, + edit, or delete policy families. + + Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a + policy family. Cluster policies created using a policy family inherit the policy family's policy + definition.""" + + def __init__(self, api_client): + self._api = api_client + + def get(self, policy_family_id: str, **kwargs) -> PolicyFamily: + + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetPolicyFamilyRequest(policy_family_id=policy_family_id) + + json = self._api.do('GET', f'/api/2.0/policy-families/{request.policy_family_id}') + return PolicyFamily.from_dict(json) + + def list(self, *, max_results: int = None, page_token: str = None, **kwargs) -> Iterator[PolicyFamily]: + + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListPolicyFamiliesRequest(max_results=max_results, page_token=page_token) + + query = {} + if max_results: query['max_results'] = request.max_results + if page_token: query['page_token'] = request.page_token + + while True: + json = self._api.do('GET', '/api/2.0/policy-families', query=query) + if 'policy_families' not in json or not json['policy_families']: + return + for v in json['policy_families']: + yield PolicyFamily.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] diff --git a/databricks/sdk/service/dbfs.py b/databricks/sdk/service/files.py similarity index 98% rename from databricks/sdk/service/dbfs.py rename to databricks/sdk/service/files.py index af107b24f..10968d23e 100755 --- a/databricks/sdk/service/dbfs.py +++ b/databricks/sdk/service/files.py @@ -111,14 +111,14 @@ def from_dict(cls, d: Dict[str, any]) -> 'FileInfo': @dataclass -class GetStatus: +class GetStatusRequest: """Get the information of a file or directory""" path: str @dataclass -class ListRequest: +class ListDbfsRequest: """List directory contents or file details""" path: str @@ -189,7 +189,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'Put': @dataclass -class Read: +class ReadDbfsRequest: """Get the contents of a file""" path: str @@ -293,7 +293,7 @@ def get_status(self, path: str, **kwargs) -> FileInfo: throws an exception with `RESOURCE_DOES_NOT_EXIST`.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetStatus(path=path) + request = GetStatusRequest(path=path) query = {} if path: query['path'] = request.path @@ -315,7 +315,7 @@ def list(self, path: str, **kwargs) -> Iterator[FileInfo]: without timing out.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRequest(path=path) + request = ListDbfsRequest(path=path) query = {} if path: query['path'] = request.path @@ -380,7 +380,7 @@ def read(self, path: str, *, length: int = None, offset: int = None, **kwargs) - file.",""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Read(length=length, offset=offset, path=path) + request = ReadDbfsRequest(length=length, offset=offset, path=path) query = {} if length: query['length'] = request.length diff --git a/databricks/sdk/service/gitcredentials.py b/databricks/sdk/service/gitcredentials.py deleted file mode 100755 index 5534e14f0..000000000 --- a/databricks/sdk/service/gitcredentials.py +++ /dev/null @@ -1,202 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict, Iterator, List - -from ._internal import _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateCredentials: - git_provider: str - git_username: str = None - personal_access_token: str = None - - def as_dict(self) -> dict: - body = {} - if self.git_provider: body['git_provider'] = self.git_provider - if self.git_username: body['git_username'] = self.git_username - if self.personal_access_token: body['personal_access_token'] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateCredentials': - return cls(git_provider=d.get('git_provider', None), - git_username=d.get('git_username', None), - personal_access_token=d.get('personal_access_token', None)) - - -@dataclass -class CreateCredentialsResponse: - credential_id: int = None - git_provider: str = None - git_username: str = None - - def as_dict(self) -> dict: - body = {} - if self.credential_id: body['credential_id'] = self.credential_id - if self.git_provider: body['git_provider'] = self.git_provider - if self.git_username: body['git_username'] = self.git_username - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateCredentialsResponse': - return cls(credential_id=d.get('credential_id', None), - git_provider=d.get('git_provider', None), - git_username=d.get('git_username', None)) - - -@dataclass -class CredentialInfo: - credential_id: int = None - git_provider: str = None - git_username: str = None - - def as_dict(self) -> dict: - body = {} - if self.credential_id: body['credential_id'] = self.credential_id - if self.git_provider: body['git_provider'] = self.git_provider - if self.git_username: body['git_username'] = self.git_username - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CredentialInfo': - return cls(credential_id=d.get('credential_id', None), - git_provider=d.get('git_provider', None), - git_username=d.get('git_username', None)) - - -@dataclass -class Delete: - """Delete a credential""" - - credential_id: int - - -@dataclass -class Get: - """Get a credential entry""" - - credential_id: int - - -@dataclass -class GetCredentialsResponse: - credentials: 'List[CredentialInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetCredentialsResponse': - return cls(credentials=_repeated(d, 'credentials', CredentialInfo)) - - -@dataclass -class UpdateCredentials: - credential_id: int - git_provider: str = None - git_username: str = None - personal_access_token: str = None - - def as_dict(self) -> dict: - body = {} - if self.credential_id: body['credential_id'] = self.credential_id - if self.git_provider: body['git_provider'] = self.git_provider - if self.git_username: body['git_username'] = self.git_username - if self.personal_access_token: body['personal_access_token'] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateCredentials': - return cls(credential_id=d.get('credential_id', None), - git_provider=d.get('git_provider', None), - git_username=d.get('git_username', None), - personal_access_token=d.get('personal_access_token', None)) - - -class GitCredentialsAPI: - """Registers personal access token for Databricks to do operations on behalf of the user. - - See [more info]. - - [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - git_provider: str, - *, - git_username: str = None, - personal_access_token: str = None, - **kwargs) -> CreateCredentialsResponse: - """Create a credential entry. - - Creates a Git credential entry for the user. Only one Git credential per user is supported, so any - attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update - existing credentials, or the DELETE endpoint to delete existing credentials.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateCredentials(git_provider=git_provider, - git_username=git_username, - personal_access_token=personal_access_token) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/git-credentials', body=body) - return CreateCredentialsResponse.from_dict(json) - - def delete(self, credential_id: int, **kwargs): - """Delete a credential. - - Deletes the specified Git credential.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Delete(credential_id=credential_id) - - self._api.do('DELETE', f'/api/2.0/git-credentials/{request.credential_id}') - - def get(self, credential_id: int, **kwargs) -> CredentialInfo: - """Get a credential entry. - - Gets the Git credential with the specified credential ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(credential_id=credential_id) - - json = self._api.do('GET', f'/api/2.0/git-credentials/{request.credential_id}') - return CredentialInfo.from_dict(json) - - def list(self) -> Iterator[CredentialInfo]: - """Get Git credentials. - - Lists the calling user's Git credentials. One credential per user is supported.""" - - json = self._api.do('GET', '/api/2.0/git-credentials') - return [CredentialInfo.from_dict(v) for v in json.get('credentials', [])] - - def update(self, - credential_id: int, - *, - git_provider: str = None, - git_username: str = None, - personal_access_token: str = None, - **kwargs): - """Update a credential. - - Updates the specified Git credential.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateCredentials(credential_id=credential_id, - git_provider=git_provider, - git_username=git_username, - personal_access_token=personal_access_token) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.0/git-credentials/{request.credential_id}', body=body) diff --git a/databricks/sdk/service/globalinitscripts.py b/databricks/sdk/service/globalinitscripts.py deleted file mode 100755 index 3b9be2438..000000000 --- a/databricks/sdk/service/globalinitscripts.py +++ /dev/null @@ -1,262 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict, Iterator, List - -from ._internal import _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateResponse: - script_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.script_id: body['script_id'] = self.script_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateResponse': - return cls(script_id=d.get('script_id', None)) - - -@dataclass -class Delete: - """Delete init script""" - - script_id: str - - -@dataclass -class Get: - """Get an init script""" - - script_id: str - - -@dataclass -class GlobalInitScriptCreateRequest: - name: str - script: str - enabled: bool = None - position: int = None - - def as_dict(self) -> dict: - body = {} - if self.enabled: body['enabled'] = self.enabled - if self.name: body['name'] = self.name - if self.position: body['position'] = self.position - if self.script: body['script'] = self.script - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptCreateRequest': - return cls(enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None)) - - -@dataclass -class GlobalInitScriptDetails: - created_at: int = None - created_by: str = None - enabled: bool = None - name: str = None - position: int = None - script_id: str = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.enabled: body['enabled'] = self.enabled - if self.name: body['name'] = self.name - if self.position: body['position'] = self.position - if self.script_id: body['script_id'] = self.script_id - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetails': - return cls(created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script_id=d.get('script_id', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class GlobalInitScriptDetailsWithContent: - created_at: int = None - created_by: str = None - enabled: bool = None - name: str = None - position: int = None - script: str = None - script_id: str = None - updated_at: int = None - updated_by: str = None - - def as_dict(self) -> dict: - body = {} - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.enabled: body['enabled'] = self.enabled - if self.name: body['name'] = self.name - if self.position: body['position'] = self.position - if self.script: body['script'] = self.script - if self.script_id: body['script_id'] = self.script_id - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetailsWithContent': - return cls(created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None), - script_id=d.get('script_id', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -@dataclass -class GlobalInitScriptUpdateRequest: - name: str - script: str - script_id: str - enabled: bool = None - position: int = None - - def as_dict(self) -> dict: - body = {} - if self.enabled: body['enabled'] = self.enabled - if self.name: body['name'] = self.name - if self.position: body['position'] = self.position - if self.script: body['script'] = self.script - if self.script_id: body['script_id'] = self.script_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptUpdateRequest': - return cls(enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None), - script_id=d.get('script_id', None)) - - -@dataclass -class ListGlobalInitScriptsResponse: - scripts: 'List[GlobalInitScriptDetails]' = None - - def as_dict(self) -> dict: - body = {} - if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListGlobalInitScriptsResponse': - return cls(scripts=_repeated(d, 'scripts', GlobalInitScriptDetails)) - - -class GlobalInitScriptsAPI: - """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts - for their workspace. These scripts run on every node in every cluster in the workspace. - - **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. - Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark - container fails to launch and init scripts with later position are skipped. If enough containers fail, the - entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - name: str, - script: str, - *, - enabled: bool = None, - position: int = None, - **kwargs) -> CreateResponse: - """Create init script. - - Creates a new global init script in this workspace.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GlobalInitScriptCreateRequest(enabled=enabled, - name=name, - position=position, - script=script) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/global-init-scripts', body=body) - return CreateResponse.from_dict(json) - - def delete(self, script_id: str, **kwargs): - """Delete init script. - - Deletes a global init script.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Delete(script_id=script_id) - - self._api.do('DELETE', f'/api/2.0/global-init-scripts/{request.script_id}') - - def get(self, script_id: str, **kwargs) -> GlobalInitScriptDetailsWithContent: - """Get an init script. - - Gets all the details of a script, including its Base64-encoded contents.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(script_id=script_id) - - json = self._api.do('GET', f'/api/2.0/global-init-scripts/{request.script_id}') - return GlobalInitScriptDetailsWithContent.from_dict(json) - - def list(self) -> Iterator[GlobalInitScriptDetails]: - """Get init scripts. - - "Get a list of all global init scripts for this workspace. This returns all properties for each script - but **not** the script contents. To retrieve the contents of a script, use the [get a global init - script](#operation/get-script) operation.""" - - json = self._api.do('GET', '/api/2.0/global-init-scripts') - return [GlobalInitScriptDetails.from_dict(v) for v in json.get('scripts', [])] - - def update(self, - name: str, - script: str, - script_id: str, - *, - enabled: bool = None, - position: int = None, - **kwargs): - """Update init script. - - Updates a global init script, specifying only the fields to change. All fields are optional. - Unspecified fields retain their current value.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GlobalInitScriptUpdateRequest(enabled=enabled, - name=name, - position=position, - script=script, - script_id=script_id) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.0/global-init-scripts/{request.script_id}', body=body) diff --git a/databricks/sdk/service/scim.py b/databricks/sdk/service/iam.py similarity index 70% rename from databricks/sdk/service/scim.py rename to databricks/sdk/service/iam.py index dbbb47079..ac0e5cd5a 100755 --- a/databricks/sdk/service/scim.py +++ b/databricks/sdk/service/iam.py @@ -12,6 +12,52 @@ # all definitions in this file are in alphabetical order +@dataclass +class AccessControlRequest: + group_name: str = None + permission_level: 'PermissionLevel' = None + service_principal_name: str = None + user_name: str = None + + def as_dict(self) -> dict: + body = {} + if self.group_name: body['group_name'] = self.group_name + if self.permission_level: body['permission_level'] = self.permission_level.value + if self.service_principal_name: body['service_principal_name'] = self.service_principal_name + if self.user_name: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AccessControlRequest': + return cls(group_name=d.get('group_name', None), + permission_level=_enum(d, 'permission_level', PermissionLevel), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + +@dataclass +class AccessControlResponse: + all_permissions: 'List[Permission]' = None + group_name: str = None + service_principal_name: str = None + user_name: str = None + + def as_dict(self) -> dict: + body = {} + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.group_name: body['group_name'] = self.group_name + if self.service_principal_name: body['service_principal_name'] = self.service_principal_name + if self.user_name: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AccessControlResponse': + return cls(all_permissions=_repeated(d, 'all_permissions', Permission), + group_name=d.get('group_name', None), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + @dataclass class ComplexValue: display: str = None @@ -35,6 +81,27 @@ def from_dict(cls, d: Dict[str, any]) -> 'ComplexValue': value=d.get('value', None)) +@dataclass +class DeleteAccountGroupRequest: + """Delete a group""" + + id: str + + +@dataclass +class DeleteAccountServicePrincipalRequest: + """Delete a service principal""" + + id: str + + +@dataclass +class DeleteAccountUserRequest: + """Delete a user""" + + id: str + + @dataclass class DeleteGroupRequest: """Delete a group""" @@ -56,6 +123,35 @@ class DeleteUserRequest: id: str +@dataclass +class DeleteWorkspaceAssignmentRequest: + """Delete permissions assignment""" + + workspace_id: int + principal_id: int + + +@dataclass +class GetAccountGroupRequest: + """Get group details""" + + id: str + + +@dataclass +class GetAccountServicePrincipalRequest: + """Get service principal details""" + + id: str + + +@dataclass +class GetAccountUserRequest: + """Get user details""" + + id: str + + @dataclass class GetGroupRequest: """Get group details""" @@ -63,6 +159,36 @@ class GetGroupRequest: id: str +@dataclass +class GetPermissionLevelsRequest: + """Get permission levels""" + + request_object_type: str + request_object_id: str + + +@dataclass +class GetPermissionLevelsResponse: + permission_levels: 'List[PermissionsDescription]' = None + + def as_dict(self) -> dict: + body = {} + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetPermissionLevelsResponse': + return cls(permission_levels=_repeated(d, 'permission_levels', PermissionsDescription)) + + +@dataclass +class GetPermissionRequest: + """Get object permissions""" + + request_object_type: str + request_object_id: str + + @dataclass class GetServicePrincipalRequest: """Get service principal details""" @@ -77,6 +203,13 @@ class GetUserRequest: id: str +@dataclass +class GetWorkspaceAssignmentRequest: + """List workspace permissions""" + + workspace_id: int + + @dataclass class Group: id: str @@ -109,6 +242,45 @@ def from_dict(cls, d: Dict[str, any]) -> 'Group': roles=_repeated(d, 'roles', ComplexValue)) +@dataclass +class ListAccountGroupsRequest: + """List group details""" + + attributes: str = None + count: int = None + excluded_attributes: str = None + filter: str = None + sort_by: str = None + sort_order: 'ListSortOrder' = None + start_index: int = None + + +@dataclass +class ListAccountServicePrincipalsRequest: + """List service principals""" + + attributes: str = None + count: int = None + excluded_attributes: str = None + filter: str = None + sort_by: str = None + sort_order: 'ListSortOrder' = None + start_index: int = None + + +@dataclass +class ListAccountUsersRequest: + """List users""" + + attributes: str = None + count: int = None + excluded_attributes: str = None + filter: str = None + sort_by: str = None + sort_order: 'ListSortOrder' = None + start_index: int = None + + @dataclass class ListGroupsRequest: """List group details""" @@ -223,6 +395,13 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListUsersResponse': total_results=d.get('totalResults', None)) +@dataclass +class ListWorkspaceAssignmentRequest: + """Get permission assignments""" + + workspace_id: int + + @dataclass class Name: family_name: str = None @@ -239,6 +418,27 @@ def from_dict(cls, d: Dict[str, any]) -> 'Name': return cls(family_name=d.get('familyName', None), given_name=d.get('givenName', None)) +@dataclass +class ObjectPermissions: + access_control_list: 'List[AccessControlResponse]' = None + object_id: str = None + object_type: str = None + + def as_dict(self) -> dict: + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id: body['object_id'] = self.object_id + if self.object_type: body['object_type'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ObjectPermissions': + return cls(access_control_list=_repeated(d, 'access_control_list', AccessControlResponse), + object_id=d.get('object_id', None), + object_type=d.get('object_type', None)) + + @dataclass class PartialUpdate: id: str @@ -281,6 +481,162 @@ class PatchOp(Enum): replace = 'replace' +@dataclass +class Permission: + inherited: bool = None + inherited_from_object: 'List[str]' = None + permission_level: 'PermissionLevel' = None + + def as_dict(self) -> dict: + body = {} + if self.inherited: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Permission': + return cls(inherited=d.get('inherited', None), + inherited_from_object=d.get('inherited_from_object', None), + permission_level=_enum(d, 'permission_level', PermissionLevel)) + + +@dataclass +class PermissionAssignment: + error: str = None + permissions: 'List[WorkspacePermission]' = None + principal: 'PrincipalOutput' = None + + def as_dict(self) -> dict: + body = {} + if self.error: body['error'] = self.error + if self.permissions: body['permissions'] = [v for v in self.permissions] + if self.principal: body['principal'] = self.principal.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PermissionAssignment': + return cls(error=d.get('error', None), + permissions=d.get('permissions', None), + principal=_from_dict(d, 'principal', PrincipalOutput)) + + +@dataclass +class PermissionAssignments: + permission_assignments: 'List[PermissionAssignment]' = None + + def as_dict(self) -> dict: + body = {} + if self.permission_assignments: + body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PermissionAssignments': + return cls(permission_assignments=_repeated(d, 'permission_assignments', PermissionAssignment)) + + +class PermissionLevel(Enum): + """Permission level""" + + CAN_ATTACH_TO = 'CAN_ATTACH_TO' + CAN_BIND = 'CAN_BIND' + CAN_EDIT = 'CAN_EDIT' + CAN_EDIT_METADATA = 'CAN_EDIT_METADATA' + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' + CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' + CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_READ = 'CAN_READ' + CAN_RESTART = 'CAN_RESTART' + CAN_RUN = 'CAN_RUN' + CAN_USE = 'CAN_USE' + CAN_VIEW = 'CAN_VIEW' + CAN_VIEW_METADATA = 'CAN_VIEW_METADATA' + IS_OWNER = 'IS_OWNER' + + +@dataclass +class PermissionOutput: + description: str = None + permission_level: 'WorkspacePermission' = None + + def as_dict(self) -> dict: + body = {} + if self.description: body['description'] = self.description + if self.permission_level: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PermissionOutput': + return cls(description=d.get('description', None), + permission_level=_enum(d, 'permission_level', WorkspacePermission)) + + +@dataclass +class PermissionsDescription: + description: str = None + permission_level: 'PermissionLevel' = None + + def as_dict(self) -> dict: + body = {} + if self.description: body['description'] = self.description + if self.permission_level: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PermissionsDescription': + return cls(description=d.get('description', None), + permission_level=_enum(d, 'permission_level', PermissionLevel)) + + +@dataclass +class PermissionsRequest: + request_object_type: str + request_object_id: str + access_control_list: 'List[AccessControlRequest]' = None + + def as_dict(self) -> dict: + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.request_object_id: body['request_object_id'] = self.request_object_id + if self.request_object_type: body['request_object_type'] = self.request_object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PermissionsRequest': + return cls(access_control_list=_repeated(d, 'access_control_list', AccessControlRequest), + request_object_id=d.get('request_object_id', None), + request_object_type=d.get('request_object_type', None)) + + +@dataclass +class PrincipalOutput: + display_name: str = None + group_name: str = None + principal_id: int = None + service_principal_name: str = None + user_name: str = None + + def as_dict(self) -> dict: + body = {} + if self.display_name: body['display_name'] = self.display_name + if self.group_name: body['group_name'] = self.group_name + if self.principal_id: body['principal_id'] = self.principal_id + if self.service_principal_name: body['service_principal_name'] = self.service_principal_name + if self.user_name: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PrincipalOutput': + return cls(display_name=d.get('display_name', None), + group_name=d.get('group_name', None), + principal_id=d.get('principal_id', None), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + @dataclass class ServicePrincipal: id: str @@ -316,6 +672,26 @@ def from_dict(cls, d: Dict[str, any]) -> 'ServicePrincipal': roles=_repeated(d, 'roles', ComplexValue)) +@dataclass +class UpdateWorkspaceAssignments: + permissions: 'List[WorkspacePermission]' + workspace_id: int + principal_id: int + + def as_dict(self) -> dict: + body = {} + if self.permissions: body['permissions'] = [v for v in self.permissions] + if self.principal_id: body['principal_id'] = self.principal_id + if self.workspace_id: body['workspace_id'] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateWorkspaceAssignments': + return cls(permissions=d.get('permissions', None), + principal_id=d.get('principal_id', None), + workspace_id=d.get('workspace_id', None)) + + @dataclass class User: id: str @@ -357,6 +733,27 @@ def from_dict(cls, d: Dict[str, any]) -> 'User': user_name=d.get('userName', None)) +class WorkspacePermission(Enum): + + ADMIN = 'ADMIN' + UNKNOWN = 'UNKNOWN' + USER = 'USER' + + +@dataclass +class WorkspacePermissions: + permissions: 'List[PermissionOutput]' = None + + def as_dict(self) -> dict: + body = {} + if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'WorkspacePermissions': + return cls(permissions=_repeated(d, 'permissions', PermissionOutput)) + + class AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects. @@ -401,7 +798,7 @@ def delete(self, id: str, **kwargs): Deletes a group from the Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteGroupRequest(id=id) + request = DeleteAccountGroupRequest(id=id) self._api.do('DELETE', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{request.id}') @@ -411,7 +808,7 @@ def get(self, id: str, **kwargs) -> Group: Gets the information for a specific group in the Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetGroupRequest(id=id) + request = GetAccountGroupRequest(id=id) json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{request.id}') return Group.from_dict(json) @@ -431,13 +828,13 @@ def list(self, Gets all details of the groups associated with the Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListGroupsRequest(attributes=attributes, - count=count, - excluded_attributes=excluded_attributes, - filter=filter, - sort_by=sort_by, - sort_order=sort_order, - start_index=start_index) + request = ListAccountGroupsRequest(attributes=attributes, + count=count, + excluded_attributes=excluded_attributes, + filter=filter, + sort_by=sort_by, + sort_order=sort_order, + start_index=start_index) query = {} if attributes: query['attributes'] = request.attributes @@ -538,7 +935,7 @@ def delete(self, id: str, **kwargs): Delete a single service principal in the Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteServicePrincipalRequest(id=id) + request = DeleteAccountServicePrincipalRequest(id=id) self._api.do('DELETE', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{request.id}') @@ -549,7 +946,7 @@ def get(self, id: str, **kwargs) -> ServicePrincipal: Gets the details for a single service principal define in the Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetServicePrincipalRequest(id=id) + request = GetAccountServicePrincipalRequest(id=id) json = self._api.do( 'GET', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{request.id}') @@ -570,13 +967,13 @@ def list(self, Gets the set of service principals associated with a Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListServicePrincipalsRequest(attributes=attributes, - count=count, - excluded_attributes=excluded_attributes, - filter=filter, - sort_by=sort_by, - sort_order=sort_order, - start_index=start_index) + request = ListAccountServicePrincipalsRequest(attributes=attributes, + count=count, + excluded_attributes=excluded_attributes, + filter=filter, + sort_by=sort_by, + sort_order=sort_order, + start_index=start_index) query = {} if attributes: query['attributes'] = request.attributes @@ -691,7 +1088,7 @@ def delete(self, id: str, **kwargs): user.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteUserRequest(id=id) + request = DeleteAccountUserRequest(id=id) self._api.do('DELETE', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{request.id}') @@ -701,7 +1098,7 @@ def get(self, id: str, **kwargs) -> User: Gets information for a specific user in Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetUserRequest(id=id) + request = GetAccountUserRequest(id=id) json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{request.id}') return User.from_dict(json) @@ -721,13 +1118,13 @@ def list(self, Gets details for all the users associated with a Databricks Account.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListUsersRequest(attributes=attributes, - count=count, - excluded_attributes=excluded_attributes, - filter=filter, - sort_by=sort_by, - sort_order=sort_order, - start_index=start_index) + request = ListAccountUsersRequest(attributes=attributes, + count=count, + excluded_attributes=excluded_attributes, + filter=filter, + sort_by=sort_by, + sort_order=sort_order, + start_index=start_index) query = {} if attributes: query['attributes'] = request.attributes @@ -930,6 +1327,83 @@ def update(self, self._api.do('PUT', f'/api/2.0/preview/scim/v2/Groups/{request.id}', body=body) +class PermissionsAPI: + """Permissions API are used to create read, write, edit, update and manage access for various users on + different objects and endpoints.""" + + def __init__(self, api_client): + self._api = api_client + + def get(self, request_object_type: str, request_object_id: str, **kwargs) -> ObjectPermissions: + """Get object permissions. + + Gets the permission of an object. Objects can inherit permissions from their parent objects or root + objects.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetPermissionRequest(request_object_id=request_object_id, + request_object_type=request_object_type) + + json = self._api.do( + 'GET', f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}') + return ObjectPermissions.from_dict(json) + + def get_permission_levels(self, request_object_type: str, request_object_id: str, + **kwargs) -> GetPermissionLevelsResponse: + """Get permission levels. + + Gets the permission levels that a user can have on an object.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetPermissionLevelsRequest(request_object_id=request_object_id, + request_object_type=request_object_type) + + json = self._api.do( + 'GET', + f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}/permissionLevels' + ) + return GetPermissionLevelsResponse.from_dict(json) + + def set(self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: List[AccessControlRequest] = None, + **kwargs): + """Set permissions. + + Sets permissions on object. Objects can inherit permissions from their parent objects and root + objects.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = PermissionsRequest(access_control_list=access_control_list, + request_object_id=request_object_id, + request_object_type=request_object_type) + body = request.as_dict() + self._api.do('PUT', + f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}', + body=body) + + def update(self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: List[AccessControlRequest] = None, + **kwargs): + """Update permission. + + Updates the permissions on an object.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = PermissionsRequest(access_control_list=access_control_list, + request_object_id=request_object_id, + request_object_type=request_object_type) + body = request.as_dict() + self._api.do('PATCH', + f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}', + body=body) + + class ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all @@ -1210,3 +1684,69 @@ def update(self, user_name=user_name) body = request.as_dict() self._api.do('PUT', f'/api/2.0/preview/scim/v2/Users/{request.id}', body=body) + + +class WorkspaceAssignmentAPI: + """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your + account.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, workspace_id: int, principal_id: int, **kwargs): + """Delete permissions assignment. + + Deletes the workspace permissions assignment in a given account and workspace for the specified + principal.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteWorkspaceAssignmentRequest(principal_id=principal_id, workspace_id=workspace_id) + + self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/principals/{request.principal_id}' + ) + + def get(self, workspace_id: int, **kwargs) -> WorkspacePermissions: + """List workspace permissions. + + Get an array of workspace permissions for the specified account and workspace.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetWorkspaceAssignmentRequest(workspace_id=workspace_id) + + json = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/permissions' + ) + return WorkspacePermissions.from_dict(json) + + def list(self, workspace_id: int, **kwargs) -> Iterator[PermissionAssignment]: + """Get permission assignments. + + Get the permission assignments for the specified Databricks Account and Databricks Workspace.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListWorkspaceAssignmentRequest(workspace_id=workspace_id) + + json = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments' + ) + return [PermissionAssignment.from_dict(v) for v in json.get('permission_assignments', [])] + + def update(self, permissions: List[WorkspacePermission], workspace_id: int, principal_id: int, **kwargs): + """Create or update permissions assignment. + + Creates or updates the workspace permissions assignment in a given account and workspace for the + specified principal.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateWorkspaceAssignments(permissions=permissions, + principal_id=principal_id, + workspace_id=workspace_id) + body = request.as_dict() + self._api.do( + 'PUT', + f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/principals/{request.principal_id}', + body=body) diff --git a/databricks/sdk/service/instancepools.py b/databricks/sdk/service/instancepools.py deleted file mode 100755 index 239487b47..000000000 --- a/databricks/sdk/service/instancepools.py +++ /dev/null @@ -1,757 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, Iterator, List - -from ._internal import _enum, _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateInstancePool: - instance_pool_name: str - node_type_id: str - aws_attributes: 'InstancePoolAwsAttributes' = None - azure_attributes: 'InstancePoolAzureAttributes' = None - custom_tags: 'Dict[str,str]' = None - disk_spec: 'DiskSpec' = None - enable_elastic_disk: bool = None - idle_instance_autotermination_minutes: int = None - instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None - max_capacity: int = None - min_idle_instances: int = None - preloaded_docker_images: 'List[DockerImage]' = None - preloaded_spark_versions: 'List[str]' = None - - def as_dict(self) -> dict: - body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.idle_instance_autotermination_minutes: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_fleet_attributes: - body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() - if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity: body['max_capacity'] = self.max_capacity - if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePool': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', - InstancePoolFleetAttributes), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None)) - - -@dataclass -class CreateInstancePoolResponse: - instance_pool_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePoolResponse': - return cls(instance_pool_id=d.get('instance_pool_id', None)) - - -@dataclass -class DeleteInstancePool: - instance_pool_id: str - - def as_dict(self) -> dict: - body = {} - if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeleteInstancePool': - return cls(instance_pool_id=d.get('instance_pool_id', None)) - - -@dataclass -class DiskSpec: - disk_count: int = None - disk_iops: int = None - disk_size: int = None - disk_throughput: int = None - disk_type: 'DiskType' = None - - def as_dict(self) -> dict: - body = {} - if self.disk_count: body['disk_count'] = self.disk_count - if self.disk_iops: body['disk_iops'] = self.disk_iops - if self.disk_size: body['disk_size'] = self.disk_size - if self.disk_throughput: body['disk_throughput'] = self.disk_throughput - if self.disk_type: body['disk_type'] = self.disk_type.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DiskSpec': - return cls(disk_count=d.get('disk_count', None), - disk_iops=d.get('disk_iops', None), - disk_size=d.get('disk_size', None), - disk_throughput=d.get('disk_throughput', None), - disk_type=_from_dict(d, 'disk_type', DiskType)) - - -@dataclass -class DiskType: - azure_disk_volume_type: 'DiskTypeAzureDiskVolumeType' = None - ebs_volume_type: 'DiskTypeEbsVolumeType' = None - - def as_dict(self) -> dict: - body = {} - if self.azure_disk_volume_type: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value - if self.ebs_volume_type: body['ebs_volume_type'] = self.ebs_volume_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DiskType': - return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), - ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) - - -class DiskTypeAzureDiskVolumeType(Enum): - - PREMIUM_LRS = 'PREMIUM_LRS' - STANDARD_LRS = 'STANDARD_LRS' - - -class DiskTypeEbsVolumeType(Enum): - - GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' - THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' - - -@dataclass -class DockerBasicAuth: - password: str = None - username: str = None - - def as_dict(self) -> dict: - body = {} - if self.password: body['password'] = self.password - if self.username: body['username'] = self.username - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DockerBasicAuth': - return cls(password=d.get('password', None), username=d.get('username', None)) - - -@dataclass -class DockerImage: - basic_auth: 'DockerBasicAuth' = None - url: str = None - - def as_dict(self) -> dict: - body = {} - if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict() - if self.url: body['url'] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DockerImage': - return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None)) - - -@dataclass -class EditInstancePool: - instance_pool_id: str - instance_pool_name: str - node_type_id: str - aws_attributes: 'InstancePoolAwsAttributes' = None - azure_attributes: 'InstancePoolAzureAttributes' = None - custom_tags: 'Dict[str,str]' = None - disk_spec: 'DiskSpec' = None - enable_elastic_disk: bool = None - idle_instance_autotermination_minutes: int = None - instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None - max_capacity: int = None - min_idle_instances: int = None - preloaded_docker_images: 'List[DockerImage]' = None - preloaded_spark_versions: 'List[str]' = None - - def as_dict(self) -> dict: - body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.idle_instance_autotermination_minutes: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_fleet_attributes: - body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() - if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity: body['max_capacity'] = self.max_capacity - if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'EditInstancePool': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', - InstancePoolFleetAttributes), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None)) - - -@dataclass -class FleetLaunchTemplateOverride: - availability_zone: str - instance_type: str - max_price: float = None - priority: float = None - - def as_dict(self) -> dict: - body = {} - if self.availability_zone: body['availability_zone'] = self.availability_zone - if self.instance_type: body['instance_type'] = self.instance_type - if self.max_price: body['max_price'] = self.max_price - if self.priority: body['priority'] = self.priority - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'FleetLaunchTemplateOverride': - return cls(availability_zone=d.get('availability_zone', None), - instance_type=d.get('instance_type', None), - max_price=d.get('max_price', None), - priority=d.get('priority', None)) - - -@dataclass -class FleetOnDemandOption: - allocation_strategy: 'FleetOnDemandOptionAllocationStrategy' = None - max_total_price: float = None - use_capacity_reservations_first: bool = None - - def as_dict(self) -> dict: - body = {} - if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value - if self.max_total_price: body['max_total_price'] = self.max_total_price - if self.use_capacity_reservations_first: - body['use_capacity_reservations_first'] = self.use_capacity_reservations_first - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'FleetOnDemandOption': - return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetOnDemandOptionAllocationStrategy), - max_total_price=d.get('max_total_price', None), - use_capacity_reservations_first=d.get('use_capacity_reservations_first', None)) - - -class FleetOnDemandOptionAllocationStrategy(Enum): - """Only lowest-price and prioritized are allowed""" - - CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED' - DIVERSIFIED = 'DIVERSIFIED' - LOWEST_PRICE = 'LOWEST_PRICE' - PRIORITIZED = 'PRIORITIZED' - - -@dataclass -class FleetSpotOption: - allocation_strategy: 'FleetSpotOptionAllocationStrategy' = None - instance_pools_to_use_count: int = None - max_total_price: float = None - - def as_dict(self) -> dict: - body = {} - if self.allocation_strategy: body['allocation_strategy'] = self.allocation_strategy.value - if self.instance_pools_to_use_count: - body['instance_pools_to_use_count'] = self.instance_pools_to_use_count - if self.max_total_price: body['max_total_price'] = self.max_total_price - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'FleetSpotOption': - return cls(allocation_strategy=_enum(d, 'allocation_strategy', FleetSpotOptionAllocationStrategy), - instance_pools_to_use_count=d.get('instance_pools_to_use_count', None), - max_total_price=d.get('max_total_price', None)) - - -class FleetSpotOptionAllocationStrategy(Enum): - """lowest-price | diversified | capacity-optimized""" - - CAPACITY_OPTIMIZED = 'CAPACITY_OPTIMIZED' - DIVERSIFIED = 'DIVERSIFIED' - LOWEST_PRICE = 'LOWEST_PRICE' - PRIORITIZED = 'PRIORITIZED' - - -@dataclass -class Get: - """Get instance pool information""" - - instance_pool_id: str - - -@dataclass -class GetInstancePool: - instance_pool_id: str - aws_attributes: 'InstancePoolAwsAttributes' = None - azure_attributes: 'InstancePoolAzureAttributes' = None - custom_tags: 'Dict[str,str]' = None - default_tags: 'Dict[str,str]' = None - disk_spec: 'DiskSpec' = None - enable_elastic_disk: bool = None - idle_instance_autotermination_minutes: int = None - instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None - instance_pool_name: str = None - max_capacity: int = None - min_idle_instances: int = None - node_type_id: str = None - preloaded_docker_images: 'List[DockerImage]' = None - preloaded_spark_versions: 'List[str]' = None - state: 'InstancePoolState' = None - stats: 'InstancePoolStats' = None - status: 'InstancePoolStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.idle_instance_autotermination_minutes: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_fleet_attributes: - body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() - if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity: body['max_capacity'] = self.max_capacity - if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - if self.state: body['state'] = self.state.value - if self.stats: body['stats'] = self.stats.as_dict() - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetInstancePool': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - default_tags=d.get('default_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', - InstancePoolFleetAttributes), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None), - state=_enum(d, 'state', InstancePoolState), - stats=_from_dict(d, 'stats', InstancePoolStats), - status=_from_dict(d, 'status', InstancePoolStatus)) - - -@dataclass -class InstancePoolAndStats: - aws_attributes: 'InstancePoolAwsAttributes' = None - azure_attributes: 'InstancePoolAzureAttributes' = None - custom_tags: 'Dict[str,str]' = None - default_tags: 'Dict[str,str]' = None - disk_spec: 'DiskSpec' = None - enable_elastic_disk: bool = None - idle_instance_autotermination_minutes: int = None - instance_pool_fleet_attributes: 'InstancePoolFleetAttributes' = None - instance_pool_id: str = None - instance_pool_name: str = None - max_capacity: int = None - min_idle_instances: int = None - node_type_id: str = None - preloaded_docker_images: 'List[DockerImage]' = None - preloaded_spark_versions: 'List[str]' = None - state: 'InstancePoolState' = None - stats: 'InstancePoolStats' = None - status: 'InstancePoolStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.idle_instance_autotermination_minutes: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_fleet_attributes: - body['instance_pool_fleet_attributes'] = self.instance_pool_fleet_attributes.as_dict() - if self.instance_pool_id: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity: body['max_capacity'] = self.max_capacity - if self.min_idle_instances: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - if self.state: body['state'] = self.state.value - if self.stats: body['stats'] = self.stats.as_dict() - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAndStats': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - default_tags=d.get('default_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_fleet_attributes=_from_dict(d, 'instance_pool_fleet_attributes', - InstancePoolFleetAttributes), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None), - state=_enum(d, 'state', InstancePoolState), - stats=_from_dict(d, 'stats', InstancePoolStats), - status=_from_dict(d, 'status', InstancePoolStatus)) - - -@dataclass -class InstancePoolAwsAttributes: - availability: 'InstancePoolAwsAttributesAvailability' = None - spot_bid_price_percent: int = None - zone_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.availability: body['availability'] = self.availability.value - if self.spot_bid_price_percent: body['spot_bid_price_percent'] = self.spot_bid_price_percent - if self.zone_id: body['zone_id'] = self.zone_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAwsAttributes': - return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), - spot_bid_price_percent=d.get('spot_bid_price_percent', None), - zone_id=d.get('zone_id', None)) - - -class InstancePoolAwsAttributesAvailability(Enum): - """Availability type used for the spot nodes. - - The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability""" - - ON_DEMAND = 'ON_DEMAND' - SPOT = 'SPOT' - SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK' - - -@dataclass -class InstancePoolAzureAttributes: - availability: 'InstancePoolAzureAttributesAvailability' = None - spot_bid_max_price: float = None - - def as_dict(self) -> dict: - body = {} - if self.availability: body['availability'] = self.availability.value - if self.spot_bid_max_price: body['spot_bid_max_price'] = self.spot_bid_max_price - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAzureAttributes': - return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), - spot_bid_max_price=d.get('spot_bid_max_price', None)) - - -class InstancePoolAzureAttributesAvailability(Enum): - """Shows the Availability type used for the spot nodes. - - The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability""" - - ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' - SPOT_AZURE = 'SPOT_AZURE' - SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE' - - -@dataclass -class InstancePoolFleetAttributes: - fleet_on_demand_option: 'FleetOnDemandOption' = None - fleet_spot_option: 'FleetSpotOption' = None - launch_template_overrides: 'List[FleetLaunchTemplateOverride]' = None - - def as_dict(self) -> dict: - body = {} - if self.fleet_on_demand_option: body['fleet_on_demand_option'] = self.fleet_on_demand_option.as_dict() - if self.fleet_spot_option: body['fleet_spot_option'] = self.fleet_spot_option.as_dict() - if self.launch_template_overrides: - body['launch_template_overrides'] = [v.as_dict() for v in self.launch_template_overrides] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolFleetAttributes': - return cls(fleet_on_demand_option=_from_dict(d, 'fleet_on_demand_option', FleetOnDemandOption), - fleet_spot_option=_from_dict(d, 'fleet_spot_option', FleetSpotOption), - launch_template_overrides=_repeated(d, 'launch_template_overrides', - FleetLaunchTemplateOverride)) - - -class InstancePoolState(Enum): - """Current state of the instance pool.""" - - ACTIVE = 'ACTIVE' - DELETED = 'DELETED' - STOPPED = 'STOPPED' - - -@dataclass -class InstancePoolStats: - idle_count: int = None - pending_idle_count: int = None - pending_used_count: int = None - used_count: int = None - - def as_dict(self) -> dict: - body = {} - if self.idle_count: body['idle_count'] = self.idle_count - if self.pending_idle_count: body['pending_idle_count'] = self.pending_idle_count - if self.pending_used_count: body['pending_used_count'] = self.pending_used_count - if self.used_count: body['used_count'] = self.used_count - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStats': - return cls(idle_count=d.get('idle_count', None), - pending_idle_count=d.get('pending_idle_count', None), - pending_used_count=d.get('pending_used_count', None), - used_count=d.get('used_count', None)) - - -@dataclass -class InstancePoolStatus: - pending_instance_errors: 'List[PendingInstanceError]' = None - - def as_dict(self) -> dict: - body = {} - if self.pending_instance_errors: - body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStatus': - return cls(pending_instance_errors=_repeated(d, 'pending_instance_errors', PendingInstanceError)) - - -@dataclass -class ListInstancePools: - instance_pools: 'List[InstancePoolAndStats]' = None - - def as_dict(self) -> dict: - body = {} - if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListInstancePools': - return cls(instance_pools=_repeated(d, 'instance_pools', InstancePoolAndStats)) - - -@dataclass -class PendingInstanceError: - instance_id: str = None - message: str = None - - def as_dict(self) -> dict: - body = {} - if self.instance_id: body['instance_id'] = self.instance_id - if self.message: body['message'] = self.message - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PendingInstanceError': - return cls(instance_id=d.get('instance_id', None), message=d.get('message', None)) - - -class InstancePoolsAPI: - """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud - instances which reduces a cluster start and auto-scaling times. - - Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use - instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle - instances. If the pool has no idle instances, the pool expands by allocating a new instance from the - instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it - returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that - pool’s idle instances. - - You can specify a different pool for the driver node and worker nodes, or use the same pool for both. - - Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does - apply. See pricing.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - instance_pool_name: str, - node_type_id: str, - *, - aws_attributes: InstancePoolAwsAttributes = None, - azure_attributes: InstancePoolAzureAttributes = None, - custom_tags: Dict[str, str] = None, - disk_spec: DiskSpec = None, - enable_elastic_disk: bool = None, - idle_instance_autotermination_minutes: int = None, - instance_pool_fleet_attributes: InstancePoolFleetAttributes = None, - max_capacity: int = None, - min_idle_instances: int = None, - preloaded_docker_images: List[DockerImage] = None, - preloaded_spark_versions: List[str] = None, - **kwargs) -> CreateInstancePoolResponse: - """Create a new instance pool. - - Creates a new instance pool using idle and ready-to-use cloud instances.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateInstancePool( - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - custom_tags=custom_tags, - disk_spec=disk_spec, - enable_elastic_disk=enable_elastic_disk, - idle_instance_autotermination_minutes=idle_instance_autotermination_minutes, - instance_pool_fleet_attributes=instance_pool_fleet_attributes, - instance_pool_name=instance_pool_name, - max_capacity=max_capacity, - min_idle_instances=min_idle_instances, - node_type_id=node_type_id, - preloaded_docker_images=preloaded_docker_images, - preloaded_spark_versions=preloaded_spark_versions) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/instance-pools/create', body=body) - return CreateInstancePoolResponse.from_dict(json) - - def delete(self, instance_pool_id: str, **kwargs): - """Delete an instance pool. - - Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteInstancePool(instance_pool_id=instance_pool_id) - body = request.as_dict() - self._api.do('POST', '/api/2.0/instance-pools/delete', body=body) - - def edit(self, - instance_pool_id: str, - instance_pool_name: str, - node_type_id: str, - *, - aws_attributes: InstancePoolAwsAttributes = None, - azure_attributes: InstancePoolAzureAttributes = None, - custom_tags: Dict[str, str] = None, - disk_spec: DiskSpec = None, - enable_elastic_disk: bool = None, - idle_instance_autotermination_minutes: int = None, - instance_pool_fleet_attributes: InstancePoolFleetAttributes = None, - max_capacity: int = None, - min_idle_instances: int = None, - preloaded_docker_images: List[DockerImage] = None, - preloaded_spark_versions: List[str] = None, - **kwargs): - """Edit an existing instance pool. - - Modifies the configuration of an existing instance pool.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = EditInstancePool( - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - custom_tags=custom_tags, - disk_spec=disk_spec, - enable_elastic_disk=enable_elastic_disk, - idle_instance_autotermination_minutes=idle_instance_autotermination_minutes, - instance_pool_fleet_attributes=instance_pool_fleet_attributes, - instance_pool_id=instance_pool_id, - instance_pool_name=instance_pool_name, - max_capacity=max_capacity, - min_idle_instances=min_idle_instances, - node_type_id=node_type_id, - preloaded_docker_images=preloaded_docker_images, - preloaded_spark_versions=preloaded_spark_versions) - body = request.as_dict() - self._api.do('POST', '/api/2.0/instance-pools/edit', body=body) - - def get(self, instance_pool_id: str, **kwargs) -> GetInstancePool: - """Get instance pool information. - - Retrieve the information for an instance pool based on its identifier.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(instance_pool_id=instance_pool_id) - - query = {} - if instance_pool_id: query['instance_pool_id'] = request.instance_pool_id - - json = self._api.do('GET', '/api/2.0/instance-pools/get', query=query) - return GetInstancePool.from_dict(json) - - def list(self) -> Iterator[InstancePoolAndStats]: - """List instance pool info. - - Gets a list of instance pools with their statistics.""" - - json = self._api.do('GET', '/api/2.0/instance-pools/list') - return [InstancePoolAndStats.from_dict(v) for v in json.get('instance_pools', [])] diff --git a/databricks/sdk/service/ipaccesslists.py b/databricks/sdk/service/ipaccesslists.py deleted file mode 100755 index 69f233baf..000000000 --- a/databricks/sdk/service/ipaccesslists.py +++ /dev/null @@ -1,340 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, Iterator, List - -from ._internal import _enum, _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateIpAccessList: - label: str - list_type: 'ListType' - ip_addresses: 'List[str]' - - def as_dict(self) -> dict: - body = {} - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label: body['label'] = self.label - if self.list_type: body['list_type'] = self.list_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateIpAccessList': - return cls(ip_addresses=d.get('ip_addresses', None), - label=d.get('label', None), - list_type=_enum(d, 'list_type', ListType)) - - -@dataclass -class CreateIpAccessListResponse: - ip_access_list: 'IpAccessListInfo' = None - - def as_dict(self) -> dict: - body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateIpAccessListResponse': - return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) - - -@dataclass -class Delete: - """Delete access list""" - - ip_access_list_id: str - - -@dataclass -class FetchIpAccessListResponse: - ip_access_list: 'IpAccessListInfo' = None - - def as_dict(self) -> dict: - body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'FetchIpAccessListResponse': - return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) - - -@dataclass -class Get: - """Get access list""" - - ip_access_list_id: str - - -@dataclass -class GetIpAccessListResponse: - ip_access_lists: 'List[IpAccessListInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetIpAccessListResponse': - return cls(ip_access_lists=_repeated(d, 'ip_access_lists', IpAccessListInfo)) - - -@dataclass -class IpAccessListInfo: - address_count: int = None - created_at: int = None - created_by: int = None - enabled: bool = None - ip_addresses: 'List[str]' = None - label: str = None - list_id: str = None - list_type: 'ListType' = None - updated_at: int = None - updated_by: int = None - - def as_dict(self) -> dict: - body = {} - if self.address_count: body['address_count'] = self.address_count - if self.created_at: body['created_at'] = self.created_at - if self.created_by: body['created_by'] = self.created_by - if self.enabled: body['enabled'] = self.enabled - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label: body['label'] = self.label - if self.list_id: body['list_id'] = self.list_id - if self.list_type: body['list_type'] = self.list_type.value - if self.updated_at: body['updated_at'] = self.updated_at - if self.updated_by: body['updated_by'] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'IpAccessListInfo': - return cls(address_count=d.get('address_count', None), - created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - enabled=d.get('enabled', None), - ip_addresses=d.get('ip_addresses', None), - label=d.get('label', None), - list_id=d.get('list_id', None), - list_type=_enum(d, 'list_type', ListType), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) - - -class ListType(Enum): - """This describes an enum""" - - ALLOW = 'ALLOW' - BLOCK = 'BLOCK' - - -@dataclass -class ReplaceIpAccessList: - label: str - list_type: 'ListType' - ip_addresses: 'List[str]' - enabled: bool - ip_access_list_id: str - list_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.enabled: body['enabled'] = self.enabled - if self.ip_access_list_id: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label: body['label'] = self.label - if self.list_id: body['list_id'] = self.list_id - if self.list_type: body['list_type'] = self.list_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ReplaceIpAccessList': - return cls(enabled=d.get('enabled', None), - ip_access_list_id=d.get('ip_access_list_id', None), - ip_addresses=d.get('ip_addresses', None), - label=d.get('label', None), - list_id=d.get('list_id', None), - list_type=_enum(d, 'list_type', ListType)) - - -@dataclass -class UpdateIpAccessList: - label: str - list_type: 'ListType' - ip_addresses: 'List[str]' - enabled: bool - ip_access_list_id: str - list_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.enabled: body['enabled'] = self.enabled - if self.ip_access_list_id: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label: body['label'] = self.label - if self.list_id: body['list_id'] = self.list_id - if self.list_type: body['list_type'] = self.list_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateIpAccessList': - return cls(enabled=d.get('enabled', None), - ip_access_list_id=d.get('ip_access_list_id', None), - ip_addresses=d.get('ip_addresses', None), - label=d.get('label', None), - list_id=d.get('list_id', None), - list_type=_enum(d, 'list_type', ListType)) - - -class IpAccessListsAPI: - """IP Access List enables admins to configure IP access lists. - - IP access lists affect web application access and REST API access to this workspace only. If the feature - is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists - (inclusion) and block lists (exclusion). - - When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address - matches any block list, the connection is rejected. 2. **If the connection was not rejected by block - lists**, the IP address is compared with the allow lists. - - If there is at least one allow list for the workspace, the connection is allowed only if the IP address - matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. - - For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, - where one CIDR counts as a single value. - - After changes to the IP access list feature, it can take a few minutes for changes to take effect.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, label: str, list_type: ListType, ip_addresses: List[str], - **kwargs) -> CreateIpAccessListResponse: - """Create access list. - - Creates an IP access list for this workspace. - - A list can be an allow list or a block list. See the top of this file for a description of how the - server treats allow lists and block lists at runtime. - - When creating or updating an IP access list: - - * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, - where one CIDR counts as a single value. Attempts to exceed that number return error 400 with - `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, - error 400 is returned with `error_code` value `INVALID_STATE`. - - It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no - effect until you enable the feature. See :method:workspaceconf/setStatus""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateIpAccessList(ip_addresses=ip_addresses, label=label, list_type=list_type) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/ip-access-lists', body=body) - return CreateIpAccessListResponse.from_dict(json) - - def delete(self, ip_access_list_id: str, **kwargs): - """Delete access list. - - Deletes an IP access list, specified by its list ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Delete(ip_access_list_id=ip_access_list_id) - - self._api.do('DELETE', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}') - - def get(self, ip_access_list_id: str, **kwargs) -> FetchIpAccessListResponse: - """Get access list. - - Gets an IP access list, specified by its list ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(ip_access_list_id=ip_access_list_id) - - json = self._api.do('GET', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}') - return FetchIpAccessListResponse.from_dict(json) - - def list(self) -> Iterator[IpAccessListInfo]: - """Get access lists. - - Gets all IP access lists for the specified workspace.""" - - json = self._api.do('GET', '/api/2.0/ip-access-lists') - return [IpAccessListInfo.from_dict(v) for v in json.get('ip_access_lists', [])] - - def replace(self, - label: str, - list_type: ListType, - ip_addresses: List[str], - enabled: bool, - ip_access_list_id: str, - *, - list_id: str = None, - **kwargs): - """Replace access list. - - Replaces an IP access list, specified by its ID. - - A list can include allow lists and block lists. See the top of this file for a description of how the - server treats allow lists and block lists at run time. When replacing an IP access list: * For all - allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one - CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value - `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is - returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take - effect. Note that your resulting IP access list has no effect until you enable the feature. See - :method:workspaceconf/setStatus.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ReplaceIpAccessList(enabled=enabled, - ip_access_list_id=ip_access_list_id, - ip_addresses=ip_addresses, - label=label, - list_id=list_id, - list_type=list_type) - body = request.as_dict() - self._api.do('PUT', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}', body=body) - - def update(self, - label: str, - list_type: ListType, - ip_addresses: List[str], - enabled: bool, - ip_access_list_id: str, - *, - list_id: str = None, - **kwargs): - """Update access list. - - Updates an existing IP access list, specified by its ID. - - A list can include allow lists and block lists. See the top of this file for a description of how the - server treats allow lists and block lists at run time. - - When updating an IP access list: - - * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, - where one CIDR counts as a single value. Attempts to exceed that number return error 400 with - `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, - error 400 is returned with `error_code` value `INVALID_STATE`. - - It can take a few minutes for the changes to take effect. Note that your resulting IP access list has - no effect until you enable the feature. See :method:workspaceconf/setStatus.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateIpAccessList(enabled=enabled, - ip_access_list_id=ip_access_list_id, - ip_addresses=ip_addresses, - label=label, - list_id=list_id, - list_type=list_type) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}', body=body) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index b85bbde9c..d66f3833d 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -13,9 +13,8 @@ _LOG = logging.getLogger('databricks.sdk') -from .clusters import BaseClusterInfo -from .libraries import Library -from .permissions import AccessControlRequest +from .compute import BaseClusterInfo, Library +from .iam import AccessControlRequest # all definitions in this file are in alphabetical order @@ -390,14 +389,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'DeleteRun': return cls(run_id=d.get('run_id', None)) -@dataclass -class ExportRun: - """Export and retrieve a job run""" - - run_id: int - views_to_export: 'ViewsToExport' = None - - @dataclass class ExportRunOutput: views: 'List[ViewItem]' = None @@ -412,6 +403,14 @@ def from_dict(cls, d: Dict[str, any]) -> 'ExportRunOutput': return cls(views=_repeated(d, 'views', ViewItem)) +@dataclass +class ExportRunRequest: + """Export and retrieve a job run""" + + run_id: int + views_to_export: 'ViewsToExport' = None + + @dataclass class FileArrivalTriggerSettings: min_time_between_trigger_seconds: int = None @@ -435,25 +434,25 @@ def from_dict(cls, d: Dict[str, any]) -> 'FileArrivalTriggerSettings': @dataclass -class Get: +class GetJobRequest: """Get a single job""" job_id: int @dataclass -class GetRun: - """Get a single job run""" +class GetRunOutputRequest: + """Get the output for a single run""" run_id: int - include_history: bool = None @dataclass -class GetRunOutput: - """Get the output for a single run""" +class GetRunRequest: + """Get a single job run""" run_id: int + include_history: bool = None @dataclass @@ -779,7 +778,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'JobWebhookNotificationsOnSuccessItem': @dataclass -class ListRequest: +class ListJobsRequest: """List all jobs""" expand_tasks: bool = None @@ -805,7 +804,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListJobsResponse': @dataclass -class ListRuns: +class ListRunsRequest: """List runs for a job""" active_only: bool = None @@ -1478,16 +1477,27 @@ def from_dict(cls, d: Dict[str, any]) -> 'SparkJarTask': class SparkPythonTask: python_file: str parameters: 'List[str]' = None + source: 'SparkPythonTaskSource' = None def as_dict(self) -> dict: body = {} if self.parameters: body['parameters'] = [v for v in self.parameters] if self.python_file: body['python_file'] = self.python_file + if self.source: body['source'] = self.source.value return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'SparkPythonTask': - return cls(parameters=d.get('parameters', None), python_file=d.get('python_file', None)) + return cls(parameters=d.get('parameters', None), + python_file=d.get('python_file', None), + source=_enum(d, 'source', SparkPythonTaskSource)) + + +class SparkPythonTaskSource(Enum): + """This describes an enum""" + + GIT = 'GIT' + WORKSPACE = 'WORKSPACE' @dataclass @@ -2100,7 +2110,7 @@ def export_run(self, run_id: int, *, views_to_export: ViewsToExport = None, **kw Export and retrieve the job run task.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ExportRun(run_id=run_id, views_to_export=views_to_export) + request = ExportRunRequest(run_id=run_id, views_to_export=views_to_export) query = {} if run_id: query['run_id'] = request.run_id @@ -2115,7 +2125,7 @@ def get(self, job_id: int, **kwargs) -> Job: Retrieves the details for a single job.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Get(job_id=job_id) + request = GetJobRequest(job_id=job_id) query = {} if job_id: query['job_id'] = request.job_id @@ -2129,7 +2139,7 @@ def get_run(self, run_id: int, *, include_history: bool = None, **kwargs) -> Run Retrieve the metadata of a run.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetRun(include_history=include_history, run_id=run_id) + request = GetRunRequest(include_history=include_history, run_id=run_id) query = {} if include_history: query['include_history'] = request.include_history @@ -2151,7 +2161,7 @@ def get_run_output(self, run_id: int, **kwargs) -> RunOutput: reference them beyond 60 days, you must save old run results before they expire.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetRunOutput(run_id=run_id) + request = GetRunOutputRequest(run_id=run_id) query = {} if run_id: query['run_id'] = request.run_id @@ -2171,7 +2181,7 @@ def list(self, Retrieves a list of jobs.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRequest(expand_tasks=expand_tasks, limit=limit, name=name, offset=offset) + request = ListJobsRequest(expand_tasks=expand_tasks, limit=limit, name=name, offset=offset) query = {} if expand_tasks: query['expand_tasks'] = request.expand_tasks @@ -2211,15 +2221,15 @@ def list_runs(self, List runs in descending order by start time.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRuns(active_only=active_only, - completed_only=completed_only, - expand_tasks=expand_tasks, - job_id=job_id, - limit=limit, - offset=offset, - run_type=run_type, - start_time_from=start_time_from, - start_time_to=start_time_to) + request = ListRunsRequest(active_only=active_only, + completed_only=completed_only, + expand_tasks=expand_tasks, + job_id=job_id, + limit=limit, + offset=offset, + run_type=run_type, + start_time_from=start_time_from, + start_time_to=start_time_to) query = {} if active_only: query['active_only'] = request.active_only diff --git a/databricks/sdk/service/libraries.py b/databricks/sdk/service/libraries.py deleted file mode 100755 index dbaa5945d..000000000 --- a/databricks/sdk/service/libraries.py +++ /dev/null @@ -1,282 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, List - -from ._internal import _enum, _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class ClusterLibraryStatuses: - cluster_id: str = None - library_statuses: 'List[LibraryFullStatus]' = None - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['cluster_id'] = self.cluster_id - if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ClusterLibraryStatuses': - return cls(cluster_id=d.get('cluster_id', None), - library_statuses=_repeated(d, 'library_statuses', LibraryFullStatus)) - - -@dataclass -class ClusterStatus: - """Get status""" - - cluster_id: str - - -@dataclass -class InstallLibraries: - cluster_id: str - libraries: 'List[Library]' - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'InstallLibraries': - return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) - - -@dataclass -class Library: - cran: 'RCranLibrary' = None - egg: str = None - jar: str = None - maven: 'MavenLibrary' = None - pypi: 'PythonPyPiLibrary' = None - whl: str = None - - def as_dict(self) -> dict: - body = {} - if self.cran: body['cran'] = self.cran.as_dict() - if self.egg: body['egg'] = self.egg - if self.jar: body['jar'] = self.jar - if self.maven: body['maven'] = self.maven.as_dict() - if self.pypi: body['pypi'] = self.pypi.as_dict() - if self.whl: body['whl'] = self.whl - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Library': - return cls(cran=_from_dict(d, 'cran', RCranLibrary), - egg=d.get('egg', None), - jar=d.get('jar', None), - maven=_from_dict(d, 'maven', MavenLibrary), - pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), - whl=d.get('whl', None)) - - -@dataclass -class LibraryFullStatus: - is_library_for_all_clusters: bool = None - library: 'Library' = None - messages: 'List[str]' = None - status: 'LibraryFullStatusStatus' = None - - def as_dict(self) -> dict: - body = {} - if self.is_library_for_all_clusters: - body['is_library_for_all_clusters'] = self.is_library_for_all_clusters - if self.library: body['library'] = self.library.as_dict() - if self.messages: body['messages'] = [v for v in self.messages] - if self.status: body['status'] = self.status.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'LibraryFullStatus': - return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), - library=_from_dict(d, 'library', Library), - messages=d.get('messages', None), - status=_enum(d, 'status', LibraryFullStatusStatus)) - - -class LibraryFullStatusStatus(Enum): - """Status of installing the library on the cluster.""" - - FAILED = 'FAILED' - INSTALLED = 'INSTALLED' - INSTALLING = 'INSTALLING' - PENDING = 'PENDING' - RESOLVING = 'RESOLVING' - SKIPPED = 'SKIPPED' - UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART' - - -@dataclass -class ListAllClusterLibraryStatusesResponse: - statuses: 'List[ClusterLibraryStatuses]' = None - - def as_dict(self) -> dict: - body = {} - if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListAllClusterLibraryStatusesResponse': - return cls(statuses=_repeated(d, 'statuses', ClusterLibraryStatuses)) - - -@dataclass -class MavenLibrary: - coordinates: str - exclusions: 'List[str]' = None - repo: str = None - - def as_dict(self) -> dict: - body = {} - if self.coordinates: body['coordinates'] = self.coordinates - if self.exclusions: body['exclusions'] = [v for v in self.exclusions] - if self.repo: body['repo'] = self.repo - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'MavenLibrary': - return cls(coordinates=d.get('coordinates', None), - exclusions=d.get('exclusions', None), - repo=d.get('repo', None)) - - -@dataclass -class PythonPyPiLibrary: - package: str - repo: str = None - - def as_dict(self) -> dict: - body = {} - if self.package: body['package'] = self.package - if self.repo: body['repo'] = self.repo - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PythonPyPiLibrary': - return cls(package=d.get('package', None), repo=d.get('repo', None)) - - -@dataclass -class RCranLibrary: - package: str - repo: str = None - - def as_dict(self) -> dict: - body = {} - if self.package: body['package'] = self.package - if self.repo: body['repo'] = self.repo - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RCranLibrary': - return cls(package=d.get('package', None), repo=d.get('repo', None)) - - -@dataclass -class UninstallLibraries: - cluster_id: str - libraries: 'List[Library]' - - def as_dict(self) -> dict: - body = {} - if self.cluster_id: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UninstallLibraries': - return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) - - -class LibrariesAPI: - """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a - cluster. - - To make third-party or custom code available to notebooks and jobs running on your clusters, you can - install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Java, Scala, and - Python libraries and point to external packages in PyPI, Maven, and CRAN repositories. - - Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library - directly from a public repository such as PyPI or Maven, using a previously installed workspace library, - or using an init script. - - When you install a library on a cluster, a notebook already attached to that cluster will not immediately - see the new library. You must first detach and then reattach the notebook to the cluster. - - When you uninstall a library from a cluster, the library is removed only when you restart the cluster. - Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.""" - - def __init__(self, api_client): - self._api = api_client - - def all_cluster_statuses(self) -> ListAllClusterLibraryStatusesResponse: - """Get all statuses. - - Get the status of all libraries on all clusters. A status will be available for all libraries - installed on this cluster via the API or the libraries UI as well as libraries set to be installed on - all clusters via the libraries UI.""" - - json = self._api.do('GET', '/api/2.0/libraries/all-cluster-statuses') - return ListAllClusterLibraryStatusesResponse.from_dict(json) - - def cluster_status(self, cluster_id: str, **kwargs) -> ClusterLibraryStatuses: - """Get status. - - Get the status of libraries on a cluster. A status will be available for all libraries installed on - this cluster via the API or the libraries UI as well as libraries set to be installed on all clusters - via the libraries UI. The order of returned libraries will be as follows. - - 1. Libraries set to be installed on this cluster will be returned first. Within this group, the final - order will be order in which the libraries were added to the cluster. - - 2. Libraries set to be installed on all clusters are returned next. Within this group there is no - order guarantee. - - 3. Libraries that were previously requested on this cluster or on all clusters, but now marked for - removal. Within this group there is no order guarantee.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ClusterStatus(cluster_id=cluster_id) - - query = {} - if cluster_id: query['cluster_id'] = request.cluster_id - - json = self._api.do('GET', '/api/2.0/libraries/cluster-status', query=query) - return ClusterLibraryStatuses.from_dict(json) - - def install(self, cluster_id: str, libraries: List[Library], **kwargs): - """Add a library. - - Add libraries to be installed on a cluster. The installation is asynchronous; it happens in the - background after the completion of this request. - - **Note**: The actual set of libraries to be installed on a cluster is the union of the libraries - specified via this method and the libraries set to be installed on all clusters via the libraries UI.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = InstallLibraries(cluster_id=cluster_id, libraries=libraries) - body = request.as_dict() - self._api.do('POST', '/api/2.0/libraries/install', body=body) - - def uninstall(self, cluster_id: str, libraries: List[Library], **kwargs): - """Uninstall libraries. - - Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the cluster is - restarted. Uninstalling libraries that are not installed on the cluster will have no impact but is not - an error.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UninstallLibraries(cluster_id=cluster_id, libraries=libraries) - body = request.as_dict() - self._api.do('POST', '/api/2.0/libraries/uninstall', body=body) diff --git a/databricks/sdk/service/mlflow.py b/databricks/sdk/service/ml.py similarity index 87% rename from databricks/sdk/service/mlflow.py rename to databricks/sdk/service/ml.py index c3a07740b..71b30deff 100755 --- a/databricks/sdk/service/mlflow.py +++ b/databricks/sdk/service/ml.py @@ -72,20 +72,6 @@ class ActivityType(Enum): SYSTEM_TRANSITION = 'SYSTEM_TRANSITION' -@dataclass -class ApproveResponse: - activity: 'Activity' = None - - def as_dict(self) -> dict: - body = {} - if self.activity: body['activity'] = self.activity.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ApproveResponse': - return cls(activity=_from_dict(d, 'activity', Activity)) - - @dataclass class ApproveTransitionRequest: name: str @@ -112,6 +98,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'ApproveTransitionRequest': version=d.get('version', None)) +@dataclass +class ApproveTransitionRequestResponse: + activity: 'Activity' = None + + def as_dict(self) -> dict: + body = {} + if self.activity: body['activity'] = self.activity.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ApproveTransitionRequestResponse': + return cls(activity=_from_dict(d, 'activity', Activity)) + + class CommentActivityAction(Enum): """This describes an enum""" @@ -126,6 +126,7 @@ class CommentObject: available_actions: 'List[CommentActivityAction]' = None comment: str = None creation_timestamp: int = None + id: str = None last_updated_timestamp: int = None user_id: str = None @@ -134,6 +135,7 @@ def as_dict(self) -> dict: if self.available_actions: body['available_actions'] = [v for v in self.available_actions] if self.comment: body['comment'] = self.comment if self.creation_timestamp: body['creation_timestamp'] = self.creation_timestamp + if self.id: body['id'] = self.id if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp if self.user_id: body['user_id'] = self.user_id return body @@ -143,6 +145,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'CommentObject': return cls(available_actions=d.get('available_actions', None), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), + id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), user_id=d.get('user_id', None)) @@ -165,6 +168,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateComment': return cls(comment=d.get('comment', None), name=d.get('name', None), version=d.get('version', None)) +@dataclass +class CreateCommentResponse: + comment: 'CommentObject' = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateCommentResponse': + return cls(comment=_from_dict(d, 'comment', CommentObject)) + + @dataclass class CreateExperiment: name: str @@ -200,80 +217,80 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateExperimentResponse': @dataclass -class CreateModelVersionRequest: +class CreateModelRequest: name: str - source: str description: str = None - run_id: str = None - run_link: str = None - tags: 'List[ModelVersionTag]' = None + tags: 'List[ModelTag]' = None def as_dict(self) -> dict: body = {} if self.description: body['description'] = self.description if self.name: body['name'] = self.name - if self.run_id: body['run_id'] = self.run_id - if self.run_link: body['run_link'] = self.run_link - if self.source: body['source'] = self.source if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateModelVersionRequest': + def from_dict(cls, d: Dict[str, any]) -> 'CreateModelRequest': return cls(description=d.get('description', None), name=d.get('name', None), - run_id=d.get('run_id', None), - run_link=d.get('run_link', None), - source=d.get('source', None), - tags=_repeated(d, 'tags', ModelVersionTag)) + tags=_repeated(d, 'tags', ModelTag)) @dataclass -class CreateModelVersionResponse: - model_version: 'ModelVersion' = None +class CreateModelResponse: + registered_model: 'Model' = None def as_dict(self) -> dict: body = {} - if self.model_version: body['model_version'] = self.model_version.as_dict() + if self.registered_model: body['registered_model'] = self.registered_model.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateModelVersionResponse': - return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) + def from_dict(cls, d: Dict[str, any]) -> 'CreateModelResponse': + return cls(registered_model=_from_dict(d, 'registered_model', Model)) @dataclass -class CreateRegisteredModelRequest: +class CreateModelVersionRequest: name: str + source: str description: str = None - tags: 'List[RegisteredModelTag]' = None + run_id: str = None + run_link: str = None + tags: 'List[ModelVersionTag]' = None def as_dict(self) -> dict: body = {} if self.description: body['description'] = self.description if self.name: body['name'] = self.name + if self.run_id: body['run_id'] = self.run_id + if self.run_link: body['run_link'] = self.run_link + if self.source: body['source'] = self.source if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateRegisteredModelRequest': + def from_dict(cls, d: Dict[str, any]) -> 'CreateModelVersionRequest': return cls(description=d.get('description', None), name=d.get('name', None), - tags=_repeated(d, 'tags', RegisteredModelTag)) + run_id=d.get('run_id', None), + run_link=d.get('run_link', None), + source=d.get('source', None), + tags=_repeated(d, 'tags', ModelVersionTag)) @dataclass -class CreateRegisteredModelResponse: - registered_model: 'RegisteredModel' = None +class CreateModelVersionResponse: + model_version: 'ModelVersion' = None def as_dict(self) -> dict: body = {} - if self.registered_model: body['registered_model'] = self.registered_model.as_dict() + if self.model_version: body['model_version'] = self.model_version.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateRegisteredModelResponse': - return cls(registered_model=_from_dict(d, 'registered_model', RegisteredModel)) + def from_dict(cls, d: Dict[str, any]) -> 'CreateModelVersionResponse': + return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) @dataclass @@ -305,20 +322,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateRegistryWebhook': status=_enum(d, 'status', RegistryWebhookStatus)) -@dataclass -class CreateResponse: - comment: 'CommentObject' = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateResponse': - return cls(comment=_from_dict(d, 'comment', CommentObject)) - - @dataclass class CreateRun: experiment_id: str = None @@ -380,52 +383,63 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateTransitionRequest': @dataclass -class DeleteExperiment: - experiment_id: str +class CreateTransitionRequestResponse: + request: 'TransitionRequest' = None def as_dict(self) -> dict: body = {} - if self.experiment_id: body['experiment_id'] = self.experiment_id + if self.request: body['request'] = self.request.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeleteExperiment': - return cls(experiment_id=d.get('experiment_id', None)) + def from_dict(cls, d: Dict[str, any]) -> 'CreateTransitionRequestResponse': + return cls(request=_from_dict(d, 'request', TransitionRequest)) @dataclass -class DeleteModelVersionCommentRequest: - """Delete a comment""" +class CreateWebhookResponse: + webhook: 'RegistryWebhook' = None - id: str + def as_dict(self) -> dict: + body = {} + if self.webhook: body['webhook'] = self.webhook.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateWebhookResponse': + return cls(webhook=_from_dict(d, 'webhook', RegistryWebhook)) @dataclass -class DeleteModelVersionRequest: - """Delete a model version.""" +class DeleteCommentRequest: + """Delete a comment""" - name: str - version: str + id: str @dataclass -class DeleteModelVersionTagRequest: - """Delete a model version tag""" +class DeleteExperiment: + experiment_id: str - name: str - version: str - key: str + def as_dict(self) -> dict: + body = {} + if self.experiment_id: body['experiment_id'] = self.experiment_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteExperiment': + return cls(experiment_id=d.get('experiment_id', None)) @dataclass -class DeleteRegisteredModelRequest: +class DeleteModelRequest: """Delete a model""" name: str @dataclass -class DeleteRegisteredModelTagRequest: +class DeleteModelTagRequest: """Delete a model tag""" name: str @@ -433,10 +447,20 @@ class DeleteRegisteredModelTagRequest: @dataclass -class DeleteRegistryWebhookRequest: - """Delete a webhook""" +class DeleteModelVersionRequest: + """Delete a model version.""" - id: str = None + name: str + version: str + + +@dataclass +class DeleteModelVersionTagRequest: + """Delete a model version tag""" + + name: str + version: str + key: str @dataclass @@ -480,6 +504,13 @@ class DeleteTransitionRequestRequest: comment: str = None +@dataclass +class DeleteWebhookRequest: + """Delete a webhook""" + + id: str = None + + @dataclass class Experiment: artifact_location: str = None @@ -615,13 +646,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetLatestVersionsResponse': return cls(model_versions=_repeated(d, 'model_versions', ModelVersion)) -@dataclass -class GetMLflowDatabrickRequest: - """Get model""" - - name: str - - @dataclass class GetMetricHistoryResponse: metrics: 'List[Metric]' = None @@ -638,6 +662,27 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetMetricHistoryResponse': return cls(metrics=_repeated(d, 'metrics', Metric), next_page_token=d.get('next_page_token', None)) +@dataclass +class GetModelRequest: + """Get model""" + + name: str + + +@dataclass +class GetModelResponse: + registered_model: 'ModelDatabricks' = None + + def as_dict(self) -> dict: + body = {} + if self.registered_model: body['registered_model'] = self.registered_model.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetModelResponse': + return cls(registered_model=_from_dict(d, 'registered_model', ModelDatabricks)) + + @dataclass class GetModelVersionDownloadUriRequest: """Get a model version URI""" @@ -682,41 +727,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetModelVersionResponse': return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) -@dataclass -class GetRegisteredModelRequest: - """Get a model""" - - name: str - - -@dataclass -class GetRegisteredModelResponse: - registered_model: 'RegisteredModel' = None - - def as_dict(self) -> dict: - body = {} - if self.registered_model: body['registered_model'] = self.registered_model.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetRegisteredModelResponse': - return cls(registered_model=_from_dict(d, 'registered_model', RegisteredModel)) - - -@dataclass -class GetResponse: - registered_model: 'RegisteredModelDatabricks' = None - - def as_dict(self) -> dict: - body = {} - if self.registered_model: body['registered_model'] = self.registered_model.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetResponse': - return cls(registered_model=_from_dict(d, 'registered_model', RegisteredModelDatabricks)) - - @dataclass class GetRunRequest: """Get a run""" @@ -871,7 +881,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListExperimentsResponse': @dataclass -class ListRegisteredModelsRequest: +class ListModelsRequest: """List models""" max_results: int = None @@ -879,9 +889,9 @@ class ListRegisteredModelsRequest: @dataclass -class ListRegisteredModelsResponse: +class ListModelsResponse: next_page_token: str = None - registered_models: 'List[RegisteredModel]' = None + registered_models: 'List[Model]' = None def as_dict(self) -> dict: body = {} @@ -890,9 +900,9 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListRegisteredModelsResponse': + def from_dict(cls, d: Dict[str, any]) -> 'ListModelsResponse': return cls(next_page_token=d.get('next_page_token', None), - registered_models=_repeated(d, 'registered_models', RegisteredModel)) + registered_models=_repeated(d, 'registered_models', Model)) @dataclass @@ -913,16 +923,15 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListRegistryWebhooks': @dataclass -class ListRegistryWebhooksRequest: - """List registry webhooks""" +class ListTransitionRequestsRequest: + """List transition requests""" - events: 'List[RegistryWebhookEvent]' = None - model_name: str = None - page_token: str = None + name: str + version: str @dataclass -class ListResponse: +class ListTransitionRequestsResponse: requests: 'List[Activity]' = None def as_dict(self) -> dict: @@ -931,16 +940,17 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListResponse': + def from_dict(cls, d: Dict[str, any]) -> 'ListTransitionRequestsResponse': return cls(requests=_repeated(d, 'requests', Activity)) @dataclass -class ListTransitionRequestsRequest: - """List transition requests""" +class ListWebhooksRequest: + """List registry webhooks""" - name: str - version: str + events: 'List[RegistryWebhookEvent]' = None + model_name: str = None + page_token: str = None @dataclass @@ -1058,134 +1068,77 @@ def from_dict(cls, d: Dict[str, any]) -> 'Metric': @dataclass -class ModelVersion: +class Model: creation_timestamp: int = None - current_stage: str = None description: str = None last_updated_timestamp: int = None + latest_versions: 'List[ModelVersion]' = None name: str = None - run_id: str = None - run_link: str = None - source: str = None - status: 'ModelVersionStatus' = None - status_message: str = None - tags: 'List[ModelVersionTag]' = None + tags: 'List[ModelTag]' = None user_id: str = None - version: str = None def as_dict(self) -> dict: body = {} if self.creation_timestamp: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage: body['current_stage'] = self.current_stage if self.description: body['description'] = self.description if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] if self.name: body['name'] = self.name - if self.run_id: body['run_id'] = self.run_id - if self.run_link: body['run_link'] = self.run_link - if self.source: body['source'] = self.source - if self.status: body['status'] = self.status.value - if self.status_message: body['status_message'] = self.status_message if self.tags: body['tags'] = [v.as_dict() for v in self.tags] if self.user_id: body['user_id'] = self.user_id - if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ModelVersion': + def from_dict(cls, d: Dict[str, any]) -> 'Model': return cls(creation_timestamp=d.get('creation_timestamp', None), - current_stage=d.get('current_stage', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), + latest_versions=_repeated(d, 'latest_versions', ModelVersion), name=d.get('name', None), - run_id=d.get('run_id', None), - run_link=d.get('run_link', None), - source=d.get('source', None), - status=_enum(d, 'status', ModelVersionStatus), - status_message=d.get('status_message', None), - tags=_repeated(d, 'tags', ModelVersionTag), - user_id=d.get('user_id', None), - version=d.get('version', None)) + tags=_repeated(d, 'tags', ModelTag), + user_id=d.get('user_id', None)) @dataclass -class ModelVersionDatabricks: +class ModelDatabricks: creation_timestamp: int = None - current_stage: 'Stage' = None description: str = None + id: str = None last_updated_timestamp: int = None + latest_versions: 'List[ModelVersion]' = None name: str = None permission_level: 'PermissionLevel' = None - run_id: str = None - run_link: str = None - source: str = None - status: 'Status' = None - status_message: str = None - tags: 'List[ModelVersionTag]' = None + tags: 'List[ModelTag]' = None user_id: str = None - version: str = None def as_dict(self) -> dict: body = {} if self.creation_timestamp: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage: body['current_stage'] = self.current_stage.value if self.description: body['description'] = self.description + if self.id: body['id'] = self.id if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] if self.name: body['name'] = self.name if self.permission_level: body['permission_level'] = self.permission_level.value - if self.run_id: body['run_id'] = self.run_id - if self.run_link: body['run_link'] = self.run_link - if self.source: body['source'] = self.source - if self.status: body['status'] = self.status.value - if self.status_message: body['status_message'] = self.status_message if self.tags: body['tags'] = [v.as_dict() for v in self.tags] if self.user_id: body['user_id'] = self.user_id - if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ModelVersionDatabricks': + def from_dict(cls, d: Dict[str, any]) -> 'ModelDatabricks': return cls(creation_timestamp=d.get('creation_timestamp', None), - current_stage=_enum(d, 'current_stage', Stage), description=d.get('description', None), + id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), + latest_versions=_repeated(d, 'latest_versions', ModelVersion), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), - run_id=d.get('run_id', None), - run_link=d.get('run_link', None), - source=d.get('source', None), - status=_enum(d, 'status', Status), - status_message=d.get('status_message', None), - tags=_repeated(d, 'tags', ModelVersionTag), - user_id=d.get('user_id', None), - version=d.get('version', None)) - - -class ModelVersionStatus(Enum): - """Current status of `model_version`""" - - FAILED_REGISTRATION = 'FAILED_REGISTRATION' - PENDING_REGISTRATION = 'PENDING_REGISTRATION' - READY = 'READY' - - -@dataclass -class ModelVersionTag: - key: str = None - value: str = None - - def as_dict(self) -> dict: - body = {} - if self.key: body['key'] = self.key - if self.value: body['value'] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ModelVersionTag': - return cls(key=d.get('key', None), value=d.get('value', None)) + tags=_repeated(d, 'tags', ModelTag), + user_id=d.get('user_id', None)) @dataclass -class Param: +class ModelTag: key: str = None value: str = None @@ -1196,93 +1149,139 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Param': + def from_dict(cls, d: Dict[str, any]) -> 'ModelTag': return cls(key=d.get('key', None), value=d.get('value', None)) -class PermissionLevel(Enum): - """Permission level of the requesting user on the object. For what is allowed at each level, see - [MLflow Model permissions](..).""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' - CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' - CAN_READ = 'CAN_READ' - - @dataclass -class RegisteredModel: +class ModelVersion: creation_timestamp: int = None + current_stage: str = None description: str = None last_updated_timestamp: int = None - latest_versions: 'List[ModelVersion]' = None name: str = None - tags: 'List[RegisteredModelTag]' = None + run_id: str = None + run_link: str = None + source: str = None + status: 'ModelVersionStatus' = None + status_message: str = None + tags: 'List[ModelVersionTag]' = None user_id: str = None + version: str = None def as_dict(self) -> dict: body = {} if self.creation_timestamp: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage: body['current_stage'] = self.current_stage if self.description: body['description'] = self.description if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] if self.name: body['name'] = self.name + if self.run_id: body['run_id'] = self.run_id + if self.run_link: body['run_link'] = self.run_link + if self.source: body['source'] = self.source + if self.status: body['status'] = self.status.value + if self.status_message: body['status_message'] = self.status_message if self.tags: body['tags'] = [v.as_dict() for v in self.tags] if self.user_id: body['user_id'] = self.user_id + if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RegisteredModel': + def from_dict(cls, d: Dict[str, any]) -> 'ModelVersion': return cls(creation_timestamp=d.get('creation_timestamp', None), + current_stage=d.get('current_stage', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), - latest_versions=_repeated(d, 'latest_versions', ModelVersion), name=d.get('name', None), - tags=_repeated(d, 'tags', RegisteredModelTag), - user_id=d.get('user_id', None)) + run_id=d.get('run_id', None), + run_link=d.get('run_link', None), + source=d.get('source', None), + status=_enum(d, 'status', ModelVersionStatus), + status_message=d.get('status_message', None), + tags=_repeated(d, 'tags', ModelVersionTag), + user_id=d.get('user_id', None), + version=d.get('version', None)) @dataclass -class RegisteredModelDatabricks: +class ModelVersionDatabricks: creation_timestamp: int = None + current_stage: 'Stage' = None description: str = None - id: str = None last_updated_timestamp: int = None - latest_versions: 'List[ModelVersion]' = None name: str = None permission_level: 'PermissionLevel' = None - tags: 'List[RegisteredModelTag]' = None + run_id: str = None + run_link: str = None + source: str = None + status: 'Status' = None + status_message: str = None + tags: 'List[ModelVersionTag]' = None user_id: str = None + version: str = None def as_dict(self) -> dict: body = {} if self.creation_timestamp: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage: body['current_stage'] = self.current_stage.value if self.description: body['description'] = self.description - if self.id: body['id'] = self.id if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] if self.name: body['name'] = self.name if self.permission_level: body['permission_level'] = self.permission_level.value + if self.run_id: body['run_id'] = self.run_id + if self.run_link: body['run_link'] = self.run_link + if self.source: body['source'] = self.source + if self.status: body['status'] = self.status.value + if self.status_message: body['status_message'] = self.status_message if self.tags: body['tags'] = [v.as_dict() for v in self.tags] if self.user_id: body['user_id'] = self.user_id + if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RegisteredModelDatabricks': + def from_dict(cls, d: Dict[str, any]) -> 'ModelVersionDatabricks': return cls(creation_timestamp=d.get('creation_timestamp', None), + current_stage=_enum(d, 'current_stage', Stage), description=d.get('description', None), - id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), - latest_versions=_repeated(d, 'latest_versions', ModelVersion), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), - tags=_repeated(d, 'tags', RegisteredModelTag), - user_id=d.get('user_id', None)) + run_id=d.get('run_id', None), + run_link=d.get('run_link', None), + source=d.get('source', None), + status=_enum(d, 'status', Status), + status_message=d.get('status_message', None), + tags=_repeated(d, 'tags', ModelVersionTag), + user_id=d.get('user_id', None), + version=d.get('version', None)) + + +class ModelVersionStatus(Enum): + """Current status of `model_version`""" + + FAILED_REGISTRATION = 'FAILED_REGISTRATION' + PENDING_REGISTRATION = 'PENDING_REGISTRATION' + READY = 'READY' + + +@dataclass +class ModelVersionTag: + key: str = None + value: str = None + + def as_dict(self) -> dict: + body = {} + if self.key: body['key'] = self.key + if self.value: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ModelVersionTag': + return cls(key=d.get('key', None), value=d.get('value', None)) @dataclass -class RegisteredModelTag: +class Param: key: str = None value: str = None @@ -1293,10 +1292,21 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RegisteredModelTag': + def from_dict(cls, d: Dict[str, any]) -> 'Param': return cls(key=d.get('key', None), value=d.get('value', None)) +class PermissionLevel(Enum): + """Permission level of the requesting user on the object. For what is allowed at each level, see + [MLflow Model permissions](..).""" + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' + CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_READ = 'CAN_READ' + + @dataclass class RegistryWebhook: creation_timestamp: int = None @@ -1359,20 +1369,6 @@ class RegistryWebhookStatus(Enum): TEST_MODE = 'TEST_MODE' -@dataclass -class RejectResponse: - activity: 'Activity' = None - - def as_dict(self) -> dict: - body = {} - if self.activity: body['activity'] = self.activity.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RejectResponse': - return cls(activity=_from_dict(d, 'activity', Activity)) - - @dataclass class RejectTransitionRequest: name: str @@ -1397,7 +1393,21 @@ def from_dict(cls, d: Dict[str, any]) -> 'RejectTransitionRequest': @dataclass -class RenameRegisteredModelRequest: +class RejectTransitionRequestResponse: + activity: 'Activity' = None + + def as_dict(self) -> dict: + body = {} + if self.activity: body['activity'] = self.activity.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RejectTransitionRequestResponse': + return cls(activity=_from_dict(d, 'activity', Activity)) + + +@dataclass +class RenameModelRequest: name: str new_name: str = None @@ -1408,13 +1418,13 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RenameRegisteredModelRequest': + def from_dict(cls, d: Dict[str, any]) -> 'RenameModelRequest': return cls(name=d.get('name', None), new_name=d.get('new_name', None)) @dataclass -class RenameRegisteredModelResponse: - registered_model: 'RegisteredModel' = None +class RenameModelResponse: + registered_model: 'Model' = None def as_dict(self) -> dict: body = {} @@ -1422,8 +1432,8 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RenameRegisteredModelResponse': - return cls(registered_model=_from_dict(d, 'registered_model', RegisteredModel)) + def from_dict(cls, d: Dict[str, any]) -> 'RenameModelResponse': + return cls(registered_model=_from_dict(d, 'registered_model', Model)) @dataclass @@ -1634,7 +1644,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'SearchModelVersionsResponse': @dataclass -class SearchRegisteredModelsRequest: +class SearchModelsRequest: """Search models""" filter: str = None @@ -1644,9 +1654,9 @@ class SearchRegisteredModelsRequest: @dataclass -class SearchRegisteredModelsResponse: +class SearchModelsResponse: next_page_token: str = None - registered_models: 'List[RegisteredModel]' = None + registered_models: 'List[Model]' = None def as_dict(self) -> dict: body = {} @@ -1655,9 +1665,9 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SearchRegisteredModelsResponse': + def from_dict(cls, d: Dict[str, any]) -> 'SearchModelsResponse': return cls(next_page_token=d.get('next_page_token', None), - registered_models=_repeated(d, 'registered_models', RegisteredModel)) + registered_models=_repeated(d, 'registered_models', Model)) @dataclass @@ -1734,9 +1744,8 @@ def from_dict(cls, d: Dict[str, any]) -> 'SetExperimentTag': @dataclass -class SetModelVersionTagRequest: +class SetModelTagRequest: name: str - version: str key: str value: str @@ -1745,20 +1754,17 @@ def as_dict(self) -> dict: if self.key: body['key'] = self.key if self.name: body['name'] = self.name if self.value: body['value'] = self.value - if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SetModelVersionTagRequest': - return cls(key=d.get('key', None), - name=d.get('name', None), - value=d.get('value', None), - version=d.get('version', None)) + def from_dict(cls, d: Dict[str, any]) -> 'SetModelTagRequest': + return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None)) @dataclass -class SetRegisteredModelTagRequest: +class SetModelVersionTagRequest: name: str + version: str key: str value: str @@ -1767,11 +1773,15 @@ def as_dict(self) -> dict: if self.key: body['key'] = self.key if self.name: body['name'] = self.name if self.value: body['value'] = self.value + if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SetRegisteredModelTagRequest': - return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None)) + def from_dict(cls, d: Dict[str, any]) -> 'SetModelVersionTagRequest': + return cls(key=d.get('key', None), + name=d.get('name', None), + value=d.get('value', None), + version=d.get('version', None)) @dataclass @@ -1862,29 +1872,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'TestRegistryWebhookResponse': return cls(webhook=_from_dict(d, 'webhook', TestRegistryWebhook)) -@dataclass -class TransitionModelVersionStage: - name: str - version: str - stage: str - archive_existing_versions: bool - - def as_dict(self) -> dict: - body = {} - if self.archive_existing_versions: body['archive_existing_versions'] = self.archive_existing_versions - if self.name: body['name'] = self.name - if self.stage: body['stage'] = self.stage - if self.version: body['version'] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'TransitionModelVersionStage': - return cls(archive_existing_versions=d.get('archive_existing_versions', None), - name=d.get('name', None), - stage=d.get('stage', None), - version=d.get('version', None)) - - @dataclass class TransitionModelVersionStageDatabricks: name: str @@ -1911,20 +1898,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'TransitionModelVersionStageDatabricks' version=d.get('version', None)) -@dataclass -class TransitionModelVersionStageResponse: - model_version: 'ModelVersion' = None - - def as_dict(self) -> dict: - body = {} - if self.model_version: body['model_version'] = self.model_version.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'TransitionModelVersionStageResponse': - return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) - - @dataclass class TransitionRequest: """Transition request details.""" @@ -1983,6 +1956,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateComment': return cls(comment=d.get('comment', None), id=d.get('id', None)) +@dataclass +class UpdateCommentResponse: + comment: 'CommentObject' = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateCommentResponse': + return cls(comment=_from_dict(d, 'comment', CommentObject)) + + @dataclass class UpdateExperiment: experiment_id: str @@ -2000,39 +1987,39 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateExperiment': @dataclass -class UpdateModelVersionRequest: +class UpdateModelRequest: name: str - version: str description: str = None def as_dict(self) -> dict: body = {} if self.description: body['description'] = self.description if self.name: body['name'] = self.name - if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateModelVersionRequest': - return cls(description=d.get('description', None), - name=d.get('name', None), - version=d.get('version', None)) + def from_dict(cls, d: Dict[str, any]) -> 'UpdateModelRequest': + return cls(description=d.get('description', None), name=d.get('name', None)) @dataclass -class UpdateRegisteredModelRequest: +class UpdateModelVersionRequest: name: str + version: str description: str = None def as_dict(self) -> dict: body = {} if self.description: body['description'] = self.description if self.name: body['name'] = self.name + if self.version: body['version'] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateRegisteredModelRequest': - return cls(description=d.get('description', None), name=d.get('name', None)) + def from_dict(cls, d: Dict[str, any]) -> 'UpdateModelVersionRequest': + return cls(description=d.get('description', None), + name=d.get('name', None), + version=d.get('version', None)) @dataclass @@ -2064,20 +2051,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateRegistryWebhook': status=_enum(d, 'status', RegistryWebhookStatus)) -@dataclass -class UpdateResponse: - comment: 'CommentObject' = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateResponse': - return cls(comment=_from_dict(d, 'comment', CommentObject)) - - @dataclass class UpdateRun: end_time: int = None @@ -2130,12 +2103,12 @@ class ExperimentsAPI: def __init__(self, api_client): self._api = api_client - def create(self, - name: str, - *, - artifact_location: str = None, - tags: List[ExperimentTag] = None, - **kwargs) -> CreateExperimentResponse: + def create_experiment(self, + name: str, + *, + artifact_location: str = None, + tags: List[ExperimentTag] = None, + **kwargs) -> CreateExperimentResponse: """Create experiment. Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that @@ -2151,7 +2124,30 @@ def create(self, json = self._api.do('POST', '/api/2.0/mlflow/experiments/create', body=body) return CreateExperimentResponse.from_dict(json) - def delete(self, experiment_id: str, **kwargs): + def create_run(self, + *, + experiment_id: str = None, + start_time: int = None, + tags: List[RunTag] = None, + user_id: str = None, + **kwargs) -> CreateRunResponse: + """Create a run. + + Creates a new run within an experiment. A run is usually a single execution of a machine learning or + data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag` + associated with a single execution.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateRun(experiment_id=experiment_id, + start_time=start_time, + tags=tags, + user_id=user_id) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/mlflow/runs/create', body=body) + return CreateRunResponse.from_dict(json) + + def delete_experiment(self, experiment_id: str, **kwargs): """Delete an experiment. Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the @@ -2162,19 +2158,26 @@ def delete(self, experiment_id: str, **kwargs): body = request.as_dict() self._api.do('POST', '/api/2.0/mlflow/experiments/delete', body=body) - def get(self, experiment_id: str, **kwargs) -> Experiment: - """Get an experiment. + def delete_run(self, run_id: str, **kwargs): + """Delete a run. - Gets metadata for an experiment. This method works on deleted experiments.""" + Marks a run for deletion.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetExperimentRequest(experiment_id=experiment_id) - - query = {} - if experiment_id: query['experiment_id'] = request.experiment_id + request = DeleteRun(run_id=run_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/delete', body=body) - json = self._api.do('GET', '/api/2.0/mlflow/experiments/get', query=query) - return Experiment.from_dict(json) + def delete_tag(self, run_id: str, key: str, **kwargs): + """Delete a tag. + + Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run + completes.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteTag(key=key, run_id=run_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/delete-tag', body=body) def get_by_name(self, experiment_name: str, **kwargs) -> GetExperimentByNameResponse: """Get metadata. @@ -2196,12 +2199,104 @@ def get_by_name(self, experiment_name: str, **kwargs) -> GetExperimentByNameResp json = self._api.do('GET', '/api/2.0/mlflow/experiments/get-by-name', query=query) return GetExperimentByNameResponse.from_dict(json) - def list(self, - *, - max_results: int = None, - page_token: str = None, - view_type: str = None, - **kwargs) -> Iterator[Experiment]: + def get_experiment(self, experiment_id: str, **kwargs) -> Experiment: + """Get an experiment. + + Gets metadata for an experiment. This method works on deleted experiments.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetExperimentRequest(experiment_id=experiment_id) + + query = {} + if experiment_id: query['experiment_id'] = request.experiment_id + + json = self._api.do('GET', '/api/2.0/mlflow/experiments/get', query=query) + return Experiment.from_dict(json) + + def get_history(self, + metric_key: str, + *, + max_results: int = None, + page_token: str = None, + run_id: str = None, + run_uuid: str = None, + **kwargs) -> GetMetricHistoryResponse: + """Get history of a given metric within a run. + + Gets a list of all values for the specified metric for a given run.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetHistoryRequest(max_results=max_results, + metric_key=metric_key, + page_token=page_token, + run_id=run_id, + run_uuid=run_uuid) + + query = {} + if max_results: query['max_results'] = request.max_results + if metric_key: query['metric_key'] = request.metric_key + if page_token: query['page_token'] = request.page_token + if run_id: query['run_id'] = request.run_id + if run_uuid: query['run_uuid'] = request.run_uuid + + json = self._api.do('GET', '/api/2.0/mlflow/metrics/get-history', query=query) + return GetMetricHistoryResponse.from_dict(json) + + def get_run(self, run_id: str, *, run_uuid: str = None, **kwargs) -> GetRunResponse: + """Get a run. + + "Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the + same key are logged for a run, return only the value with the latest timestamp. + + If there are multiple values with the latest timestamp, return the maximum of these values.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetRunRequest(run_id=run_id, run_uuid=run_uuid) + + query = {} + if run_id: query['run_id'] = request.run_id + if run_uuid: query['run_uuid'] = request.run_uuid + + json = self._api.do('GET', '/api/2.0/mlflow/runs/get', query=query) + return GetRunResponse.from_dict(json) + + def list_artifacts(self, + *, + page_token: str = None, + path: str = None, + run_id: str = None, + run_uuid: str = None, + **kwargs) -> Iterator[FileInfo]: + """Get all artifacts. + + List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response + contains only artifacts with the specified prefix.",""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListArtifactsRequest(page_token=page_token, path=path, run_id=run_id, run_uuid=run_uuid) + + query = {} + if page_token: query['page_token'] = request.page_token + if path: query['path'] = request.path + if run_id: query['run_id'] = request.run_id + if run_uuid: query['run_uuid'] = request.run_uuid + + while True: + json = self._api.do('GET', '/api/2.0/mlflow/artifacts/list', query=query) + if 'files' not in json or not json['files']: + return + for v in json['files']: + yield FileInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_experiments(self, + *, + max_results: int = None, + page_token: str = None, + view_type: str = None, + **kwargs) -> Iterator[Experiment]: """List experiments. Gets a list of all experiments.""" @@ -2226,7 +2321,103 @@ def list(self, return query['page_token'] = json['next_page_token'] - def restore(self, experiment_id: str, **kwargs): + def log_batch(self, + *, + metrics: List[Metric] = None, + params: List[Param] = None, + run_id: str = None, + tags: List[RunTag] = None, + **kwargs): + """Log a batch. + + Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server + will respond with an error (non-200 status code). + + In case of error (due to internal server error or an invalid request), partial data may be written. + + You can write metrics, params, and tags in interleaving fashion, but within a given entity type are + guaranteed to follow the order specified in the request body. + + The overwrite behavior for metrics, params, and tags is as follows: + + * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to + the set of values for the metric with the provided key. + + * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple + tag values with the same key are provided in the same API request, the last-provided tag value is + written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. + + * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will + result in an error). However, logging the same param (key, value) is permitted. Specifically, logging + a param is idempotent. + + Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB + in size and contain: + + * No more than 1000 metrics, params, and tags in total * Up to 1000 metrics - Up to 100 params * Up to + 100 tags + + For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 + metrics, 50 params, and 51 tags is invalid. + + The following limits also apply to metric, param, and tag keys and values: + + * Metric keyes, param keys, and tag keys can be up to 250 characters in length * Parameter and tag + values can be up to 250 characters in length""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = LogBatch(metrics=metrics, params=params, run_id=run_id, tags=tags) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/log-batch', body=body) + + def log_metric(self, + key: str, + value: float, + timestamp: int, + *, + run_id: str = None, + run_uuid: str = None, + step: int = None, + **kwargs): + """Log a metric. + + Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated + timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be + logged multiple times.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = LogMetric(key=key, + run_id=run_id, + run_uuid=run_uuid, + step=step, + timestamp=timestamp, + value=value) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/log-metric', body=body) + + def log_model(self, *, model_json: str = None, run_id: str = None, **kwargs): + """Log a model. + + **NOTE:** Experimental: This API may change or be removed in a future release without warning.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = LogModel(model_json=model_json, run_id=run_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/log-model', body=body) + + def log_param(self, key: str, value: str, *, run_id: str = None, run_uuid: str = None, **kwargs): + """Log a param. + + Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include + hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A + param can be logged only once for a run.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = LogParam(key=key, run_id=run_id, run_uuid=run_uuid, value=value) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/log-parameter', body=body) + + def restore_experiment(self, experiment_id: str, **kwargs): """Restores an experiment. "Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, @@ -2240,14 +2431,24 @@ def restore(self, experiment_id: str, **kwargs): body = request.as_dict() self._api.do('POST', '/api/2.0/mlflow/experiments/restore', body=body) - def search(self, - *, - filter: str = None, - max_results: int = None, - order_by: List[str] = None, - page_token: str = None, - view_type: SearchExperimentsViewType = None, - **kwargs) -> Iterator[Experiment]: + def restore_run(self, run_id: str, **kwargs): + """Restore a run. + + Restores a deleted run.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = RestoreRun(run_id=run_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/restore', body=body) + + def search_experiments(self, + *, + filter: str = None, + max_results: int = None, + order_by: List[str] = None, + page_token: str = None, + view_type: SearchExperimentsViewType = None, + **kwargs) -> Iterator[Experiment]: """Search experiments. Searches for experiments that satisfy specified search criteria.""" @@ -2270,463 +2471,260 @@ def search(self, return body['page_token'] = json['next_page_token'] - def set_experiment_tag(self, experiment_id: str, key: str, value: str, **kwargs): - """Set a tag. + def search_runs(self, + *, + experiment_ids: List[str] = None, + filter: str = None, + max_results: int = None, + order_by: List[str] = None, + page_token: str = None, + run_view_type: SearchRunsRunViewType = None, + **kwargs) -> Iterator[Run]: + """Search for runs. - Sets a tag on an experiment. Experiment tags are metadata that can be updated.""" + Searches for runs that satisfy expressions. + + Search expressions can use `mlflowMetric` and `mlflowParam` keys.",""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = SetExperimentTag(experiment_id=experiment_id, key=key, value=value) + request = SearchRuns(experiment_ids=experiment_ids, + filter=filter, + max_results=max_results, + order_by=order_by, + page_token=page_token, + run_view_type=run_view_type) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/experiments/set-experiment-tag', body=body) - - def update(self, experiment_id: str, *, new_name: str = None, **kwargs): - """Update an experiment. - - Updates experiment metadata.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateExperiment(experiment_id=experiment_id, new_name=new_name) - body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/experiments/update', body=body) - - -class MLflowArtifactsAPI: - - def __init__(self, api_client): - self._api = api_client - - def list(self, - *, - page_token: str = None, - path: str = None, - run_id: str = None, - run_uuid: str = None, - **kwargs) -> Iterator[FileInfo]: - """Get all artifacts. - - List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response - contains only artifacts with the specified prefix.",""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListArtifactsRequest(page_token=page_token, path=path, run_id=run_id, run_uuid=run_uuid) - - query = {} - if page_token: query['page_token'] = request.page_token - if path: query['path'] = request.path - if run_id: query['run_id'] = request.run_id - if run_uuid: query['run_uuid'] = request.run_uuid while True: - json = self._api.do('GET', '/api/2.0/mlflow/artifacts/list', query=query) - if 'files' not in json or not json['files']: + json = self._api.do('POST', '/api/2.0/mlflow/runs/search', body=body) + if 'runs' not in json or not json['runs']: return - for v in json['files']: - yield FileInfo.from_dict(v) + for v in json['runs']: + yield Run.from_dict(v) if 'next_page_token' not in json or not json['next_page_token']: return - query['page_token'] = json['next_page_token'] - - -class MLflowDatabricksAPI: - """These endpoints are modified versions of the MLflow API that accept additional input parameters or return - additional information.""" - - def __init__(self, api_client): - self._api = api_client - - def get(self, name: str, **kwargs) -> GetResponse: - """Get model. - - Get the details of a model. This is a Databricks Workspace version of the [MLflow endpoint] that also - returns the model's Databricks Workspace ID and the permission level of the requesting user on the - model. - - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetMLflowDatabrickRequest(name=name) - - query = {} - if name: query['name'] = request.name - - json = self._api.do('GET', '/api/2.0/mlflow/databricks/registered-models/get', query=query) - return GetResponse.from_dict(json) + body['page_token'] = json['next_page_token'] - def transition_stage(self, - name: str, - version: str, - stage: Stage, - archive_existing_versions: bool, - *, - comment: str = None, - **kwargs) -> TransitionStageResponse: - """Transition a stage. - - Transition a model version's stage. This is a Databricks Workspace version of the [MLflow endpoint] - that also accepts a comment associated with the transition to be recorded.", + def set_experiment_tag(self, experiment_id: str, key: str, value: str, **kwargs): + """Set a tag. - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage""" + Sets a tag on an experiment. Experiment tags are metadata that can be updated.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = TransitionModelVersionStageDatabricks( - archive_existing_versions=archive_existing_versions, - comment=comment, - name=name, - stage=stage, - version=version) + request = SetExperimentTag(experiment_id=experiment_id, key=key, value=value) body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/experiments/set-experiment-tag', body=body) - json = self._api.do('POST', '/api/2.0/mlflow/databricks/model-versions/transition-stage', body=body) - return TransitionStageResponse.from_dict(json) - - -class MLflowMetricsAPI: - - def __init__(self, api_client): - self._api = api_client - - def get_history(self, - metric_key: str, - *, - max_results: int = None, - page_token: str = None, - run_id: str = None, - run_uuid: str = None, - **kwargs) -> GetMetricHistoryResponse: - """Get history of a given metric within a run. - - Gets a list of all values for the specified metric for a given run.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetHistoryRequest(max_results=max_results, - metric_key=metric_key, - page_token=page_token, - run_id=run_id, - run_uuid=run_uuid) - - query = {} - if max_results: query['max_results'] = request.max_results - if metric_key: query['metric_key'] = request.metric_key - if page_token: query['page_token'] = request.page_token - if run_id: query['run_id'] = request.run_id - if run_uuid: query['run_uuid'] = request.run_uuid - - json = self._api.do('GET', '/api/2.0/mlflow/metrics/get-history', query=query) - return GetMetricHistoryResponse.from_dict(json) - - -class MLflowRunsAPI: - - def __init__(self, api_client): - self._api = api_client - - def create(self, - *, - experiment_id: str = None, - start_time: int = None, - tags: List[RunTag] = None, - user_id: str = None, - **kwargs) -> CreateRunResponse: - """Create a run. + def set_tag(self, key: str, value: str, *, run_id: str = None, run_uuid: str = None, **kwargs): + """Set a tag. - Creates a new run within an experiment. A run is usually a single execution of a machine learning or - data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag` - associated with a single execution.""" + Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateRun(experiment_id=experiment_id, - start_time=start_time, - tags=tags, - user_id=user_id) + request = SetTag(key=key, run_id=run_id, run_uuid=run_uuid, value=value) body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/runs/set-tag', body=body) - json = self._api.do('POST', '/api/2.0/mlflow/runs/create', body=body) - return CreateRunResponse.from_dict(json) - - def delete(self, run_id: str, **kwargs): - """Delete a run. + def update_experiment(self, experiment_id: str, *, new_name: str = None, **kwargs): + """Update an experiment. - Marks a run for deletion.""" + Updates experiment metadata.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteRun(run_id=run_id) + request = UpdateExperiment(experiment_id=experiment_id, new_name=new_name) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/delete', body=body) + self._api.do('POST', '/api/2.0/mlflow/experiments/update', body=body) - def delete_tag(self, run_id: str, key: str, **kwargs): - """Delete a tag. + def update_run(self, + *, + end_time: int = None, + run_id: str = None, + run_uuid: str = None, + status: UpdateRunStatus = None, + **kwargs) -> UpdateRunResponse: + """Update a run. - Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run - completes.""" + Updates run metadata.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteTag(key=key, run_id=run_id) + request = UpdateRun(end_time=end_time, run_id=run_id, run_uuid=run_uuid, status=status) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/delete-tag', body=body) - def get(self, run_id: str, *, run_uuid: str = None, **kwargs) -> GetRunResponse: - """Get a run. - - "Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the - same key are logged for a run, return only the value with the latest timestamp. - - If there are multiple values with the latest timestamp, return the maximum of these values.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetRunRequest(run_id=run_id, run_uuid=run_uuid) + json = self._api.do('POST', '/api/2.0/mlflow/runs/update', body=body) + return UpdateRunResponse.from_dict(json) - query = {} - if run_id: query['run_id'] = request.run_id - if run_uuid: query['run_uuid'] = request.run_uuid - json = self._api.do('GET', '/api/2.0/mlflow/runs/get', query=query) - return GetRunResponse.from_dict(json) +class ModelRegistryAPI: - def log_batch(self, - *, - metrics: List[Metric] = None, - params: List[Param] = None, - run_id: str = None, - tags: List[RunTag] = None, - **kwargs): - """Log a batch. - - Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server - will respond with an error (non-200 status code). - - In case of error (due to internal server error or an invalid request), partial data may be written. - - You can write metrics, params, and tags in interleaving fashion, but within a given entity type are - guaranteed to follow the order specified in the request body. - - The overwrite behavior for metrics, params, and tags is as follows: - - * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to - the set of values for the metric with the provided key. - - * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple - tag values with the same key are provided in the same API request, the last-provided tag value is - written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. - - * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will - result in an error). However, logging the same param (key, value) is permitted. Specifically, logging - a param is idempotent. - - Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB - in size and contain: - - * No more than 1000 metrics, params, and tags in total * Up to 1000 metrics - Up to 100 params * Up to - 100 tags - - For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 - metrics, 50 params, and 51 tags is invalid. - - The following limits also apply to metric, param, and tag keys and values: - - * Metric keyes, param keys, and tag keys can be up to 250 characters in length * Parameter and tag - values can be up to 250 characters in length""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = LogBatch(metrics=metrics, params=params, run_id=run_id, tags=tags) - body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/log-batch', body=body) + def __init__(self, api_client): + self._api = api_client - def log_metric(self, - key: str, - value: float, - timestamp: int, - *, - run_id: str = None, - run_uuid: str = None, - step: int = None, - **kwargs): - """Log a metric. + def approve_transition_request(self, + name: str, + version: str, + stage: Stage, + archive_existing_versions: bool, + *, + comment: str = None, + **kwargs) -> ApproveTransitionRequestResponse: + """Approve transition request. - Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated - timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be - logged multiple times.""" + Approves a model version stage transition request.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = LogMetric(key=key, - run_id=run_id, - run_uuid=run_uuid, - step=step, - timestamp=timestamp, - value=value) + request = ApproveTransitionRequest(archive_existing_versions=archive_existing_versions, + comment=comment, + name=name, + stage=stage, + version=version) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/log-metric', body=body) - def log_model(self, *, model_json: str = None, run_id: str = None, **kwargs): - """Log a model. - - **NOTE:** Experimental: This API may change or be removed in a future release without warning.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = LogModel(model_json=model_json, run_id=run_id) - body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/log-model', body=body) + json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/approve', body=body) + return ApproveTransitionRequestResponse.from_dict(json) - def log_parameter(self, key: str, value: str, *, run_id: str = None, run_uuid: str = None, **kwargs): - """Log a param. + def create_comment(self, name: str, version: str, comment: str, **kwargs) -> CreateCommentResponse: + """Post a comment. - Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include - hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A - param can be logged only once for a run.""" + Posts a comment on a model version. A comment can be submitted either by a user or programmatically to + display relevant information about the model. For example, test results or deployment errors.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = LogParam(key=key, run_id=run_id, run_uuid=run_uuid, value=value) + request = CreateComment(comment=comment, name=name, version=version) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/log-parameter', body=body) - def restore(self, run_id: str, **kwargs): - """Restore a run. + json = self._api.do('POST', '/api/2.0/mlflow/comments/create', body=body) + return CreateCommentResponse.from_dict(json) + + def create_model(self, + name: str, + *, + description: str = None, + tags: List[ModelTag] = None, + **kwargs) -> CreateModelResponse: + """Create a model. - Restores a deleted run.""" + Creates a new registered model with the name specified in the request body. + + Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = RestoreRun(run_id=run_id) + request = CreateModelRequest(description=description, name=name, tags=tags) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/restore', body=body) - def search(self, - *, - experiment_ids: List[str] = None, - filter: str = None, - max_results: int = None, - order_by: List[str] = None, - page_token: str = None, - run_view_type: SearchRunsRunViewType = None, - **kwargs) -> Iterator[Run]: - """Search for runs. - - Searches for runs that satisfy expressions. + json = self._api.do('POST', '/api/2.0/mlflow/registered-models/create', body=body) + return CreateModelResponse.from_dict(json) + + def create_model_version(self, + name: str, + source: str, + *, + description: str = None, + run_id: str = None, + run_link: str = None, + tags: List[ModelVersionTag] = None, + **kwargs) -> CreateModelVersionResponse: + """Create a model version. - Search expressions can use `mlflowMetric` and `mlflowParam` keys.",""" + Creates a model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = SearchRuns(experiment_ids=experiment_ids, - filter=filter, - max_results=max_results, - order_by=order_by, - page_token=page_token, - run_view_type=run_view_type) + request = CreateModelVersionRequest(description=description, + name=name, + run_id=run_id, + run_link=run_link, + source=source, + tags=tags) body = request.as_dict() - while True: - json = self._api.do('POST', '/api/2.0/mlflow/runs/search', body=body) - if 'runs' not in json or not json['runs']: - return - for v in json['runs']: - yield Run.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - body['page_token'] = json['next_page_token'] + json = self._api.do('POST', '/api/2.0/mlflow/model-versions/create', body=body) + return CreateModelVersionResponse.from_dict(json) - def set_tag(self, key: str, value: str, *, run_id: str = None, run_uuid: str = None, **kwargs): - """Set a tag. + def create_transition_request(self, + name: str, + version: str, + stage: Stage, + *, + comment: str = None, + **kwargs) -> CreateTransitionRequestResponse: + """Make a transition request. - Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.""" + Creates a model version stage transition request.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = SetTag(key=key, run_id=run_id, run_uuid=run_uuid, value=value) + request = CreateTransitionRequest(comment=comment, name=name, stage=stage, version=version) body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/runs/set-tag', body=body) - def update(self, - *, - end_time: int = None, - run_id: str = None, - run_uuid: str = None, - status: UpdateRunStatus = None, - **kwargs) -> UpdateRunResponse: - """Update a run. + json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/create', body=body) + return CreateTransitionRequestResponse.from_dict(json) + + def create_webhook(self, + events: List[RegistryWebhookEvent], + *, + description: str = None, + http_url_spec: HttpUrlSpec = None, + job_spec: JobSpec = None, + model_name: str = None, + status: RegistryWebhookStatus = None, + **kwargs) -> CreateWebhookResponse: + """Create a webhook. - Updates run metadata.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateRun(end_time=end_time, run_id=run_id, run_uuid=run_uuid, status=status) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/mlflow/runs/update', body=body) - return UpdateRunResponse.from_dict(json) - - -class ModelVersionCommentsAPI: - - def __init__(self, api_client): - self._api = api_client - - def create(self, name: str, version: str, comment: str, **kwargs) -> CreateResponse: - """Post a comment. + **NOTE**: This endpoint is in Public Preview. - Posts a comment on a model version. A comment can be submitted either by a user or programmatically to - display relevant information about the model. For example, test results or deployment errors.""" + Creates a registry webhook.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateComment(comment=comment, name=name, version=version) + request = CreateRegistryWebhook(description=description, + events=events, + http_url_spec=http_url_spec, + job_spec=job_spec, + model_name=model_name, + status=status) body = request.as_dict() - json = self._api.do('POST', '/api/2.0/mlflow/comments/create', body=body) - return CreateResponse.from_dict(json) + json = self._api.do('POST', '/api/2.0/mlflow/registry-webhooks/create', body=body) + return CreateWebhookResponse.from_dict(json) - def delete(self, id: str, **kwargs): + def delete_comment(self, id: str, **kwargs): """Delete a comment. Deletes a comment on a model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteModelVersionCommentRequest(id=id) + request = DeleteCommentRequest(id=id) query = {} if id: query['id'] = request.id self._api.do('DELETE', '/api/2.0/mlflow/comments/delete', query=query) - def update(self, id: str, comment: str, **kwargs) -> UpdateResponse: - """Update a comment. + def delete_model(self, name: str, **kwargs): + """Delete a model. - Post an edit to a comment on a model version.""" + Deletes a registered model.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = UpdateComment(comment=comment, id=id) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/mlflow/comments/update', body=body) - return UpdateResponse.from_dict(json) + request = DeleteModelRequest(name=name) + query = {} + if name: query['name'] = request.name -class ModelVersionsAPI: - - def __init__(self, api_client): - self._api = api_client + self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete', query=query) - def create(self, - name: str, - source: str, - *, - description: str = None, - run_id: str = None, - run_link: str = None, - tags: List[ModelVersionTag] = None, - **kwargs) -> CreateModelVersionResponse: - """Create a model version. + def delete_model_tag(self, name: str, key: str, **kwargs): + """Delete a model tag. - Creates a model version.""" + Deletes the tag for a registered model.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateModelVersionRequest(description=description, - name=name, - run_id=run_id, - run_link=run_link, - source=source, - tags=tags) - body = request.as_dict() + request = DeleteModelTagRequest(key=key, name=name) - json = self._api.do('POST', '/api/2.0/mlflow/model-versions/create', body=body) - return CreateModelVersionResponse.from_dict(json) + query = {} + if key: query['key'] = request.key + if name: query['name'] = request.name + + self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete-tag', query=query) - def delete(self, name: str, version: str, **kwargs): + def delete_model_version(self, name: str, version: str, **kwargs): """Delete a model version. Deletes a model version.""" @@ -2740,7 +2738,7 @@ def delete(self, name: str, version: str, **kwargs): self._api.do('DELETE', '/api/2.0/mlflow/model-versions/delete', query=query) - def delete_tag(self, name: str, version: str, key: str, **kwargs): + def delete_model_version_tag(self, name: str, version: str, key: str, **kwargs): """Delete a model version tag. Deletes a model version tag.""" @@ -2755,190 +2753,117 @@ def delete_tag(self, name: str, version: str, key: str, **kwargs): self._api.do('DELETE', '/api/2.0/mlflow/model-versions/delete-tag', query=query) - def get(self, name: str, version: str, **kwargs) -> GetModelVersionResponse: - """Get a model version. + def delete_transition_request(self, + name: str, + version: str, + stage: str, + creator: str, + *, + comment: str = None, + **kwargs): + """Delete a ransition request. - Get a model version.""" + Cancels a model version stage transition request.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetModelVersionRequest(name=name, version=version) + request = DeleteTransitionRequestRequest(comment=comment, + creator=creator, + name=name, + stage=stage, + version=version) query = {} + if comment: query['comment'] = request.comment + if creator: query['creator'] = request.creator if name: query['name'] = request.name + if stage: query['stage'] = request.stage if version: query['version'] = request.version - json = self._api.do('GET', '/api/2.0/mlflow/model-versions/get', query=query) - return GetModelVersionResponse.from_dict(json) + self._api.do('DELETE', '/api/2.0/mlflow/transition-requests/delete', query=query) - def get_download_uri(self, name: str, version: str, **kwargs) -> GetModelVersionDownloadUriResponse: - """Get a model version URI. + def delete_webhook(self, *, id: str = None, **kwargs): + """Delete a webhook. - Gets a URI to download the model version.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetModelVersionDownloadUriRequest(name=name, version=version) - - query = {} - if name: query['name'] = request.name - if version: query['version'] = request.version - - json = self._api.do('GET', '/api/2.0/mlflow/model-versions/get-download-uri', query=query) - return GetModelVersionDownloadUriResponse.from_dict(json) - - def search(self, - *, - filter: str = None, - max_results: int = None, - order_by: List[str] = None, - page_token: str = None, - **kwargs) -> Iterator[ModelVersion]: - """Searches model versions. + **NOTE:** This endpoint is in Public Preview. - Searches for specific model versions based on the supplied __filter__.""" + Deletes a registry webhook.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = SearchModelVersionsRequest(filter=filter, - max_results=max_results, - order_by=order_by, - page_token=page_token) + request = DeleteWebhookRequest(id=id) query = {} - if filter: query['filter'] = request.filter - if max_results: query['max_results'] = request.max_results - if order_by: query['order_by'] = [v for v in request.order_by] - if page_token: query['page_token'] = request.page_token - - while True: - json = self._api.do('GET', '/api/2.0/mlflow/model-versions/search', query=query) - if 'model_versions' not in json or not json['model_versions']: - return - for v in json['model_versions']: - yield ModelVersion.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def set_tag(self, name: str, version: str, key: str, value: str, **kwargs): - """Set a version tag. - - Sets a model version tag.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = SetModelVersionTagRequest(key=key, name=name, value=value, version=version) - body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/model-versions/set-tag', body=body) - - def transition_stage(self, name: str, version: str, stage: str, archive_existing_versions: bool, - **kwargs) -> TransitionModelVersionStageResponse: - """Transition a stage. - - Transition to the next model stage.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = TransitionModelVersionStage(archive_existing_versions=archive_existing_versions, - name=name, - stage=stage, - version=version) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/mlflow/model-versions/transition-stage', body=body) - return TransitionModelVersionStageResponse.from_dict(json) - - def update(self, name: str, version: str, *, description: str = None, **kwargs): - """Update model version. - - Updates the model version.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateModelVersionRequest(description=description, name=name, version=version) - body = request.as_dict() - self._api.do('PATCH', '/api/2.0/mlflow/model-versions/update', body=body) - - -class RegisteredModelsAPI: + if id: query['id'] = request.id - def __init__(self, api_client): - self._api = api_client + self._api.do('DELETE', '/api/2.0/mlflow/registry-webhooks/delete', query=query) - def create(self, - name: str, - *, - description: str = None, - tags: List[RegisteredModelTag] = None, - **kwargs) -> CreateRegisteredModelResponse: - """Create a model. - - Creates a new registered model with the name specified in the request body. + def get_latest_versions(self, name: str, *, stages: List[str] = None, **kwargs) -> Iterator[ModelVersion]: + """Get the latest version. - Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.""" + Gets the latest version of a registered model.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateRegisteredModelRequest(description=description, name=name, tags=tags) + request = GetLatestVersionsRequest(name=name, stages=stages) body = request.as_dict() - json = self._api.do('POST', '/api/2.0/mlflow/registered-models/create', body=body) - return CreateRegisteredModelResponse.from_dict(json) + json = self._api.do('POST', '/api/2.0/mlflow/registered-models/get-latest-versions', body=body) + return [ModelVersion.from_dict(v) for v in json.get('model_versions', [])] - def delete(self, name: str, **kwargs): - """Delete a model. + def get_model(self, name: str, **kwargs) -> GetModelResponse: + """Get model. - Deletes a registered model.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteRegisteredModelRequest(name=name) - - query = {} - if name: query['name'] = request.name - - self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete', query=query) - - def delete_tag(self, name: str, key: str, **kwargs): - """Delete a model tag. + Get the details of a model. This is a Databricks Workspace version of the [MLflow endpoint] that also + returns the model's Databricks Workspace ID and the permission level of the requesting user on the + model. - Deletes the tag for a registered model.""" + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteRegisteredModelTagRequest(key=key, name=name) + request = GetModelRequest(name=name) query = {} - if key: query['key'] = request.key if name: query['name'] = request.name - self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete-tag', query=query) + json = self._api.do('GET', '/api/2.0/mlflow/databricks/registered-models/get', query=query) + return GetModelResponse.from_dict(json) - def get(self, name: str, **kwargs) -> GetRegisteredModelResponse: - """Get a model. + def get_model_version(self, name: str, version: str, **kwargs) -> GetModelVersionResponse: + """Get a model version. - Gets the registered model that matches the specified ID.""" + Get a model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetRegisteredModelRequest(name=name) + request = GetModelVersionRequest(name=name, version=version) query = {} if name: query['name'] = request.name + if version: query['version'] = request.version - json = self._api.do('GET', '/api/2.0/mlflow/registered-models/get', query=query) - return GetRegisteredModelResponse.from_dict(json) + json = self._api.do('GET', '/api/2.0/mlflow/model-versions/get', query=query) + return GetModelVersionResponse.from_dict(json) - def get_latest_versions(self, name: str, *, stages: List[str] = None, **kwargs) -> Iterator[ModelVersion]: - """Get the latest version. + def get_model_version_download_uri(self, name: str, version: str, + **kwargs) -> GetModelVersionDownloadUriResponse: + """Get a model version URI. - Gets the latest version of a registered model.""" + Gets a URI to download the model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetLatestVersionsRequest(name=name, stages=stages) - body = request.as_dict() + request = GetModelVersionDownloadUriRequest(name=name, version=version) - json = self._api.do('POST', '/api/2.0/mlflow/registered-models/get-latest-versions', body=body) - return [ModelVersion.from_dict(v) for v in json.get('model_versions', [])] + query = {} + if name: query['name'] = request.name + if version: query['version'] = request.version + + json = self._api.do('GET', '/api/2.0/mlflow/model-versions/get-download-uri', query=query) + return GetModelVersionDownloadUriResponse.from_dict(json) - def list(self, *, max_results: int = None, page_token: str = None, **kwargs) -> Iterator[RegisteredModel]: + def list_models(self, *, max_results: int = None, page_token: str = None, **kwargs) -> Iterator[Model]: """List models. Lists all available registered models, up to the limit specified in __max_results__.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRegisteredModelsRequest(max_results=max_results, page_token=page_token) + request = ListModelsRequest(max_results=max_results, page_token=page_token) query = {} if max_results: query['max_results'] = request.max_results @@ -2949,155 +2874,177 @@ def list(self, *, max_results: int = None, page_token: str = None, **kwargs) -> if 'registered_models' not in json or not json['registered_models']: return for v in json['registered_models']: - yield RegisteredModel.from_dict(v) + yield Model.from_dict(v) if 'next_page_token' not in json or not json['next_page_token']: return query['page_token'] = json['next_page_token'] - def rename(self, name: str, *, new_name: str = None, **kwargs) -> RenameRegisteredModelResponse: - """Rename a model. + def list_transition_requests(self, name: str, version: str, **kwargs) -> Iterator[Activity]: + """List transition requests. - Renames a registered model.""" + Gets a list of all open stage transition requests for the model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = RenameRegisteredModelRequest(name=name, new_name=new_name) - body = request.as_dict() + request = ListTransitionRequestsRequest(name=name, version=version) - json = self._api.do('POST', '/api/2.0/mlflow/registered-models/rename', body=body) - return RenameRegisteredModelResponse.from_dict(json) - - def search(self, - *, - filter: str = None, - max_results: int = None, - order_by: List[str] = None, - page_token: str = None, - **kwargs) -> Iterator[RegisteredModel]: - """Search models. + query = {} + if name: query['name'] = request.name + if version: query['version'] = request.version + + json = self._api.do('GET', '/api/2.0/mlflow/transition-requests/list', query=query) + return [Activity.from_dict(v) for v in json.get('requests', [])] + + def list_webhooks(self, + *, + events: List[RegistryWebhookEvent] = None, + model_name: str = None, + page_token: str = None, + **kwargs) -> Iterator[RegistryWebhook]: + """List registry webhooks. - Search for registered models based on the specified __filter__.""" + **NOTE:** This endpoint is in Public Preview. + + Lists all registry webhooks.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = SearchRegisteredModelsRequest(filter=filter, - max_results=max_results, - order_by=order_by, - page_token=page_token) + request = ListWebhooksRequest(events=events, model_name=model_name, page_token=page_token) query = {} - if filter: query['filter'] = request.filter - if max_results: query['max_results'] = request.max_results - if order_by: query['order_by'] = [v for v in request.order_by] + if events: query['events'] = [v for v in request.events] + if model_name: query['model_name'] = request.model_name if page_token: query['page_token'] = request.page_token while True: - json = self._api.do('GET', '/api/2.0/mlflow/registered-models/search', query=query) - if 'registered_models' not in json or not json['registered_models']: + json = self._api.do('GET', '/api/2.0/mlflow/registry-webhooks/list', query=query) + if 'webhooks' not in json or not json['webhooks']: return - for v in json['registered_models']: - yield RegisteredModel.from_dict(v) + for v in json['webhooks']: + yield RegistryWebhook.from_dict(v) if 'next_page_token' not in json or not json['next_page_token']: return query['page_token'] = json['next_page_token'] - def set_tag(self, name: str, key: str, value: str, **kwargs): - """Set a tag. - - Sets a tag on a registered model.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = SetRegisteredModelTagRequest(key=key, name=name, value=value) - body = request.as_dict() - self._api.do('POST', '/api/2.0/mlflow/registered-models/set-tag', body=body) - - def update(self, name: str, *, description: str = None, **kwargs): - """Update model. + def reject_transition_request(self, + name: str, + version: str, + stage: Stage, + *, + comment: str = None, + **kwargs) -> RejectTransitionRequestResponse: + """Reject a transition request. - Updates a registered model.""" + Rejects a model version stage transition request.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = UpdateRegisteredModelRequest(description=description, name=name) + request = RejectTransitionRequest(comment=comment, name=name, stage=stage, version=version) body = request.as_dict() - self._api.do('PATCH', '/api/2.0/mlflow/registered-models/update', body=body) - - -class RegistryWebhooksAPI: - def __init__(self, api_client): - self._api = api_client + json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/reject', body=body) + return RejectTransitionRequestResponse.from_dict(json) - def create(self, - events: List[RegistryWebhookEvent], - *, - description: str = None, - http_url_spec: HttpUrlSpec = None, - job_spec: JobSpec = None, - model_name: str = None, - status: RegistryWebhookStatus = None, - **kwargs) -> CreateResponse: - """Create a webhook. - - **NOTE**: This endpoint is in Public Preview. + def rename_model(self, name: str, *, new_name: str = None, **kwargs) -> RenameModelResponse: + """Rename a model. - Creates a registry webhook.""" + Renames a registered model.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateRegistryWebhook(description=description, - events=events, - http_url_spec=http_url_spec, - job_spec=job_spec, - model_name=model_name, - status=status) + request = RenameModelRequest(name=name, new_name=new_name) body = request.as_dict() - json = self._api.do('POST', '/api/2.0/mlflow/registry-webhooks/create', body=body) - return CreateResponse.from_dict(json) - - def delete(self, *, id: str = None, **kwargs): - """Delete a webhook. - - **NOTE:** This endpoint is in Public Preview. + json = self._api.do('POST', '/api/2.0/mlflow/registered-models/rename', body=body) + return RenameModelResponse.from_dict(json) + + def search_model_versions(self, + *, + filter: str = None, + max_results: int = None, + order_by: List[str] = None, + page_token: str = None, + **kwargs) -> Iterator[ModelVersion]: + """Searches model versions. - Deletes a registry webhook.""" + Searches for specific model versions based on the supplied __filter__.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteRegistryWebhookRequest(id=id) + request = SearchModelVersionsRequest(filter=filter, + max_results=max_results, + order_by=order_by, + page_token=page_token) query = {} - if id: query['id'] = request.id + if filter: query['filter'] = request.filter + if max_results: query['max_results'] = request.max_results + if order_by: query['order_by'] = [v for v in request.order_by] + if page_token: query['page_token'] = request.page_token - self._api.do('DELETE', '/api/2.0/mlflow/registry-webhooks/delete', query=query) + while True: + json = self._api.do('GET', '/api/2.0/mlflow/model-versions/search', query=query) + if 'model_versions' not in json or not json['model_versions']: + return + for v in json['model_versions']: + yield ModelVersion.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] - def list(self, - *, - events: List[RegistryWebhookEvent] = None, - model_name: str = None, - page_token: str = None, - **kwargs) -> Iterator[RegistryWebhook]: - """List registry webhooks. - - **NOTE:** This endpoint is in Public Preview. + def search_models(self, + *, + filter: str = None, + max_results: int = None, + order_by: List[str] = None, + page_token: str = None, + **kwargs) -> Iterator[Model]: + """Search models. - Lists all registry webhooks.""" + Search for registered models based on the specified __filter__.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRegistryWebhooksRequest(events=events, model_name=model_name, page_token=page_token) + request = SearchModelsRequest(filter=filter, + max_results=max_results, + order_by=order_by, + page_token=page_token) query = {} - if events: query['events'] = [v for v in request.events] - if model_name: query['model_name'] = request.model_name + if filter: query['filter'] = request.filter + if max_results: query['max_results'] = request.max_results + if order_by: query['order_by'] = [v for v in request.order_by] if page_token: query['page_token'] = request.page_token while True: - json = self._api.do('GET', '/api/2.0/mlflow/registry-webhooks/list', query=query) - if 'webhooks' not in json or not json['webhooks']: + json = self._api.do('GET', '/api/2.0/mlflow/registered-models/search', query=query) + if 'registered_models' not in json or not json['registered_models']: return - for v in json['webhooks']: - yield RegistryWebhook.from_dict(v) + for v in json['registered_models']: + yield Model.from_dict(v) if 'next_page_token' not in json or not json['next_page_token']: return query['page_token'] = json['next_page_token'] - def test(self, id: str, *, event: RegistryWebhookEvent = None, **kwargs) -> TestRegistryWebhookResponse: + def set_model_tag(self, name: str, key: str, value: str, **kwargs): + """Set a tag. + + Sets a tag on a registered model.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = SetModelTagRequest(key=key, name=name, value=value) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/registered-models/set-tag', body=body) + + def set_model_version_tag(self, name: str, version: str, key: str, value: str, **kwargs): + """Set a version tag. + + Sets a model version tag.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = SetModelVersionTagRequest(key=key, name=name, value=value, version=version) + body = request.as_dict() + self._api.do('POST', '/api/2.0/mlflow/model-versions/set-tag', body=body) + + def test_registry_webhook(self, + id: str, + *, + event: RegistryWebhookEvent = None, + **kwargs) -> TestRegistryWebhookResponse: """Test a webhook. **NOTE:** This endpoint is in Public Preview. @@ -3111,128 +3058,86 @@ def test(self, id: str, *, event: RegistryWebhookEvent = None, **kwargs) -> Test json = self._api.do('POST', '/api/2.0/mlflow/registry-webhooks/test', body=body) return TestRegistryWebhookResponse.from_dict(json) - def update(self, - id: str, - *, - description: str = None, - events: List[RegistryWebhookEvent] = None, - http_url_spec: HttpUrlSpec = None, - job_spec: JobSpec = None, - status: RegistryWebhookStatus = None, - **kwargs): - """Update a webhook. + def transition_stage(self, + name: str, + version: str, + stage: Stage, + archive_existing_versions: bool, + *, + comment: str = None, + **kwargs) -> TransitionStageResponse: + """Transition a stage. - **NOTE:** This endpoint is in Public Preview. + Transition a model version's stage. This is a Databricks Workspace version of the [MLflow endpoint] + that also accepts a comment associated with the transition to be recorded.", - Updates a registry webhook.""" + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = UpdateRegistryWebhook(description=description, - events=events, - http_url_spec=http_url_spec, - id=id, - job_spec=job_spec, - status=status) + request = TransitionModelVersionStageDatabricks( + archive_existing_versions=archive_existing_versions, + comment=comment, + name=name, + stage=stage, + version=version) body = request.as_dict() - self._api.do('PATCH', '/api/2.0/mlflow/registry-webhooks/update', body=body) - - -class TransitionRequestsAPI: - def __init__(self, api_client): - self._api = api_client + json = self._api.do('POST', '/api/2.0/mlflow/databricks/model-versions/transition-stage', body=body) + return TransitionStageResponse.from_dict(json) - def approve(self, - name: str, - version: str, - stage: Stage, - archive_existing_versions: bool, - *, - comment: str = None, - **kwargs) -> ApproveResponse: - """Approve transition requests. + def update_comment(self, id: str, comment: str, **kwargs) -> UpdateCommentResponse: + """Update a comment. - Approves a model version stage transition request.""" + Post an edit to a comment on a model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ApproveTransitionRequest(archive_existing_versions=archive_existing_versions, - comment=comment, - name=name, - stage=stage, - version=version) + request = UpdateComment(comment=comment, id=id) body = request.as_dict() - json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/approve', body=body) - return ApproveResponse.from_dict(json) - - def create(self, - name: str, - version: str, - stage: Stage, - *, - comment: str = None, - **kwargs) -> CreateResponse: - """Make a transition request. + json = self._api.do('PATCH', '/api/2.0/mlflow/comments/update', body=body) + return UpdateCommentResponse.from_dict(json) + + def update_model(self, name: str, *, description: str = None, **kwargs): + """Update model. - Creates a model version stage transition request.""" + Updates a registered model.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = CreateTransitionRequest(comment=comment, name=name, stage=stage, version=version) + request = UpdateModelRequest(description=description, name=name) body = request.as_dict() + self._api.do('PATCH', '/api/2.0/mlflow/registered-models/update', body=body) - json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/create', body=body) - return CreateResponse.from_dict(json) - - def delete(self, name: str, version: str, stage: str, creator: str, *, comment: str = None, **kwargs): - """Delete a ransition request. + def update_model_version(self, name: str, version: str, *, description: str = None, **kwargs): + """Update model version. - Cancels a model version stage transition request.""" + Updates the model version.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = DeleteTransitionRequestRequest(comment=comment, - creator=creator, - name=name, - stage=stage, - version=version) - - query = {} - if comment: query['comment'] = request.comment - if creator: query['creator'] = request.creator - if name: query['name'] = request.name - if stage: query['stage'] = request.stage - if version: query['version'] = request.version - - self._api.do('DELETE', '/api/2.0/mlflow/transition-requests/delete', query=query) + request = UpdateModelVersionRequest(description=description, name=name, version=version) + body = request.as_dict() + self._api.do('PATCH', '/api/2.0/mlflow/model-versions/update', body=body) - def list(self, name: str, version: str, **kwargs) -> Iterator[Activity]: - """List transition requests. + def update_webhook(self, + id: str, + *, + description: str = None, + events: List[RegistryWebhookEvent] = None, + http_url_spec: HttpUrlSpec = None, + job_spec: JobSpec = None, + status: RegistryWebhookStatus = None, + **kwargs): + """Update a webhook. - Gets a list of all open stage transition requests for the model version.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListTransitionRequestsRequest(name=name, version=version) - - query = {} - if name: query['name'] = request.name - if version: query['version'] = request.version - - json = self._api.do('GET', '/api/2.0/mlflow/transition-requests/list', query=query) - return [Activity.from_dict(v) for v in json.get('requests', [])] - - def reject(self, - name: str, - version: str, - stage: Stage, - *, - comment: str = None, - **kwargs) -> RejectResponse: - """Reject a transition request. + **NOTE:** This endpoint is in Public Preview. - Rejects a model version stage transition request.""" + Updates a registry webhook.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = RejectTransitionRequest(comment=comment, name=name, stage=stage, version=version) + request = UpdateRegistryWebhook(description=description, + events=events, + http_url_spec=http_url_spec, + id=id, + job_spec=job_spec, + status=status) body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/mlflow/transition-requests/reject', body=body) - return RejectResponse.from_dict(json) + self._api.do('PATCH', '/api/2.0/mlflow/registry-webhooks/update', body=body) diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index d52488d08..190d18f88 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -2,7 +2,7 @@ import logging from dataclasses import dataclass -from typing import Any, Dict, Iterator, List +from typing import Dict, Iterator, List from ._internal import _from_dict, _repeated @@ -223,8 +223,8 @@ def from_dict(cls, d: Dict[str, any]) -> 'OAuthEnrollmentStatus': @dataclass class TokenAccessPolicy: - access_token_ttl_in_minutes: Any = None - refresh_token_ttl_in_minutes: Any = None + access_token_ttl_in_minutes: int = None + refresh_token_ttl_in_minutes: int = None def as_dict(self) -> dict: body = {} diff --git a/databricks/sdk/service/permissions.py b/databricks/sdk/service/permissions.py deleted file mode 100755 index 1581ac3e0..000000000 --- a/databricks/sdk/service/permissions.py +++ /dev/null @@ -1,470 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, Iterator, List - -from ._internal import _enum, _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class AccessControlRequest: - group_name: str = None - permission_level: 'PermissionLevel' = None - service_principal_name: str = None - user_name: str = None - - def as_dict(self) -> dict: - body = {} - if self.group_name: body['group_name'] = self.group_name - if self.permission_level: body['permission_level'] = self.permission_level.value - if self.service_principal_name: body['service_principal_name'] = self.service_principal_name - if self.user_name: body['user_name'] = self.user_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'AccessControlRequest': - return cls(group_name=d.get('group_name', None), - permission_level=_enum(d, 'permission_level', PermissionLevel), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) - - -@dataclass -class AccessControlResponse: - all_permissions: 'List[Permission]' = None - group_name: str = None - service_principal_name: str = None - user_name: str = None - - def as_dict(self) -> dict: - body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.group_name: body['group_name'] = self.group_name - if self.service_principal_name: body['service_principal_name'] = self.service_principal_name - if self.user_name: body['user_name'] = self.user_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'AccessControlResponse': - return cls(all_permissions=_repeated(d, 'all_permissions', Permission), - group_name=d.get('group_name', None), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) - - -@dataclass -class DeleteWorkspaceAssignmentRequest: - """Delete permissions assignment""" - - workspace_id: int - principal_id: int - - -@dataclass -class Get: - """Get object permissions""" - - request_object_type: str - request_object_id: str - - -@dataclass -class GetPermissionLevels: - """Get permission levels""" - - request_object_type: str - request_object_id: str - - -@dataclass -class GetPermissionLevelsResponse: - permission_levels: 'List[PermissionsDescription]' = None - - def as_dict(self) -> dict: - body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'GetPermissionLevelsResponse': - return cls(permission_levels=_repeated(d, 'permission_levels', PermissionsDescription)) - - -@dataclass -class GetWorkspaceAssignmentRequest: - """List workspace permissions""" - - workspace_id: int - - -@dataclass -class ListWorkspaceAssignmentRequest: - """Get permission assignments""" - - workspace_id: int - - -@dataclass -class ObjectPermissions: - access_control_list: 'List[AccessControlResponse]' = None - object_id: str = None - object_type: str = None - - def as_dict(self) -> dict: - body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id: body['object_id'] = self.object_id - if self.object_type: body['object_type'] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ObjectPermissions': - return cls(access_control_list=_repeated(d, 'access_control_list', AccessControlResponse), - object_id=d.get('object_id', None), - object_type=d.get('object_type', None)) - - -@dataclass -class Permission: - inherited: bool = None - inherited_from_object: 'List[str]' = None - permission_level: 'PermissionLevel' = None - - def as_dict(self) -> dict: - body = {} - if self.inherited: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level: body['permission_level'] = self.permission_level.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'Permission': - return cls(inherited=d.get('inherited', None), - inherited_from_object=d.get('inherited_from_object', None), - permission_level=_enum(d, 'permission_level', PermissionLevel)) - - -@dataclass -class PermissionAssignment: - error: str = None - permissions: 'List[WorkspacePermission]' = None - principal: 'PrincipalOutput' = None - - def as_dict(self) -> dict: - body = {} - if self.error: body['error'] = self.error - if self.permissions: body['permissions'] = [v for v in self.permissions] - if self.principal: body['principal'] = self.principal.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PermissionAssignment': - return cls(error=d.get('error', None), - permissions=d.get('permissions', None), - principal=_from_dict(d, 'principal', PrincipalOutput)) - - -@dataclass -class PermissionAssignments: - permission_assignments: 'List[PermissionAssignment]' = None - - def as_dict(self) -> dict: - body = {} - if self.permission_assignments: - body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PermissionAssignments': - return cls(permission_assignments=_repeated(d, 'permission_assignments', PermissionAssignment)) - - -class PermissionLevel(Enum): - """Permission level""" - - CAN_ATTACH_TO = 'CAN_ATTACH_TO' - CAN_BIND = 'CAN_BIND' - CAN_EDIT = 'CAN_EDIT' - CAN_EDIT_METADATA = 'CAN_EDIT_METADATA' - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' - CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' - CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' - CAN_READ = 'CAN_READ' - CAN_RESTART = 'CAN_RESTART' - CAN_RUN = 'CAN_RUN' - CAN_USE = 'CAN_USE' - CAN_VIEW = 'CAN_VIEW' - CAN_VIEW_METADATA = 'CAN_VIEW_METADATA' - IS_OWNER = 'IS_OWNER' - - -@dataclass -class PermissionOutput: - description: str = None - permission_level: 'WorkspacePermission' = None - - def as_dict(self) -> dict: - body = {} - if self.description: body['description'] = self.description - if self.permission_level: body['permission_level'] = self.permission_level.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PermissionOutput': - return cls(description=d.get('description', None), - permission_level=_enum(d, 'permission_level', WorkspacePermission)) - - -@dataclass -class PermissionsDescription: - description: str = None - permission_level: 'PermissionLevel' = None - - def as_dict(self) -> dict: - body = {} - if self.description: body['description'] = self.description - if self.permission_level: body['permission_level'] = self.permission_level.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PermissionsDescription': - return cls(description=d.get('description', None), - permission_level=_enum(d, 'permission_level', PermissionLevel)) - - -@dataclass -class PermissionsRequest: - request_object_type: str - request_object_id: str - access_control_list: 'List[AccessControlRequest]' = None - - def as_dict(self) -> dict: - body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id: body['request_object_id'] = self.request_object_id - if self.request_object_type: body['request_object_type'] = self.request_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PermissionsRequest': - return cls(access_control_list=_repeated(d, 'access_control_list', AccessControlRequest), - request_object_id=d.get('request_object_id', None), - request_object_type=d.get('request_object_type', None)) - - -@dataclass -class PrincipalOutput: - display_name: str = None - group_name: str = None - principal_id: int = None - service_principal_name: str = None - user_name: str = None - - def as_dict(self) -> dict: - body = {} - if self.display_name: body['display_name'] = self.display_name - if self.group_name: body['group_name'] = self.group_name - if self.principal_id: body['principal_id'] = self.principal_id - if self.service_principal_name: body['service_principal_name'] = self.service_principal_name - if self.user_name: body['user_name'] = self.user_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PrincipalOutput': - return cls(display_name=d.get('display_name', None), - group_name=d.get('group_name', None), - principal_id=d.get('principal_id', None), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) - - -@dataclass -class UpdateWorkspaceAssignments: - permissions: 'List[WorkspacePermission]' - workspace_id: int - principal_id: int - - def as_dict(self) -> dict: - body = {} - if self.permissions: body['permissions'] = [v for v in self.permissions] - if self.principal_id: body['principal_id'] = self.principal_id - if self.workspace_id: body['workspace_id'] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateWorkspaceAssignments': - return cls(permissions=d.get('permissions', None), - principal_id=d.get('principal_id', None), - workspace_id=d.get('workspace_id', None)) - - -class WorkspacePermission(Enum): - - ADMIN = 'ADMIN' - UNKNOWN = 'UNKNOWN' - USER = 'USER' - - -@dataclass -class WorkspacePermissions: - permissions: 'List[PermissionOutput]' = None - - def as_dict(self) -> dict: - body = {} - if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'WorkspacePermissions': - return cls(permissions=_repeated(d, 'permissions', PermissionOutput)) - - -class PermissionsAPI: - """Permissions API are used to create read, write, edit, update and manage access for various users on - different objects and endpoints.""" - - def __init__(self, api_client): - self._api = api_client - - def get(self, request_object_type: str, request_object_id: str, **kwargs) -> ObjectPermissions: - """Get object permissions. - - Gets the permission of an object. Objects can inherit permissions from their parent objects or root - objects.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(request_object_id=request_object_id, request_object_type=request_object_type) - - json = self._api.do( - 'GET', f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}') - return ObjectPermissions.from_dict(json) - - def get_permission_levels(self, request_object_type: str, request_object_id: str, - **kwargs) -> GetPermissionLevelsResponse: - """Get permission levels. - - Gets the permission levels that a user can have on an object.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetPermissionLevels(request_object_id=request_object_id, - request_object_type=request_object_type) - - json = self._api.do( - 'GET', - f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}/permissionLevels' - ) - return GetPermissionLevelsResponse.from_dict(json) - - def set(self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: List[AccessControlRequest] = None, - **kwargs): - """Set permissions. - - Sets permissions on object. Objects can inherit permissions from their parent objects and root - objects.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = PermissionsRequest(access_control_list=access_control_list, - request_object_id=request_object_id, - request_object_type=request_object_type) - body = request.as_dict() - self._api.do('PUT', - f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}', - body=body) - - def update(self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: List[AccessControlRequest] = None, - **kwargs): - """Update permission. - - Updates the permissions on an object.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = PermissionsRequest(access_control_list=access_control_list, - request_object_id=request_object_id, - request_object_type=request_object_type) - body = request.as_dict() - self._api.do('PATCH', - f'/api/2.0/permissions/{request.request_object_type}/{request.request_object_id}', - body=body) - - -class WorkspaceAssignmentAPI: - """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your - account.""" - - def __init__(self, api_client): - self._api = api_client - - def delete(self, workspace_id: int, principal_id: int, **kwargs): - """Delete permissions assignment. - - Deletes the workspace permissions assignment in a given account and workspace for the specified - principal.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteWorkspaceAssignmentRequest(principal_id=principal_id, workspace_id=workspace_id) - - self._api.do( - 'DELETE', - f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/principals/{request.principal_id}' - ) - - def get(self, workspace_id: int, **kwargs) -> WorkspacePermissions: - """List workspace permissions. - - Get an array of workspace permissions for the specified account and workspace.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetWorkspaceAssignmentRequest(workspace_id=workspace_id) - - json = self._api.do( - 'GET', - f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/permissions' - ) - return WorkspacePermissions.from_dict(json) - - def list(self, workspace_id: int, **kwargs) -> Iterator[PermissionAssignment]: - """Get permission assignments. - - Get the permission assignments for the specified Databricks Account and Databricks Workspace.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListWorkspaceAssignmentRequest(workspace_id=workspace_id) - - json = self._api.do( - 'GET', - f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments' - ) - return [PermissionAssignment.from_dict(v) for v in json.get('permission_assignments', [])] - - def update(self, permissions: List[WorkspacePermission], workspace_id: int, principal_id: int, **kwargs): - """Create or update permissions assignment. - - Creates or updates the workspace permissions assignment in a given account and workspace for the - specified principal.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateWorkspaceAssignments(permissions=permissions, - principal_id=principal_id, - workspace_id=workspace_id) - body = request.as_dict() - self._api.do( - 'PUT', - f'/api/2.0/accounts/{self._api.account_id}/workspaces/{request.workspace_id}/permissionassignments/principals/{request.principal_id}', - body=body) diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index c3b076b5f..b68078656 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -13,9 +13,8 @@ _LOG = logging.getLogger('databricks.sdk') -from .clusters import (AutoScale, AwsAttributes, AzureAttributes, - ClusterLogConf, GcpAttributes) -from .libraries import MavenLibrary +from .compute import (AutoScale, AwsAttributes, AzureAttributes, + ClusterLogConf, GcpAttributes, MavenLibrary) # all definitions in this file are in alphabetical order @@ -133,7 +132,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'DataPlaneId': @dataclass -class Delete: +class DeletePipelineRequest: """Delete a pipeline""" pipeline_id: str @@ -229,6 +228,20 @@ class EventLevel(Enum): WARN = 'WARN' +@dataclass +class FileLibrary: + path: str = None + + def as_dict(self) -> dict: + body = {} + if self.path: body['path'] = self.path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FileLibrary': + return cls(path=d.get('path', None)) + + @dataclass class Filters: exclude: 'List[str]' = None @@ -246,7 +259,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'Filters': @dataclass -class Get: +class GetPipelineRequest: """Get a pipeline""" pipeline_id: str @@ -304,7 +317,7 @@ class GetPipelineResponseHealth(Enum): @dataclass -class GetUpdate: +class GetUpdateRequest: """Get a pipeline update""" pipeline_id: str @@ -326,7 +339,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetUpdateResponse': @dataclass -class ListPipelineEvents: +class ListPipelineEventsRequest: """List pipeline events""" pipeline_id: str @@ -357,7 +370,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListPipelineEventsResponse': @dataclass -class ListPipelines: +class ListPipelinesRequest: """List pipelines""" filter: str = None @@ -384,7 +397,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListPipelinesResponse': @dataclass -class ListUpdates: +class ListUpdatesRequest: """List pipeline updates""" pipeline_id: str @@ -600,6 +613,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'PipelineEvent': @dataclass class PipelineLibrary: + file: 'FileLibrary' = None jar: str = None maven: 'MavenLibrary' = None notebook: 'NotebookLibrary' = None @@ -607,6 +621,7 @@ class PipelineLibrary: def as_dict(self) -> dict: body = {} + if self.file: body['file'] = self.file.as_dict() if self.jar: body['jar'] = self.jar if self.maven: body['maven'] = self.maven.as_dict() if self.notebook: body['notebook'] = self.notebook.as_dict() @@ -615,7 +630,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PipelineLibrary': - return cls(jar=d.get('jar', None), + return cls(file=_from_dict(d, 'file', FileLibrary), + jar=d.get('jar', None), maven=_from_dict(d, 'maven', MavenLibrary), notebook=_from_dict(d, 'notebook', NotebookLibrary), whl=d.get('whl', None)) @@ -740,7 +756,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'PipelineTrigger': @dataclass -class Reset: +class ResetRequest: """Reset a pipeline""" pipeline_id: str @@ -858,7 +874,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'StartUpdateResponse': @dataclass -class Stop: +class StopRequest: """Stop a pipeline""" pipeline_id: str @@ -1098,7 +1114,7 @@ def delete(self, pipeline_id: str, **kwargs): Deletes a pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Delete(pipeline_id=pipeline_id) + request = DeletePipelineRequest(pipeline_id=pipeline_id) self._api.do('DELETE', f'/api/2.0/pipelines/{request.pipeline_id}') @@ -1106,7 +1122,7 @@ def get(self, pipeline_id: str, **kwargs) -> GetPipelineResponse: """Get a pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Get(pipeline_id=pipeline_id) + request = GetPipelineRequest(pipeline_id=pipeline_id) json = self._api.do('GET', f'/api/2.0/pipelines/{request.pipeline_id}') return GetPipelineResponse.from_dict(json) @@ -1117,7 +1133,7 @@ def get_update(self, pipeline_id: str, update_id: str, **kwargs) -> GetUpdateRes Gets an update from an active pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetUpdate(pipeline_id=pipeline_id, update_id=update_id) + request = GetUpdateRequest(pipeline_id=pipeline_id, update_id=update_id) json = self._api.do('GET', f'/api/2.0/pipelines/{request.pipeline_id}/updates/{request.update_id}') return GetUpdateResponse.from_dict(json) @@ -1135,11 +1151,11 @@ def list_pipeline_events(self, Retrieves events for a pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListPipelineEvents(filter=filter, - max_results=max_results, - order_by=order_by, - page_token=page_token, - pipeline_id=pipeline_id) + request = ListPipelineEventsRequest(filter=filter, + max_results=max_results, + order_by=order_by, + page_token=page_token, + pipeline_id=pipeline_id) query = {} if filter: query['filter'] = request.filter @@ -1169,10 +1185,10 @@ def list_pipelines(self, Lists pipelines defined in the Delta Live Tables system.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListPipelines(filter=filter, - max_results=max_results, - order_by=order_by, - page_token=page_token) + request = ListPipelinesRequest(filter=filter, + max_results=max_results, + order_by=order_by, + page_token=page_token) query = {} if filter: query['filter'] = request.filter @@ -1202,10 +1218,10 @@ def list_updates(self, List updates for an active pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListUpdates(max_results=max_results, - page_token=page_token, - pipeline_id=pipeline_id, - until_update_id=until_update_id) + request = ListUpdatesRequest(max_results=max_results, + page_token=page_token, + pipeline_id=pipeline_id, + until_update_id=until_update_id) query = {} if max_results: query['max_results'] = request.max_results @@ -1221,7 +1237,7 @@ def reset(self, pipeline_id: str, **kwargs) -> Wait[GetPipelineResponse]: Resets a pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Reset(pipeline_id=pipeline_id) + request = ResetRequest(pipeline_id=pipeline_id) self._api.do('POST', f'/api/2.0/pipelines/{request.pipeline_id}/reset') return Wait(self.wait_get_pipeline_running, pipeline_id=request.pipeline_id) @@ -1258,7 +1274,7 @@ def stop(self, pipeline_id: str, **kwargs) -> Wait[GetPipelineResponse]: Stops a pipeline.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Stop(pipeline_id=pipeline_id) + request = StopRequest(pipeline_id=pipeline_id) self._api.do('POST', f'/api/2.0/pipelines/{request.pipeline_id}/stop') return Wait(self.wait_get_pipeline_idle, pipeline_id=request.pipeline_id) diff --git a/databricks/sdk/service/deployment.py b/databricks/sdk/service/provisioning.py similarity index 97% rename from databricks/sdk/service/deployment.py rename to databricks/sdk/service/provisioning.py index 1ea183eb5..1f38bec5b 100755 --- a/databricks/sdk/service/deployment.py +++ b/databricks/sdk/service/provisioning.py @@ -203,12 +203,14 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateStorageConfigurationRequest': @dataclass class CreateVpcEndpointRequest: vpc_endpoint_name: str - aws_vpc_endpoint_id: str - region: str + aws_vpc_endpoint_id: str = None + gcp_vpc_endpoint_info: 'GcpVpcEndpointInfo' = None + region: str = None def as_dict(self) -> dict: body = {} if self.aws_vpc_endpoint_id: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() if self.region: body['region'] = self.region if self.vpc_endpoint_name: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body @@ -216,6 +218,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateVpcEndpointRequest': return cls(aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), + gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), vpc_endpoint_name=d.get('vpc_endpoint_name', None)) @@ -396,14 +399,7 @@ class EndpointUseCase(Enum): """This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. - If the VPC endpoint connects to the Databricks control plane for either the front-end connection - or the back-end REST API connection, the value is `WORKSPACE_ACCESS`. - - If the VPC endpoint connects to the Databricks workspace for the back-end [secure cluster - connectivity] relay, the value is `DATAPLANE_RELAY_ACCESS`. - - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html - [secure cluster connectivity]: https://docs.databricks.com/security/secure-cluster-connectivity.html""" + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" DATAPLANE_RELAY_ACCESS = 'DATAPLANE_RELAY_ACCESS' WORKSPACE_ACCESS = 'WORKSPACE_ACCESS' @@ -492,6 +488,34 @@ def from_dict(cls, d: Dict[str, any]) -> 'GcpNetworkInfo': vpc_id=d.get('vpc_id', None)) +@dataclass +class GcpVpcEndpointInfo: + """The Google Cloud specific information for this Private Service Connect endpoint.""" + + project_id: str + psc_endpoint_name: str + endpoint_region: str + psc_connection_id: str = None + service_attachment_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.endpoint_region: body['endpoint_region'] = self.endpoint_region + if self.project_id: body['project_id'] = self.project_id + if self.psc_connection_id: body['psc_connection_id'] = self.psc_connection_id + if self.psc_endpoint_name: body['psc_endpoint_name'] = self.psc_endpoint_name + if self.service_attachment_id: body['service_attachment_id'] = self.service_attachment_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GcpVpcEndpointInfo': + return cls(endpoint_region=d.get('endpoint_region', None), + project_id=d.get('project_id', None), + psc_connection_id=d.get('psc_connection_id', None), + psc_endpoint_name=d.get('psc_endpoint_name', None), + service_attachment_id=d.get('service_attachment_id', None)) + + @dataclass class GetCredentialRequest: """Get credential configuration""" @@ -875,6 +899,7 @@ class VpcEndpoint: aws_account_id: str = None aws_endpoint_service_id: str = None aws_vpc_endpoint_id: str = None + gcp_vpc_endpoint_info: 'GcpVpcEndpointInfo' = None region: str = None state: str = None use_case: 'EndpointUseCase' = None @@ -887,6 +912,7 @@ def as_dict(self) -> dict: if self.aws_account_id: body['aws_account_id'] = self.aws_account_id if self.aws_endpoint_service_id: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id if self.aws_vpc_endpoint_id: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() if self.region: body['region'] = self.region if self.state: body['state'] = self.state if self.use_case: body['use_case'] = self.use_case.value @@ -900,6 +926,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'VpcEndpoint': aws_account_id=d.get('aws_account_id', None), aws_endpoint_service_id=d.get('aws_endpoint_service_id', None), aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), + gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), state=d.get('state', None), use_case=_enum(d, 'use_case', EndpointUseCase), @@ -1251,13 +1278,7 @@ def list(self) -> Iterator[Network]: class PrivateAccessAPI: - """These APIs manage private access settings for this account. A private access settings object specifies how - your workspace is accessed using AWS PrivateLink. Each workspace that has any PrivateLink connections must - include the ID for a private access settings object is in its workspace configuration object. Your account - must be enabled for PrivateLink to use these APIs. Before configuring PrivateLink, it is important to read - the [Databricks article about PrivateLink]. - - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" + """These APIs manage private access settings for this account.""" def __init__(self, api_client): self._api = api_client @@ -1463,19 +1484,18 @@ def list(self) -> Iterator[StorageConfiguration]: class VpcEndpointsAPI: - """These APIs manage VPC endpoint configurations for this account. This object registers an AWS VPC endpoint - in your Databricks account so your workspace can use it with AWS PrivateLink. Your VPC endpoint connects - to one of two VPC endpoint services -- one for workspace (both for front-end connection and for back-end - connection to REST APIs) and one for the back-end secure cluster connectivity relay from the data plane. - Your account must be enabled for PrivateLink to use these APIs. Before configuring PrivateLink, it is - important to read the [Databricks article about PrivateLink]. - - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" + """These APIs manage VPC endpoint configurations for this account.""" def __init__(self, api_client): self._api = api_client - def create(self, vpc_endpoint_name: str, aws_vpc_endpoint_id: str, region: str, **kwargs) -> VpcEndpoint: + def create(self, + vpc_endpoint_name: str, + *, + aws_vpc_endpoint_id: str = None, + gcp_vpc_endpoint_info: GcpVpcEndpointInfo = None, + region: str = None, + **kwargs) -> VpcEndpoint: """Create VPC endpoint configuration. Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to @@ -1493,6 +1513,7 @@ def create(self, vpc_endpoint_name: str, aws_vpc_endpoint_id: str, region: str, request = kwargs.get('request', None) if not request: # request is not given through keyed args request = CreateVpcEndpointRequest(aws_vpc_endpoint_id=aws_vpc_endpoint_id, + gcp_vpc_endpoint_info=gcp_vpc_endpoint_info, region=region, vpc_endpoint_name=vpc_endpoint_name) body = request.as_dict() @@ -1506,9 +1527,6 @@ def delete(self, vpc_endpoint_id: str, **kwargs): Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. - Upon deleting a VPC endpoint configuration, the VPC endpoint in AWS changes its state from `accepted` - to `rejected`, which means that it is no longer usable from your VPC. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink diff --git a/databricks/sdk/service/repos.py b/databricks/sdk/service/repos.py deleted file mode 100755 index 9893fb179..000000000 --- a/databricks/sdk/service/repos.py +++ /dev/null @@ -1,250 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict, Iterator, List - -from ._internal import _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateRepo: - url: str - provider: str - path: str = None - sparse_checkout: 'SparseCheckout' = None - - def as_dict(self) -> dict: - body = {} - if self.path: body['path'] = self.path - if self.provider: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url: body['url'] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateRepo': - return cls(path=d.get('path', None), - provider=d.get('provider', None), - sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), - url=d.get('url', None)) - - -@dataclass -class Delete: - """Delete a repo""" - - repo_id: int - - -@dataclass -class Get: - """Get a repo""" - - repo_id: int - - -@dataclass -class ListRequest: - """Get repos""" - - next_page_token: str = None - path_prefix: str = None - - -@dataclass -class ListReposResponse: - next_page_token: str = None - repos: 'List[RepoInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.next_page_token: body['next_page_token'] = self.next_page_token - if self.repos: body['repos'] = [v.as_dict() for v in self.repos] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListReposResponse': - return cls(next_page_token=d.get('next_page_token', None), repos=_repeated(d, 'repos', RepoInfo)) - - -@dataclass -class RepoInfo: - branch: str = None - head_commit_id: str = None - id: int = None - path: str = None - provider: str = None - sparse_checkout: 'SparseCheckout' = None - url: str = None - - def as_dict(self) -> dict: - body = {} - if self.branch: body['branch'] = self.branch - if self.head_commit_id: body['head_commit_id'] = self.head_commit_id - if self.id: body['id'] = self.id - if self.path: body['path'] = self.path - if self.provider: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url: body['url'] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RepoInfo': - return cls(branch=d.get('branch', None), - head_commit_id=d.get('head_commit_id', None), - id=d.get('id', None), - path=d.get('path', None), - provider=d.get('provider', None), - sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), - url=d.get('url', None)) - - -@dataclass -class SparseCheckout: - patterns: 'List[str]' = None - - def as_dict(self) -> dict: - body = {} - if self.patterns: body['patterns'] = [v for v in self.patterns] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SparseCheckout': - return cls(patterns=d.get('patterns', None)) - - -@dataclass -class SparseCheckoutUpdate: - patterns: 'List[str]' = None - - def as_dict(self) -> dict: - body = {} - if self.patterns: body['patterns'] = [v for v in self.patterns] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SparseCheckoutUpdate': - return cls(patterns=d.get('patterns', None)) - - -@dataclass -class UpdateRepo: - repo_id: int - branch: str = None - sparse_checkout: 'SparseCheckoutUpdate' = None - tag: str = None - - def as_dict(self) -> dict: - body = {} - if self.branch: body['branch'] = self.branch - if self.repo_id: body['repo_id'] = self.repo_id - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.tag: body['tag'] = self.tag - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'UpdateRepo': - return cls(branch=d.get('branch', None), - repo_id=d.get('repo_id', None), - sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate), - tag=d.get('tag', None)) - - -class ReposAPI: - """The Repos API allows users to manage their git repos. Users can use the API to access all repos that they - have manage permissions on. - - Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a - repository, committing and pushing, pulling, branch management, and visual comparison of diffs when - committing. - - Within Repos you can develop code in notebooks or other files and follow data science and engineering code - development best practices using Git for version control, collaboration, and CI/CD.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, - url: str, - provider: str, - *, - path: str = None, - sparse_checkout: SparseCheckout = None, - **kwargs) -> RepoInfo: - """Create a repo. - - Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created - programmatically must be linked to a remote Git repo, unlike repos created in the browser.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateRepo(path=path, provider=provider, sparse_checkout=sparse_checkout, url=url) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/repos', body=body) - return RepoInfo.from_dict(json) - - def delete(self, repo_id: int, **kwargs): - """Delete a repo. - - Deletes the specified repo.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Delete(repo_id=repo_id) - - self._api.do('DELETE', f'/api/2.0/repos/{request.repo_id}') - - def get(self, repo_id: int, **kwargs) -> RepoInfo: - """Get a repo. - - Returns the repo with the given repo ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(repo_id=repo_id) - - json = self._api.do('GET', f'/api/2.0/repos/{request.repo_id}') - return RepoInfo.from_dict(json) - - def list(self, *, next_page_token: str = None, path_prefix: str = None, **kwargs) -> Iterator[RepoInfo]: - """Get repos. - - Returns repos that the calling user has Manage permissions on. Results are paginated with each page - containing twenty repos.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListRequest(next_page_token=next_page_token, path_prefix=path_prefix) - - query = {} - if next_page_token: query['next_page_token'] = request.next_page_token - if path_prefix: query['path_prefix'] = request.path_prefix - - while True: - json = self._api.do('GET', '/api/2.0/repos', query=query) - if 'repos' not in json or not json['repos']: - return - for v in json['repos']: - yield RepoInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['next_page_token'] = json['next_page_token'] - - def update(self, - repo_id: int, - *, - branch: str = None, - sparse_checkout: SparseCheckoutUpdate = None, - tag: str = None, - **kwargs): - """Update a repo. - - Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same - branch.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = UpdateRepo(branch=branch, repo_id=repo_id, sparse_checkout=sparse_checkout, tag=tag) - body = request.as_dict() - self._api.do('PATCH', f'/api/2.0/repos/{request.repo_id}', body=body) diff --git a/databricks/sdk/service/secrets.py b/databricks/sdk/service/secrets.py deleted file mode 100755 index 48c5c96da..000000000 --- a/databricks/sdk/service/secrets.py +++ /dev/null @@ -1,472 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from enum import Enum -from typing import Dict, Iterator, List - -from ._internal import _enum, _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class AclItem: - principal: str - permission: 'AclPermission' - - def as_dict(self) -> dict: - body = {} - if self.permission: body['permission'] = self.permission.value - if self.principal: body['principal'] = self.principal - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'AclItem': - return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None)) - - -class AclPermission(Enum): - - MANAGE = 'MANAGE' - READ = 'READ' - WRITE = 'WRITE' - - -@dataclass -class AzureKeyVaultSecretScopeMetadata: - resource_id: str - dns_name: str - - def as_dict(self) -> dict: - body = {} - if self.dns_name: body['dns_name'] = self.dns_name - if self.resource_id: body['resource_id'] = self.resource_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'AzureKeyVaultSecretScopeMetadata': - return cls(dns_name=d.get('dns_name', None), resource_id=d.get('resource_id', None)) - - -@dataclass -class CreateScope: - scope: str - initial_manage_principal: str = None - keyvault_metadata: 'AzureKeyVaultSecretScopeMetadata' = None - scope_backend_type: 'ScopeBackendType' = None - - def as_dict(self) -> dict: - body = {} - if self.initial_manage_principal: body['initial_manage_principal'] = self.initial_manage_principal - if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() - if self.scope: body['scope'] = self.scope - if self.scope_backend_type: body['scope_backend_type'] = self.scope_backend_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateScope': - return cls(initial_manage_principal=d.get('initial_manage_principal', None), - keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), - scope=d.get('scope', None), - scope_backend_type=_enum(d, 'scope_backend_type', ScopeBackendType)) - - -@dataclass -class DeleteAcl: - scope: str - principal: str - - def as_dict(self) -> dict: - body = {} - if self.principal: body['principal'] = self.principal - if self.scope: body['scope'] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeleteAcl': - return cls(principal=d.get('principal', None), scope=d.get('scope', None)) - - -@dataclass -class DeleteScope: - scope: str - - def as_dict(self) -> dict: - body = {} - if self.scope: body['scope'] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeleteScope': - return cls(scope=d.get('scope', None)) - - -@dataclass -class DeleteSecret: - scope: str - key: str - - def as_dict(self) -> dict: - body = {} - if self.key: body['key'] = self.key - if self.scope: body['scope'] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'DeleteSecret': - return cls(key=d.get('key', None), scope=d.get('scope', None)) - - -@dataclass -class GetAcl: - """Get secret ACL details""" - - scope: str - principal: str - - -@dataclass -class ListAcls: - """Lists ACLs""" - - scope: str - - -@dataclass -class ListAclsResponse: - items: 'List[AclItem]' = None - - def as_dict(self) -> dict: - body = {} - if self.items: body['items'] = [v.as_dict() for v in self.items] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListAclsResponse': - return cls(items=_repeated(d, 'items', AclItem)) - - -@dataclass -class ListScopesResponse: - scopes: 'List[SecretScope]' = None - - def as_dict(self) -> dict: - body = {} - if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListScopesResponse': - return cls(scopes=_repeated(d, 'scopes', SecretScope)) - - -@dataclass -class ListSecrets: - """List secret keys""" - - scope: str - - -@dataclass -class ListSecretsResponse: - secrets: 'List[SecretMetadata]' = None - - def as_dict(self) -> dict: - body = {} - if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListSecretsResponse': - return cls(secrets=_repeated(d, 'secrets', SecretMetadata)) - - -@dataclass -class PutAcl: - scope: str - principal: str - permission: 'AclPermission' - - def as_dict(self) -> dict: - body = {} - if self.permission: body['permission'] = self.permission.value - if self.principal: body['principal'] = self.principal - if self.scope: body['scope'] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PutAcl': - return cls(permission=_enum(d, 'permission', AclPermission), - principal=d.get('principal', None), - scope=d.get('scope', None)) - - -@dataclass -class PutSecret: - scope: str - key: str - bytes_value: str = None - string_value: str = None - - def as_dict(self) -> dict: - body = {} - if self.bytes_value: body['bytes_value'] = self.bytes_value - if self.key: body['key'] = self.key - if self.scope: body['scope'] = self.scope - if self.string_value: body['string_value'] = self.string_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PutSecret': - return cls(bytes_value=d.get('bytes_value', None), - key=d.get('key', None), - scope=d.get('scope', None), - string_value=d.get('string_value', None)) - - -class ScopeBackendType(Enum): - - AZURE_KEYVAULT = 'AZURE_KEYVAULT' - DATABRICKS = 'DATABRICKS' - - -@dataclass -class SecretMetadata: - key: str = None - last_updated_timestamp: int = None - - def as_dict(self) -> dict: - body = {} - if self.key: body['key'] = self.key - if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SecretMetadata': - return cls(key=d.get('key', None), last_updated_timestamp=d.get('last_updated_timestamp', None)) - - -@dataclass -class SecretScope: - backend_type: 'ScopeBackendType' = None - keyvault_metadata: 'AzureKeyVaultSecretScopeMetadata' = None - name: str = None - - def as_dict(self) -> dict: - body = {} - if self.backend_type: body['backend_type'] = self.backend_type.value - if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() - if self.name: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'SecretScope': - return cls(backend_type=_enum(d, 'backend_type', ScopeBackendType), - keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), - name=d.get('name', None)) - - -class SecretsAPI: - """The Secrets API allows you to manage secrets, secret scopes, and access permissions. - - Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of - directly entering your credentials into a notebook, use Databricks secrets to store your credentials and - reference them in notebooks and jobs. - - Administrators, secret creators, and users granted permission can read Databricks secrets. While - Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not - possible to prevent such users from reading secrets.""" - - def __init__(self, api_client): - self._api = api_client - - def create_scope(self, - scope: str, - *, - initial_manage_principal: str = None, - keyvault_metadata: AzureKeyVaultSecretScopeMetadata = None, - scope_backend_type: ScopeBackendType = None, - **kwargs): - """Create a new secret scope. - - The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not - exceed 128 characters. The maximum number of scopes in a workspace is 100.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateScope(initial_manage_principal=initial_manage_principal, - keyvault_metadata=keyvault_metadata, - scope=scope, - scope_backend_type=scope_backend_type) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/scopes/create', body=body) - - def delete_acl(self, scope: str, principal: str, **kwargs): - """Delete an ACL. - - Deletes the given ACL on the given scope. - - Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no - such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have - permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteAcl(principal=principal, scope=scope) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/acls/delete', body=body) - - def delete_scope(self, scope: str, **kwargs): - """Delete a secret scope. - - Deletes a secret scope. - - Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user - does not have permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteScope(scope=scope) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/scopes/delete', body=body) - - def delete_secret(self, scope: str, key: str, **kwargs): - """Delete a secret. - - Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the - secret scope. - - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` - if the user does not have permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = DeleteSecret(key=key, scope=scope) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/delete', body=body) - - def get_acl(self, scope: str, principal: str, **kwargs) -> AclItem: - """Get secret ACL details. - - Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` - permission to invoke this API. - - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - user does not have permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetAcl(principal=principal, scope=scope) - - query = {} - if principal: query['principal'] = request.principal - if scope: query['scope'] = request.scope - - json = self._api.do('GET', '/api/2.0/secrets/acls/get', query=query) - return AclItem.from_dict(json) - - def list_acls(self, scope: str, **kwargs) -> Iterator[AclItem]: - """Lists ACLs. - - List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. - - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - user does not have permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListAcls(scope=scope) - - query = {} - if scope: query['scope'] = request.scope - - json = self._api.do('GET', '/api/2.0/secrets/acls/list', query=query) - return [AclItem.from_dict(v) for v in json.get('items', [])] - - def list_scopes(self) -> Iterator[SecretScope]: - """List all scopes. - - Lists all secret scopes available in the workspace. - - Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.""" - - json = self._api.do('GET', '/api/2.0/secrets/scopes/list') - return [SecretScope.from_dict(v) for v in json.get('scopes', [])] - - def list_secrets(self, scope: str, **kwargs) -> Iterator[SecretMetadata]: - """List secret keys. - - Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data - cannot be retrieved using this API. Users need the READ permission to make this call. - - The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if - no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make - this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListSecrets(scope=scope) - - query = {} - if scope: query['scope'] = request.scope - - json = self._api.do('GET', '/api/2.0/secrets/list', query=query) - return [SecretMetadata.from_dict(v) for v in json.get('secrets', [])] - - def put_acl(self, scope: str, principal: str, permission: AclPermission, **kwargs): - """Create/update an ACL. - - Creates or overwrites the Access Control List (ACL) associated with the given principal (user or - group) on the specified scope point. - - In general, a user or group will use the most powerful permission available to them, and permissions - are ordered as follows: - - * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to - read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what - secrets are available. - - Note that in general, secret values can only be read from within a command on a cluster (for example, - through a notebook). There is no API to read the actual secret value material outside of a cluster. - However, the user's permission will be applied based on who is executing the command, and they must - have at least READ permission. - - Users must have the `MANAGE` permission to invoke this API. - - The principal is a user or group name corresponding to an existing Databricks principal to be granted - or revoked access. - - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a - permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission is - invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = PutAcl(permission=permission, principal=principal, scope=scope) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/acls/put', body=body) - - def put_secret(self, - scope: str, - key: str, - *, - bytes_value: str = None, - string_value: str = None, - **kwargs): - """Add a secret. - - Inserts a secret under the provided scope with the given name. If a secret already exists with the - same name, this command overwrites the existing secret's value. The server encrypts the secret using - the secret scope's encryption settings before storing it. - - You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of - alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The - maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. - - The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine - the value returned when the secret value is requested. Exactly one must be specified. - - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if - maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or - value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this - API call.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = PutSecret(bytes_value=bytes_value, key=key, scope=scope, string_value=string_value) - body = request.as_dict() - self._api.do('POST', '/api/2.0/secrets/put', body=body) diff --git a/databricks/sdk/service/endpoints.py b/databricks/sdk/service/serving.py similarity index 100% rename from databricks/sdk/service/endpoints.py rename to databricks/sdk/service/serving.py diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py new file mode 100755 index 000000000..ee31dbacf --- /dev/null +++ b/databricks/sdk/service/settings.py @@ -0,0 +1,849 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Dict, Iterator, List + +from ._internal import _enum, _from_dict, _repeated + +_LOG = logging.getLogger('databricks.sdk') + +# all definitions in this file are in alphabetical order + + +@dataclass +class CreateIpAccessList: + label: str + list_type: 'ListType' + ip_addresses: 'List[str]' + + def as_dict(self) -> dict: + body = {} + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label: body['label'] = self.label + if self.list_type: body['list_type'] = self.list_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateIpAccessList': + return cls(ip_addresses=d.get('ip_addresses', None), + label=d.get('label', None), + list_type=_enum(d, 'list_type', ListType)) + + +@dataclass +class CreateIpAccessListResponse: + ip_access_list: 'IpAccessListInfo' = None + + def as_dict(self) -> dict: + body = {} + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateIpAccessListResponse': + return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) + + +@dataclass +class CreateOboTokenRequest: + application_id: str + lifetime_seconds: int + comment: str = None + + def as_dict(self) -> dict: + body = {} + if self.application_id: body['application_id'] = self.application_id + if self.comment: body['comment'] = self.comment + if self.lifetime_seconds: body['lifetime_seconds'] = self.lifetime_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateOboTokenRequest': + return cls(application_id=d.get('application_id', None), + comment=d.get('comment', None), + lifetime_seconds=d.get('lifetime_seconds', None)) + + +@dataclass +class CreateOboTokenResponse: + token_info: 'TokenInfo' = None + token_value: str = None + + def as_dict(self) -> dict: + body = {} + if self.token_info: body['token_info'] = self.token_info.as_dict() + if self.token_value: body['token_value'] = self.token_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateOboTokenResponse': + return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None)) + + +@dataclass +class CreateTokenRequest: + comment: str = None + lifetime_seconds: int = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.lifetime_seconds: body['lifetime_seconds'] = self.lifetime_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateTokenRequest': + return cls(comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) + + +@dataclass +class CreateTokenResponse: + token_info: 'PublicTokenInfo' = None + token_value: str = None + + def as_dict(self) -> dict: + body = {} + if self.token_info: body['token_info'] = self.token_info.as_dict() + if self.token_value: body['token_value'] = self.token_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateTokenResponse': + return cls(token_info=_from_dict(d, 'token_info', PublicTokenInfo), + token_value=d.get('token_value', None)) + + +@dataclass +class DeleteAccountIpAccessListRequest: + """Delete access list""" + + ip_access_list_id: str + + +@dataclass +class DeleteIpAccessListRequest: + """Delete access list""" + + ip_access_list_id: str + + +@dataclass +class DeleteTokenManagementRequest: + """Delete a token""" + + token_id: str + + +@dataclass +class FetchIpAccessListResponse: + ip_access_list: 'IpAccessListInfo' = None + + def as_dict(self) -> dict: + body = {} + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FetchIpAccessListResponse': + return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) + + +@dataclass +class GetAccountIpAccessListRequest: + """Get IP access list""" + + ip_access_list_id: str + + +@dataclass +class GetIpAccessListRequest: + """Get access list""" + + ip_access_list_id: str + + +@dataclass +class GetIpAccessListResponse: + ip_access_lists: 'List[IpAccessListInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetIpAccessListResponse': + return cls(ip_access_lists=_repeated(d, 'ip_access_lists', IpAccessListInfo)) + + +@dataclass +class GetIpAccessListsResponse: + ip_access_lists: 'List[IpAccessListInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetIpAccessListsResponse': + return cls(ip_access_lists=_repeated(d, 'ip_access_lists', IpAccessListInfo)) + + +@dataclass +class GetStatusRequest: + """Check configuration status""" + + keys: str + + +@dataclass +class GetTokenManagementRequest: + """Get token info""" + + token_id: str + + +@dataclass +class IpAccessListInfo: + address_count: int = None + created_at: int = None + created_by: int = None + enabled: bool = None + ip_addresses: 'List[str]' = None + label: str = None + list_id: str = None + list_type: 'ListType' = None + updated_at: int = None + updated_by: int = None + + def as_dict(self) -> dict: + body = {} + if self.address_count: body['address_count'] = self.address_count + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.enabled: body['enabled'] = self.enabled + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label: body['label'] = self.label + if self.list_id: body['list_id'] = self.list_id + if self.list_type: body['list_type'] = self.list_type.value + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'IpAccessListInfo': + return cls(address_count=d.get('address_count', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + enabled=d.get('enabled', None), + ip_addresses=d.get('ip_addresses', None), + label=d.get('label', None), + list_id=d.get('list_id', None), + list_type=_enum(d, 'list_type', ListType), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class ListTokenManagementRequest: + """List all tokens""" + + created_by_id: str = None + created_by_username: str = None + + +@dataclass +class ListTokensResponse: + token_infos: 'List[PublicTokenInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListTokensResponse': + return cls(token_infos=_repeated(d, 'token_infos', PublicTokenInfo)) + + +class ListType(Enum): + """This describes an enum""" + + ALLOW = 'ALLOW' + BLOCK = 'BLOCK' + + +@dataclass +class PublicTokenInfo: + comment: str = None + creation_time: int = None + expiry_time: int = None + token_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.creation_time: body['creation_time'] = self.creation_time + if self.expiry_time: body['expiry_time'] = self.expiry_time + if self.token_id: body['token_id'] = self.token_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PublicTokenInfo': + return cls(comment=d.get('comment', None), + creation_time=d.get('creation_time', None), + expiry_time=d.get('expiry_time', None), + token_id=d.get('token_id', None)) + + +@dataclass +class ReplaceIpAccessList: + label: str + list_type: 'ListType' + ip_addresses: 'List[str]' + enabled: bool + ip_access_list_id: str + list_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.enabled: body['enabled'] = self.enabled + if self.ip_access_list_id: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label: body['label'] = self.label + if self.list_id: body['list_id'] = self.list_id + if self.list_type: body['list_type'] = self.list_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ReplaceIpAccessList': + return cls(enabled=d.get('enabled', None), + ip_access_list_id=d.get('ip_access_list_id', None), + ip_addresses=d.get('ip_addresses', None), + label=d.get('label', None), + list_id=d.get('list_id', None), + list_type=_enum(d, 'list_type', ListType)) + + +@dataclass +class RevokeTokenRequest: + token_id: str + + def as_dict(self) -> dict: + body = {} + if self.token_id: body['token_id'] = self.token_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RevokeTokenRequest': + return cls(token_id=d.get('token_id', None)) + + +@dataclass +class TokenInfo: + comment: str = None + created_by_id: int = None + created_by_username: str = None + creation_time: int = None + expiry_time: int = None + owner_id: int = None + token_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.created_by_id: body['created_by_id'] = self.created_by_id + if self.created_by_username: body['created_by_username'] = self.created_by_username + if self.creation_time: body['creation_time'] = self.creation_time + if self.expiry_time: body['expiry_time'] = self.expiry_time + if self.owner_id: body['owner_id'] = self.owner_id + if self.token_id: body['token_id'] = self.token_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'TokenInfo': + return cls(comment=d.get('comment', None), + created_by_id=d.get('created_by_id', None), + created_by_username=d.get('created_by_username', None), + creation_time=d.get('creation_time', None), + expiry_time=d.get('expiry_time', None), + owner_id=d.get('owner_id', None), + token_id=d.get('token_id', None)) + + +@dataclass +class UpdateIpAccessList: + label: str + list_type: 'ListType' + ip_addresses: 'List[str]' + enabled: bool + ip_access_list_id: str + list_id: str = None + + def as_dict(self) -> dict: + body = {} + if self.enabled: body['enabled'] = self.enabled + if self.ip_access_list_id: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label: body['label'] = self.label + if self.list_id: body['list_id'] = self.list_id + if self.list_type: body['list_type'] = self.list_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateIpAccessList': + return cls(enabled=d.get('enabled', None), + ip_access_list_id=d.get('ip_access_list_id', None), + ip_addresses=d.get('ip_addresses', None), + label=d.get('label', None), + list_id=d.get('list_id', None), + list_type=_enum(d, 'list_type', ListType)) + + +WorkspaceConf = Dict[str, str] + + +class AccountIpAccessListsAPI: + """The Accounts IP Access List API enables account admins to configure IP access lists for access to the + account console. + + Account IP Access Lists affect web application access and REST API access to the account console and + account APIs. If the feature is disabled for the account, all access is allowed for this account. There is + support for allow lists (inclusion) and block lists (exclusion). + + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address + matches any block list, the connection is rejected. 2. **If the connection was not rejected by block + lists**, the IP address is compared with the allow lists. + + If there is at least one allow list for the account, the connection is allowed only if the IP address + matches an allow list. If there are no allow lists for the account, all IP addresses are allowed. + + For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where + one CIDR counts as a single value. + + After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, label: str, list_type: ListType, ip_addresses: List[str], + **kwargs) -> CreateIpAccessListResponse: + """Create access list. + + Creates an IP access list for the account. + + A list can be an allow list or a block list. See the top of this file for a description of how the + server treats allow lists and block lists at runtime. + + When creating or updating an IP access list: + + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, + where one CIDR counts as a single value. Attempts to exceed that number return error 400 with + `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, + error 400 is returned with `error_code` value `INVALID_STATE`. + + It can take a few minutes for the changes to take effect.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateIpAccessList(ip_addresses=ip_addresses, label=label, list_type=list_type) + body = request.as_dict() + + json = self._api.do('POST', + f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists', + body=body) + return CreateIpAccessListResponse.from_dict(json) + + def delete(self, ip_access_list_id: str, **kwargs): + """Delete access list. + + Deletes an IP access list, specified by its list ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteAccountIpAccessListRequest(ip_access_list_id=ip_access_list_id) + + self._api.do( + 'DELETE', + f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists/{request.ip_access_list_id}') + + def get(self, ip_access_list_id: str, **kwargs) -> GetIpAccessListResponse: + """Get IP access list. + + Gets an IP access list, specified by its list ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetAccountIpAccessListRequest(ip_access_list_id=ip_access_list_id) + + json = self._api.do( + 'GET', + f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists/{request.ip_access_list_id}') + return GetIpAccessListResponse.from_dict(json) + + def list(self) -> Iterator[IpAccessListInfo]: + """Get access lists. + + Gets all IP access lists for the specified account.""" + + json = self._api.do('GET', f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists') + return [IpAccessListInfo.from_dict(v) for v in json.get('ip_access_lists', [])] + + def replace(self, + label: str, + list_type: ListType, + ip_addresses: List[str], + enabled: bool, + ip_access_list_id: str, + *, + list_id: str = None, + **kwargs): + """Replace access list. + + Replaces an IP access list, specified by its ID. + + A list can include allow lists and block lists. See the top of this file for a description of how the + server treats allow lists and block lists at run time. When replacing an IP access list: * For all + allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one + CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value + `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is + returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take + effect.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ReplaceIpAccessList(enabled=enabled, + ip_access_list_id=ip_access_list_id, + ip_addresses=ip_addresses, + label=label, + list_id=list_id, + list_type=list_type) + body = request.as_dict() + self._api.do( + 'PUT', + f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists/{request.ip_access_list_id}', + body=body) + + def update(self, + label: str, + list_type: ListType, + ip_addresses: List[str], + enabled: bool, + ip_access_list_id: str, + *, + list_id: str = None, + **kwargs): + """Update access list. + + Updates an existing IP access list, specified by its ID. + + A list can include allow lists and block lists. See the top of this file for a description of how the + server treats allow lists and block lists at run time. + + When updating an IP access list: + + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, + where one CIDR counts as a single value. Attempts to exceed that number return error 400 with + `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, + error 400 is returned with `error_code` value `INVALID_STATE`. + + It can take a few minutes for the changes to take effect.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateIpAccessList(enabled=enabled, + ip_access_list_id=ip_access_list_id, + ip_addresses=ip_addresses, + label=label, + list_id=list_id, + list_type=list_type) + body = request.as_dict() + self._api.do( + 'PATCH', + f'/api/2.0/preview/accounts/{self._api.account_id}/ip-access-lists/{request.ip_access_list_id}', + body=body) + + +class IpAccessListsAPI: + """IP Access List enables admins to configure IP access lists. + + IP access lists affect web application access and REST API access to this workspace only. If the feature + is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists + (inclusion) and block lists (exclusion). + + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address + matches any block list, the connection is rejected. 2. **If the connection was not rejected by block + lists**, the IP address is compared with the allow lists. + + If there is at least one allow list for the workspace, the connection is allowed only if the IP address + matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. + + For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, + where one CIDR counts as a single value. + + After changes to the IP access list feature, it can take a few minutes for changes to take effect.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, label: str, list_type: ListType, ip_addresses: List[str], + **kwargs) -> CreateIpAccessListResponse: + """Create access list. + + Creates an IP access list for this workspace. + + A list can be an allow list or a block list. See the top of this file for a description of how the + server treats allow lists and block lists at runtime. + + When creating or updating an IP access list: + + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, + where one CIDR counts as a single value. Attempts to exceed that number return error 400 with + `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, + error 400 is returned with `error_code` value `INVALID_STATE`. + + It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no + effect until you enable the feature. See :method:workspaceconf/setStatus""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateIpAccessList(ip_addresses=ip_addresses, label=label, list_type=list_type) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/ip-access-lists', body=body) + return CreateIpAccessListResponse.from_dict(json) + + def delete(self, ip_access_list_id: str, **kwargs): + """Delete access list. + + Deletes an IP access list, specified by its list ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteIpAccessListRequest(ip_access_list_id=ip_access_list_id) + + self._api.do('DELETE', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}') + + def get(self, ip_access_list_id: str, **kwargs) -> FetchIpAccessListResponse: + """Get access list. + + Gets an IP access list, specified by its list ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetIpAccessListRequest(ip_access_list_id=ip_access_list_id) + + json = self._api.do('GET', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}') + return FetchIpAccessListResponse.from_dict(json) + + def list(self) -> Iterator[IpAccessListInfo]: + """Get access lists. + + Gets all IP access lists for the specified workspace.""" + + json = self._api.do('GET', '/api/2.0/ip-access-lists') + return [IpAccessListInfo.from_dict(v) for v in json.get('ip_access_lists', [])] + + def replace(self, + label: str, + list_type: ListType, + ip_addresses: List[str], + enabled: bool, + ip_access_list_id: str, + *, + list_id: str = None, + **kwargs): + """Replace access list. + + Replaces an IP access list, specified by its ID. + + A list can include allow lists and block lists. See the top of this file for a description of how the + server treats allow lists and block lists at run time. When replacing an IP access list: * For all + allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one + CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value + `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is + returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take + effect. Note that your resulting IP access list has no effect until you enable the feature. See + :method:workspaceconf/setStatus.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ReplaceIpAccessList(enabled=enabled, + ip_access_list_id=ip_access_list_id, + ip_addresses=ip_addresses, + label=label, + list_id=list_id, + list_type=list_type) + body = request.as_dict() + self._api.do('PUT', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}', body=body) + + def update(self, + label: str, + list_type: ListType, + ip_addresses: List[str], + enabled: bool, + ip_access_list_id: str, + *, + list_id: str = None, + **kwargs): + """Update access list. + + Updates an existing IP access list, specified by its ID. + + A list can include allow lists and block lists. See the top of this file for a description of how the + server treats allow lists and block lists at run time. + + When updating an IP access list: + + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, + where one CIDR counts as a single value. Attempts to exceed that number return error 400 with + `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, + error 400 is returned with `error_code` value `INVALID_STATE`. + + It can take a few minutes for the changes to take effect. Note that your resulting IP access list has + no effect until you enable the feature. See :method:workspaceconf/setStatus.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateIpAccessList(enabled=enabled, + ip_access_list_id=ip_access_list_id, + ip_addresses=ip_addresses, + label=label, + list_id=list_id, + list_type=list_type) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.0/ip-access-lists/{request.ip_access_list_id}', body=body) + + +class TokenManagementAPI: + """Enables administrators to get all tokens and delete tokens for other users. Admins can either get every + token, get a specific token by ID, or get all tokens for a particular user.""" + + def __init__(self, api_client): + self._api = api_client + + def create_obo_token(self, + application_id: str, + lifetime_seconds: int, + *, + comment: str = None, + **kwargs) -> CreateOboTokenResponse: + """Create on-behalf token. + + Creates a token on behalf of a service principal.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateOboTokenRequest(application_id=application_id, + comment=comment, + lifetime_seconds=lifetime_seconds) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/token-management/on-behalf-of/tokens', body=body) + return CreateOboTokenResponse.from_dict(json) + + def delete(self, token_id: str, **kwargs): + """Delete a token. + + Deletes a token, specified by its ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteTokenManagementRequest(token_id=token_id) + + self._api.do('DELETE', f'/api/2.0/token-management/tokens/{request.token_id}') + + def get(self, token_id: str, **kwargs) -> TokenInfo: + """Get token info. + + Gets information about a token, specified by its ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetTokenManagementRequest(token_id=token_id) + + json = self._api.do('GET', f'/api/2.0/token-management/tokens/{request.token_id}') + return TokenInfo.from_dict(json) + + def list(self, + *, + created_by_id: str = None, + created_by_username: str = None, + **kwargs) -> Iterator[PublicTokenInfo]: + """List all tokens. + + Lists all tokens associated with the specified workspace or user.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListTokenManagementRequest(created_by_id=created_by_id, + created_by_username=created_by_username) + + query = {} + if created_by_id: query['created_by_id'] = request.created_by_id + if created_by_username: query['created_by_username'] = request.created_by_username + + json = self._api.do('GET', '/api/2.0/token-management/tokens', query=query) + return [PublicTokenInfo.from_dict(v) for v in json.get('token_infos', [])] + + +class TokensAPI: + """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access + Databricks REST APIs.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, comment: str = None, lifetime_seconds: int = None, **kwargs) -> CreateTokenResponse: + """Create a user token. + + Creates and returns a token for a user. If this call is made through token authentication, it creates + a token with the same client ID as the authenticated token. If the user's token quota is exceeded, + this call returns an error **QUOTA_EXCEEDED**.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateTokenRequest(comment=comment, lifetime_seconds=lifetime_seconds) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/token/create', body=body) + return CreateTokenResponse.from_dict(json) + + def delete(self, token_id: str, **kwargs): + """Revoke token. + + Revokes an access token. + + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = RevokeTokenRequest(token_id=token_id) + body = request.as_dict() + self._api.do('POST', '/api/2.0/token/delete', body=body) + + def list(self) -> Iterator[PublicTokenInfo]: + """List tokens. + + Lists all the valid tokens for a user-workspace pair.""" + + json = self._api.do('GET', '/api/2.0/token/list') + return [PublicTokenInfo.from_dict(v) for v in json.get('token_infos', [])] + + +class WorkspaceConfAPI: + """This API allows updating known workspace settings for advanced users.""" + + def __init__(self, api_client): + self._api = api_client + + def get_status(self, keys: str, **kwargs) -> WorkspaceConf: + """Check configuration status. + + Gets the configuration status for a workspace.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetStatusRequest(keys=keys) + + query = {} + if keys: query['keys'] = request.keys + + json = self._api.do('GET', '/api/2.0/workspace-conf', query=query) + return WorkspaceConf.from_dict(json) + + def set_status(self, **kwargs): + """Enable/disable features. + + Sets the configuration status for a workspace, including enabling or disabling it.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = Dict[str, str]() + + self._api.do('PATCH', '/api/2.0/workspace-conf') diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py new file mode 100755 index 000000000..a4160a8c2 --- /dev/null +++ b/databricks/sdk/service/sharing.py @@ -0,0 +1,1176 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +import logging +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, Iterator, List + +from ._internal import _enum, _from_dict, _repeated + +_LOG = logging.getLogger('databricks.sdk') + +from .catalog import PermissionsChange, PermissionsList + +# all definitions in this file are in alphabetical order + + +class AuthenticationType(Enum): + """The delta sharing authentication type.""" + + DATABRICKS = 'DATABRICKS' + TOKEN = 'TOKEN' + + +@dataclass +class CreateProvider: + name: str + authentication_type: 'AuthenticationType' + comment: str = None + recipient_profile_str: str = None + + def as_dict(self) -> dict: + body = {} + if self.authentication_type: body['authentication_type'] = self.authentication_type.value + if self.comment: body['comment'] = self.comment + if self.name: body['name'] = self.name + if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateProvider': + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), + comment=d.get('comment', None), + name=d.get('name', None), + recipient_profile_str=d.get('recipient_profile_str', None)) + + +@dataclass +class CreateRecipient: + name: str + authentication_type: 'AuthenticationType' + comment: str = None + data_recipient_global_metastore_id: Any = None + ip_access_list: 'IpAccessList' = None + owner: str = None + properties_kvpairs: Any = None + sharing_code: str = None + + def as_dict(self) -> dict: + body = {} + if self.authentication_type: body['authentication_type'] = self.authentication_type.value + if self.comment: body['comment'] = self.comment + if self.data_recipient_global_metastore_id: + body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.sharing_code: body['sharing_code'] = self.sharing_code + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateRecipient': + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), + comment=d.get('comment', None), + data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), + ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), + name=d.get('name', None), + owner=d.get('owner', None), + properties_kvpairs=d.get('properties_kvpairs', None), + sharing_code=d.get('sharing_code', None)) + + +@dataclass +class CreateShare: + name: str + comment: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.name: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateShare': + return cls(comment=d.get('comment', None), name=d.get('name', None)) + + +@dataclass +class DeleteProviderRequest: + """Delete a provider""" + + name: str + + +@dataclass +class DeleteRecipientRequest: + """Delete a share recipient""" + + name: str + + +@dataclass +class DeleteShareRequest: + """Delete a share""" + + name: str + + +@dataclass +class GetActivationUrlInfoRequest: + """Get a share activation URL""" + + activation_url: str + + +@dataclass +class GetProviderRequest: + """Get a provider""" + + name: str + + +@dataclass +class GetRecipientRequest: + """Get a share recipient""" + + name: str + + +@dataclass +class GetRecipientSharePermissionsResponse: + permissions_out: 'List[ShareToPrivilegeAssignment]' = None + + def as_dict(self) -> dict: + body = {} + if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetRecipientSharePermissionsResponse': + return cls(permissions_out=_repeated(d, 'permissions_out', ShareToPrivilegeAssignment)) + + +@dataclass +class GetShareRequest: + """Get a share""" + + name: str + include_shared_data: bool = None + + +@dataclass +class IpAccessList: + allowed_ip_addresses: 'List[str]' = None + + def as_dict(self) -> dict: + body = {} + if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'IpAccessList': + return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) + + +@dataclass +class ListProviderSharesResponse: + shares: 'List[ProviderShare]' = None + + def as_dict(self) -> dict: + body = {} + if self.shares: body['shares'] = [v.as_dict() for v in self.shares] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListProviderSharesResponse': + return cls(shares=_repeated(d, 'shares', ProviderShare)) + + +@dataclass +class ListProvidersRequest: + """List providers""" + + data_provider_global_metastore_id: str = None + + +@dataclass +class ListProvidersResponse: + providers: 'List[ProviderInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.providers: body['providers'] = [v.as_dict() for v in self.providers] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListProvidersResponse': + return cls(providers=_repeated(d, 'providers', ProviderInfo)) + + +@dataclass +class ListRecipientsRequest: + """List share recipients""" + + data_recipient_global_metastore_id: str = None + + +@dataclass +class ListRecipientsResponse: + recipients: 'List[RecipientInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListRecipientsResponse': + return cls(recipients=_repeated(d, 'recipients', RecipientInfo)) + + +@dataclass +class ListSharesRequest: + """List shares by Provider""" + + name: str + + +@dataclass +class ListSharesResponse: + shares: 'List[ShareInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.shares: body['shares'] = [v.as_dict() for v in self.shares] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListSharesResponse': + return cls(shares=_repeated(d, 'shares', ShareInfo)) + + +@dataclass +class Partition: + values: 'List[PartitionValue]' = None + + def as_dict(self) -> dict: + body = {} + if self.values: body['values'] = [v.as_dict() for v in self.values] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'Partition': + return cls(values=_repeated(d, 'values', PartitionValue)) + + +@dataclass +class PartitionValue: + name: str = None + op: 'PartitionValueOp' = None + recipient_property_key: str = None + value: str = None + + def as_dict(self) -> dict: + body = {} + if self.name: body['name'] = self.name + if self.op: body['op'] = self.op.value + if self.recipient_property_key: body['recipient_property_key'] = self.recipient_property_key + if self.value: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PartitionValue': + return cls(name=d.get('name', None), + op=_enum(d, 'op', PartitionValueOp), + recipient_property_key=d.get('recipient_property_key', None), + value=d.get('value', None)) + + +class PartitionValueOp(Enum): + """The operator to apply for the value.""" + + EQUAL = 'EQUAL' + LIKE = 'LIKE' + + +class Privilege(Enum): + + ALL_PRIVILEGES = 'ALL_PRIVILEGES' + CREATE = 'CREATE' + CREATE_CATALOG = 'CREATE_CATALOG' + CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' + CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' + CREATE_FUNCTION = 'CREATE_FUNCTION' + CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' + CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' + CREATE_PROVIDER = 'CREATE_PROVIDER' + CREATE_RECIPIENT = 'CREATE_RECIPIENT' + CREATE_SCHEMA = 'CREATE_SCHEMA' + CREATE_SHARE = 'CREATE_SHARE' + CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL' + CREATE_TABLE = 'CREATE_TABLE' + CREATE_VIEW = 'CREATE_VIEW' + EXECUTE = 'EXECUTE' + MODIFY = 'MODIFY' + READ_FILES = 'READ_FILES' + READ_PRIVATE_FILES = 'READ_PRIVATE_FILES' + REFRESH = 'REFRESH' + SELECT = 'SELECT' + SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' + USAGE = 'USAGE' + USE_CATALOG = 'USE_CATALOG' + USE_PROVIDER = 'USE_PROVIDER' + USE_RECIPIENT = 'USE_RECIPIENT' + USE_SCHEMA = 'USE_SCHEMA' + USE_SHARE = 'USE_SHARE' + WRITE_FILES = 'WRITE_FILES' + WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES' + + +@dataclass +class PrivilegeAssignment: + principal: str = None + privileges: 'List[Privilege]' = None + + def as_dict(self) -> dict: + body = {} + if self.principal: body['principal'] = self.principal + if self.privileges: body['privileges'] = [v for v in self.privileges] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PrivilegeAssignment': + return cls(principal=d.get('principal', None), privileges=d.get('privileges', None)) + + +@dataclass +class ProviderInfo: + authentication_type: 'AuthenticationType' = None + cloud: str = None + comment: str = None + created_at: int = None + created_by: str = None + data_provider_global_metastore_id: str = None + metastore_id: str = None + name: str = None + owner: str = None + recipient_profile: 'RecipientProfile' = None + recipient_profile_str: str = None + region: str = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.authentication_type: body['authentication_type'] = self.authentication_type.value + if self.cloud: body['cloud'] = self.cloud + if self.comment: body['comment'] = self.comment + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.data_provider_global_metastore_id: + body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id + if self.metastore_id: body['metastore_id'] = self.metastore_id + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.recipient_profile: body['recipient_profile'] = self.recipient_profile.as_dict() + if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str + if self.region: body['region'] = self.region + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ProviderInfo': + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), + cloud=d.get('cloud', None), + comment=d.get('comment', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + data_provider_global_metastore_id=d.get('data_provider_global_metastore_id', None), + metastore_id=d.get('metastore_id', None), + name=d.get('name', None), + owner=d.get('owner', None), + recipient_profile=_from_dict(d, 'recipient_profile', RecipientProfile), + recipient_profile_str=d.get('recipient_profile_str', None), + region=d.get('region', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class ProviderShare: + name: str = None + + def as_dict(self) -> dict: + body = {} + if self.name: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ProviderShare': + return cls(name=d.get('name', None)) + + +@dataclass +class RecipientInfo: + activated: bool = None + activation_url: str = None + authentication_type: 'AuthenticationType' = None + cloud: str = None + comment: str = None + created_at: int = None + created_by: str = None + data_recipient_global_metastore_id: Any = None + ip_access_list: 'IpAccessList' = None + metastore_id: str = None + name: str = None + owner: str = None + properties_kvpairs: Any = None + region: str = None + sharing_code: str = None + tokens: 'List[RecipientTokenInfo]' = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.activated: body['activated'] = self.activated + if self.activation_url: body['activation_url'] = self.activation_url + if self.authentication_type: body['authentication_type'] = self.authentication_type.value + if self.cloud: body['cloud'] = self.cloud + if self.comment: body['comment'] = self.comment + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.data_recipient_global_metastore_id: + body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.metastore_id: body['metastore_id'] = self.metastore_id + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.region: body['region'] = self.region + if self.sharing_code: body['sharing_code'] = self.sharing_code + if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RecipientInfo': + return cls(activated=d.get('activated', None), + activation_url=d.get('activation_url', None), + authentication_type=_enum(d, 'authentication_type', AuthenticationType), + cloud=d.get('cloud', None), + comment=d.get('comment', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), + ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), + metastore_id=d.get('metastore_id', None), + name=d.get('name', None), + owner=d.get('owner', None), + properties_kvpairs=d.get('properties_kvpairs', None), + region=d.get('region', None), + sharing_code=d.get('sharing_code', None), + tokens=_repeated(d, 'tokens', RecipientTokenInfo), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class RecipientProfile: + bearer_token: str = None + endpoint: str = None + share_credentials_version: int = None + + def as_dict(self) -> dict: + body = {} + if self.bearer_token: body['bearer_token'] = self.bearer_token + if self.endpoint: body['endpoint'] = self.endpoint + if self.share_credentials_version: body['share_credentials_version'] = self.share_credentials_version + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RecipientProfile': + return cls(bearer_token=d.get('bearer_token', None), + endpoint=d.get('endpoint', None), + share_credentials_version=d.get('share_credentials_version', None)) + + +@dataclass +class RecipientTokenInfo: + activation_url: str = None + created_at: int = None + created_by: str = None + expiration_time: int = None + id: str = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.activation_url: body['activation_url'] = self.activation_url + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.expiration_time: body['expiration_time'] = self.expiration_time + if self.id: body['id'] = self.id + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RecipientTokenInfo': + return cls(activation_url=d.get('activation_url', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + expiration_time=d.get('expiration_time', None), + id=d.get('id', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class RetrieveTokenRequest: + """Get an access token""" + + activation_url: str + + +@dataclass +class RetrieveTokenResponse: + bearer_token: str = None + endpoint: str = None + expiration_time: str = None + share_credentials_version: int = None + + def as_dict(self) -> dict: + body = {} + if self.bearer_token: body['bearerToken'] = self.bearer_token + if self.endpoint: body['endpoint'] = self.endpoint + if self.expiration_time: body['expirationTime'] = self.expiration_time + if self.share_credentials_version: body['shareCredentialsVersion'] = self.share_credentials_version + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RetrieveTokenResponse': + return cls(bearer_token=d.get('bearerToken', None), + endpoint=d.get('endpoint', None), + expiration_time=d.get('expirationTime', None), + share_credentials_version=d.get('shareCredentialsVersion', None)) + + +@dataclass +class RotateRecipientToken: + existing_token_expire_in_seconds: int + name: str + + def as_dict(self) -> dict: + body = {} + if self.existing_token_expire_in_seconds: + body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds + if self.name: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RotateRecipientToken': + return cls(existing_token_expire_in_seconds=d.get('existing_token_expire_in_seconds', None), + name=d.get('name', None)) + + +@dataclass +class ShareInfo: + comment: str = None + created_at: int = None + created_by: str = None + name: str = None + objects: 'List[SharedDataObject]' = None + owner: str = None + updated_at: int = None + updated_by: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.created_at: body['created_at'] = self.created_at + if self.created_by: body['created_by'] = self.created_by + if self.name: body['name'] = self.name + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.owner: body['owner'] = self.owner + if self.updated_at: body['updated_at'] = self.updated_at + if self.updated_by: body['updated_by'] = self.updated_by + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ShareInfo': + return cls(comment=d.get('comment', None), + created_at=d.get('created_at', None), + created_by=d.get('created_by', None), + name=d.get('name', None), + objects=_repeated(d, 'objects', SharedDataObject), + owner=d.get('owner', None), + updated_at=d.get('updated_at', None), + updated_by=d.get('updated_by', None)) + + +@dataclass +class SharePermissionsRequest: + """Get recipient share permissions""" + + name: str + + +@dataclass +class ShareToPrivilegeAssignment: + privilege_assignments: 'List[PrivilegeAssignment]' = None + share_name: str = None + + def as_dict(self) -> dict: + body = {} + if self.privilege_assignments: + body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.share_name: body['share_name'] = self.share_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ShareToPrivilegeAssignment': + return cls(privilege_assignments=_repeated(d, 'privilege_assignments', PrivilegeAssignment), + share_name=d.get('share_name', None)) + + +@dataclass +class SharedDataObject: + name: str + added_at: int = None + added_by: str = None + cdf_enabled: bool = None + comment: str = None + data_object_type: str = None + partitions: 'List[Partition]' = None + shared_as: str = None + start_version: int = None + status: 'SharedDataObjectStatus' = None + + def as_dict(self) -> dict: + body = {} + if self.added_at: body['added_at'] = self.added_at + if self.added_by: body['added_by'] = self.added_by + if self.cdf_enabled: body['cdf_enabled'] = self.cdf_enabled + if self.comment: body['comment'] = self.comment + if self.data_object_type: body['data_object_type'] = self.data_object_type + if self.name: body['name'] = self.name + if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] + if self.shared_as: body['shared_as'] = self.shared_as + if self.start_version: body['start_version'] = self.start_version + if self.status: body['status'] = self.status.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SharedDataObject': + return cls(added_at=d.get('added_at', None), + added_by=d.get('added_by', None), + cdf_enabled=d.get('cdf_enabled', None), + comment=d.get('comment', None), + data_object_type=d.get('data_object_type', None), + name=d.get('name', None), + partitions=_repeated(d, 'partitions', Partition), + shared_as=d.get('shared_as', None), + start_version=d.get('start_version', None), + status=_enum(d, 'status', SharedDataObjectStatus)) + + +class SharedDataObjectStatus(Enum): + """One of: **ACTIVE**, **PERMISSION_DENIED**.""" + + ACTIVE = 'ACTIVE' + PERMISSION_DENIED = 'PERMISSION_DENIED' + + +@dataclass +class SharedDataObjectUpdate: + action: 'SharedDataObjectUpdateAction' = None + data_object: 'SharedDataObject' = None + + def as_dict(self) -> dict: + body = {} + if self.action: body['action'] = self.action.value + if self.data_object: body['data_object'] = self.data_object.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SharedDataObjectUpdate': + return cls(action=_enum(d, 'action', SharedDataObjectUpdateAction), + data_object=_from_dict(d, 'data_object', SharedDataObject)) + + +class SharedDataObjectUpdateAction(Enum): + """One of: **ADD**, **REMOVE**, **UPDATE**.""" + + ADD = 'ADD' + REMOVE = 'REMOVE' + UPDATE = 'UPDATE' + + +@dataclass +class UpdateProvider: + name: str + comment: str = None + owner: str = None + recipient_profile_str: str = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.recipient_profile_str: body['recipient_profile_str'] = self.recipient_profile_str + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateProvider': + return cls(comment=d.get('comment', None), + name=d.get('name', None), + owner=d.get('owner', None), + recipient_profile_str=d.get('recipient_profile_str', None)) + + +@dataclass +class UpdateRecipient: + name: str + comment: str = None + ip_access_list: 'IpAccessList' = None + owner: str = None + properties_kvpairs: Any = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateRecipient': + return cls(comment=d.get('comment', None), + ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), + name=d.get('name', None), + owner=d.get('owner', None), + properties_kvpairs=d.get('properties_kvpairs', None)) + + +@dataclass +class UpdateShare: + name: str + comment: str = None + owner: str = None + updates: 'List[SharedDataObjectUpdate]' = None + + def as_dict(self) -> dict: + body = {} + if self.comment: body['comment'] = self.comment + if self.name: body['name'] = self.name + if self.owner: body['owner'] = self.owner + if self.updates: body['updates'] = [v.as_dict() for v in self.updates] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateShare': + return cls(comment=d.get('comment', None), + name=d.get('name', None), + owner=d.get('owner', None), + updates=_repeated(d, 'updates', SharedDataObjectUpdate)) + + +@dataclass +class UpdateSharePermissions: + name: str + changes: 'List[PermissionsChange]' = None + + def as_dict(self) -> dict: + body = {} + if self.changes: body['changes'] = [v for v in self.changes] + if self.name: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateSharePermissions': + return cls(changes=d.get('changes', None), name=d.get('name', None)) + + +class ProvidersAPI: + """Databricks Providers REST API""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + name: str, + authentication_type: AuthenticationType, + *, + comment: str = None, + recipient_profile_str: str = None, + **kwargs) -> ProviderInfo: + """Create an auth provider. + + Creates a new authentication provider minimally based on a name and authentication type. The caller + must be an admin on the metastore.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateProvider(authentication_type=authentication_type, + comment=comment, + name=name, + recipient_profile_str=recipient_profile_str) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.1/unity-catalog/providers', body=body) + return ProviderInfo.from_dict(json) + + def delete(self, name: str, **kwargs): + """Delete a provider. + + Deletes an authentication provider, if the caller is a metastore admin or is the owner of the + provider.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteProviderRequest(name=name) + + self._api.do('DELETE', f'/api/2.1/unity-catalog/providers/{request.name}') + + def get(self, name: str, **kwargs) -> ProviderInfo: + """Get a provider. + + Gets a specific authentication provider. The caller must supply the name of the provider, and must + either be a metastore admin or the owner of the provider.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetProviderRequest(name=name) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{request.name}') + return ProviderInfo.from_dict(json) + + def list(self, *, data_provider_global_metastore_id: str = None, **kwargs) -> Iterator[ProviderInfo]: + """List providers. + + Gets an array of available authentication providers. The caller must either be a metastore admin or + the owner of the providers. Providers not owned by the caller are not included in the response. There + is no guarantee of a specific ordering of the elements in the array.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListProvidersRequest( + data_provider_global_metastore_id=data_provider_global_metastore_id) + + query = {} + if data_provider_global_metastore_id: + query['data_provider_global_metastore_id'] = request.data_provider_global_metastore_id + + json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query) + return [ProviderInfo.from_dict(v) for v in json.get('providers', [])] + + def list_shares(self, name: str, **kwargs) -> ListProviderSharesResponse: + """List shares by Provider. + + Gets an array of a specified provider's shares within the metastore where: + + * the caller is a metastore admin, or * the caller is the owner.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListSharesRequest(name=name) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{request.name}/shares') + return ListProviderSharesResponse.from_dict(json) + + def update(self, + name: str, + *, + comment: str = None, + owner: str = None, + recipient_profile_str: str = None, + **kwargs) -> ProviderInfo: + """Update a provider. + + Updates the information for an authentication provider, if the caller is a metastore admin or is the + owner of the provider. If the update changes the provider name, the caller must be both a metastore + admin and the owner of the provider.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateProvider(comment=comment, + name=name, + owner=owner, + recipient_profile_str=recipient_profile_str) + body = request.as_dict() + + json = self._api.do('PATCH', f'/api/2.1/unity-catalog/providers/{request.name}', body=body) + return ProviderInfo.from_dict(json) + + +class RecipientActivationAPI: + """Databricks Recipient Activation REST API""" + + def __init__(self, api_client): + self._api = api_client + + def get_activation_url_info(self, activation_url: str, **kwargs): + """Get a share activation URL. + + Gets an activation URL for a share.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetActivationUrlInfoRequest(activation_url=activation_url) + + self._api.do('GET', + f'/api/2.1/unity-catalog/public/data_sharing_activation_info/{request.activation_url}') + + def retrieve_token(self, activation_url: str, **kwargs) -> RetrieveTokenResponse: + """Get an access token. + + Retrieve access token with an activation url. This is a public API without any authentication.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = RetrieveTokenRequest(activation_url=activation_url) + + json = self._api.do( + 'GET', f'/api/2.1/unity-catalog/public/data_sharing_activation/{request.activation_url}') + return RetrieveTokenResponse.from_dict(json) + + +class RecipientsAPI: + """Databricks Recipients REST API""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + name: str, + authentication_type: AuthenticationType, + *, + comment: str = None, + data_recipient_global_metastore_id: Any = None, + ip_access_list: IpAccessList = None, + owner: str = None, + properties_kvpairs: Any = None, + sharing_code: str = None, + **kwargs) -> RecipientInfo: + """Create a share recipient. + + Creates a new recipient with the delta sharing authentication type in the metastore. The caller must + be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateRecipient(authentication_type=authentication_type, + comment=comment, + data_recipient_global_metastore_id=data_recipient_global_metastore_id, + ip_access_list=ip_access_list, + name=name, + owner=owner, + properties_kvpairs=properties_kvpairs, + sharing_code=sharing_code) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.1/unity-catalog/recipients', body=body) + return RecipientInfo.from_dict(json) + + def delete(self, name: str, **kwargs): + """Delete a share recipient. + + Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteRecipientRequest(name=name) + + self._api.do('DELETE', f'/api/2.1/unity-catalog/recipients/{request.name}') + + def get(self, name: str, **kwargs) -> RecipientInfo: + """Get a share recipient. + + Gets a share recipient from the metastore if: + + * the caller is the owner of the share recipient, or: * is a metastore admin""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetRecipientRequest(name=name) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{request.name}') + return RecipientInfo.from_dict(json) + + def list(self, *, data_recipient_global_metastore_id: str = None, **kwargs) -> Iterator[RecipientInfo]: + """List share recipients. + + Gets an array of all share recipients within the current metastore where: + + * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific + ordering of the elements in the array.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListRecipientsRequest( + data_recipient_global_metastore_id=data_recipient_global_metastore_id) + + query = {} + if data_recipient_global_metastore_id: + query['data_recipient_global_metastore_id'] = request.data_recipient_global_metastore_id + + json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query) + return [RecipientInfo.from_dict(v) for v in json.get('recipients', [])] + + def rotate_token(self, existing_token_expire_in_seconds: int, name: str, **kwargs) -> RecipientInfo: + """Rotate a token. + + Refreshes the specified recipient's delta sharing authentication token with the provided token info. + The caller must be the owner of the recipient.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = RotateRecipientToken(existing_token_expire_in_seconds=existing_token_expire_in_seconds, + name=name) + body = request.as_dict() + + json = self._api.do('POST', + f'/api/2.1/unity-catalog/recipients/{request.name}/rotate-token', + body=body) + return RecipientInfo.from_dict(json) + + def share_permissions(self, name: str, **kwargs) -> GetRecipientSharePermissionsResponse: + """Get recipient share permissions. + + Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the + owner of the Recipient.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = SharePermissionsRequest(name=name) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{request.name}/share-permissions') + return GetRecipientSharePermissionsResponse.from_dict(json) + + def update(self, + name: str, + *, + comment: str = None, + ip_access_list: IpAccessList = None, + owner: str = None, + properties_kvpairs: Any = None, + **kwargs): + """Update a share recipient. + + Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of + the recipient. If the recipient name will be updated, the user must be both a metastore admin and the + owner of the recipient.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateRecipient(comment=comment, + ip_access_list=ip_access_list, + name=name, + owner=owner, + properties_kvpairs=properties_kvpairs) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{request.name}', body=body) + + +class SharesAPI: + """Databricks Shares REST API""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, name: str, *, comment: str = None, **kwargs) -> ShareInfo: + """Create a share. + + Creates a new share for data objects. Data objects can be added after creation with **update**. The + caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateShare(comment=comment, name=name) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.1/unity-catalog/shares', body=body) + return ShareInfo.from_dict(json) + + def delete(self, name: str, **kwargs): + """Delete a share. + + Deletes a data object share from the metastore. The caller must be an owner of the share.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteShareRequest(name=name) + + self._api.do('DELETE', f'/api/2.1/unity-catalog/shares/{request.name}') + + def get(self, name: str, *, include_shared_data: bool = None, **kwargs) -> ShareInfo: + """Get a share. + + Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the + share.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetShareRequest(include_shared_data=include_shared_data, name=name) + + query = {} + if include_shared_data: query['include_shared_data'] = request.include_shared_data + + json = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{request.name}', query=query) + return ShareInfo.from_dict(json) + + def list(self) -> Iterator[ShareInfo]: + """List shares. + + Gets an array of data object shares from the metastore. The caller must be a metastore admin or the + owner of the share. There is no guarantee of a specific ordering of the elements in the array.""" + + json = self._api.do('GET', '/api/2.1/unity-catalog/shares') + return [ShareInfo.from_dict(v) for v in json.get('shares', [])] + + def share_permissions(self, name: str, **kwargs) -> PermissionsList: + """Get permissions. + + Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the + owner of the share.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = SharePermissionsRequest(name=name) + + json = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{request.name}/permissions') + return PermissionsList.from_dict(json) + + def update(self, + name: str, + *, + comment: str = None, + owner: str = None, + updates: List[SharedDataObjectUpdate] = None, + **kwargs) -> ShareInfo: + """Update a share. + + Updates the share with the changes and data objects in the request. The caller must be the owner of + the share or a metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + In the case that the share name is changed, **updateShare** requires that the caller is both the share + owner and a metastore admin. + + For each table that is added through this method, the share owner must also have **SELECT** privilege + on the table. This privilege must be maintained indefinitely for recipients to be able to access the + table. Typically, you should use a group as the share owner. + + Table removals through **update** do not require additional privileges.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateShare(comment=comment, name=name, owner=owner, updates=updates) + body = request.as_dict() + + json = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{request.name}', body=body) + return ShareInfo.from_dict(json) + + def update_permissions(self, name: str, *, changes: List[PermissionsChange] = None, **kwargs): + """Update permissions. + + Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an + owner of the share. + + For new recipient grants, the user must also be the owner of the recipients. recipient revocations do + not require additional privileges.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateSharePermissions(changes=changes, name=name) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{request.name}/permissions', body=body) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 9703f3a0b..fcdecb908 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -162,6 +162,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ChannelInfo': class ChannelName(Enum): + """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' @@ -2971,13 +2972,10 @@ class StatementExecutionAPI: the final state. - Wait timeouts are approximate, occur server-side, and cannot account for caller delays, network latency from caller to service, and similarly. - After a statement has been submitted and a statement_id is returned, that statement's status and result will automatically close after either of 2 - conditions: - The last result chunk is fetched (or resolved to an external link). - Ten (10) minutes pass - with no calls to get status or fetch result data. Best practice: in asynchronous clients, poll for status - regularly (and with backoff) to keep the statement open and alive. - After a `CANCEL` or `CLOSE` - operation, the statement will no longer be visible from the API which means that a subsequent poll request - may return an HTTP 404 NOT FOUND error. - After fetching the last result chunk (including chunk_index=0), - the statement is closed; shortly after closure the statement will no longer be visible to the API and so, - further calls such as :method:statementexecution/getStatement may return an HTTP 404 NOT FOUND error. + conditions: - The last result chunk is fetched (or resolved to an external link). - One hour passes with + no calls to get the status or fetch the result. Best practice: in asynchronous clients, poll for status + regularly (and with backoff) to keep the statement open and alive. - After fetching the last result chunk + (including chunk_index=0) the statement is automatically closed. [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Public Preview]: https://docs.databricks.com/release-notes/release-types.html @@ -3031,8 +3029,11 @@ def execute_statement(self, def get_statement(self, statement_id: str, **kwargs) -> GetStatementResponse: """Get status, manifest, and result first chunk. - Polls for the statement's status; when `status.state=SUCCEEDED` it will also return the result - manifest and the first chunk of the result data. + This request can be used to poll for the statement's status. When the `status.state` field is + `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the + statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the + state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and + further calls will receive an HTTP 404 response. **NOTE** This call currently may take up to 5 seconds to get the latest status and result.""" request = kwargs.get('request', None) @@ -3045,12 +3046,11 @@ def get_statement(self, statement_id: str, **kwargs) -> GetStatementResponse: def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int, **kwargs) -> ResultData: """Get result chunk by index. - After statement execution has SUCCEEDED, result data can be fetched by chunks. - - The first chunk (`chunk_index=0`) is typically fetched through `getStatementResult`, and subsequent - chunks with this call. The response structure is identical to the nested `result` element described in - getStatementResult, and similarly includes `next_chunk_index` and `next_chunk_internal_link` for - simple iteration through the result set.""" + After the statement execution has `SUCCEEDED`, the result data can be fetched by chunks. Whereas the + first chuck with `chunk_index=0` is typically fetched through a `get status` request, subsequent + chunks can be fetched using a `get result` request. The response structure is identical to the nested + `result` element described in the `get status` request, and similarly includes the `next_chunk_index` + and `next_chunk_internal_link` fields for simple iteration through the result set.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args request = GetStatementResultChunkNRequest(chunk_index=chunk_index, statement_id=statement_id) diff --git a/databricks/sdk/service/tokenmanagement.py b/databricks/sdk/service/tokenmanagement.py deleted file mode 100755 index 0af3587aa..000000000 --- a/databricks/sdk/service/tokenmanagement.py +++ /dev/null @@ -1,182 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict, Iterator, List - -from ._internal import _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateOboTokenRequest: - application_id: str - lifetime_seconds: int - comment: str = None - - def as_dict(self) -> dict: - body = {} - if self.application_id: body['application_id'] = self.application_id - if self.comment: body['comment'] = self.comment - if self.lifetime_seconds: body['lifetime_seconds'] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateOboTokenRequest': - return cls(application_id=d.get('application_id', None), - comment=d.get('comment', None), - lifetime_seconds=d.get('lifetime_seconds', None)) - - -@dataclass -class CreateOboTokenResponse: - token_info: 'TokenInfo' = None - token_value: str = None - - def as_dict(self) -> dict: - body = {} - if self.token_info: body['token_info'] = self.token_info.as_dict() - if self.token_value: body['token_value'] = self.token_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateOboTokenResponse': - return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None)) - - -@dataclass -class Delete: - """Delete a token""" - - token_id: str - - -@dataclass -class Get: - """Get token info""" - - token_id: str - - -@dataclass -class ListRequest: - """List all tokens""" - - created_by_id: str = None - created_by_username: str = None - - -@dataclass -class ListTokensResponse: - token_infos: 'List[TokenInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListTokensResponse': - return cls(token_infos=_repeated(d, 'token_infos', TokenInfo)) - - -@dataclass -class TokenInfo: - comment: str = None - created_by_id: int = None - created_by_username: str = None - creation_time: int = None - expiry_time: int = None - owner_id: int = None - token_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.created_by_id: body['created_by_id'] = self.created_by_id - if self.created_by_username: body['created_by_username'] = self.created_by_username - if self.creation_time: body['creation_time'] = self.creation_time - if self.expiry_time: body['expiry_time'] = self.expiry_time - if self.owner_id: body['owner_id'] = self.owner_id - if self.token_id: body['token_id'] = self.token_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'TokenInfo': - return cls(comment=d.get('comment', None), - created_by_id=d.get('created_by_id', None), - created_by_username=d.get('created_by_username', None), - creation_time=d.get('creation_time', None), - expiry_time=d.get('expiry_time', None), - owner_id=d.get('owner_id', None), - token_id=d.get('token_id', None)) - - -class TokenManagementAPI: - """Enables administrators to get all tokens and delete tokens for other users. Admins can either get every - token, get a specific token by ID, or get all tokens for a particular user.""" - - def __init__(self, api_client): - self._api = api_client - - def create_obo_token(self, - application_id: str, - lifetime_seconds: int, - *, - comment: str = None, - **kwargs) -> CreateOboTokenResponse: - """Create on-behalf token. - - Creates a token on behalf of a service principal.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateOboTokenRequest(application_id=application_id, - comment=comment, - lifetime_seconds=lifetime_seconds) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/token-management/on-behalf-of/tokens', body=body) - return CreateOboTokenResponse.from_dict(json) - - def delete(self, token_id: str, **kwargs): - """Delete a token. - - Deletes a token, specified by its ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Delete(token_id=token_id) - - self._api.do('DELETE', f'/api/2.0/token-management/tokens/{request.token_id}') - - def get(self, token_id: str, **kwargs) -> TokenInfo: - """Get token info. - - Gets information about a token, specified by its ID.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Get(token_id=token_id) - - json = self._api.do('GET', f'/api/2.0/token-management/tokens/{request.token_id}') - return TokenInfo.from_dict(json) - - def list(self, - *, - created_by_id: str = None, - created_by_username: str = None, - **kwargs) -> Iterator[TokenInfo]: - """List all tokens. - - Lists all tokens associated with the specified workspace or user.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = ListRequest(created_by_id=created_by_id, created_by_username=created_by_username) - - query = {} - if created_by_id: query['created_by_id'] = request.created_by_id - if created_by_username: query['created_by_username'] = request.created_by_username - - json = self._api.do('GET', '/api/2.0/token-management/tokens', query=query) - return [TokenInfo.from_dict(v) for v in json.get('token_infos', [])] diff --git a/databricks/sdk/service/tokens.py b/databricks/sdk/service/tokens.py deleted file mode 100755 index 46c40064a..000000000 --- a/databricks/sdk/service/tokens.py +++ /dev/null @@ -1,137 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict, Iterator, List - -from ._internal import _from_dict, _repeated - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class CreateTokenRequest: - comment: str = None - lifetime_seconds: int = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.lifetime_seconds: body['lifetime_seconds'] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateTokenRequest': - return cls(comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) - - -@dataclass -class CreateTokenResponse: - token_info: 'PublicTokenInfo' = None - token_value: str = None - - def as_dict(self) -> dict: - body = {} - if self.token_info: body['token_info'] = self.token_info.as_dict() - if self.token_value: body['token_value'] = self.token_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateTokenResponse': - return cls(token_info=_from_dict(d, 'token_info', PublicTokenInfo), - token_value=d.get('token_value', None)) - - -@dataclass -class ListTokensResponse: - token_infos: 'List[PublicTokenInfo]' = None - - def as_dict(self) -> dict: - body = {} - if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ListTokensResponse': - return cls(token_infos=_repeated(d, 'token_infos', PublicTokenInfo)) - - -@dataclass -class PublicTokenInfo: - comment: str = None - creation_time: int = None - expiry_time: int = None - token_id: str = None - - def as_dict(self) -> dict: - body = {} - if self.comment: body['comment'] = self.comment - if self.creation_time: body['creation_time'] = self.creation_time - if self.expiry_time: body['expiry_time'] = self.expiry_time - if self.token_id: body['token_id'] = self.token_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'PublicTokenInfo': - return cls(comment=d.get('comment', None), - creation_time=d.get('creation_time', None), - expiry_time=d.get('expiry_time', None), - token_id=d.get('token_id', None)) - - -@dataclass -class RevokeTokenRequest: - token_id: str - - def as_dict(self) -> dict: - body = {} - if self.token_id: body['token_id'] = self.token_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'RevokeTokenRequest': - return cls(token_id=d.get('token_id', None)) - - -class TokensAPI: - """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access - Databricks REST APIs.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, *, comment: str = None, lifetime_seconds: int = None, **kwargs) -> CreateTokenResponse: - """Create a user token. - - Creates and returns a token for a user. If this call is made through token authentication, it creates - a token with the same client ID as the authenticated token. If the user's token quota is exceeded, - this call returns an error **QUOTA_EXCEEDED**.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = CreateTokenRequest(comment=comment, lifetime_seconds=lifetime_seconds) - body = request.as_dict() - - json = self._api.do('POST', '/api/2.0/token/create', body=body) - return CreateTokenResponse.from_dict(json) - - def delete(self, token_id: str, **kwargs): - """Revoke token. - - Revokes an access token. - - If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = RevokeTokenRequest(token_id=token_id) - body = request.as_dict() - self._api.do('POST', '/api/2.0/token/delete', body=body) - - def list(self) -> Iterator[PublicTokenInfo]: - """List tokens. - - Lists all the valid tokens for a user-workspace pair.""" - - json = self._api.do('GET', '/api/2.0/token/list') - return [PublicTokenInfo.from_dict(v) for v in json.get('token_infos', [])] diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 15113198c..1604ce3b2 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -5,13 +5,158 @@ from enum import Enum from typing import Dict, Iterator, List -from ._internal import _enum, _repeated +from ._internal import _enum, _from_dict, _repeated _LOG = logging.getLogger('databricks.sdk') # all definitions in this file are in alphabetical order +@dataclass +class AclItem: + principal: str + permission: 'AclPermission' + + def as_dict(self) -> dict: + body = {} + if self.permission: body['permission'] = self.permission.value + if self.principal: body['principal'] = self.principal + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AclItem': + return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None)) + + +class AclPermission(Enum): + + MANAGE = 'MANAGE' + READ = 'READ' + WRITE = 'WRITE' + + +@dataclass +class AzureKeyVaultSecretScopeMetadata: + resource_id: str + dns_name: str + + def as_dict(self) -> dict: + body = {} + if self.dns_name: body['dns_name'] = self.dns_name + if self.resource_id: body['resource_id'] = self.resource_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AzureKeyVaultSecretScopeMetadata': + return cls(dns_name=d.get('dns_name', None), resource_id=d.get('resource_id', None)) + + +@dataclass +class CreateCredentials: + git_provider: str + git_username: str = None + personal_access_token: str = None + + def as_dict(self) -> dict: + body = {} + if self.git_provider: body['git_provider'] = self.git_provider + if self.git_username: body['git_username'] = self.git_username + if self.personal_access_token: body['personal_access_token'] = self.personal_access_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateCredentials': + return cls(git_provider=d.get('git_provider', None), + git_username=d.get('git_username', None), + personal_access_token=d.get('personal_access_token', None)) + + +@dataclass +class CreateCredentialsResponse: + credential_id: int = None + git_provider: str = None + git_username: str = None + + def as_dict(self) -> dict: + body = {} + if self.credential_id: body['credential_id'] = self.credential_id + if self.git_provider: body['git_provider'] = self.git_provider + if self.git_username: body['git_username'] = self.git_username + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateCredentialsResponse': + return cls(credential_id=d.get('credential_id', None), + git_provider=d.get('git_provider', None), + git_username=d.get('git_username', None)) + + +@dataclass +class CreateRepo: + url: str + provider: str + path: str = None + sparse_checkout: 'SparseCheckout' = None + + def as_dict(self) -> dict: + body = {} + if self.path: body['path'] = self.path + if self.provider: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateRepo': + return cls(path=d.get('path', None), + provider=d.get('provider', None), + sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), + url=d.get('url', None)) + + +@dataclass +class CreateScope: + scope: str + initial_manage_principal: str = None + keyvault_metadata: 'AzureKeyVaultSecretScopeMetadata' = None + scope_backend_type: 'ScopeBackendType' = None + + def as_dict(self) -> dict: + body = {} + if self.initial_manage_principal: body['initial_manage_principal'] = self.initial_manage_principal + if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() + if self.scope: body['scope'] = self.scope + if self.scope_backend_type: body['scope_backend_type'] = self.scope_backend_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateScope': + return cls(initial_manage_principal=d.get('initial_manage_principal', None), + keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), + scope=d.get('scope', None), + scope_backend_type=_enum(d, 'scope_backend_type', ScopeBackendType)) + + +@dataclass +class CredentialInfo: + credential_id: int = None + git_provider: str = None + git_username: str = None + + def as_dict(self) -> dict: + body = {} + if self.credential_id: body['credential_id'] = self.credential_id + if self.git_provider: body['git_provider'] = self.git_provider + if self.git_username: body['git_username'] = self.git_username + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CredentialInfo': + return cls(credential_id=d.get('credential_id', None), + git_provider=d.get('git_provider', None), + git_username=d.get('git_username', None)) + + @dataclass class Delete: path: str @@ -29,18 +174,74 @@ def from_dict(cls, d: Dict[str, any]) -> 'Delete': @dataclass -class Export: - """Export a notebook""" +class DeleteAcl: + scope: str + principal: str - path: str - direct_download: bool = None - format: 'ExportFormat' = None + def as_dict(self) -> dict: + body = {} + if self.principal: body['principal'] = self.principal + if self.scope: body['scope'] = self.scope + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteAcl': + return cls(principal=d.get('principal', None), scope=d.get('scope', None)) + + +@dataclass +class DeleteGitCredentialRequest: + """Delete a credential""" + + credential_id: int + + +@dataclass +class DeleteRepoRequest: + """Delete a repo""" + + repo_id: int + + +@dataclass +class DeleteScope: + scope: str + + def as_dict(self) -> dict: + body = {} + if self.scope: body['scope'] = self.scope + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteScope': + return cls(scope=d.get('scope', None)) + + +@dataclass +class DeleteSecret: + scope: str + key: str + + def as_dict(self) -> dict: + body = {} + if self.key: body['key'] = self.key + if self.scope: body['scope'] = self.scope + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteSecret': + return cls(key=d.get('key', None), scope=d.get('scope', None)) class ExportFormat(Enum): - """This specifies the format of the file to be imported. By default, this is `SOURCE`. However it - may be one of: `SOURCE`, `HTML`, `JUPYTER`, `DBC`. The value is case sensitive.""" + """This specifies the format of the file to be imported. By default, this is `SOURCE`. + + If using `AUTO` the item is imported or exported as either a workspace file or a + notebook,depending on an analysis of the item’s extension and the header content provided in + the request. The value is case sensitive. In addition, if the item is imported as a notebook, + then the item’s extension is automatically removed.""" + AUTO = 'AUTO' DBC = 'DBC' HTML = 'HTML' JUPYTER = 'JUPYTER' @@ -48,6 +249,15 @@ class ExportFormat(Enum): SOURCE = 'SOURCE' +@dataclass +class ExportRequest: + """Export a workspace object""" + + path: str + direct_download: bool = None + format: 'ExportFormat' = None + + @dataclass class ExportResponse: content: str = None @@ -63,7 +273,43 @@ def from_dict(cls, d: Dict[str, any]) -> 'ExportResponse': @dataclass -class GetStatus: +class GetAclRequest: + """Get secret ACL details""" + + scope: str + principal: str + + +@dataclass +class GetCredentialsResponse: + credentials: 'List[CredentialInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetCredentialsResponse': + return cls(credentials=_repeated(d, 'credentials', CredentialInfo)) + + +@dataclass +class GetGitCredentialRequest: + """Get a credential entry""" + + credential_id: int + + +@dataclass +class GetRepoRequest: + """Get a repo""" + + repo_id: int + + +@dataclass +class GetStatusRequest: """Get status""" path: str @@ -105,11 +351,48 @@ class Language(Enum): @dataclass -class ListRequest: - """List contents""" +class ListAclsRequest: + """Lists ACLs""" - path: str - notebooks_modified_after: int = None + scope: str + + +@dataclass +class ListAclsResponse: + items: 'List[AclItem]' = None + + def as_dict(self) -> dict: + body = {} + if self.items: body['items'] = [v.as_dict() for v in self.items] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListAclsResponse': + return cls(items=_repeated(d, 'items', AclItem)) + + +@dataclass +class ListReposRequest: + """Get repos""" + + next_page_token: str = None + path_prefix: str = None + + +@dataclass +class ListReposResponse: + next_page_token: str = None + repos: 'List[RepoInfo]' = None + + def as_dict(self) -> dict: + body = {} + if self.next_page_token: body['next_page_token'] = self.next_page_token + if self.repos: body['repos'] = [v.as_dict() for v in self.repos] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListReposResponse': + return cls(next_page_token=d.get('next_page_token', None), repos=_repeated(d, 'repos', RepoInfo)) @dataclass @@ -126,6 +409,49 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListResponse': return cls(objects=_repeated(d, 'objects', ObjectInfo)) +@dataclass +class ListScopesResponse: + scopes: 'List[SecretScope]' = None + + def as_dict(self) -> dict: + body = {} + if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListScopesResponse': + return cls(scopes=_repeated(d, 'scopes', SecretScope)) + + +@dataclass +class ListSecretsRequest: + """List secret keys""" + + scope: str + + +@dataclass +class ListSecretsResponse: + secrets: 'List[SecretMetadata]' = None + + def as_dict(self) -> dict: + body = {} + if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListSecretsResponse': + return cls(secrets=_repeated(d, 'secrets', SecretMetadata)) + + +@dataclass +class ListWorkspaceRequest: + """List contents""" + + path: str + notebooks_modified_after: int = None + + @dataclass class Mkdirs: path: str @@ -182,6 +508,575 @@ class ObjectType(Enum): REPO = 'REPO' +@dataclass +class PutAcl: + scope: str + principal: str + permission: 'AclPermission' + + def as_dict(self) -> dict: + body = {} + if self.permission: body['permission'] = self.permission.value + if self.principal: body['principal'] = self.principal + if self.scope: body['scope'] = self.scope + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PutAcl': + return cls(permission=_enum(d, 'permission', AclPermission), + principal=d.get('principal', None), + scope=d.get('scope', None)) + + +@dataclass +class PutSecret: + scope: str + key: str + bytes_value: str = None + string_value: str = None + + def as_dict(self) -> dict: + body = {} + if self.bytes_value: body['bytes_value'] = self.bytes_value + if self.key: body['key'] = self.key + if self.scope: body['scope'] = self.scope + if self.string_value: body['string_value'] = self.string_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'PutSecret': + return cls(bytes_value=d.get('bytes_value', None), + key=d.get('key', None), + scope=d.get('scope', None), + string_value=d.get('string_value', None)) + + +@dataclass +class RepoInfo: + branch: str = None + head_commit_id: str = None + id: int = None + path: str = None + provider: str = None + sparse_checkout: 'SparseCheckout' = None + url: str = None + + def as_dict(self) -> dict: + body = {} + if self.branch: body['branch'] = self.branch + if self.head_commit_id: body['head_commit_id'] = self.head_commit_id + if self.id: body['id'] = self.id + if self.path: body['path'] = self.path + if self.provider: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'RepoInfo': + return cls(branch=d.get('branch', None), + head_commit_id=d.get('head_commit_id', None), + id=d.get('id', None), + path=d.get('path', None), + provider=d.get('provider', None), + sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), + url=d.get('url', None)) + + +class ScopeBackendType(Enum): + + AZURE_KEYVAULT = 'AZURE_KEYVAULT' + DATABRICKS = 'DATABRICKS' + + +@dataclass +class SecretMetadata: + key: str = None + last_updated_timestamp: int = None + + def as_dict(self) -> dict: + body = {} + if self.key: body['key'] = self.key + if self.last_updated_timestamp: body['last_updated_timestamp'] = self.last_updated_timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SecretMetadata': + return cls(key=d.get('key', None), last_updated_timestamp=d.get('last_updated_timestamp', None)) + + +@dataclass +class SecretScope: + backend_type: 'ScopeBackendType' = None + keyvault_metadata: 'AzureKeyVaultSecretScopeMetadata' = None + name: str = None + + def as_dict(self) -> dict: + body = {} + if self.backend_type: body['backend_type'] = self.backend_type.value + if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() + if self.name: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SecretScope': + return cls(backend_type=_enum(d, 'backend_type', ScopeBackendType), + keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), + name=d.get('name', None)) + + +@dataclass +class SparseCheckout: + patterns: 'List[str]' = None + + def as_dict(self) -> dict: + body = {} + if self.patterns: body['patterns'] = [v for v in self.patterns] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SparseCheckout': + return cls(patterns=d.get('patterns', None)) + + +@dataclass +class SparseCheckoutUpdate: + patterns: 'List[str]' = None + + def as_dict(self) -> dict: + body = {} + if self.patterns: body['patterns'] = [v for v in self.patterns] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'SparseCheckoutUpdate': + return cls(patterns=d.get('patterns', None)) + + +@dataclass +class UpdateCredentials: + credential_id: int + git_provider: str = None + git_username: str = None + personal_access_token: str = None + + def as_dict(self) -> dict: + body = {} + if self.credential_id: body['credential_id'] = self.credential_id + if self.git_provider: body['git_provider'] = self.git_provider + if self.git_username: body['git_username'] = self.git_username + if self.personal_access_token: body['personal_access_token'] = self.personal_access_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateCredentials': + return cls(credential_id=d.get('credential_id', None), + git_provider=d.get('git_provider', None), + git_username=d.get('git_username', None), + personal_access_token=d.get('personal_access_token', None)) + + +@dataclass +class UpdateRepo: + repo_id: int + branch: str = None + sparse_checkout: 'SparseCheckoutUpdate' = None + tag: str = None + + def as_dict(self) -> dict: + body = {} + if self.branch: body['branch'] = self.branch + if self.repo_id: body['repo_id'] = self.repo_id + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.tag: body['tag'] = self.tag + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'UpdateRepo': + return cls(branch=d.get('branch', None), + repo_id=d.get('repo_id', None), + sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate), + tag=d.get('tag', None)) + + +class GitCredentialsAPI: + """Registers personal access token for Databricks to do operations on behalf of the user. + + See [more info]. + + [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + git_provider: str, + *, + git_username: str = None, + personal_access_token: str = None, + **kwargs) -> CreateCredentialsResponse: + """Create a credential entry. + + Creates a Git credential entry for the user. Only one Git credential per user is supported, so any + attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update + existing credentials, or the DELETE endpoint to delete existing credentials.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateCredentials(git_provider=git_provider, + git_username=git_username, + personal_access_token=personal_access_token) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/git-credentials', body=body) + return CreateCredentialsResponse.from_dict(json) + + def delete(self, credential_id: int, **kwargs): + """Delete a credential. + + Deletes the specified Git credential.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteGitCredentialRequest(credential_id=credential_id) + + self._api.do('DELETE', f'/api/2.0/git-credentials/{request.credential_id}') + + def get(self, credential_id: int, **kwargs) -> CredentialInfo: + """Get a credential entry. + + Gets the Git credential with the specified credential ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetGitCredentialRequest(credential_id=credential_id) + + json = self._api.do('GET', f'/api/2.0/git-credentials/{request.credential_id}') + return CredentialInfo.from_dict(json) + + def list(self) -> Iterator[CredentialInfo]: + """Get Git credentials. + + Lists the calling user's Git credentials. One credential per user is supported.""" + + json = self._api.do('GET', '/api/2.0/git-credentials') + return [CredentialInfo.from_dict(v) for v in json.get('credentials', [])] + + def update(self, + credential_id: int, + *, + git_provider: str = None, + git_username: str = None, + personal_access_token: str = None, + **kwargs): + """Update a credential. + + Updates the specified Git credential.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateCredentials(credential_id=credential_id, + git_provider=git_provider, + git_username=git_username, + personal_access_token=personal_access_token) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.0/git-credentials/{request.credential_id}', body=body) + + +class ReposAPI: + """The Repos API allows users to manage their git repos. Users can use the API to access all repos that they + have manage permissions on. + + Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a + repository, committing and pushing, pulling, branch management, and visual comparison of diffs when + committing. + + Within Repos you can develop code in notebooks or other files and follow data science and engineering code + development best practices using Git for version control, collaboration, and CI/CD.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + url: str, + provider: str, + *, + path: str = None, + sparse_checkout: SparseCheckout = None, + **kwargs) -> RepoInfo: + """Create a repo. + + Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created + programmatically must be linked to a remote Git repo, unlike repos created in the browser.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateRepo(path=path, provider=provider, sparse_checkout=sparse_checkout, url=url) + body = request.as_dict() + + json = self._api.do('POST', '/api/2.0/repos', body=body) + return RepoInfo.from_dict(json) + + def delete(self, repo_id: int, **kwargs): + """Delete a repo. + + Deletes the specified repo.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteRepoRequest(repo_id=repo_id) + + self._api.do('DELETE', f'/api/2.0/repos/{request.repo_id}') + + def get(self, repo_id: int, **kwargs) -> RepoInfo: + """Get a repo. + + Returns the repo with the given repo ID.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetRepoRequest(repo_id=repo_id) + + json = self._api.do('GET', f'/api/2.0/repos/{request.repo_id}') + return RepoInfo.from_dict(json) + + def list(self, *, next_page_token: str = None, path_prefix: str = None, **kwargs) -> Iterator[RepoInfo]: + """Get repos. + + Returns repos that the calling user has Manage permissions on. Results are paginated with each page + containing twenty repos.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListReposRequest(next_page_token=next_page_token, path_prefix=path_prefix) + + query = {} + if next_page_token: query['next_page_token'] = request.next_page_token + if path_prefix: query['path_prefix'] = request.path_prefix + + while True: + json = self._api.do('GET', '/api/2.0/repos', query=query) + if 'repos' not in json or not json['repos']: + return + for v in json['repos']: + yield RepoInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['next_page_token'] = json['next_page_token'] + + def update(self, + repo_id: int, + *, + branch: str = None, + sparse_checkout: SparseCheckoutUpdate = None, + tag: str = None, + **kwargs): + """Update a repo. + + Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same + branch.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = UpdateRepo(branch=branch, repo_id=repo_id, sparse_checkout=sparse_checkout, tag=tag) + body = request.as_dict() + self._api.do('PATCH', f'/api/2.0/repos/{request.repo_id}', body=body) + + +class SecretsAPI: + """The Secrets API allows you to manage secrets, secret scopes, and access permissions. + + Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of + directly entering your credentials into a notebook, use Databricks secrets to store your credentials and + reference them in notebooks and jobs. + + Administrators, secret creators, and users granted permission can read Databricks secrets. While + Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not + possible to prevent such users from reading secrets.""" + + def __init__(self, api_client): + self._api = api_client + + def create_scope(self, + scope: str, + *, + initial_manage_principal: str = None, + keyvault_metadata: AzureKeyVaultSecretScopeMetadata = None, + scope_backend_type: ScopeBackendType = None, + **kwargs): + """Create a new secret scope. + + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not + exceed 128 characters. The maximum number of scopes in a workspace is 100.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = CreateScope(initial_manage_principal=initial_manage_principal, + keyvault_metadata=keyvault_metadata, + scope=scope, + scope_backend_type=scope_backend_type) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/scopes/create', body=body) + + def delete_acl(self, scope: str, principal: str, **kwargs): + """Delete an ACL. + + Deletes the given ACL on the given scope. + + Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no + such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have + permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteAcl(principal=principal, scope=scope) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/acls/delete', body=body) + + def delete_scope(self, scope: str, **kwargs): + """Delete a secret scope. + + Deletes a secret scope. + + Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user + does not have permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteScope(scope=scope) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/scopes/delete', body=body) + + def delete_secret(self, scope: str, key: str, **kwargs): + """Delete a secret. + + Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the + secret scope. + + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` + if the user does not have permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = DeleteSecret(key=key, scope=scope) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/delete', body=body) + + def get_acl(self, scope: str, principal: str, **kwargs) -> AclItem: + """Get secret ACL details. + + Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` + permission to invoke this API. + + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the + user does not have permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = GetAclRequest(principal=principal, scope=scope) + + query = {} + if principal: query['principal'] = request.principal + if scope: query['scope'] = request.scope + + json = self._api.do('GET', '/api/2.0/secrets/acls/get', query=query) + return AclItem.from_dict(json) + + def list_acls(self, scope: str, **kwargs) -> Iterator[AclItem]: + """Lists ACLs. + + List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. + + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the + user does not have permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListAclsRequest(scope=scope) + + query = {} + if scope: query['scope'] = request.scope + + json = self._api.do('GET', '/api/2.0/secrets/acls/list', query=query) + return [AclItem.from_dict(v) for v in json.get('items', [])] + + def list_scopes(self) -> Iterator[SecretScope]: + """List all scopes. + + Lists all secret scopes available in the workspace. + + Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.""" + + json = self._api.do('GET', '/api/2.0/secrets/scopes/list') + return [SecretScope.from_dict(v) for v in json.get('scopes', [])] + + def list_secrets(self, scope: str, **kwargs) -> Iterator[SecretMetadata]: + """List secret keys. + + Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data + cannot be retrieved using this API. Users need the READ permission to make this call. + + The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if + no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make + this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = ListSecretsRequest(scope=scope) + + query = {} + if scope: query['scope'] = request.scope + + json = self._api.do('GET', '/api/2.0/secrets/list', query=query) + return [SecretMetadata.from_dict(v) for v in json.get('secrets', [])] + + def put_acl(self, scope: str, principal: str, permission: AclPermission, **kwargs): + """Create/update an ACL. + + Creates or overwrites the Access Control List (ACL) associated with the given principal (user or + group) on the specified scope point. + + In general, a user or group will use the most powerful permission available to them, and permissions + are ordered as follows: + + * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to + read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what + secrets are available. + + Note that in general, secret values can only be read from within a command on a cluster (for example, + through a notebook). There is no API to read the actual secret value material outside of a cluster. + However, the user's permission will be applied based on who is executing the command, and they must + have at least READ permission. + + Users must have the `MANAGE` permission to invoke this API. + + The principal is a user or group name corresponding to an existing Databricks principal to be granted + or revoked access. + + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a + permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission is + invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = PutAcl(permission=permission, principal=principal, scope=scope) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/acls/put', body=body) + + def put_secret(self, + scope: str, + key: str, + *, + bytes_value: str = None, + string_value: str = None, + **kwargs): + """Add a secret. + + Inserts a secret under the provided scope with the given name. If a secret already exists with the + same name, this command overwrites the existing secret's value. The server encrypts the secret using + the secret scope's encryption settings before storing it. + + You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of + alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The + maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. + + The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine + the value returned when the secret value is requested. Exactly one must be specified. + + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if + maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or + value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this + API call.""" + request = kwargs.get('request', None) + if not request: # request is not given through keyed args + request = PutSecret(bytes_value=bytes_value, key=key, scope=scope, string_value=string_value) + body = request.as_dict() + self._api.do('POST', '/api/2.0/secrets/put', body=body) + + class WorkspaceAPI: """The Workspace API allows you to list, import, export, and delete notebooks and folders. @@ -212,9 +1107,9 @@ def export(self, direct_download: bool = None, format: ExportFormat = None, **kwargs) -> ExportResponse: - """Export a notebook. + """Export a workspace object. - Exports a notebook or the contents of an entire directory. + Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -222,7 +1117,7 @@ def export(self, call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = Export(direct_download=direct_download, format=format, path=path) + request = ExportRequest(direct_download=direct_download, format=format, path=path) query = {} if direct_download: query['direct_download'] = request.direct_download @@ -239,7 +1134,7 @@ def get_status(self, path: str, **kwargs) -> ObjectInfo: `RESOURCE_DOES_NOT_EXIST`.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = GetStatus(path=path) + request = GetStatusRequest(path=path) query = {} if path: query['path'] = request.path @@ -255,11 +1150,11 @@ def import_(self, language: Language = None, overwrite: bool = None, **kwargs): - """Import a notebook. + """Import a workspace object. - Imports a notebook or the contents of an entire directory. If `path` already exists and `overwrite` is - set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to - import a directory.""" + Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. + If `path` already exists and `overwrite` is set to `false`, this call returns an error + `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args request = Import(content=content, @@ -277,7 +1172,7 @@ def list(self, path: str, *, notebooks_modified_after: int = None, **kwargs) -> exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.""" request = kwargs.get('request', None) if not request: # request is not given through keyed args - request = ListRequest(notebooks_modified_after=notebooks_modified_after, path=path) + request = ListWorkspaceRequest(notebooks_modified_after=notebooks_modified_after, path=path) query = {} if notebooks_modified_after: query['notebooks_modified_after'] = request.notebooks_modified_after diff --git a/databricks/sdk/service/workspaceconf.py b/databricks/sdk/service/workspaceconf.py deleted file mode 100755 index 89d655cda..000000000 --- a/databricks/sdk/service/workspaceconf.py +++ /dev/null @@ -1,50 +0,0 @@ -# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -import logging -from dataclasses import dataclass -from typing import Dict - -_LOG = logging.getLogger('databricks.sdk') - -# all definitions in this file are in alphabetical order - - -@dataclass -class GetStatus: - """Check configuration status""" - - keys: str - - -WorkspaceConf = Dict[str, str] - - -class WorkspaceConfAPI: - """This API allows updating known workspace settings for advanced users.""" - - def __init__(self, api_client): - self._api = api_client - - def get_status(self, keys: str, **kwargs) -> WorkspaceConf: - """Check configuration status. - - Gets the configuration status for a workspace.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = GetStatus(keys=keys) - - query = {} - if keys: query['keys'] = request.keys - - json = self._api.do('GET', '/api/2.0/workspace-conf', query=query) - return WorkspaceConf.from_dict(json) - - def set_status(self, **kwargs): - """Enable/disable features. - - Sets the configuration status for a workspace, including enabling or disabling it.""" - request = kwargs.get('request', None) - if not request: # request is not given through keyed args - request = Dict[str, str]() - - self._api.do('PATCH', '/api/2.0/workspace-conf') diff --git a/examples/custom_auth.py b/examples/custom_auth.py index b4ebc730c..d57d0719a 100644 --- a/examples/custom_auth.py +++ b/examples/custom_auth.py @@ -1,5 +1,5 @@ from databricks.sdk.core import Config, ApiClient, HeaderFactory, credentials_provider -from databricks.sdk.service.scim import CurrentUserAPI +from databricks.sdk.service.iam import CurrentUserAPI @credentials_provider('custom', ['host']) diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py index b3657c5b0..6d63eaf10 100644 --- a/tests/test_dbfs_mixins.py +++ b/tests/test_dbfs_mixins.py @@ -1,8 +1,8 @@ def test_moving_dbfs_file_to_local_dir(config, tmp_path, mocker): from databricks.sdk import WorkspaceClient - from databricks.sdk.service.dbfs import FileInfo, ReadResponse + from databricks.sdk.service.files import FileInfo, ReadResponse - get_status = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.get_status', + get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', return_value=FileInfo(path='a', is_dir=False, file_size=4)) def fake_read(path: str, *, length: int = None, offset: int = None): @@ -12,8 +12,8 @@ def fake_read(path: str, *, length: int = None, offset: int = None): return ReadResponse(bytes_read=4, data='aGVsbG8=') return ReadResponse(bytes_read=0) - mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.read', wraps=fake_read) - delete = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.delete') + mocker.patch('databricks.sdk.service.files.DbfsAPI.read', wraps=fake_read) + delete = mocker.patch('databricks.sdk.service.files.DbfsAPI.delete') w = WorkspaceClient(config=config) w.dbfs.move_('a', f'file:{tmp_path}', recursive=True) @@ -28,20 +28,20 @@ def fake_read(path: str, *, length: int = None, offset: int = None): def test_moving_local_dir_to_dbfs(config, tmp_path, mocker): from databricks.sdk import WorkspaceClient from databricks.sdk.core import DatabricksError - from databricks.sdk.service.dbfs import CreateResponse + from databricks.sdk.service.files import CreateResponse with (tmp_path / 'a').open('wb') as f: f.write(b'hello') - mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.create', return_value=CreateResponse(123)) + mocker.patch('databricks.sdk.service.files.DbfsAPI.create', return_value=CreateResponse(123)) def fake(path: str): assert path == 'a' raise DatabricksError('nope', error_code='RESOURCE_DOES_NOT_EXIST') - mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.get_status', wraps=fake) - add_block = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.add_block') - close = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.close') + mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', wraps=fake) + add_block = mocker.patch('databricks.sdk.service.files.DbfsAPI.add_block') + close = mocker.patch('databricks.sdk.service.files.DbfsAPI.close') w = WorkspaceClient(config=config) w.dbfs.move_(f'file:{tmp_path}', 'a', recursive=True) diff --git a/tests/test_dbutils.py b/tests/test_dbutils.py index 51e73346a..415ba0cca 100644 --- a/tests/test_dbutils.py +++ b/tests/test_dbutils.py @@ -20,8 +20,8 @@ def test_fs_cp(dbutils, mocker): def test_fs_head(dbutils, mocker): - from databricks.sdk.service.dbfs import ReadResponse - inner = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.read', + from databricks.sdk.service.files import ReadResponse + inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.read', return_value=ReadResponse(data='aGVsbG8=')) result = dbutils.fs.head('a') @@ -31,7 +31,7 @@ def test_fs_head(dbutils, mocker): def test_fs_ls(dbutils, mocker): - from databricks.sdk.service.dbfs import FileInfo + from databricks.sdk.service.files import FileInfo inner = mocker.patch('databricks.sdk.mixins.dbfs.DbfsExt.list', return_value=[ FileInfo(path='b', file_size=10, modification_time=20), @@ -47,7 +47,7 @@ def test_fs_ls(dbutils, mocker): def test_fs_mkdirs(dbutils, mocker): - inner = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.mkdirs') + inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.mkdirs') dbutils.fs.mkdirs('a') @@ -63,7 +63,7 @@ def test_fs_mv(dbutils, mocker): def test_fs_put(dbutils, mocker): - inner = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.put') + inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.put') dbutils.fs.put('a', 'b') @@ -71,7 +71,7 @@ def test_fs_put(dbutils, mocker): def test_fs_rm(dbutils, mocker): - inner = mocker.patch('databricks.sdk.service.dbfs.DbfsAPI.delete') + inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.delete') dbutils.fs.rm('a') @@ -88,22 +88,21 @@ def dbutils_proxy(mocker): from databricks.sdk.core import Config from databricks.sdk.dbutils import RemoteDbUtils from databricks.sdk.service._internal import Wait - from databricks.sdk.service.clusters import ClusterInfo, State - from databricks.sdk.service.commands import (CommandStatus, - CommandStatusResponse, - Created, Language, Results) + from databricks.sdk.service.compute import (ClusterInfo, CommandStatus, + CommandStatusResponse, Created, + Language, Results, State) from .conftest import noop_credentials - cluster_get = mocker.patch('databricks.sdk.service.clusters.ClustersAPI.get', + cluster_get = mocker.patch('databricks.sdk.service.compute.ClustersAPI.get', return_value=ClusterInfo(state=State.RUNNING)) - context_create = mocker.patch('databricks.sdk.service.commands.CommandExecutionAPI.create', + context_create = mocker.patch('databricks.sdk.service.compute.CommandExecutionAPI.create', return_value=Wait(lambda **kwargs: Created('y'))) def inner(results_data: any, expect_command: str): import json command_execute = mocker.patch( - 'databricks.sdk.service.commands.CommandExecutionAPI.execute', + 'databricks.sdk.service.compute.CommandExecutionAPI.execute', return_value=Wait(lambda **kwargs: CommandStatusResponse( results=Results(data=json.dumps(results_data)), status=CommandStatus.Finished)))