From fbd24762e8df98389f77de3b3f0bba799995e16f Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Mon, 17 Jul 2023 20:23:16 +0200 Subject: [PATCH] Release 0.2.0 * Synchronize auth permutation tests with Go SDK ([#108](https://github.com/databricks/databricks-sdk-java/pull/108)). * Regenerated from OpenAPI spec ([#112](https://github.com/databricks/databricks-sdk-java/pull/112)). * Add issue template ([#113](https://github.com/databricks/databricks-sdk-java/pull/113)). * Add accounts-dod support in the Java SDK ([#114](https://github.com/databricks/databricks-sdk-java/pull/114)). * Regenerate Java SDK using recent OpenAPI Specification ([#115](https://github.com/databricks/databricks-sdk-java/pull/115)). * Rerun OpenAPI Generator ([#117](https://github.com/databricks/databricks-sdk-java/pull/117)). * Integrate with auto-release toolchain ([#118](https://github.com/databricks/databricks-sdk-java/pull/118)). * Updated CLI token source `parseExpiry` method to account for different time format ([#116](https://github.com/databricks/databricks-sdk-java/pull/116)). API Changes: * Changed `create()` method for `accountClient.metastoreAssignments()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateMetastoreAssignment` class. * Changed `create()` method for `accountClient.metastoreAssignments()` service to return `com.databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItemList` class. * Changed `get()` method for `accountClient.metastoreAssignments()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreAssignment` class. * Changed `update()` method for `accountClient.metastoreAssignments()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignment` class. * Changed `update()` method for `accountClient.metastoreAssignments()` service to no longer return `com.databricks.sdk.service.catalog.MetastoreAssignment` class. * Changed `create()` method for `accountClient.metastores()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateMetastore` class. * Changed `create()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. * Changed `get()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. * Changed `update()` method for `accountClient.metastores()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateMetastore` class. * Changed `update()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. * Changed `create()` method for `accountClient.storageCredentials()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateStorageCredential` class. * Changed `update()` method for `accountClient.storageCredentials()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateStorageCredential` class. * Added `update()` method for `workspaceClient.tables()` service. * Changed `get()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. * Changed `update()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. * Added `workspaceClient.connections()` service. * Added `workspaceClient.systemSchemas()` service. * Added `connectionName` field for `com.databricks.sdk.service.catalog.CatalogInfo`. * Added `options` field for `com.databricks.sdk.service.catalog.CatalogInfo`. * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. * Removed `com.databricks.sdk.service.catalog.GcpServiceAccountKey` class. * Removed `schemas` field for `com.databricks.sdk.service.catalog.ListFunctionsResponse`. * Added `functions` field for `com.databricks.sdk.service.catalog.ListFunctionsResponse`. * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.UpdateMetastore`. * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. * Changed `assignWorkspaces` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindings` to `com.databricks.sdk.service.catalog.List` class. * Changed `unassignWorkspaces` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindings` to `com.databricks.sdk.service.catalog.List` class. * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. * Removed `com.databricks.sdk.service.catalog.WorkspaceId` class. * Added `com.databricks.sdk.service.catalog.AccountsCreateMetastore` class. * Added `com.databricks.sdk.service.catalog.AccountsCreateMetastoreAssignment` class. * Added `com.databricks.sdk.service.catalog.AccountsCreateStorageCredential` class. * Added `com.databricks.sdk.service.catalog.AccountsMetastoreAssignment` class. * Added `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. * Added `com.databricks.sdk.service.catalog.AccountsUpdateMetastore` class. * Added `com.databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignment` class. * Added `com.databricks.sdk.service.catalog.AccountsUpdateStorageCredential` class. * Added `com.databricks.sdk.service.catalog.AzureManagedIdentity` class. * Added `com.databricks.sdk.service.catalog.ConnectionInfo` class. * Added `com.databricks.sdk.service.catalog.ConnectionType` class. * Added `com.databricks.sdk.service.catalog.CreateConnection` class. * Added `com.databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItem` class. * Added `com.databricks.sdk.service.catalog.CredentialType` class. * Added `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. * Added `com.databricks.sdk.service.catalog.DatabricksGcpServiceAccountResponse` class. * Added `com.databricks.sdk.service.catalog.DeleteConnectionRequest` class. * Added `com.databricks.sdk.service.catalog.DisableRequest` class. * Added `com.databricks.sdk.service.catalog.DisableSchemaName` class. * Added `com.databricks.sdk.service.catalog.EnableRequest` class. * Added `com.databricks.sdk.service.catalog.EnableSchemaName` class. * Added `com.databricks.sdk.service.catalog.GetConnectionRequest` class. * Added `com.databricks.sdk.service.catalog.ListConnectionsResponse` class. * Added `com.databricks.sdk.service.catalog.ListSystemSchemasRequest` class. * Added `com.databricks.sdk.service.catalog.ListSystemSchemasResponse` class. * Added `com.databricks.sdk.service.catalog.PropertiesKvPairs` class. * Added `com.databricks.sdk.service.catalog.SecurableOptionsMap` class. * Added `com.databricks.sdk.service.catalog.SystemSchemaInfo` class. * Added `com.databricks.sdk.service.catalog.SystemSchemaInfoState` class. * Added `com.databricks.sdk.service.catalog.UpdateConnection` class. * Added `com.databricks.sdk.service.catalog.UpdateTableRequest` class. * Changed `get()` method for `workspaceClient.clusters()` service to return `com.databricks.sdk.service.compute.ClusterDetails` class. * Removed `com.databricks.sdk.service.compute.BaseClusterInfo` class. * Added `dataSecurityMode` field for `com.databricks.sdk.service.compute.ClusterAttributes`. * Added `dockerImage` field for `com.databricks.sdk.service.compute.ClusterAttributes`. * Added `singleUserName` field for `com.databricks.sdk.service.compute.ClusterAttributes`. * Removed `com.databricks.sdk.service.compute.ClusterInfo` class. * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.CreateInstancePool`. * Added `dataSecurityMode` field for `com.databricks.sdk.service.compute.EditCluster`. * Added `dockerImage` field for `com.databricks.sdk.service.compute.EditCluster`. * Added `singleUserName` field for `com.databricks.sdk.service.compute.EditCluster`. * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.EditInstancePool`. * Added `localSsdCount` field for `com.databricks.sdk.service.compute.GcpAttributes`. * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.GetInstancePool`. * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.InstancePoolAndStats`. * Changed `clusters` field for `com.databricks.sdk.service.compute.ListClustersResponse` to `com.databricks.sdk.service.compute.ClusterDetailsList` class. * Added `com.databricks.sdk.service.compute.ClusterDetails` class. * Added `com.databricks.sdk.service.compute.ClusterSpec` class. * Added `com.databricks.sdk.service.compute.ComputeSpec` class. * Added `com.databricks.sdk.service.compute.ComputeSpecKind` class. * Added `com.databricks.sdk.service.compute.InstancePoolGcpAttributes` class. * Added `accountClient.accessControl()` service. * Added `workspaceClient.accessControlProxy()` service. * Added `meta` field for `com.databricks.sdk.service.iam.Group`. * Added `schema` field for `com.databricks.sdk.service.iam.PartialUpdate`. * Added `com.databricks.sdk.service.iam.GetAssignableRolesForResourceRequest` class. * Added `com.databricks.sdk.service.iam.GetAssignableRolesForResourceResponse` class. * Added `com.databricks.sdk.service.iam.GetRuleSetRequest` class. * Added `com.databricks.sdk.service.iam.GrantRule` class. * Added `com.databricks.sdk.service.iam.PatchSchema` class. * Added `com.databricks.sdk.service.iam.Principal` class. * Added `com.databricks.sdk.service.iam.ResourceMeta` class. * Added `com.databricks.sdk.service.iam.RuleSetResponse` class. * Added `com.databricks.sdk.service.iam.RuleSetUpdateRequest` class. * Added `com.databricks.sdk.service.iam.UpdateRuleSetRequest` class. * Added `jobParameters` field for `com.databricks.sdk.service.jobs.BaseRun`. * Added `triggerInfo` field for `com.databricks.sdk.service.jobs.BaseRun`. * Changed `newCluster` field for `com.databricks.sdk.service.jobs.ClusterSpec` to `com.databricks.sdk.service.compute.ClusterSpec` class. * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.Continuous` to `com.databricks.sdk.service.jobs.PauseStatus` class. * Removed `com.databricks.sdk.service.jobs.ContinuousPauseStatus` class. * Changed `format` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.Format` class. * Changed `tasks` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.TaskList` class. * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. * Added `compute` field for `com.databricks.sdk.service.jobs.CreateJob`. * Added `health` field for `com.databricks.sdk.service.jobs.CreateJob`. * Added `parameters` field for `com.databricks.sdk.service.jobs.CreateJob`. * Added `runAs` field for `com.databricks.sdk.service.jobs.CreateJob`. * Removed `com.databricks.sdk.service.jobs.CreateJobFormat` class. * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.CronSchedule` to `com.databricks.sdk.service.jobs.PauseStatus` class. * Removed `com.databricks.sdk.service.jobs.CronSchedulePauseStatus` class. * Removed `com.databricks.sdk.service.jobs.FileArrivalTriggerSettings` class. * Changed `gitProvider` field for `com.databricks.sdk.service.jobs.GitSource` to `com.databricks.sdk.service.jobs.GitProvider` class. * Added `jobSource` field for `com.databricks.sdk.service.jobs.GitSource`. * Removed `com.databricks.sdk.service.jobs.GitSourceGitProvider` class. * Changed `newCluster` field for `com.databricks.sdk.service.jobs.JobCluster` to `com.databricks.sdk.service.compute.ClusterSpec` class. * Added `onDurationWarningThresholdExceeded` field for `com.databricks.sdk.service.jobs.JobEmailNotifications`. * Changed `format` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.Format` class. * Changed `tasks` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.TaskList` class. * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. * Added `compute` field for `com.databricks.sdk.service.jobs.JobSettings`. * Added `health` field for `com.databricks.sdk.service.jobs.JobSettings`. * Added `parameters` field for `com.databricks.sdk.service.jobs.JobSettings`. * Added `runAs` field for `com.databricks.sdk.service.jobs.JobSettings`. * Removed `com.databricks.sdk.service.jobs.JobSettingsFormat` class. * Removed `com.databricks.sdk.service.jobs.JobTaskSettings` class. * Removed `com.databricks.sdk.service.jobs.JobWebhookNotifications` class. * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnFailureItem` class. * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnStartItem` class. * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnSuccessItem` class. * Added `pageToken` field for `com.databricks.sdk.service.jobs.ListJobsRequest`. * Added `nextPageToken` field for `com.databricks.sdk.service.jobs.ListJobsResponse`. * Added `prevPageToken` field for `com.databricks.sdk.service.jobs.ListJobsResponse`. * Added `pageToken` field for `com.databricks.sdk.service.jobs.ListRunsRequest`. * Added `nextPageToken` field for `com.databricks.sdk.service.jobs.ListRunsResponse`. * Added `prevPageToken` field for `com.databricks.sdk.service.jobs.ListRunsResponse`. * Changed `source` field for `com.databricks.sdk.service.jobs.NotebookTask` to `com.databricks.sdk.service.jobs.Source` class. * Removed `com.databricks.sdk.service.jobs.NotebookTaskSource` class. * Added `rerunDependentTasks` field for `com.databricks.sdk.service.jobs.RepairRun`. * Added `jobParameters` field for `com.databricks.sdk.service.jobs.Run`. * Added `triggerInfo` field for `com.databricks.sdk.service.jobs.Run`. * Added `jobParameters` field for `com.databricks.sdk.service.jobs.RunNow`. * Added `conditionTask` field for `com.databricks.sdk.service.jobs.RunOutput`. * Added `runJobOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. * Removed `com.databricks.sdk.service.jobs.RunSubmitTaskSettings` class. * Changed `dependsOn` field for `com.databricks.sdk.service.jobs.RunTask` to `com.databricks.sdk.service.jobs.TaskDependencyList` class. * Changed `newCluster` field for `com.databricks.sdk.service.jobs.RunTask` to `com.databricks.sdk.service.compute.ClusterSpec` class. * Added `conditionTask` field for `com.databricks.sdk.service.jobs.RunTask`. * Added `resolvedValues` field for `com.databricks.sdk.service.jobs.RunTask`. * Added `runIf` field for `com.databricks.sdk.service.jobs.RunTask`. * Added `runJobTask` field for `com.databricks.sdk.service.jobs.RunTask`. * Changed `source` field for `com.databricks.sdk.service.jobs.SparkPythonTask` to `com.databricks.sdk.service.jobs.Source` class. * Removed `com.databricks.sdk.service.jobs.SparkPythonTaskSource` class. * Changed `widgets` field for `com.databricks.sdk.service.jobs.SqlDashboardOutput` to `com.databricks.sdk.service.jobs.SqlDashboardWidgetOutputList` class. * Changed `tasks` field for `com.databricks.sdk.service.jobs.SubmitRun` to `com.databricks.sdk.service.jobs.SubmitTaskList` class. * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.SubmitRun` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. * Added `emailNotifications` field for `com.databricks.sdk.service.jobs.SubmitRun`. * Added `health` field for `com.databricks.sdk.service.jobs.SubmitRun`. * Removed `com.databricks.sdk.service.jobs.TaskDependenciesItem` class. * Added `onDurationWarningThresholdExceeded` field for `com.databricks.sdk.service.jobs.TaskEmailNotifications`. * Changed `fileArrival` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.FileArrivalTriggerConfiguration` class. * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.PauseStatus` class. * Removed `com.databricks.sdk.service.jobs.TriggerSettingsPauseStatus` class. * Added `com.databricks.sdk.service.jobs.ConditionTask` class. * Added `com.databricks.sdk.service.jobs.ConditionTaskOp` class. * Added `com.databricks.sdk.service.jobs.FileArrivalTriggerConfiguration` class. * Added `com.databricks.sdk.service.jobs.Format` class. * Added `com.databricks.sdk.service.jobs.GitProvider` class. * Added `com.databricks.sdk.service.jobs.JobCompute` class. * Added `com.databricks.sdk.service.jobs.JobParameter` class. * Added `com.databricks.sdk.service.jobs.JobParameterDefinition` class. * Added `com.databricks.sdk.service.jobs.JobRunAs` class. * Added `com.databricks.sdk.service.jobs.JobSource` class. * Added `com.databricks.sdk.service.jobs.JobSourceDirtyState` class. * Added `com.databricks.sdk.service.jobs.JobsHealthMetric` class. * Added `com.databricks.sdk.service.jobs.JobsHealthOperator` class. * Added `com.databricks.sdk.service.jobs.JobsHealthRule` class. * Added `com.databricks.sdk.service.jobs.JobsHealthRules` class. * Added `com.databricks.sdk.service.jobs.ParamPairs` class. * Added `com.databricks.sdk.service.jobs.PauseStatus` class. * Added `com.databricks.sdk.service.jobs.ResolvedConditionTaskValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedDbtTaskValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedNotebookTaskValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedParamPairValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedPythonWheelTaskValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedRunJobTaskValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedStringParamsValues` class. * Added `com.databricks.sdk.service.jobs.ResolvedValues` class. * Added `com.databricks.sdk.service.jobs.RunConditionTask` class. * Added `com.databricks.sdk.service.jobs.RunConditionTaskOp` class. * Added `com.databricks.sdk.service.jobs.RunIf` class. * Added `com.databricks.sdk.service.jobs.RunJobOutput` class. * Added `com.databricks.sdk.service.jobs.RunJobTask` class. * Added `com.databricks.sdk.service.jobs.Source` class. * Added `com.databricks.sdk.service.jobs.SubmitTask` class. * Added `com.databricks.sdk.service.jobs.Task` class. * Added `com.databricks.sdk.service.jobs.TaskDependency` class. * Added `com.databricks.sdk.service.jobs.TriggerInfo` class. * Added `com.databricks.sdk.service.jobs.Webhook` class. * Added `com.databricks.sdk.service.jobs.WebhookNotifications` class. * Added `com.databricks.sdk.service.jobs.WebhookNotificationsOnDurationWarningThresholdExceededItem` class. * Removed `registeredModel` field for `com.databricks.sdk.service.ml.GetModelResponse`. * Added `registeredModelDatabricks` field for `com.databricks.sdk.service.ml.GetModelResponse`. * Removed `whl` field for `com.databricks.sdk.service.pipelines.PipelineLibrary`. * Added `environmentVars` field for `com.databricks.sdk.service.serving.ServedModelInput`. * Added `environmentVars` field for `com.databricks.sdk.service.serving.ServedModelOutput`. * Added `accountClient.settings()` service. * Added `com.databricks.sdk.service.settings.DeletePersonalComputeSettingRequest` class. * Added `com.databricks.sdk.service.settings.DeletePersonalComputeSettingResponse` class. * Added `com.databricks.sdk.service.settings.PersonalComputeMessage` class. * Added `com.databricks.sdk.service.settings.PersonalComputeMessageEnum` class. * Added `com.databricks.sdk.service.settings.PersonalComputeSetting` class. * Added `com.databricks.sdk.service.settings.ReadPersonalComputeSettingRequest` class. * Added `com.databricks.sdk.service.settings.UpdatePersonalComputeSettingRequest` class. * Added `workspaceClient.cleanRooms()` service. * Added `historyDataSharingStatus` field for `com.databricks.sdk.service.sharing.SharedDataObject`. * Added `com.databricks.sdk.service.sharing.CentralCleanRoomInfo` class. * Added `com.databricks.sdk.service.sharing.CleanRoomAssetInfo` class. * Added `com.databricks.sdk.service.sharing.CleanRoomCatalog` class. * Added `com.databricks.sdk.service.sharing.CleanRoomCatalogUpdate` class. * Added `com.databricks.sdk.service.sharing.CleanRoomCollaboratorInfo` class. * Added `com.databricks.sdk.service.sharing.CleanRoomInfo` class. * Added `com.databricks.sdk.service.sharing.CleanRoomNotebookInfo` class. * Added `com.databricks.sdk.service.sharing.CleanRoomTableInfo` class. * Added `com.databricks.sdk.service.sharing.ColumnInfo` class. * Added `com.databricks.sdk.service.sharing.ColumnMask` class. * Added `com.databricks.sdk.service.sharing.ColumnTypeName` class. * Added `com.databricks.sdk.service.sharing.CreateCleanRoom` class. * Added `com.databricks.sdk.service.sharing.DeleteCleanRoomRequest` class. * Added `com.databricks.sdk.service.sharing.GetCleanRoomRequest` class. * Added `com.databricks.sdk.service.sharing.ListCleanRoomsResponse` class. * Added `com.databricks.sdk.service.sharing.SharedDataObjectHistoryDataSharingStatus` class. * Added `com.databricks.sdk.service.sharing.UpdateCleanRoom` class. * Changed `query` field for `com.databricks.sdk.service.sql.Alert` to `com.databricks.sdk.service.sql.AlertQuery` class. * Changed `value` field for `com.databricks.sdk.service.sql.AlertOptions` to `Object` class. * Removed `isDbAdmin` field for `com.databricks.sdk.service.sql.User`. * Removed `profileImageUrl` field for `com.databricks.sdk.service.sql.User`. * Added `com.databricks.sdk.service.sql.AlertQuery` class. * Removed `keyvaultMetadata` field for `com.databricks.sdk.service.workspace.CreateScope`. * Added `backendAzureKeyvault` field for `com.databricks.sdk.service.workspace.CreateScope`. * Changed `format` field for `com.databricks.sdk.service.workspace.Import` to `com.databricks.sdk.service.workspace.ImportFormat` class. * Added `com.databricks.sdk.service.workspace.ImportFormat` class. OpenAPI SHA: ca501b7365fb211d84051727fa3a11d4d38ab2a8, Date: 2023-07-17 Dependency updates: * Bump commons-io from 2.12.0 to 2.13.0 ([#105](https://github.com/databricks/databricks-sdk-java/pull/105)). * Bump jackson.version from 2.15.1 to 2.15.2 ([#101](https://github.com/databricks/databricks-sdk-java/pull/101)). * Bump maven-failsafe-plugin from 3.1.0 to 3.1.2 ([#103](https://github.com/databricks/databricks-sdk-java/pull/103)). --- .gitattributes | 5 + CHANGELOG.md | 273 ++++++++++++++++++ databricks-sdk-java/pom.xml | 2 +- .../com/databricks/sdk/core/UserAgent.java | 2 +- .../databricks/sdk/service/jobs/BaseRun.java | 2 +- .../sdk/service/jobs/CreateJob.java | 18 +- .../sdk/service/jobs/GitSource.java | 8 +- .../service/jobs/JobEmailNotifications.java | 25 +- .../sdk/service/jobs/JobSettings.java | 18 +- .../sdk/service/jobs/JobsHealthMetric.java | 11 + .../sdk/service/jobs/JobsHealthOperator.java | 11 + .../sdk/service/jobs/JobsHealthRule.java | 78 +++++ .../sdk/service/jobs/JobsHealthRules.java | 44 +++ .../com/databricks/sdk/service/jobs/Run.java | 2 +- .../databricks/sdk/service/jobs/RunTask.java | 4 +- .../sdk/service/jobs/SubmitRun.java | 16 + .../sdk/service/jobs/SubmitTask.java | 18 +- .../com/databricks/sdk/service/jobs/Task.java | 20 +- .../service/jobs/TaskEmailNotifications.java | 26 +- .../service/jobs/WebhookNotifications.java | 28 +- ...nDurationWarningThresholdExceededItem.java | 45 +++ .../service/pipelines/PipelineLibrary.java | 19 +- .../sdk/service/sql/ChannelName.java | 1 - pom.xml | 2 +- 24 files changed, 637 insertions(+), 41 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthMetric.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthOperator.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java diff --git a/.gitattributes b/.gitattributes index 03e954cc5..e8c1d7271 100755 --- a/.gitattributes +++ b/.gitattributes @@ -555,6 +555,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourceDirtyState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthMetric.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthOperator.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java linguist-generated=true @@ -634,6 +638,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewType.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityId.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 409833815..ad00a856c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,278 @@ # Version changelog +## 0.2.0 + +* Synchronize auth permutation tests with Go SDK ([#108](https://github.com/databricks/databricks-sdk-java/pull/108)). +* Regenerated from OpenAPI spec ([#112](https://github.com/databricks/databricks-sdk-java/pull/112)). +* Add issue template ([#113](https://github.com/databricks/databricks-sdk-java/pull/113)). +* Add accounts-dod support in the Java SDK ([#114](https://github.com/databricks/databricks-sdk-java/pull/114)). +* Regenerate Java SDK using recent OpenAPI Specification ([#115](https://github.com/databricks/databricks-sdk-java/pull/115)). +* Rerun OpenAPI Generator ([#117](https://github.com/databricks/databricks-sdk-java/pull/117)). +* Integrate with auto-release toolchain ([#118](https://github.com/databricks/databricks-sdk-java/pull/118)). +* Updated CLI token source `parseExpiry` method to account for different time format ([#116](https://github.com/databricks/databricks-sdk-java/pull/116)). + +API Changes: + + * Changed `create()` method for `accountClient.metastoreAssignments()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateMetastoreAssignment` class. + * Changed `create()` method for `accountClient.metastoreAssignments()` service to return `com.databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItemList` class. + * Changed `get()` method for `accountClient.metastoreAssignments()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreAssignment` class. + * Changed `update()` method for `accountClient.metastoreAssignments()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignment` class. + * Changed `update()` method for `accountClient.metastoreAssignments()` service to no longer return `com.databricks.sdk.service.catalog.MetastoreAssignment` class. + * Changed `create()` method for `accountClient.metastores()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateMetastore` class. + * Changed `create()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. + * Changed `get()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. + * Changed `update()` method for `accountClient.metastores()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateMetastore` class. + * Changed `update()` method for `accountClient.metastores()` service to return `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. + * Changed `create()` method for `accountClient.storageCredentials()` service . New request type is `com.databricks.sdk.service.catalog.AccountsCreateStorageCredential` class. + * Changed `update()` method for `accountClient.storageCredentials()` service . New request type is `com.databricks.sdk.service.catalog.AccountsUpdateStorageCredential` class. + * Added `update()` method for `workspaceClient.tables()` service. + * Changed `get()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. + * Changed `update()` method for `workspaceClient.workspaceBindings()` service to return `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. + * Added `workspaceClient.connections()` service. + * Added `workspaceClient.systemSchemas()` service. + * Added `connectionName` field for `com.databricks.sdk.service.catalog.CatalogInfo`. + * Added `options` field for `com.databricks.sdk.service.catalog.CatalogInfo`. + * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. + * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. + * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. + * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.CreateStorageCredential`. + * Removed `com.databricks.sdk.service.catalog.GcpServiceAccountKey` class. + * Removed `schemas` field for `com.databricks.sdk.service.catalog.ListFunctionsResponse`. + * Added `functions` field for `com.databricks.sdk.service.catalog.ListFunctionsResponse`. + * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. + * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. + * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. + * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.UpdateMetastore`. + * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. + * Removed `metastoreId` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. + * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. + * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential`. + * Changed `assignWorkspaces` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindings` to `com.databricks.sdk.service.catalog.List` class. + * Changed `unassignWorkspaces` field for `com.databricks.sdk.service.catalog.UpdateWorkspaceBindings` to `com.databricks.sdk.service.catalog.List` class. + * Removed `gcpServiceAccountKey` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. + * Added `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. + * Added `databricksGcpServiceAccount` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential`. + * Removed `com.databricks.sdk.service.catalog.WorkspaceId` class. + * Added `com.databricks.sdk.service.catalog.AccountsCreateMetastore` class. + * Added `com.databricks.sdk.service.catalog.AccountsCreateMetastoreAssignment` class. + * Added `com.databricks.sdk.service.catalog.AccountsCreateStorageCredential` class. + * Added `com.databricks.sdk.service.catalog.AccountsMetastoreAssignment` class. + * Added `com.databricks.sdk.service.catalog.AccountsMetastoreInfo` class. + * Added `com.databricks.sdk.service.catalog.AccountsUpdateMetastore` class. + * Added `com.databricks.sdk.service.catalog.AccountsUpdateMetastoreAssignment` class. + * Added `com.databricks.sdk.service.catalog.AccountsUpdateStorageCredential` class. + * Added `com.databricks.sdk.service.catalog.AzureManagedIdentity` class. + * Added `com.databricks.sdk.service.catalog.ConnectionInfo` class. + * Added `com.databricks.sdk.service.catalog.ConnectionType` class. + * Added `com.databricks.sdk.service.catalog.CreateConnection` class. + * Added `com.databricks.sdk.service.catalog.CreateMetastoreAssignmentsResponseItem` class. + * Added `com.databricks.sdk.service.catalog.CredentialType` class. + * Added `com.databricks.sdk.service.catalog.CurrentWorkspaceBindings` class. + * Added `com.databricks.sdk.service.catalog.DatabricksGcpServiceAccountResponse` class. + * Added `com.databricks.sdk.service.catalog.DeleteConnectionRequest` class. + * Added `com.databricks.sdk.service.catalog.DisableRequest` class. + * Added `com.databricks.sdk.service.catalog.DisableSchemaName` class. + * Added `com.databricks.sdk.service.catalog.EnableRequest` class. + * Added `com.databricks.sdk.service.catalog.EnableSchemaName` class. + * Added `com.databricks.sdk.service.catalog.GetConnectionRequest` class. + * Added `com.databricks.sdk.service.catalog.ListConnectionsResponse` class. + * Added `com.databricks.sdk.service.catalog.ListSystemSchemasRequest` class. + * Added `com.databricks.sdk.service.catalog.ListSystemSchemasResponse` class. + * Added `com.databricks.sdk.service.catalog.PropertiesKvPairs` class. + * Added `com.databricks.sdk.service.catalog.SecurableOptionsMap` class. + * Added `com.databricks.sdk.service.catalog.SystemSchemaInfo` class. + * Added `com.databricks.sdk.service.catalog.SystemSchemaInfoState` class. + * Added `com.databricks.sdk.service.catalog.UpdateConnection` class. + * Added `com.databricks.sdk.service.catalog.UpdateTableRequest` class. + * Changed `get()` method for `workspaceClient.clusters()` service to return `com.databricks.sdk.service.compute.ClusterDetails` class. + * Removed `com.databricks.sdk.service.compute.BaseClusterInfo` class. + * Added `dataSecurityMode` field for `com.databricks.sdk.service.compute.ClusterAttributes`. + * Added `dockerImage` field for `com.databricks.sdk.service.compute.ClusterAttributes`. + * Added `singleUserName` field for `com.databricks.sdk.service.compute.ClusterAttributes`. + * Removed `com.databricks.sdk.service.compute.ClusterInfo` class. + * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.CreateInstancePool`. + * Added `dataSecurityMode` field for `com.databricks.sdk.service.compute.EditCluster`. + * Added `dockerImage` field for `com.databricks.sdk.service.compute.EditCluster`. + * Added `singleUserName` field for `com.databricks.sdk.service.compute.EditCluster`. + * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.EditInstancePool`. + * Added `localSsdCount` field for `com.databricks.sdk.service.compute.GcpAttributes`. + * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.GetInstancePool`. + * Added `gcpAttributes` field for `com.databricks.sdk.service.compute.InstancePoolAndStats`. + * Changed `clusters` field for `com.databricks.sdk.service.compute.ListClustersResponse` to `com.databricks.sdk.service.compute.ClusterDetailsList` class. + * Added `com.databricks.sdk.service.compute.ClusterDetails` class. + * Added `com.databricks.sdk.service.compute.ClusterSpec` class. + * Added `com.databricks.sdk.service.compute.ComputeSpec` class. + * Added `com.databricks.sdk.service.compute.ComputeSpecKind` class. + * Added `com.databricks.sdk.service.compute.InstancePoolGcpAttributes` class. + * Added `accountClient.accessControl()` service. + * Added `workspaceClient.accessControlProxy()` service. + * Added `meta` field for `com.databricks.sdk.service.iam.Group`. + * Added `schema` field for `com.databricks.sdk.service.iam.PartialUpdate`. + * Added `com.databricks.sdk.service.iam.GetAssignableRolesForResourceRequest` class. + * Added `com.databricks.sdk.service.iam.GetAssignableRolesForResourceResponse` class. + * Added `com.databricks.sdk.service.iam.GetRuleSetRequest` class. + * Added `com.databricks.sdk.service.iam.GrantRule` class. + * Added `com.databricks.sdk.service.iam.PatchSchema` class. + * Added `com.databricks.sdk.service.iam.Principal` class. + * Added `com.databricks.sdk.service.iam.ResourceMeta` class. + * Added `com.databricks.sdk.service.iam.RuleSetResponse` class. + * Added `com.databricks.sdk.service.iam.RuleSetUpdateRequest` class. + * Added `com.databricks.sdk.service.iam.UpdateRuleSetRequest` class. + * Added `jobParameters` field for `com.databricks.sdk.service.jobs.BaseRun`. + * Added `triggerInfo` field for `com.databricks.sdk.service.jobs.BaseRun`. + * Changed `newCluster` field for `com.databricks.sdk.service.jobs.ClusterSpec` to `com.databricks.sdk.service.compute.ClusterSpec` class. + * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.Continuous` to `com.databricks.sdk.service.jobs.PauseStatus` class. + * Removed `com.databricks.sdk.service.jobs.ContinuousPauseStatus` class. + * Changed `format` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.Format` class. + * Changed `tasks` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.TaskList` class. + * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. + * Added `compute` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Added `health` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Added `parameters` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Added `runAs` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Removed `com.databricks.sdk.service.jobs.CreateJobFormat` class. + * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.CronSchedule` to `com.databricks.sdk.service.jobs.PauseStatus` class. + * Removed `com.databricks.sdk.service.jobs.CronSchedulePauseStatus` class. + * Removed `com.databricks.sdk.service.jobs.FileArrivalTriggerSettings` class. + * Changed `gitProvider` field for `com.databricks.sdk.service.jobs.GitSource` to `com.databricks.sdk.service.jobs.GitProvider` class. + * Added `jobSource` field for `com.databricks.sdk.service.jobs.GitSource`. + * Removed `com.databricks.sdk.service.jobs.GitSourceGitProvider` class. + * Changed `newCluster` field for `com.databricks.sdk.service.jobs.JobCluster` to `com.databricks.sdk.service.compute.ClusterSpec` class. + * Added `onDurationWarningThresholdExceeded` field for `com.databricks.sdk.service.jobs.JobEmailNotifications`. + * Changed `format` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.Format` class. + * Changed `tasks` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.TaskList` class. + * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. + * Added `compute` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Added `health` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Added `parameters` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Added `runAs` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Removed `com.databricks.sdk.service.jobs.JobSettingsFormat` class. + * Removed `com.databricks.sdk.service.jobs.JobTaskSettings` class. + * Removed `com.databricks.sdk.service.jobs.JobWebhookNotifications` class. + * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnFailureItem` class. + * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnStartItem` class. + * Removed `com.databricks.sdk.service.jobs.JobWebhookNotificationsOnSuccessItem` class. + * Added `pageToken` field for `com.databricks.sdk.service.jobs.ListJobsRequest`. + * Added `nextPageToken` field for `com.databricks.sdk.service.jobs.ListJobsResponse`. + * Added `prevPageToken` field for `com.databricks.sdk.service.jobs.ListJobsResponse`. + * Added `pageToken` field for `com.databricks.sdk.service.jobs.ListRunsRequest`. + * Added `nextPageToken` field for `com.databricks.sdk.service.jobs.ListRunsResponse`. + * Added `prevPageToken` field for `com.databricks.sdk.service.jobs.ListRunsResponse`. + * Changed `source` field for `com.databricks.sdk.service.jobs.NotebookTask` to `com.databricks.sdk.service.jobs.Source` class. + * Removed `com.databricks.sdk.service.jobs.NotebookTaskSource` class. + * Added `rerunDependentTasks` field for `com.databricks.sdk.service.jobs.RepairRun`. + * Added `jobParameters` field for `com.databricks.sdk.service.jobs.Run`. + * Added `triggerInfo` field for `com.databricks.sdk.service.jobs.Run`. + * Added `jobParameters` field for `com.databricks.sdk.service.jobs.RunNow`. + * Added `conditionTask` field for `com.databricks.sdk.service.jobs.RunOutput`. + * Added `runJobOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. + * Removed `com.databricks.sdk.service.jobs.RunSubmitTaskSettings` class. + * Changed `dependsOn` field for `com.databricks.sdk.service.jobs.RunTask` to `com.databricks.sdk.service.jobs.TaskDependencyList` class. + * Changed `newCluster` field for `com.databricks.sdk.service.jobs.RunTask` to `com.databricks.sdk.service.compute.ClusterSpec` class. + * Added `conditionTask` field for `com.databricks.sdk.service.jobs.RunTask`. + * Added `resolvedValues` field for `com.databricks.sdk.service.jobs.RunTask`. + * Added `runIf` field for `com.databricks.sdk.service.jobs.RunTask`. + * Added `runJobTask` field for `com.databricks.sdk.service.jobs.RunTask`. + * Changed `source` field for `com.databricks.sdk.service.jobs.SparkPythonTask` to `com.databricks.sdk.service.jobs.Source` class. + * Removed `com.databricks.sdk.service.jobs.SparkPythonTaskSource` class. + * Changed `widgets` field for `com.databricks.sdk.service.jobs.SqlDashboardOutput` to `com.databricks.sdk.service.jobs.SqlDashboardWidgetOutputList` class. + * Changed `tasks` field for `com.databricks.sdk.service.jobs.SubmitRun` to `com.databricks.sdk.service.jobs.SubmitTaskList` class. + * Changed `webhookNotifications` field for `com.databricks.sdk.service.jobs.SubmitRun` to `com.databricks.sdk.service.jobs.WebhookNotifications` class. + * Added `emailNotifications` field for `com.databricks.sdk.service.jobs.SubmitRun`. + * Added `health` field for `com.databricks.sdk.service.jobs.SubmitRun`. + * Removed `com.databricks.sdk.service.jobs.TaskDependenciesItem` class. + * Added `onDurationWarningThresholdExceeded` field for `com.databricks.sdk.service.jobs.TaskEmailNotifications`. + * Changed `fileArrival` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.FileArrivalTriggerConfiguration` class. + * Changed `pauseStatus` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.PauseStatus` class. + * Removed `com.databricks.sdk.service.jobs.TriggerSettingsPauseStatus` class. + * Added `com.databricks.sdk.service.jobs.ConditionTask` class. + * Added `com.databricks.sdk.service.jobs.ConditionTaskOp` class. + * Added `com.databricks.sdk.service.jobs.FileArrivalTriggerConfiguration` class. + * Added `com.databricks.sdk.service.jobs.Format` class. + * Added `com.databricks.sdk.service.jobs.GitProvider` class. + * Added `com.databricks.sdk.service.jobs.JobCompute` class. + * Added `com.databricks.sdk.service.jobs.JobParameter` class. + * Added `com.databricks.sdk.service.jobs.JobParameterDefinition` class. + * Added `com.databricks.sdk.service.jobs.JobRunAs` class. + * Added `com.databricks.sdk.service.jobs.JobSource` class. + * Added `com.databricks.sdk.service.jobs.JobSourceDirtyState` class. + * Added `com.databricks.sdk.service.jobs.JobsHealthMetric` class. + * Added `com.databricks.sdk.service.jobs.JobsHealthOperator` class. + * Added `com.databricks.sdk.service.jobs.JobsHealthRule` class. + * Added `com.databricks.sdk.service.jobs.JobsHealthRules` class. + * Added `com.databricks.sdk.service.jobs.ParamPairs` class. + * Added `com.databricks.sdk.service.jobs.PauseStatus` class. + * Added `com.databricks.sdk.service.jobs.ResolvedConditionTaskValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedDbtTaskValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedNotebookTaskValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedParamPairValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedPythonWheelTaskValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedRunJobTaskValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedStringParamsValues` class. + * Added `com.databricks.sdk.service.jobs.ResolvedValues` class. + * Added `com.databricks.sdk.service.jobs.RunConditionTask` class. + * Added `com.databricks.sdk.service.jobs.RunConditionTaskOp` class. + * Added `com.databricks.sdk.service.jobs.RunIf` class. + * Added `com.databricks.sdk.service.jobs.RunJobOutput` class. + * Added `com.databricks.sdk.service.jobs.RunJobTask` class. + * Added `com.databricks.sdk.service.jobs.Source` class. + * Added `com.databricks.sdk.service.jobs.SubmitTask` class. + * Added `com.databricks.sdk.service.jobs.Task` class. + * Added `com.databricks.sdk.service.jobs.TaskDependency` class. + * Added `com.databricks.sdk.service.jobs.TriggerInfo` class. + * Added `com.databricks.sdk.service.jobs.Webhook` class. + * Added `com.databricks.sdk.service.jobs.WebhookNotifications` class. + * Added `com.databricks.sdk.service.jobs.WebhookNotificationsOnDurationWarningThresholdExceededItem` class. + * Removed `registeredModel` field for `com.databricks.sdk.service.ml.GetModelResponse`. + * Added `registeredModelDatabricks` field for `com.databricks.sdk.service.ml.GetModelResponse`. + * Removed `whl` field for `com.databricks.sdk.service.pipelines.PipelineLibrary`. + * Added `environmentVars` field for `com.databricks.sdk.service.serving.ServedModelInput`. + * Added `environmentVars` field for `com.databricks.sdk.service.serving.ServedModelOutput`. + * Added `accountClient.settings()` service. + * Added `com.databricks.sdk.service.settings.DeletePersonalComputeSettingRequest` class. + * Added `com.databricks.sdk.service.settings.DeletePersonalComputeSettingResponse` class. + * Added `com.databricks.sdk.service.settings.PersonalComputeMessage` class. + * Added `com.databricks.sdk.service.settings.PersonalComputeMessageEnum` class. + * Added `com.databricks.sdk.service.settings.PersonalComputeSetting` class. + * Added `com.databricks.sdk.service.settings.ReadPersonalComputeSettingRequest` class. + * Added `com.databricks.sdk.service.settings.UpdatePersonalComputeSettingRequest` class. + * Added `workspaceClient.cleanRooms()` service. + * Added `historyDataSharingStatus` field for `com.databricks.sdk.service.sharing.SharedDataObject`. + * Added `com.databricks.sdk.service.sharing.CentralCleanRoomInfo` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomAssetInfo` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomCatalog` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomCatalogUpdate` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomCollaboratorInfo` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomInfo` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomNotebookInfo` class. + * Added `com.databricks.sdk.service.sharing.CleanRoomTableInfo` class. + * Added `com.databricks.sdk.service.sharing.ColumnInfo` class. + * Added `com.databricks.sdk.service.sharing.ColumnMask` class. + * Added `com.databricks.sdk.service.sharing.ColumnTypeName` class. + * Added `com.databricks.sdk.service.sharing.CreateCleanRoom` class. + * Added `com.databricks.sdk.service.sharing.DeleteCleanRoomRequest` class. + * Added `com.databricks.sdk.service.sharing.GetCleanRoomRequest` class. + * Added `com.databricks.sdk.service.sharing.ListCleanRoomsResponse` class. + * Added `com.databricks.sdk.service.sharing.SharedDataObjectHistoryDataSharingStatus` class. + * Added `com.databricks.sdk.service.sharing.UpdateCleanRoom` class. + * Changed `query` field for `com.databricks.sdk.service.sql.Alert` to `com.databricks.sdk.service.sql.AlertQuery` class. + * Changed `value` field for `com.databricks.sdk.service.sql.AlertOptions` to `Object` class. + * Removed `isDbAdmin` field for `com.databricks.sdk.service.sql.User`. + * Removed `profileImageUrl` field for `com.databricks.sdk.service.sql.User`. + * Added `com.databricks.sdk.service.sql.AlertQuery` class. + * Removed `keyvaultMetadata` field for `com.databricks.sdk.service.workspace.CreateScope`. + * Added `backendAzureKeyvault` field for `com.databricks.sdk.service.workspace.CreateScope`. + * Changed `format` field for `com.databricks.sdk.service.workspace.Import` to `com.databricks.sdk.service.workspace.ImportFormat` class. + * Added `com.databricks.sdk.service.workspace.ImportFormat` class. + +OpenAPI SHA: ca501b7365fb211d84051727fa3a11d4d38ab2a8, Date: 2023-07-17 + +Dependency updates: + + * Bump commons-io from 2.12.0 to 2.13.0 ([#105](https://github.com/databricks/databricks-sdk-java/pull/105)). + * Bump jackson.version from 2.15.1 to 2.15.2 ([#101](https://github.com/databricks/databricks-sdk-java/pull/101)). + * Bump maven-failsafe-plugin from 3.1.0 to 3.1.2 ([#103](https://github.com/databricks/databricks-sdk-java/pull/103)). + ## 0.1.1 * Usable version \ No newline at end of file diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index 383944a73..7052a8db4 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.1.1 + 0.2.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 85affa416..8efbdfc11 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -13,7 +13,7 @@ public class UserAgent { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.1.1"; + private static final String version = "0.2.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index b2d038f4b..568834ee0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -114,7 +114,7 @@ public class BaseRun { @JsonProperty("run_id") private Long runId; - /** An optional name for the run. The maximum allowed length is 4096 bytes in UTF-8 encoding. */ + /** An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding. */ @JsonProperty("run_name") private String runName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index ddb5384f8..5b7b7ee53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -47,6 +47,10 @@ public class CreateJob { @JsonProperty("git_source") private GitSource gitSource; + /** An optional set of health rules that can be defined for this job. */ + @JsonProperty("health") + private JobsHealthRules health; + /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in @@ -74,7 +78,7 @@ public class CreateJob { @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; - /** An optional name for the job. */ + /** An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding. */ @JsonProperty("name") private String name; @@ -196,6 +200,15 @@ public GitSource getGitSource() { return gitSource; } + public CreateJob setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + public CreateJob setJobClusters(Collection jobClusters) { this.jobClusters = jobClusters; return this; @@ -315,6 +328,7 @@ public boolean equals(Object o) { && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) && Objects.equals(jobClusters, that.jobClusters) && Objects.equals(maxConcurrentRuns, that.maxConcurrentRuns) && Objects.equals(name, that.name) @@ -338,6 +352,7 @@ public int hashCode() { emailNotifications, format, gitSource, + health, jobClusters, maxConcurrentRuns, name, @@ -361,6 +376,7 @@ public String toString() { .add("emailNotifications", emailNotifications) .add("format", format) .add("gitSource", gitSource) + .add("health", health) .add("jobClusters", jobClusters) .add("maxConcurrentRuns", maxConcurrentRuns) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java index 235847c01..89324f37e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java @@ -16,15 +16,13 @@ public class GitSource { /** * Name of the branch to be checked out and used by this job. This field cannot be specified in * conjunction with git_tag or git_commit. - * - *

The maximum length is 255 characters. */ @JsonProperty("git_branch") private String gitBranch; /** * Commit to be checked out and used by this job. This field cannot be specified in conjunction - * with git_branch or git_tag. The maximum length is 64 characters. + * with git_branch or git_tag. */ @JsonProperty("git_commit") private String gitCommit; @@ -46,13 +44,11 @@ public class GitSource { /** * Name of the tag to be checked out and used by this job. This field cannot be specified in * conjunction with git_branch or git_commit. - * - *

The maximum length is 255 characters. */ @JsonProperty("git_tag") private String gitTag; - /** URL of the repository to be cloned by this job. The maximum length is 300 characters. */ + /** URL of the repository to be cloned by this job. */ @JsonProperty("git_url") private String gitUrl; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java index 7529109ec..f723580f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java @@ -14,6 +14,15 @@ public class JobEmailNotifications { @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; + /** + * A list of email addresses to be notified when the duration of a run exceeds the threshold + * specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the + * `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are + * not sent. + */ + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection onDurationWarningThresholdExceeded; + /** * A list of email addresses to be notified when a run unsuccessfully completes. A run is * considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` @@ -48,6 +57,16 @@ public Boolean getNoAlertForSkippedRuns() { return noAlertForSkippedRuns; } + public JobEmailNotifications setOnDurationWarningThresholdExceeded( + Collection onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + public JobEmailNotifications setOnFailure(Collection onFailure) { this.onFailure = onFailure; return this; @@ -81,6 +100,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; JobEmailNotifications that = (JobEmailNotifications) o; return Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns) + && Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) && Objects.equals(onFailure, that.onFailure) && Objects.equals(onStart, that.onStart) && Objects.equals(onSuccess, that.onSuccess); @@ -88,13 +109,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(noAlertForSkippedRuns, onFailure, onStart, onSuccess); + return Objects.hash( + noAlertForSkippedRuns, onDurationWarningThresholdExceeded, onFailure, onStart, onSuccess); } @Override public String toString() { return new ToStringer(JobEmailNotifications.class) .add("noAlertForSkippedRuns", noAlertForSkippedRuns) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) .add("onFailure", onFailure) .add("onStart", onStart) .add("onSuccess", onSuccess) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index 9e9eae753..6ed7c2765 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -43,6 +43,10 @@ public class JobSettings { @JsonProperty("git_source") private GitSource gitSource; + /** An optional set of health rules that can be defined for this job. */ + @JsonProperty("health") + private JobsHealthRules health; + /** * A list of job cluster specifications that can be shared and reused by tasks of this job. * Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in @@ -70,7 +74,7 @@ public class JobSettings { @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; - /** An optional name for the job. */ + /** An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding. */ @JsonProperty("name") private String name; @@ -182,6 +186,15 @@ public GitSource getGitSource() { return gitSource; } + public JobSettings setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + public JobSettings setJobClusters(Collection jobClusters) { this.jobClusters = jobClusters; return this; @@ -300,6 +313,7 @@ public boolean equals(Object o) { && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) && Objects.equals(jobClusters, that.jobClusters) && Objects.equals(maxConcurrentRuns, that.maxConcurrentRuns) && Objects.equals(name, that.name) @@ -322,6 +336,7 @@ public int hashCode() { emailNotifications, format, gitSource, + health, jobClusters, maxConcurrentRuns, name, @@ -344,6 +359,7 @@ public String toString() { .add("emailNotifications", emailNotifications) .add("format", format) .add("gitSource", gitSource) + .add("health", health) .add("jobClusters", jobClusters) .add("maxConcurrentRuns", maxConcurrentRuns) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthMetric.java new file mode 100755 index 000000000..2989698b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthMetric.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** Specifies the health metric that is being evaluated for a particular health rule. */ +@Generated +public enum JobsHealthMetric { + RUN_DURATION_SECONDS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthOperator.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthOperator.java new file mode 100755 index 000000000..eccf82f29 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthOperator.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** Specifies the operator used to compare the health metric value with the specified threshold. */ +@Generated +public enum JobsHealthOperator { + GREATER_THAN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java new file mode 100755 index 000000000..a7c9589f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class JobsHealthRule { + /** Specifies the health metric that is being evaluated for a particular health rule. */ + @JsonProperty("metric") + private JobsHealthMetric metric; + + /** + * Specifies the operator used to compare the health metric value with the specified threshold. + */ + @JsonProperty("op") + private JobsHealthOperator op; + + /** + * Specifies the threshold value that the health metric should obey to satisfy the health rule. + */ + @JsonProperty("value") + private Long value; + + public JobsHealthRule setMetric(JobsHealthMetric metric) { + this.metric = metric; + return this; + } + + public JobsHealthMetric getMetric() { + return metric; + } + + public JobsHealthRule setOp(JobsHealthOperator op) { + this.op = op; + return this; + } + + public JobsHealthOperator getOp() { + return op; + } + + public JobsHealthRule setValue(Long value) { + this.value = value; + return this; + } + + public Long getValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobsHealthRule that = (JobsHealthRule) o; + return Objects.equals(metric, that.metric) + && Objects.equals(op, that.op) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(metric, op, value); + } + + @Override + public String toString() { + return new ToStringer(JobsHealthRule.class) + .add("metric", metric) + .add("op", op) + .add("value", value) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java new file mode 100755 index 000000000..45f3e3015 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRules.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** An optional set of health rules that can be defined for this job. */ +@Generated +public class JobsHealthRules { + /** */ + @JsonProperty("rules") + private Collection rules; + + public JobsHealthRules setRules(Collection rules) { + this.rules = rules; + return this; + } + + public Collection getRules() { + return rules; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobsHealthRules that = (JobsHealthRules) o; + return Objects.equals(rules, that.rules); + } + + @Override + public int hashCode() { + return Objects.hash(rules); + } + + @Override + public String toString() { + return new ToStringer(JobsHealthRules.class).add("rules", rules).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 9680ccf40..983369616 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -118,7 +118,7 @@ public class Run { @JsonProperty("run_id") private Long runId; - /** An optional name for the run. The maximum allowed length is 4096 bytes in UTF-8 encoding. */ + /** An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding. */ @JsonProperty("run_name") private String runName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index 6155bbe44..7e72b1825 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -59,7 +59,7 @@ public class RunTask { @JsonProperty("depends_on") private Collection dependsOn; - /** An optional description for this task. The maximum length is 4096 bytes. */ + /** An optional description for this task. */ @JsonProperty("description") private String description; @@ -185,7 +185,7 @@ public class RunTask { /** * A unique name for the task. This field is used to refer to this task from other tasks. This * field is required and must be unique within its parent job. On Update or Reset, this field is - * used to reference the tasks to be updated or reset. The maximum length is 100 characters. + * used to reference the tasks to be updated or reset. */ @JsonProperty("task_key") private String taskKey; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index f60a0ca0e..b96bd3e1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -28,6 +28,10 @@ public class SubmitRun { @JsonProperty("git_source") private GitSource gitSource; + /** An optional set of health rules that can be defined for this job. */ + @JsonProperty("health") + private JobsHealthRules health; + /** * An optional token that can be used to guarantee the idempotency of job run requests. If a run * with the provided token already exists, the request does not create a new run but returns the @@ -103,6 +107,15 @@ public GitSource getGitSource() { return gitSource; } + public SubmitRun setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + public SubmitRun setIdempotencyToken(String idempotencyToken) { this.idempotencyToken = idempotencyToken; return this; @@ -165,6 +178,7 @@ public boolean equals(Object o) { return Objects.equals(accessControlList, that.accessControlList) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(health, that.health) && Objects.equals(idempotencyToken, that.idempotencyToken) && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(runName, that.runName) @@ -179,6 +193,7 @@ public int hashCode() { accessControlList, emailNotifications, gitSource, + health, idempotencyToken, notificationSettings, runName, @@ -193,6 +208,7 @@ public String toString() { .add("accessControlList", accessControlList) .add("emailNotifications", emailNotifications) .add("gitSource", gitSource) + .add("health", health) .add("idempotencyToken", idempotencyToken) .add("notificationSettings", notificationSettings) .add("runName", runName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 6cc711113..03b367849 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -41,6 +41,10 @@ public class SubmitTask { @JsonProperty("existing_cluster_id") private String existingClusterId; + /** An optional set of health rules that can be defined for this job. */ + @JsonProperty("health") + private JobsHealthRules health; + /** * An optional list of libraries to be installed on the cluster that executes the task. The * default value is an empty list. @@ -96,7 +100,7 @@ public class SubmitTask { /** * A unique name for the task. This field is used to refer to this task from other tasks. This * field is required and must be unique within its parent job. On Update or Reset, this field is - * used to reference the tasks to be updated or reset. The maximum length is 100 characters. + * used to reference the tasks to be updated or reset. */ @JsonProperty("task_key") private String taskKey; @@ -144,6 +148,15 @@ public String getExistingClusterId() { return existingClusterId; } + public SubmitTask setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + public SubmitTask setLibraries(Collection libraries) { this.libraries = libraries; return this; @@ -261,6 +274,7 @@ public boolean equals(Object o) { && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(health, that.health) && Objects.equals(libraries, that.libraries) && Objects.equals(newCluster, that.newCluster) && Objects.equals(notebookTask, that.notebookTask) @@ -282,6 +296,7 @@ public int hashCode() { dependsOn, emailNotifications, existingClusterId, + health, libraries, newCluster, notebookTask, @@ -303,6 +318,7 @@ public String toString() { .add("dependsOn", dependsOn) .add("emailNotifications", emailNotifications) .add("existingClusterId", existingClusterId) + .add("health", health) .add("libraries", libraries) .add("newCluster", newCluster) .add("notebookTask", notebookTask) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index df23143f3..93ce0f1ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -40,7 +40,7 @@ public class Task { @JsonProperty("depends_on") private Collection dependsOn; - /** An optional description for this task. The maximum length is 4096 bytes. */ + /** An optional description for this task. */ @JsonProperty("description") private String description; @@ -59,6 +59,10 @@ public class Task { @JsonProperty("existing_cluster_id") private String existingClusterId; + /** An optional set of health rules that can be defined for this job. */ + @JsonProperty("health") + private JobsHealthRules health; + /** * If job_cluster_key, this task is executed reusing the cluster specified in * `job.settings.job_clusters`. @@ -161,7 +165,7 @@ public class Task { /** * A unique name for the task. This field is used to refer to this task from other tasks. This * field is required and must be unique within its parent job. On Update or Reset, this field is - * used to reference the tasks to be updated or reset. The maximum length is 100 characters. + * used to reference the tasks to be updated or reset. */ @JsonProperty("task_key") private String taskKey; @@ -236,6 +240,15 @@ public String getExistingClusterId() { return existingClusterId; } + public Task setHealth(JobsHealthRules health) { + this.health = health; + return this; + } + + public JobsHealthRules getHealth() { + return health; + } + public Task setJobClusterKey(String jobClusterKey) { this.jobClusterKey = jobClusterKey; return this; @@ -410,6 +423,7 @@ public boolean equals(Object o) { && Objects.equals(description, that.description) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(health, that.health) && Objects.equals(jobClusterKey, that.jobClusterKey) && Objects.equals(libraries, that.libraries) && Objects.equals(maxRetries, that.maxRetries) @@ -440,6 +454,7 @@ public int hashCode() { description, emailNotifications, existingClusterId, + health, jobClusterKey, libraries, maxRetries, @@ -470,6 +485,7 @@ public String toString() { .add("description", description) .add("emailNotifications", emailNotifications) .add("existingClusterId", existingClusterId) + .add("health", health) .add("jobClusterKey", jobClusterKey) .add("libraries", libraries) .add("maxRetries", maxRetries) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java index dce726a50..fd2a2f76d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java @@ -10,6 +10,15 @@ @Generated public class TaskEmailNotifications { + /** + * A list of email addresses to be notified when the duration of a run exceeds the threshold + * specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the + * `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are + * not sent. + */ + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection onDurationWarningThresholdExceeded; + /** * A list of email addresses to be notified when a run unsuccessfully completes. A run is * considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` @@ -35,6 +44,16 @@ public class TaskEmailNotifications { @JsonProperty("on_success") private Collection onSuccess; + public TaskEmailNotifications setOnDurationWarningThresholdExceeded( + Collection onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + public TaskEmailNotifications setOnFailure(Collection onFailure) { this.onFailure = onFailure; return this; @@ -67,19 +86,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TaskEmailNotifications that = (TaskEmailNotifications) o; - return Objects.equals(onFailure, that.onFailure) + return Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) + && Objects.equals(onFailure, that.onFailure) && Objects.equals(onStart, that.onStart) && Objects.equals(onSuccess, that.onSuccess); } @Override public int hashCode() { - return Objects.hash(onFailure, onStart, onSuccess); + return Objects.hash(onDurationWarningThresholdExceeded, onFailure, onStart, onSuccess); } @Override public String toString() { return new ToStringer(TaskEmailNotifications.class) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) .add("onFailure", onFailure) .add("onStart", onStart) .add("onSuccess", onSuccess) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java index a1bfaee3a..67bc5a199 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java @@ -10,6 +10,15 @@ @Generated public class WebhookNotifications { + /** + * An optional list of system notification IDs to call when the duration of a run exceeds the + * threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 + * destinations can be specified for the `on_duration_warning_threshold_exceeded` property. + */ + @JsonProperty("on_duration_warning_threshold_exceeded") + private Collection + onDurationWarningThresholdExceeded; + /** * An optional list of system notification IDs to call when the run fails. A maximum of 3 * destinations can be specified for the `on_failure` property. @@ -31,6 +40,18 @@ public class WebhookNotifications { @JsonProperty("on_success") private Collection onSuccess; + public WebhookNotifications setOnDurationWarningThresholdExceeded( + Collection + onDurationWarningThresholdExceeded) { + this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; + return this; + } + + public Collection + getOnDurationWarningThresholdExceeded() { + return onDurationWarningThresholdExceeded; + } + public WebhookNotifications setOnFailure(Collection onFailure) { this.onFailure = onFailure; return this; @@ -63,19 +84,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; WebhookNotifications that = (WebhookNotifications) o; - return Objects.equals(onFailure, that.onFailure) + return Objects.equals( + onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) + && Objects.equals(onFailure, that.onFailure) && Objects.equals(onStart, that.onStart) && Objects.equals(onSuccess, that.onSuccess); } @Override public int hashCode() { - return Objects.hash(onFailure, onStart, onSuccess); + return Objects.hash(onDurationWarningThresholdExceeded, onFailure, onStart, onSuccess); } @Override public String toString() { return new ToStringer(WebhookNotifications.class) + .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) .add("onFailure", onFailure) .add("onStart", onStart) .add("onSuccess", onSuccess) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java new file mode 100755 index 000000000..826783328 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class WebhookNotificationsOnDurationWarningThresholdExceededItem { + /** */ + @JsonProperty("id") + private String id; + + public WebhookNotificationsOnDurationWarningThresholdExceededItem setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WebhookNotificationsOnDurationWarningThresholdExceededItem that = + (WebhookNotificationsOnDurationWarningThresholdExceededItem) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(WebhookNotificationsOnDurationWarningThresholdExceededItem.class) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java index 90ff810b3..b01a0851a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java @@ -25,10 +25,6 @@ public class PipelineLibrary { @JsonProperty("notebook") private NotebookLibrary notebook; - /** URI of the wheel to be installed. */ - @JsonProperty("whl") - private String whl; - public PipelineLibrary setFile(FileLibrary file) { this.file = file; return this; @@ -65,15 +61,6 @@ public NotebookLibrary getNotebook() { return notebook; } - public PipelineLibrary setWhl(String whl) { - this.whl = whl; - return this; - } - - public String getWhl() { - return whl; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -82,13 +69,12 @@ public boolean equals(Object o) { return Objects.equals(file, that.file) && Objects.equals(jar, that.jar) && Objects.equals(maven, that.maven) - && Objects.equals(notebook, that.notebook) - && Objects.equals(whl, that.whl); + && Objects.equals(notebook, that.notebook); } @Override public int hashCode() { - return Objects.hash(file, jar, maven, notebook, whl); + return Objects.hash(file, jar, maven, notebook); } @Override @@ -98,7 +84,6 @@ public String toString() { .add("jar", jar) .add("maven", maven) .add("notebook", notebook) - .add("whl", whl) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 8e0f647a1..3b9d4cbb0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Name of the channel */ @Generated public enum ChannelName { CHANNEL_NAME_CURRENT, diff --git a/pom.xml b/pom.xml index 4d48a625f..0628aebb5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.1.1 + 0.2.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for