diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ffd6f58dd..2d9cb6d86 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -6f6b1371e640f2dfeba72d365ac566368656f6b6 \ No newline at end of file +cf9c61453990df0f9453670f2fe68e1b128647a2 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 94c51cb9e..ce3319564 100755 --- a/.gitattributes +++ b/.gitattributes @@ -36,6 +36,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionL databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretSecretPermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointServingEndpointPermission.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouseSqlWarehousePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java linguist-generated=true @@ -1518,6 +1527,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineT databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true @@ -1765,6 +1775,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefa databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true @@ -1783,6 +1795,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLeg databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java linguist-generated=true @@ -1810,6 +1826,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplia databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java linguist-generated=true @@ -1903,6 +1920,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComp databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true @@ -2282,17 +2300,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Vector databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java linguist-generated=true @@ -2302,12 +2323,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFor databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetAclRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionLevelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetStatusRequest.java linguist-generated=true @@ -2323,6 +2345,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListResponse.java linguist-generated=true @@ -2357,9 +2380,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsIm databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 517400403..c2d7bf2c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,63 @@ # Version changelog +## [Release] Release v0.33.0 + +### Bug Fixes + + * Fix Ubuntu unit tests on Github Actions ([#366](https://github.com/databricks/databricks-sdk-java/pull/366)). + + +### API Changes: + + * Added `workspaceClient.disableLegacyDbfs()` service. + * Added `com.databricks.sdk.service.apps.AppResource`, `com.databricks.sdk.service.apps.AppResourceJob`, `com.databricks.sdk.service.apps.AppResourceJobJobPermission`, `com.databricks.sdk.service.apps.AppResourceSecret`, `com.databricks.sdk.service.apps.AppResourceSecretSecretPermission`, `com.databricks.sdk.service.apps.AppResourceServingEndpoint`, `com.databricks.sdk.service.apps.AppResourceServingEndpointServingEndpointPermission`, `com.databricks.sdk.service.apps.AppResourceSqlWarehouse` and `com.databricks.sdk.service.apps.AppResourceSqlWarehouseSqlWarehousePermission` classes. + * Added `com.databricks.sdk.service.pipelines.ReportSpec` class. + * Added `com.databricks.sdk.service.settings.DeleteDisableLegacyDbfsRequest`, `com.databricks.sdk.service.settings.DeleteDisableLegacyDbfsResponse`, `com.databricks.sdk.service.settings.DisableLegacyDbfs`, `com.databricks.sdk.service.settings.GetDisableLegacyDbfsRequest` and `com.databricks.sdk.service.settings.UpdateDisableLegacyDbfsRequest` classes. + * Added `com.databricks.sdk.service.workspace.CreateCredentialsRequest`, `com.databricks.sdk.service.workspace.CreateRepoRequest`, `com.databricks.sdk.service.workspace.CreateRepoResponse`, `com.databricks.sdk.service.workspace.DeleteCredentialsRequest`, `Object`, `Object`, `com.databricks.sdk.service.workspace.GetCredentialsRequest`, `com.databricks.sdk.service.workspace.GetRepoResponse`, `com.databricks.sdk.service.workspace.ListCredentialsResponse`, `com.databricks.sdk.service.workspace.UpdateCredentialsRequest`, `Object`, `com.databricks.sdk.service.workspace.UpdateRepoRequest` and `Object` classes. + * Added `defaultSourceCodePath` and `resources` fields for `com.databricks.sdk.service.apps.App`. + * Added `resources` field for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Added `resources` field for `com.databricks.sdk.service.apps.UpdateAppRequest`. + * Added `unityCatalogProvisioningState` field for `com.databricks.sdk.service.catalog.OnlineTable`. + * Added `isTruncated` field for `com.databricks.sdk.service.dashboards.Result`. + * Added `effectiveBudgetPolicyId` field for `com.databricks.sdk.service.jobs.BaseJob`. + * Added `budgetPolicyId` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Added `effectiveBudgetPolicyId` field for `com.databricks.sdk.service.jobs.Job`. + * Added `budgetPolicyId` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Added `budgetPolicyId` field for `com.databricks.sdk.service.jobs.SubmitRun`. + * Added `schema` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. + * Added `schema` field for `com.databricks.sdk.service.pipelines.EditPipeline`. + * Added `report` field for `com.databricks.sdk.service.pipelines.IngestionConfig`. + * Added `schema` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. + * Added `sequenceBy` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`. + * Added `notifyOnOk` field for `com.databricks.sdk.service.sql.Alert`. + * Added `notifyOnOk` field for `com.databricks.sdk.service.sql.CreateAlertRequestAlert`. + * Added `notifyOnOk` field for `com.databricks.sdk.service.sql.ListAlertsResponseAlert`. + * Added `notifyOnOk` field for `com.databricks.sdk.service.sql.UpdateAlertRequestAlert`. + * Added `credentialId`, `gitProvider` and `gitUsername` fields for `com.databricks.sdk.service.workspace.GetCredentialsResponse`. + * Changed `create()` method for `workspaceClient.gitCredentials()` service . New request type is `com.databricks.sdk.service.workspace.CreateCredentialsRequest` class. + * Changed `delete()` method for `workspaceClient.gitCredentials()` service to return `Object` class. + * Changed `delete()` method for `workspaceClient.gitCredentials()` service . New request type is `com.databricks.sdk.service.workspace.DeleteCredentialsRequest` class. + * Changed `get()` method for `workspaceClient.gitCredentials()` service to return `com.databricks.sdk.service.workspace.GetCredentialsResponse` class. + * Changed `get()` method for `workspaceClient.gitCredentials()` service . New request type is `com.databricks.sdk.service.workspace.GetCredentialsRequest` class. + * Changed `list()` method for `workspaceClient.gitCredentials()` service to return `com.databricks.sdk.service.workspace.ListCredentialsResponse` class. + * Changed `update()` method for `workspaceClient.gitCredentials()` service to return `Object` class. + * Changed `update()` method for `workspaceClient.gitCredentials()` service . New request type is `com.databricks.sdk.service.workspace.UpdateCredentialsRequest` class. + * Changed `create()` method for `workspaceClient.repos()` service . New request type is `com.databricks.sdk.service.workspace.CreateRepoRequest` class. + * Changed `create()` method for `workspaceClient.repos()` service to return `com.databricks.sdk.service.workspace.CreateRepoResponse` class. + * Changed `delete()` method for `workspaceClient.repos()` service to return `Object` class. + * Changed `get()` method for `workspaceClient.repos()` service to return `com.databricks.sdk.service.workspace.GetRepoResponse` class. + * Changed `update()` method for `workspaceClient.repos()` service to return `Object` class. + * Changed `update()` method for `workspaceClient.repos()` service . New request type is `com.databricks.sdk.service.workspace.UpdateRepoRequest` class. + * Changed `credentialId` and `gitProvider` fields for `com.databricks.sdk.service.workspace.CreateCredentialsResponse` to be required. + * Changed `credentialId` field for `com.databricks.sdk.service.workspace.CredentialInfo` to be required. + * Changed `patterns` field for `com.databricks.sdk.service.workspace.SparseCheckout` to `com.databricks.sdk.service.workspace.List` class. + * Changed `patterns` field for `com.databricks.sdk.service.workspace.SparseCheckoutUpdate` to `com.databricks.sdk.service.workspace.List` class. + * Removed `com.databricks.sdk.service.workspace.CreateCredentials`, `com.databricks.sdk.service.workspace.CreateRepo`, `com.databricks.sdk.service.workspace.DeleteGitCredentialRequest`, `com.databricks.sdk.service.workspace.GetGitCredentialRequest`, `com.databricks.sdk.service.workspace.SparseCheckoutPattern`, `com.databricks.sdk.service.workspace.UpdateCredentials`, `com.databricks.sdk.service.workspace.UpdateRepo` and `Object` classes. + * Removed `credentials` field for `com.databricks.sdk.service.workspace.GetCredentialsResponse`. + +OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14 + + ## [Release] Release v0.32.2 ### Bug Fixes diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index 1ca5d4bfb..977d46677 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.32.2 + 0.33.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 482a0ac08..13b056bb5 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -32,7 +32,7 @@ public String getValue() { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.32.2"; + private static final String version = "0.33.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 77357102d..dce0c0594 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -5,11 +5,15 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class App { - /** The active deployment of the app. */ + /** + * The active deployment of the app. A deployment is considered active when it has been deployed + * to the app compute. + */ @JsonProperty("active_deployment") private AppDeployment activeDeployment; @@ -29,6 +33,13 @@ public class App { @JsonProperty("creator") private String creator; + /** + * The default workspace file system path of the source code from which app deployment are + * created. This field tracks the workspace source code path of the last active deployment. + */ + @JsonProperty("default_source_code_path") + private String defaultSourceCodePath; + /** The description of the app. */ @JsonProperty("description") private String description; @@ -40,10 +51,17 @@ public class App { @JsonProperty("name") private String name; - /** The pending deployment of the app. */ + /** + * The pending deployment of the app. A deployment is considered pending when it is being prepared + * for deployment to the app compute. + */ @JsonProperty("pending_deployment") private AppDeployment pendingDeployment; + /** Resources for the app. */ + @JsonProperty("resources") + private Collection resources; + /** */ @JsonProperty("service_principal_id") private Long servicePrincipalId; @@ -109,6 +127,15 @@ public String getCreator() { return creator; } + public App setDefaultSourceCodePath(String defaultSourceCodePath) { + this.defaultSourceCodePath = defaultSourceCodePath; + return this; + } + + public String getDefaultSourceCodePath() { + return defaultSourceCodePath; + } + public App setDescription(String description) { this.description = description; return this; @@ -136,6 +163,15 @@ public AppDeployment getPendingDeployment() { return pendingDeployment; } + public App setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + public App setServicePrincipalId(Long servicePrincipalId) { this.servicePrincipalId = servicePrincipalId; return this; @@ -191,9 +227,11 @@ public boolean equals(Object o) { && Objects.equals(computeStatus, that.computeStatus) && Objects.equals(createTime, that.createTime) && Objects.equals(creator, that.creator) + && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) && Objects.equals(description, that.description) && Objects.equals(name, that.name) && Objects.equals(pendingDeployment, that.pendingDeployment) + && Objects.equals(resources, that.resources) && Objects.equals(servicePrincipalId, that.servicePrincipalId) && Objects.equals(servicePrincipalName, that.servicePrincipalName) && Objects.equals(updateTime, that.updateTime) @@ -209,9 +247,11 @@ public int hashCode() { computeStatus, createTime, creator, + defaultSourceCodePath, description, name, pendingDeployment, + resources, servicePrincipalId, servicePrincipalName, updateTime, @@ -227,9 +267,11 @@ public String toString() { .add("computeStatus", computeStatus) .add("createTime", createTime) .add("creator", creator) + .add("defaultSourceCodePath", defaultSourceCodePath) .add("description", description) .add("name", name) .add("pendingDeployment", pendingDeployment) + .add("resources", resources) .add("servicePrincipalId", servicePrincipalId) .add("servicePrincipalName", servicePrincipalName) .add("updateTime", updateTime) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java new file mode 100755 index 000000000..a0728e385 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -0,0 +1,119 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResource { + /** Description of the App Resource. */ + @JsonProperty("description") + private String description; + + /** */ + @JsonProperty("job") + private AppResourceJob job; + + /** Name of the App Resource. */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("secret") + private AppResourceSecret secret; + + /** */ + @JsonProperty("serving_endpoint") + private AppResourceServingEndpoint servingEndpoint; + + /** */ + @JsonProperty("sql_warehouse") + private AppResourceSqlWarehouse sqlWarehouse; + + public AppResource setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public AppResource setJob(AppResourceJob job) { + this.job = job; + return this; + } + + public AppResourceJob getJob() { + return job; + } + + public AppResource setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResource setSecret(AppResourceSecret secret) { + this.secret = secret; + return this; + } + + public AppResourceSecret getSecret() { + return secret; + } + + public AppResource setServingEndpoint(AppResourceServingEndpoint servingEndpoint) { + this.servingEndpoint = servingEndpoint; + return this; + } + + public AppResourceServingEndpoint getServingEndpoint() { + return servingEndpoint; + } + + public AppResource setSqlWarehouse(AppResourceSqlWarehouse sqlWarehouse) { + this.sqlWarehouse = sqlWarehouse; + return this; + } + + public AppResourceSqlWarehouse getSqlWarehouse() { + return sqlWarehouse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResource that = (AppResource) o; + return Objects.equals(description, that.description) + && Objects.equals(job, that.job) + && Objects.equals(name, that.name) + && Objects.equals(secret, that.secret) + && Objects.equals(servingEndpoint, that.servingEndpoint) + && Objects.equals(sqlWarehouse, that.sqlWarehouse); + } + + @Override + public int hashCode() { + return Objects.hash(description, job, name, secret, servingEndpoint, sqlWarehouse); + } + + @Override + public String toString() { + return new ToStringer(AppResource.class) + .add("description", description) + .add("job", job) + .add("name", name) + .add("secret", secret) + .add("servingEndpoint", servingEndpoint) + .add("sqlWarehouse", sqlWarehouse) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java new file mode 100755 index 000000000..e9f546520 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceJob { + /** Id of the job to grant permission on. */ + @JsonProperty("id") + private String id; + + /** + * Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", + * "CAN_MANAGE_RUN", "CAN_VIEW". + */ + @JsonProperty("permission") + private AppResourceJobJobPermission permission; + + public AppResourceJob setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AppResourceJob setPermission(AppResourceJobJobPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceJobJobPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceJob that = (AppResourceJob) o; + return Objects.equals(id, that.id) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(id, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceJob.class) + .add("id", id) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java new file mode 100755 index 000000000..dbf2bc074 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJobJobPermission.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceJobJobPermission { + CAN_MANAGE, + CAN_MANAGE_RUN, + CAN_VIEW, + IS_OWNER, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java new file mode 100755 index 000000000..4f32fb90a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecret.java @@ -0,0 +1,77 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceSecret { + /** Key of the secret to grant permission on. */ + @JsonProperty("key") + private String key; + + /** + * Permission to grant on the secret scope. For secrets, only one permission is allowed. + * Permission must be one of: "READ", "WRITE", "MANAGE". + */ + @JsonProperty("permission") + private AppResourceSecretSecretPermission permission; + + /** Scope of the secret to grant permission on. */ + @JsonProperty("scope") + private String scope; + + public AppResourceSecret setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public AppResourceSecret setPermission(AppResourceSecretSecretPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceSecretSecretPermission getPermission() { + return permission; + } + + public AppResourceSecret setScope(String scope) { + this.scope = scope; + return this; + } + + public String getScope() { + return scope; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceSecret that = (AppResourceSecret) o; + return Objects.equals(key, that.key) + && Objects.equals(permission, that.permission) + && Objects.equals(scope, that.scope); + } + + @Override + public int hashCode() { + return Objects.hash(key, permission, scope); + } + + @Override + public String toString() { + return new ToStringer(AppResourceSecret.class) + .add("key", key) + .add("permission", permission) + .add("scope", scope) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretSecretPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretSecretPermission.java new file mode 100755 index 000000000..950b5f3d5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSecretSecretPermission.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +/** + * Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE". + */ +@Generated +public enum AppResourceSecretSecretPermission { + MANAGE, + READ, + WRITE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java new file mode 100755 index 000000000..f52937c59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpoint.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceServingEndpoint { + /** Name of the serving endpoint to grant permission on. */ + @JsonProperty("name") + private String name; + + /** + * Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", + * "CAN_QUERY", "CAN_VIEW". + */ + @JsonProperty("permission") + private AppResourceServingEndpointServingEndpointPermission permission; + + public AppResourceServingEndpoint setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResourceServingEndpoint setPermission( + AppResourceServingEndpointServingEndpointPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceServingEndpointServingEndpointPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceServingEndpoint that = (AppResourceServingEndpoint) o; + return Objects.equals(name, that.name) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(name, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceServingEndpoint.class) + .add("name", name) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointServingEndpointPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointServingEndpointPermission.java new file mode 100755 index 000000000..0d67aec6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceServingEndpointServingEndpointPermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceServingEndpointServingEndpointPermission { + CAN_MANAGE, + CAN_QUERY, + CAN_VIEW, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java new file mode 100755 index 000000000..f654d2285 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouse.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceSqlWarehouse { + /** Id of the SQL warehouse to grant permission on. */ + @JsonProperty("id") + private String id; + + /** + * Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", + * "IS_OWNER". + */ + @JsonProperty("permission") + private AppResourceSqlWarehouseSqlWarehousePermission permission; + + public AppResourceSqlWarehouse setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public AppResourceSqlWarehouse setPermission( + AppResourceSqlWarehouseSqlWarehousePermission permission) { + this.permission = permission; + return this; + } + + public AppResourceSqlWarehouseSqlWarehousePermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceSqlWarehouse that = (AppResourceSqlWarehouse) o; + return Objects.equals(id, that.id) && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(id, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceSqlWarehouse.class) + .add("id", id) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouseSqlWarehousePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouseSqlWarehousePermission.java new file mode 100755 index 000000000..39f05643a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSqlWarehouseSqlWarehousePermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceSqlWarehouseSqlWarehousePermission { + CAN_MANAGE, + CAN_USE, + IS_OWNER, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java index e63da38c3..e835442a9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated @@ -20,6 +21,10 @@ public class CreateAppRequest { @JsonProperty("name") private String name; + /** Resources for the app. */ + @JsonProperty("resources") + private Collection resources; + public CreateAppRequest setDescription(String description) { this.description = description; return this; @@ -38,17 +43,28 @@ public String getName() { return name; } + public CreateAppRequest setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppRequest that = (CreateAppRequest) o; - return Objects.equals(description, that.description) && Objects.equals(name, that.name); + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(resources, that.resources); } @Override public int hashCode() { - return Objects.hash(description, name); + return Objects.hash(description, name, resources); } @Override @@ -56,6 +72,7 @@ public String toString() { return new ToStringer(CreateAppRequest.class) .add("description", description) .add("name", name) + .add("resources", resources) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java index edbe547cb..da8b8c4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated @@ -20,6 +21,10 @@ public class UpdateAppRequest { @JsonProperty("name") private String name; + /** Resources for the app. */ + @JsonProperty("resources") + private Collection resources; + public UpdateAppRequest setDescription(String description) { this.description = description; return this; @@ -38,17 +43,28 @@ public String getName() { return name; } + public UpdateAppRequest setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateAppRequest that = (UpdateAppRequest) o; - return Objects.equals(description, that.description) && Objects.equals(name, that.name); + return Objects.equals(description, that.description) + && Objects.equals(name, that.name) + && Objects.equals(resources, that.resources); } @Override public int hashCode() { - return Objects.hash(description, name); + return Objects.hash(description, name, resources); } @Override @@ -56,6 +72,7 @@ public String toString() { return new ToStringer(UpdateAppRequest.class) .add("description", description) .add("name", name) + .add("resources", resources) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java index 329030107..19e00f79a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfoSecurableKind.java @@ -12,6 +12,7 @@ public enum ConnectionInfoSecurableKind { CONNECTION_DATABRICKS, CONNECTION_EXTERNAL_HIVE_METASTORE, CONNECTION_GLUE, + CONNECTION_HTTP_BEARER, CONNECTION_MYSQL, CONNECTION_ONLINE_CATALOG, CONNECTION_POSTGRESQL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index 72638e1a3..02d89b8ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -11,6 +11,7 @@ public enum ConnectionType { DATABRICKS, GLUE, HIVE_METASTORE, + HTTP, MYSQL, POSTGRESQL, REDSHIFT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java index 1466e5b91..7f8868e05 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java @@ -7,5 +7,6 @@ /** The type of credential. */ @Generated public enum CredentialType { + BEARER_TOKEN, USERNAME_PASSWORD, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java index 76e7ca205..a18c68dbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java @@ -24,8 +24,8 @@ public class GenerateTemporaryTableCredentialResponse { private AzureUserDelegationSas azureUserDelegationSas; /** - * Server time when the credential will expire, in unix epoch milliseconds since January 1, 1970 - * at 00:00:00 UTC. The API client is advised to cache the credential given this expiration time. + * Server time when the credential will expire, in epoch milliseconds. The API client is advised + * to cache the credential given this expiration time. */ @JsonProperty("expiration_time") private Long expirationTime; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java index 1e65a14ee..d31041dde 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java @@ -18,7 +18,7 @@ public class OnlineTable { @JsonProperty("spec") private OnlineTableSpec spec; - /** Online Table status */ + /** Online Table data synchronization status */ @JsonProperty("status") private OnlineTableStatus status; @@ -26,6 +26,14 @@ public class OnlineTable { @JsonProperty("table_serving_url") private String tableServingUrl; + /** + * The provisioning state of the online table entity in Unity Catalog. This is distinct from the + * state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline + * may be in "PROVISIONING" as it runs asynchronously). + */ + @JsonProperty("unity_catalog_provisioning_state") + private ProvisioningInfoState unityCatalogProvisioningState; + public OnlineTable setName(String name) { this.name = name; return this; @@ -62,6 +70,16 @@ public String getTableServingUrl() { return tableServingUrl; } + public OnlineTable setUnityCatalogProvisioningState( + ProvisioningInfoState unityCatalogProvisioningState) { + this.unityCatalogProvisioningState = unityCatalogProvisioningState; + return this; + } + + public ProvisioningInfoState getUnityCatalogProvisioningState() { + return unityCatalogProvisioningState; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -70,12 +88,13 @@ public boolean equals(Object o) { return Objects.equals(name, that.name) && Objects.equals(spec, that.spec) && Objects.equals(status, that.status) - && Objects.equals(tableServingUrl, that.tableServingUrl); + && Objects.equals(tableServingUrl, that.tableServingUrl) + && Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState); } @Override public int hashCode() { - return Objects.hash(name, spec, status, tableServingUrl); + return Objects.hash(name, spec, status, tableServingUrl, unityCatalogProvisioningState); } @Override @@ -85,6 +104,7 @@ public String toString() { .add("spec", spec) .add("status", status) .add("tableServingUrl", tableServingUrl) + .add("unityCatalogProvisioningState", unityCatalogProvisioningState) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java index 445828f9c..c68ebfa80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfoState.java @@ -10,5 +10,5 @@ public enum ProvisioningInfoState { DELETING, FAILED, PROVISIONING, - STATE_UNSPECIFIED, + UPDATING, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java index 1944e4928..62d74c45d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java @@ -22,7 +22,11 @@ public class CreateDashboardRequest { /** * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. + * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which + * includes the `serialized_dashboard` field. This field provides the structure of the JSON string + * that represents the dashboard's layout and components. + * + *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ @JsonProperty("serialized_dashboard") private String serializedDashboard; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java index e5f1c4484..d94cee027 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java @@ -40,15 +40,19 @@ public class Dashboard { private String parentPath; /** - * The workspace path of the dashboard asset, including the file name. This field is excluded in - * List Dashboards responses. + * The workspace path of the dashboard asset, including the file name. Exported dashboards always + * have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses. */ @JsonProperty("path") private String path; /** * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. + * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which + * includes the `serialized_dashboard` field. This field provides the structure of the JSON string + * that represents the dashboard's layout and components. + * + *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ @JsonProperty("serialized_dashboard") private String serializedDashboard; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java index 6737439ea..15c434997 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java @@ -48,8 +48,9 @@ public class GenieMessage { /** * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - * sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * - * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * + * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: + * Executing AI provided SQL query. Get the SQL query result by calling * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message * status will stay in the `EXECUTING_QUERY` until a client calls * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index cfdd2b9f8..5e1c94580 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -31,6 +31,7 @@ public enum MessageErrorType { LOCAL_CONTEXT_EXCEEDED_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, + NO_QUERY_TO_VISUALIZE_EXCEPTION, NO_TABLES_TO_QUERY_EXCEPTION, RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION, RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java index 81985d08c..972f44191 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java @@ -6,8 +6,9 @@ /** * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - * sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * - * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * + * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing + * AI provided SQL query. Get the SQL query result by calling * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status * will stay in the `EXECUTING_QUERY` until a client calls * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response @@ -31,6 +32,7 @@ public enum MessageStatus { FAILED, // Generating a response or the executing the query failed. Please see `error` // field. FETCHING_METADATA, // Fetching metadata from the data sources. + FILTERING_CONTEXT, // Running smart context step to determine relevant context. QUERY_RESULT_EXPIRED, // SQL result is not available anymore. The user needs to execute the query // again. SUBMITTED, // Message has been submitted. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java index d36303462..de3b6451a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java @@ -9,6 +9,10 @@ @Generated public class Result { + /** If result is truncated */ + @JsonProperty("is_truncated") + private Boolean isTruncated; + /** Row count of the result */ @JsonProperty("row_count") private Long rowCount; @@ -20,6 +24,15 @@ public class Result { @JsonProperty("statement_id") private String statementId; + public Result setIsTruncated(Boolean isTruncated) { + this.isTruncated = isTruncated; + return this; + } + + public Boolean getIsTruncated() { + return isTruncated; + } + public Result setRowCount(Long rowCount) { this.rowCount = rowCount; return this; @@ -43,17 +56,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Result that = (Result) o; - return Objects.equals(rowCount, that.rowCount) && Objects.equals(statementId, that.statementId); + return Objects.equals(isTruncated, that.isTruncated) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(statementId, that.statementId); } @Override public int hashCode() { - return Objects.hash(rowCount, statementId); + return Objects.hash(isTruncated, rowCount, statementId); } @Override public String toString() { return new ToStringer(Result.class) + .add("isTruncated", isTruncated) .add("rowCount", rowCount) .add("statementId", statementId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java index d4a12c274..f9821b02a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -26,7 +26,11 @@ public class UpdateDashboardRequest { /** * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. + * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which + * includes the `serialized_dashboard` field. This field provides the structure of the JSON string + * that represents the dashboard's layout and components. + * + *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ @JsonProperty("serialized_dashboard") private String serializedDashboard; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index fa4700fc4..4f335aaa0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -22,6 +22,15 @@ public class BaseJob { @JsonProperty("creator_user_name") private String creatorUserName; + /** + * The id of the budget policy used by this job for cost attribution purposes. This may be set + * through (in order of precedence): 1. Budget admins through the account or workspace console 2. + * Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based + * on accessible budget policies of the run_as identity on job creation or modification. + */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + /** The canonical identifier for this job. */ @JsonProperty("job_id") private Long jobId; @@ -51,6 +60,15 @@ public String getCreatorUserName() { return creatorUserName; } + public BaseJob setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + public BaseJob setJobId(Long jobId) { this.jobId = jobId; return this; @@ -76,13 +94,14 @@ public boolean equals(Object o) { BaseJob that = (BaseJob) o; return Objects.equals(createdTime, that.createdTime) && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(jobId, that.jobId) && Objects.equals(settings, that.settings); } @Override public int hashCode() { - return Objects.hash(createdTime, creatorUserName, jobId, settings); + return Objects.hash(createdTime, creatorUserName, effectiveBudgetPolicyId, jobId, settings); } @Override @@ -90,6 +109,7 @@ public String toString() { return new ToStringer(BaseJob.class) .add("createdTime", createdTime) .add("creatorUserName", creatorUserName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .add("jobId", jobId) .add("settings", settings) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index 040a695d3..e69adebee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -15,6 +15,14 @@ public class CreateJob { @JsonProperty("access_control_list") private Collection accessControlList; + /** + * The id of the user specified budget policy to use for this job. If not specified, a default + * budget policy may be applied when creating or modifying the job. See + * `effective_budget_policy_id` for the budget policy used by this workload. + */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. @@ -177,6 +185,15 @@ public Collection getAccessControlList() { return accessControlList; } + public CreateJob setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public CreateJob setContinuous(Continuous continuous) { this.continuous = continuous; return this; @@ -381,6 +398,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateJob that = (CreateJob) o; return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) && Objects.equals(description, that.description) @@ -409,6 +427,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessControlList, + budgetPolicyId, continuous, deployment, description, @@ -437,6 +456,7 @@ public int hashCode() { public String toString() { return new ToStringer(CreateJob.class) .add("accessControlList", accessControlList) + .add("budgetPolicyId", budgetPolicyId) .add("continuous", continuous) .add("deployment", deployment) .add("description", description) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index bbee1c6fa..6d6342874 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -23,6 +23,15 @@ public class Job { @JsonProperty("creator_user_name") private String creatorUserName; + /** + * The id of the budget policy used by this job for cost attribution purposes. This may be set + * through (in order of precedence): 1. Budget admins through the account or workspace console 2. + * Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based + * on accessible budget policies of the run_as identity on job creation or modification. + */ + @JsonProperty("effective_budget_policy_id") + private String effectiveBudgetPolicyId; + /** The canonical identifier for this job. */ @JsonProperty("job_id") private Long jobId; @@ -64,6 +73,15 @@ public String getCreatorUserName() { return creatorUserName; } + public Job setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) { + this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; + return this; + } + + public String getEffectiveBudgetPolicyId() { + return effectiveBudgetPolicyId; + } + public Job setJobId(Long jobId) { this.jobId = jobId; return this; @@ -98,6 +116,7 @@ public boolean equals(Object o) { Job that = (Job) o; return Objects.equals(createdTime, that.createdTime) && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(jobId, that.jobId) && Objects.equals(runAsUserName, that.runAsUserName) && Objects.equals(settings, that.settings); @@ -105,7 +124,8 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(createdTime, creatorUserName, jobId, runAsUserName, settings); + return Objects.hash( + createdTime, creatorUserName, effectiveBudgetPolicyId, jobId, runAsUserName, settings); } @Override @@ -113,6 +133,7 @@ public String toString() { return new ToStringer(Job.class) .add("createdTime", createdTime) .add("creatorUserName", creatorUserName) + .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .add("jobId", jobId) .add("runAsUserName", runAsUserName) .add("settings", settings) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java index 41345592f..6c56a80cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java @@ -10,7 +10,10 @@ @Generated public class JobEmailNotifications { - /** If true, do not send email to recipients specified in `on_failure` if the run is skipped. */ + /** + * If true, do not send email to recipients specified in `on_failure` if the run is skipped. This + * field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + */ @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index a50318ab2..e86566571 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -11,6 +11,14 @@ @Generated public class JobSettings { + /** + * The id of the user specified budget policy to use for this job. If not specified, a default + * budget policy may be applied when creating or modifying the job. See + * `effective_budget_policy_id` for the budget policy used by this workload. + */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. @@ -164,6 +172,15 @@ public class JobSettings { @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; + public JobSettings setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public JobSettings setContinuous(Continuous continuous) { this.continuous = continuous; return this; @@ -367,7 +384,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JobSettings that = (JobSettings) o; - return Objects.equals(continuous, that.continuous) + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) && Objects.equals(description, that.description) && Objects.equals(editMode, that.editMode) @@ -394,6 +412,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + budgetPolicyId, continuous, deployment, description, @@ -421,6 +440,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(JobSettings.class) + .add("budgetPolicyId", budgetPolicyId) .add("continuous", continuous) .add("deployment", deployment) .add("description", description) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java index 7fb7b82fe..d79a76992 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java @@ -62,7 +62,7 @@ public class RepairRun { @JsonProperty("notebook_params") private Map notebookParams; - /** */ + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java index 260c470d1..eb1f27f01 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java @@ -59,7 +59,7 @@ public class RunJobTask { @JsonProperty("notebook_params") private Map notebookParams; - /** */ + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index 71278e71f..fce617b43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -76,7 +76,7 @@ public class RunNow { @JsonProperty("notebook_params") private Map notebookParams; - /** */ + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java index 7bc6de248..b76cd8392 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java @@ -51,7 +51,7 @@ public class RunParameters { @JsonProperty("notebook_params") private Map notebookParams; - /** */ + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index 1eeb07b24..694a4df67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -14,6 +14,13 @@ public class SubmitRun { @JsonProperty("access_control_list") private Collection accessControlList; + /** + * The user specified id of the budget policy to use for this one-time run. If not specified, the + * run will be not be attributed to any budget policy. + */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** An optional set of email addresses notified when the run begins or completes. */ @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; @@ -104,6 +111,15 @@ public Collection getAccessControlList() { return accessControlList; } + public SubmitRun setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public SubmitRun setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -218,6 +234,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; SubmitRun that = (SubmitRun) o; return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(environments, that.environments) && Objects.equals(gitSource, that.gitSource) @@ -236,6 +253,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessControlList, + budgetPolicyId, emailNotifications, environments, gitSource, @@ -254,6 +272,7 @@ public int hashCode() { public String toString() { return new ToStringer(SubmitRun.class) .add("accessControlList", accessControlList) + .add("budgetPolicyId", budgetPolicyId) .add("emailNotifications", emailNotifications) .add("environments", environments) .add("gitSource", gitSource) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java index 88b847f07..440ad8cf8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java @@ -10,7 +10,10 @@ @Generated public class TaskEmailNotifications { - /** If true, do not send email to recipients specified in `on_failure` if the run is skipped. */ + /** + * If true, do not send email to recipients specified in `on_failure` if the run is skipped. This + * field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + */ @JsonProperty("no_alert_for_skipped_runs") private Boolean noAlertForSkippedRuns; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java index fb93d1dbb..5f99e706f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java @@ -6,16 +6,17 @@ /** * The code indicates why the run was terminated. Additional codes might be introduced in future - * releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled - * during execution by the Databricks platform; for example, if the maximum run duration was - * exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the - * dependency type condition was not met, or there were no material tasks to execute. * - * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further - * details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark - * Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for - * further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error - * when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed - * because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The + * releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was + * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during + * execution by the Databricks platform; for example, if the maximum run duration was exceeded. * + * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency + * type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The + * run encountered an unexpected error. Refer to the state message for further details. * + * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * + * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further + * details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when + * communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because + * it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The * workspace has reached the quota for the maximum number of concurrent active runs. Consider * scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it * tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The @@ -95,6 +96,7 @@ public enum TerminationCodeCode { UNAUTHORIZED_ERROR, // The run failed due to a permission issue while accessing a resource. Refer // to // the state message for further details. + USER_CANCELED, // The run was successfully canceled during execution by a user. WORKSPACE_RUN_LIMIT_EXCEEDED, // The workspace has reached the quota for the maximum number of // concurrent // active runs. Consider scheduling the runs over a larger time frame. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java index 37cfeccf0..ec0aa3fbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java @@ -11,38 +11,39 @@ public class TerminationDetails { /** * The code indicates why the run was terminated. Additional codes might be introduced in future - * releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled - * during execution by the Databricks platform; for example, if the maximum run duration was - * exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, - * the dependency type condition was not met, or there were no material tasks to execute. * - * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for - * further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the - * Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state - * message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout - * due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: - * The run failed because it issued an invalid request to start the cluster. * - * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of - * concurrent active runs. Consider scheduling the runs over a larger time frame. * - * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the - * workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and - * upsize requests have exceeded the allotted rate limit. Consider spreading the run execution - * over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when - * accessing the customer blob storage. Refer to the state message for further details. * - * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the - * state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue - * while accessing a resource. Refer to the state message for further details. * - * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer - * to the state message for further details. The causes might include, but are not limited to: The - * provided library is invalid, there are insufficient permissions to install the library, and so - * forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum - * concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a - * cluster that has already reached the maximum number of contexts it is configured to create. - * See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. - * Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed - * due to an invalid configuration. Refer to the state message for further details. * - * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for - * further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job - * level queue size limit. + * releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was + * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during + * execution by the Databricks platform; for example, if the maximum run duration was exceeded. * + * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency + * type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The + * run encountered an unexpected error. Refer to the state message for further details. * + * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * + * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further + * details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when + * communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because + * it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The + * workspace has reached the quota for the maximum number of concurrent active runs. Consider + * scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it + * tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: + * The number of cluster creation, start, and upsize requests have exceeded the allotted rate + * limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: + * The run failed due to an error when accessing the customer blob storage. Refer to the state + * message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. + * For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run + * failed due to a permission issue while accessing a resource. Refer to the state message for + * further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the + * user-requested library. Refer to the state message for further details. The causes might + * include, but are not limited to: The provided library is invalid, there are insufficient + * permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The + * scheduled run exceeds the limit of maximum concurrent runs set for the job. * + * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the + * maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A + * resource necessary for run execution does not exist. Refer to the state message for further + * details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer + * to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud + * provider issue. Refer to the state message for further details. * + * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size + * limit. * *

[Link]: * https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index cd5180a13..e51d9ff19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -95,6 +95,13 @@ public class CreatePipeline { @JsonProperty("photon") private Boolean photon; + /** + * The default schema (database) where tables are read from or published to. The presence of this + * field implies that the pipeline is in direct publishing mode. + */ + @JsonProperty("schema") + private String schema; + /** Whether serverless compute is enabled for this pipeline. */ @JsonProperty("serverless") private Boolean serverless; @@ -286,6 +293,15 @@ public Boolean getPhoton() { return photon; } + public CreatePipeline setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + public CreatePipeline setServerless(Boolean serverless) { this.serverless = serverless; return this; @@ -346,6 +362,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) && Objects.equals(target, that.target) @@ -374,6 +391,7 @@ public int hashCode() { name, notifications, photon, + schema, serverless, storage, target, @@ -402,6 +420,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("schema", schema) .add("serverless", serverless) .add("storage", storage) .add("target", target) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 0ea12b587..52cbcee7c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -104,6 +104,13 @@ public class EditPipeline { @JsonProperty("pipeline_id") private String pipelineId; + /** + * The default schema (database) where tables are read from or published to. The presence of this + * field implies that the pipeline is in direct publishing mode. + */ + @JsonProperty("schema") + private String schema; + /** Whether serverless compute is enabled for this pipeline. */ @JsonProperty("serverless") private Boolean serverless; @@ -304,6 +311,15 @@ public String getPipelineId() { return pipelineId; } + public EditPipeline setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + public EditPipeline setServerless(Boolean serverless) { this.serverless = serverless; return this; @@ -365,6 +381,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) && Objects.equals(target, that.target) @@ -394,6 +411,7 @@ public int hashCode() { notifications, photon, pipelineId, + schema, serverless, storage, target, @@ -423,6 +441,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("pipelineId", pipelineId) + .add("schema", schema) .add("serverless", serverless) .add("storage", storage) .add("target", target) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java index 82ef6c4a7..92f853aed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java @@ -9,6 +9,10 @@ @Generated public class IngestionConfig { + /** Select tables from a specific source report. */ + @JsonProperty("report") + private ReportSpec report; + /** Select tables from a specific source schema. */ @JsonProperty("schema") private SchemaSpec schema; @@ -17,6 +21,15 @@ public class IngestionConfig { @JsonProperty("table") private TableSpec table; + public IngestionConfig setReport(ReportSpec report) { + this.report = report; + return this; + } + + public ReportSpec getReport() { + return report; + } + public IngestionConfig setSchema(SchemaSpec schema) { this.schema = schema; return this; @@ -40,17 +53,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IngestionConfig that = (IngestionConfig) o; - return Objects.equals(schema, that.schema) && Objects.equals(table, that.table); + return Objects.equals(report, that.report) + && Objects.equals(schema, that.schema) + && Objects.equals(table, that.table); } @Override public int hashCode() { - return Objects.hash(schema, table); + return Objects.hash(report, schema, table); } @Override public String toString() { return new ToStringer(IngestionConfig.class) + .add("report", report) .add("schema", schema) .add("table", table) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index 8561d2586..c880ee65e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -87,6 +87,13 @@ public class PipelineSpec { @JsonProperty("photon") private Boolean photon; + /** + * The default schema (database) where tables are read from or published to. The presence of this + * field implies that the pipeline is in direct publishing mode. + */ + @JsonProperty("schema") + private String schema; + /** Whether serverless compute is enabled for this pipeline. */ @JsonProperty("serverless") private Boolean serverless; @@ -260,6 +267,15 @@ public Boolean getPhoton() { return photon; } + public PipelineSpec setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + public PipelineSpec setServerless(Boolean serverless) { this.serverless = serverless; return this; @@ -318,6 +334,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) && Objects.equals(target, that.target) @@ -344,6 +361,7 @@ public int hashCode() { name, notifications, photon, + schema, serverless, storage, target, @@ -370,6 +388,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("schema", schema) .add("serverless", serverless) .add("storage", storage) .add("target", target) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java new file mode 100755 index 000000000..b737fbd9a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java @@ -0,0 +1,110 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ReportSpec { + /** Required. Destination catalog to store table. */ + @JsonProperty("destination_catalog") + private String destinationCatalog; + + /** Required. Destination schema to store table. */ + @JsonProperty("destination_schema") + private String destinationSchema; + + /** + * Required. Destination table name. The pipeline fails if a table with that name already exists. + */ + @JsonProperty("destination_table") + private String destinationTable; + + /** Required. Report URL in the source system. */ + @JsonProperty("source_url") + private String sourceUrl; + + /** + * Configuration settings to control the ingestion of tables. These settings override the + * table_configuration defined in the IngestionPipelineDefinition object. + */ + @JsonProperty("table_configuration") + private TableSpecificConfig tableConfiguration; + + public ReportSpec setDestinationCatalog(String destinationCatalog) { + this.destinationCatalog = destinationCatalog; + return this; + } + + public String getDestinationCatalog() { + return destinationCatalog; + } + + public ReportSpec setDestinationSchema(String destinationSchema) { + this.destinationSchema = destinationSchema; + return this; + } + + public String getDestinationSchema() { + return destinationSchema; + } + + public ReportSpec setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + public String getDestinationTable() { + return destinationTable; + } + + public ReportSpec setSourceUrl(String sourceUrl) { + this.sourceUrl = sourceUrl; + return this; + } + + public String getSourceUrl() { + return sourceUrl; + } + + public ReportSpec setTableConfiguration(TableSpecificConfig tableConfiguration) { + this.tableConfiguration = tableConfiguration; + return this; + } + + public TableSpecificConfig getTableConfiguration() { + return tableConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ReportSpec that = (ReportSpec) o; + return Objects.equals(destinationCatalog, that.destinationCatalog) + && Objects.equals(destinationSchema, that.destinationSchema) + && Objects.equals(destinationTable, that.destinationTable) + && Objects.equals(sourceUrl, that.sourceUrl) + && Objects.equals(tableConfiguration, that.tableConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash( + destinationCatalog, destinationSchema, destinationTable, sourceUrl, tableConfiguration); + } + + @Override + public String toString() { + return new ToStringer(ReportSpec.class) + .add("destinationCatalog", destinationCatalog) + .add("destinationSchema", destinationSchema) + .add("destinationTable", destinationTable) + .add("sourceUrl", sourceUrl) + .add("tableConfiguration", tableConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java index b7cf1ccb5..619922530 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java @@ -18,7 +18,7 @@ public class TableSpec { private String destinationSchema; /** - * Optional. Destination table name. The pipeline fails If a table with that name already exists. + * Optional. Destination table name. The pipeline fails if a table with that name already exists. * If not set, the source table name is used. */ @JsonProperty("destination_table") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java index ffec3bdeb..e3dff8b3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -25,6 +25,13 @@ public class TableSpecificConfig { @JsonProperty("scd_type") private TableSpecificConfigScdType scdType; + /** + * The column names specifying the logical order of events in the source data. Delta Live Tables + * uses this sequencing to handle change events that arrive out of order. + */ + @JsonProperty("sequence_by") + private Collection sequenceBy; + public TableSpecificConfig setPrimaryKeys(Collection primaryKeys) { this.primaryKeys = primaryKeys; return this; @@ -53,6 +60,15 @@ public TableSpecificConfigScdType getScdType() { return scdType; } + public TableSpecificConfig setSequenceBy(Collection sequenceBy) { + this.sequenceBy = sequenceBy; + return this; + } + + public Collection getSequenceBy() { + return sequenceBy; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -60,12 +76,13 @@ public boolean equals(Object o) { TableSpecificConfig that = (TableSpecificConfig) o; return Objects.equals(primaryKeys, that.primaryKeys) && Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields) - && Objects.equals(scdType, that.scdType); + && Objects.equals(scdType, that.scdType) + && Objects.equals(sequenceBy, that.sequenceBy); } @Override public int hashCode() { - return Objects.hash(primaryKeys, salesforceIncludeFormulaFields, scdType); + return Objects.hash(primaryKeys, salesforceIncludeFormulaFields, scdType, sequenceBy); } @Override @@ -74,6 +91,7 @@ public String toString() { .add("primaryKeys", primaryKeys) .add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields) .add("scdType", scdType) + .add("sequenceBy", sequenceBy) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java new file mode 100755 index 000000000..51e3b14b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java @@ -0,0 +1,52 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete the disable legacy DBFS setting */ +@Generated +public class DeleteDisableLegacyDbfsRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public DeleteDisableLegacyDbfsRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyDbfsRequest that = (DeleteDisableLegacyDbfsRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyDbfsRequest.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java new file mode 100755 index 000000000..0bb408d38 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsResponse.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The etag is returned. */ +@Generated +public class DeleteDisableLegacyDbfsResponse { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonProperty("etag") + private String etag; + + public DeleteDisableLegacyDbfsResponse setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDisableLegacyDbfsResponse that = (DeleteDisableLegacyDbfsResponse) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(DeleteDisableLegacyDbfsResponse.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java new file mode 100755 index 000000000..71ee5867c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfs.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DisableLegacyDbfs { + /** */ + @JsonProperty("disable_legacy_dbfs") + private BooleanMessage disableLegacyDbfs; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public DisableLegacyDbfs setDisableLegacyDbfs(BooleanMessage disableLegacyDbfs) { + this.disableLegacyDbfs = disableLegacyDbfs; + return this; + } + + public BooleanMessage getDisableLegacyDbfs() { + return disableLegacyDbfs; + } + + public DisableLegacyDbfs setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public DisableLegacyDbfs setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DisableLegacyDbfs that = (DisableLegacyDbfs) o; + return Objects.equals(disableLegacyDbfs, that.disableLegacyDbfs) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(disableLegacyDbfs, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(DisableLegacyDbfs.class) + .add("disableLegacyDbfs", disableLegacyDbfs) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java new file mode 100755 index 000000000..821acb2b5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsAPI.java @@ -0,0 +1,68 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation + * of new mounts). When the setting is off, all DBFS functionality is enabled + */ +@Generated +public class DisableLegacyDbfsAPI { + private static final Logger LOG = LoggerFactory.getLogger(DisableLegacyDbfsAPI.class); + + private final DisableLegacyDbfsService impl; + + /** Regular-use constructor */ + public DisableLegacyDbfsAPI(ApiClient apiClient) { + impl = new DisableLegacyDbfsImpl(apiClient); + } + + /** Constructor for mocks */ + public DisableLegacyDbfsAPI(DisableLegacyDbfsService mock) { + impl = mock; + } + + /** + * Delete the disable legacy DBFS setting. + * + *

Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. + */ + public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest request) { + return impl.delete(request); + } + + /** + * Get the disable legacy DBFS setting. + * + *

Gets the disable legacy DBFS setting. + */ + public DisableLegacyDbfs get(GetDisableLegacyDbfsRequest request) { + return impl.get(request); + } + + public DisableLegacyDbfs update( + boolean allowMissing, DisableLegacyDbfs setting, String fieldMask) { + return update( + new UpdateDisableLegacyDbfsRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the disable legacy DBFS setting. + * + *

Updates the disable legacy DBFS setting for the workspace. + */ + public DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest request) { + return impl.update(request); + } + + public DisableLegacyDbfsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java new file mode 100755 index 000000000..856c1d61d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of DisableLegacyDbfs */ +@Generated +class DisableLegacyDbfsImpl implements DisableLegacyDbfsService { + private final ApiClient apiClient; + + public DisableLegacyDbfsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest request) { + String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.DELETE(path, request, DeleteDisableLegacyDbfsResponse.class, headers); + } + + @Override + public DisableLegacyDbfs get(GetDisableLegacyDbfsRequest request) { + String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, DisableLegacyDbfs.class, headers); + } + + @Override + public DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest request) { + String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, DisableLegacyDbfs.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java new file mode 100755 index 000000000..6a4aa74fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsService.java @@ -0,0 +1,37 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation + * of new mounts). When the setting is off, all DBFS functionality is enabled + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DisableLegacyDbfsService { + /** + * Delete the disable legacy DBFS setting. + * + *

Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. + */ + DeleteDisableLegacyDbfsResponse delete( + DeleteDisableLegacyDbfsRequest deleteDisableLegacyDbfsRequest); + + /** + * Get the disable legacy DBFS setting. + * + *

Gets the disable legacy DBFS setting. + */ + DisableLegacyDbfs get(GetDisableLegacyDbfsRequest getDisableLegacyDbfsRequest); + + /** + * Update the disable legacy DBFS setting. + * + *

Updates the disable legacy DBFS setting for the workspace. + */ + DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest updateDisableLegacyDbfsRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java new file mode 100755 index 000000000..d3f3545f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java @@ -0,0 +1,52 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get the disable legacy DBFS setting */ +@Generated +public class GetDisableLegacyDbfsRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetDisableLegacyDbfsRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDisableLegacyDbfsRequest that = (GetDisableLegacyDbfsRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetDisableLegacyDbfsRequest.class).add("etag", etag).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index 1fde63913..e7f1c92a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -21,6 +21,8 @@ public class SettingsAPI { private DisableLegacyAccessAPI disableLegacyAccessAPI; + private DisableLegacyDbfsAPI disableLegacyDbfsAPI; + private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI; private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI; @@ -37,6 +39,8 @@ public SettingsAPI(ApiClient apiClient) { disableLegacyAccessAPI = new DisableLegacyAccessAPI(apiClient); + disableLegacyDbfsAPI = new DisableLegacyDbfsAPI(apiClient); + enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient); restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient); @@ -70,6 +74,14 @@ public DisableLegacyAccessAPI DisableLegacyAccess() { return disableLegacyAccessAPI; } + /** + * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation + * of new mounts). + */ + public DisableLegacyDbfsAPI DisableLegacyDbfs() { + return disableLegacyDbfsAPI; + } + /** Controls whether enhanced security monitoring is enabled for the current workspace. */ public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() { return enhancedSecurityMonitoringAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java new file mode 100755 index 000000000..6c657d6b3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateDisableLegacyDbfsRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. The field mask needs to be supplied as single string. + * To specify multiple fields in the field mask, use comma as the separator (no space). + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private DisableLegacyDbfs setting; + + public UpdateDisableLegacyDbfsRequest setAllowMissing(Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateDisableLegacyDbfsRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateDisableLegacyDbfsRequest setSetting(DisableLegacyDbfs setting) { + this.setting = setting; + return this; + } + + public DisableLegacyDbfs getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDisableLegacyDbfsRequest that = (UpdateDisableLegacyDbfsRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateDisableLegacyDbfsRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java index 7fa890a48..7f916cf5f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java @@ -46,6 +46,10 @@ public class Alert { @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; + /** Whether to notify alert subscribers when alert returns back to normal. */ + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ @JsonProperty("owner_user_name") private String ownerUserName; @@ -143,6 +147,15 @@ public LifecycleState getLifecycleState() { return lifecycleState; } + public Alert setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + public Alert setOwnerUserName(String ownerUserName) { this.ownerUserName = ownerUserName; return this; @@ -218,6 +231,7 @@ public boolean equals(Object o) { && Objects.equals(displayName, that.displayName) && Objects.equals(id, that.id) && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(notifyOnOk, that.notifyOnOk) && Objects.equals(ownerUserName, that.ownerUserName) && Objects.equals(parentPath, that.parentPath) && Objects.equals(queryId, that.queryId) @@ -237,6 +251,7 @@ public int hashCode() { displayName, id, lifecycleState, + notifyOnOk, ownerUserName, parentPath, queryId, @@ -256,6 +271,7 @@ public String toString() { .add("displayName", displayName) .add("id", id) .add("lifecycleState", lifecycleState) + .add("notifyOnOk", notifyOnOk) .add("ownerUserName", ownerUserName) .add("parentPath", parentPath) .add("queryId", queryId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java index 5abee9767..4ed901e2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java @@ -7,6 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be + * chosen only when `dbsql_version` is specified. + */ @Generated public class Channel { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 3b9d4cbb0..82eb48e5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -9,6 +9,5 @@ public enum ChannelName { CHANNEL_NAME_CURRENT, CHANNEL_NAME_CUSTOM, CHANNEL_NAME_PREVIEW, - CHANNEL_NAME_PREVIOUS, CHANNEL_NAME_UNSPECIFIED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java index aa0508b0e..80af13302 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java @@ -34,6 +34,10 @@ public class CreateAlertRequestAlert { @JsonProperty("display_name") private String displayName; + /** Whether to notify alert subscribers when alert returns back to normal. */ + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + /** The workspace path of the folder containing the alert. */ @JsonProperty("parent_path") private String parentPath; @@ -85,6 +89,15 @@ public String getDisplayName() { return displayName; } + public CreateAlertRequestAlert setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + public CreateAlertRequestAlert setParentPath(String parentPath) { this.parentPath = parentPath; return this; @@ -121,6 +134,7 @@ public boolean equals(Object o) { && Objects.equals(customBody, that.customBody) && Objects.equals(customSubject, that.customSubject) && Objects.equals(displayName, that.displayName) + && Objects.equals(notifyOnOk, that.notifyOnOk) && Objects.equals(parentPath, that.parentPath) && Objects.equals(queryId, that.queryId) && Objects.equals(secondsToRetrigger, that.secondsToRetrigger); @@ -129,7 +143,14 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - condition, customBody, customSubject, displayName, parentPath, queryId, secondsToRetrigger); + condition, + customBody, + customSubject, + displayName, + notifyOnOk, + parentPath, + queryId, + secondsToRetrigger); } @Override @@ -139,6 +160,7 @@ public String toString() { .add("customBody", customBody) .add("customSubject", customSubject) .add("displayName", displayName) + .add("notifyOnOk", notifyOnOk) .add("parentPath", parentPath) .add("queryId", queryId) .add("secondsToRetrigger", secondsToRetrigger) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java index c976b5779..af89d90bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java @@ -13,7 +13,8 @@ public class CreateWarehouseRequest { * The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) * before it is automatically stopped. * - *

Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. + *

Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins + * for non-serverless warehouses - 0 indicates no autostop. * *

Defaults to 120 mins */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java index bcca330c1..9489f808c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java @@ -46,6 +46,10 @@ public class ListAlertsResponseAlert { @JsonProperty("lifecycle_state") private LifecycleState lifecycleState; + /** Whether to notify alert subscribers when alert returns back to normal. */ + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ @JsonProperty("owner_user_name") private String ownerUserName; @@ -139,6 +143,15 @@ public LifecycleState getLifecycleState() { return lifecycleState; } + public ListAlertsResponseAlert setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + public ListAlertsResponseAlert setOwnerUserName(String ownerUserName) { this.ownerUserName = ownerUserName; return this; @@ -205,6 +218,7 @@ public boolean equals(Object o) { && Objects.equals(displayName, that.displayName) && Objects.equals(id, that.id) && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(notifyOnOk, that.notifyOnOk) && Objects.equals(ownerUserName, that.ownerUserName) && Objects.equals(queryId, that.queryId) && Objects.equals(secondsToRetrigger, that.secondsToRetrigger) @@ -223,6 +237,7 @@ public int hashCode() { displayName, id, lifecycleState, + notifyOnOk, ownerUserName, queryId, secondsToRetrigger, @@ -241,6 +256,7 @@ public String toString() { .add("displayName", displayName) .add("id", id) .add("lifecycleState", lifecycleState) + .add("notifyOnOk", notifyOnOk) .add("ownerUserName", ownerUserName) .add("queryId", queryId) .add("secondsToRetrigger", secondsToRetrigger) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java index 1f5502d4d..cae7b393b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java @@ -34,6 +34,10 @@ public class UpdateAlertRequestAlert { @JsonProperty("display_name") private String displayName; + /** Whether to notify alert subscribers when alert returns back to normal. */ + @JsonProperty("notify_on_ok") + private Boolean notifyOnOk; + /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */ @JsonProperty("owner_user_name") private String ownerUserName; @@ -85,6 +89,15 @@ public String getDisplayName() { return displayName; } + public UpdateAlertRequestAlert setNotifyOnOk(Boolean notifyOnOk) { + this.notifyOnOk = notifyOnOk; + return this; + } + + public Boolean getNotifyOnOk() { + return notifyOnOk; + } + public UpdateAlertRequestAlert setOwnerUserName(String ownerUserName) { this.ownerUserName = ownerUserName; return this; @@ -121,6 +134,7 @@ public boolean equals(Object o) { && Objects.equals(customBody, that.customBody) && Objects.equals(customSubject, that.customSubject) && Objects.equals(displayName, that.displayName) + && Objects.equals(notifyOnOk, that.notifyOnOk) && Objects.equals(ownerUserName, that.ownerUserName) && Objects.equals(queryId, that.queryId) && Objects.equals(secondsToRetrigger, that.secondsToRetrigger); @@ -133,6 +147,7 @@ public int hashCode() { customBody, customSubject, displayName, + notifyOnOk, ownerUserName, queryId, secondsToRetrigger); @@ -145,6 +160,7 @@ public String toString() { .add("customBody", customBody) .add("customSubject", customSubject) .add("displayName", displayName) + .add("notifyOnOk", notifyOnOk) .add("ownerUserName", ownerUserName) .add("queryId", queryId) .add("secondsToRetrigger", secondsToRetrigger) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java similarity index 80% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java index b30d35e8f..fb02f7bf7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java @@ -8,11 +8,11 @@ import java.util.Objects; @Generated -public class CreateCredentials { +public class CreateCredentialsRequest { /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. + * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, + * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, + * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ @JsonProperty("git_provider") private String gitProvider; @@ -30,15 +30,14 @@ public class CreateCredentials { /** * The personal access token used to authenticate to the corresponding Git provider. For certain - * providers, support may exist for other types of scoped access tokens. [Learn more]. The - * personal access token used to authenticate to the corresponding Git + * providers, support may exist for other types of scoped access tokens. [Learn more]. * *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html */ @JsonProperty("personal_access_token") private String personalAccessToken; - public CreateCredentials setGitProvider(String gitProvider) { + public CreateCredentialsRequest setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; } @@ -47,7 +46,7 @@ public String getGitProvider() { return gitProvider; } - public CreateCredentials setGitUsername(String gitUsername) { + public CreateCredentialsRequest setGitUsername(String gitUsername) { this.gitUsername = gitUsername; return this; } @@ -56,7 +55,7 @@ public String getGitUsername() { return gitUsername; } - public CreateCredentials setPersonalAccessToken(String personalAccessToken) { + public CreateCredentialsRequest setPersonalAccessToken(String personalAccessToken) { this.personalAccessToken = personalAccessToken; return this; } @@ -69,7 +68,7 @@ public String getPersonalAccessToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CreateCredentials that = (CreateCredentials) o; + CreateCredentialsRequest that = (CreateCredentialsRequest) o; return Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) && Objects.equals(personalAccessToken, that.personalAccessToken); @@ -82,7 +81,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateCredentials.class) + return new ToStringer(CreateCredentialsRequest.class) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) .add("personalAccessToken", personalAccessToken) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java index 5f693ebf0..d50aae0cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java @@ -13,21 +13,13 @@ public class CreateCredentialsResponse { @JsonProperty("credential_id") private Long credentialId; - /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. - */ + /** The Git provider associated with the credential. */ @JsonProperty("git_provider") private String gitProvider; /** - * The username or email provided with your Git provider account, depending on which provider you - * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers - * please see your provider's Personal Access Token authentication documentation to see what is - * supported. + * The username or email provided with your Git provider account and associated with the + * credential. */ @JsonProperty("git_username") private String gitUsername; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java index 6d4a0863c..84cd6aa77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java @@ -8,18 +8,18 @@ import java.util.Objects; @Generated -public class CreateRepo { +public class CreateRepoRequest { /** * Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If - * repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}. + * repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`. */ @JsonProperty("path") private String path; /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. + * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, + * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, + * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ @JsonProperty("provider") private String provider; @@ -35,7 +35,7 @@ public class CreateRepo { @JsonProperty("url") private String url; - public CreateRepo setPath(String path) { + public CreateRepoRequest setPath(String path) { this.path = path; return this; } @@ -44,7 +44,7 @@ public String getPath() { return path; } - public CreateRepo setProvider(String provider) { + public CreateRepoRequest setProvider(String provider) { this.provider = provider; return this; } @@ -53,7 +53,7 @@ public String getProvider() { return provider; } - public CreateRepo setSparseCheckout(SparseCheckout sparseCheckout) { + public CreateRepoRequest setSparseCheckout(SparseCheckout sparseCheckout) { this.sparseCheckout = sparseCheckout; return this; } @@ -62,7 +62,7 @@ public SparseCheckout getSparseCheckout() { return sparseCheckout; } - public CreateRepo setUrl(String url) { + public CreateRepoRequest setUrl(String url) { this.url = url; return this; } @@ -75,7 +75,7 @@ public String getUrl() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CreateRepo that = (CreateRepo) o; + CreateRepoRequest that = (CreateRepoRequest) o; return Objects.equals(path, that.path) && Objects.equals(provider, that.provider) && Objects.equals(sparseCheckout, that.sparseCheckout) @@ -89,7 +89,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(CreateRepo.class) + return new ToStringer(CreateRepoRequest.class) .add("path", path) .add("provider", provider) .add("sparseCheckout", sparseCheckout) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java new file mode 100755 index 000000000..cb86465a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateRepoResponse { + /** Branch that the Git folder (repo) is checked out to. */ + @JsonProperty("branch") + private String branch; + + /** SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo). */ + @JsonProperty("head_commit_id") + private String headCommitId; + + /** ID of the Git folder (repo) object in the workspace. */ + @JsonProperty("id") + private Long id; + + /** Path of the Git folder (repo) in the workspace. */ + @JsonProperty("path") + private String path; + + /** Git provider of the linked Git repository. */ + @JsonProperty("provider") + private String provider; + + /** Sparse checkout settings for the Git folder (repo). */ + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + /** URL of the linked Git repository. */ + @JsonProperty("url") + private String url; + + public CreateRepoResponse setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public CreateRepoResponse setHeadCommitId(String headCommitId) { + this.headCommitId = headCommitId; + return this; + } + + public String getHeadCommitId() { + return headCommitId; + } + + public CreateRepoResponse setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public CreateRepoResponse setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public CreateRepoResponse setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public CreateRepoResponse setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public CreateRepoResponse setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateRepoResponse that = (CreateRepoResponse) o; + return Objects.equals(branch, that.branch) + && Objects.equals(headCommitId, that.headCommitId) + && Objects.equals(id, that.id) + && Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(CreateRepoResponse.class) + .add("branch", branch) + .add("headCommitId", headCommitId) + .add("id", id) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java index 5df03a4b4..ebb736500 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java @@ -13,21 +13,13 @@ public class CredentialInfo { @JsonProperty("credential_id") private Long credentialId; - /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * gitHubOAuth, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. - */ + /** The Git provider associated with the credential. */ @JsonProperty("git_provider") private String gitProvider; /** - * The username or email provided with your Git provider account, depending on which provider you - * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers - * please see your provider's Personal Access Token authentication documentation to see what is - * supported. + * The username or email provided with your Git provider account and associated with the + * credential. */ @JsonProperty("git_username") private String gitUsername; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java similarity index 79% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java index 728bef583..103c730f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java @@ -9,11 +9,11 @@ /** Delete a credential */ @Generated -public class DeleteGitCredentialRequest { +public class DeleteCredentialsRequest { /** The ID for the corresponding credential to access. */ @JsonIgnore private Long credentialId; - public DeleteGitCredentialRequest setCredentialId(Long credentialId) { + public DeleteCredentialsRequest setCredentialId(Long credentialId) { this.credentialId = credentialId; return this; } @@ -26,7 +26,7 @@ public Long getCredentialId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - DeleteGitCredentialRequest that = (DeleteGitCredentialRequest) o; + DeleteCredentialsRequest that = (DeleteCredentialsRequest) o; return Objects.equals(credentialId, that.credentialId); } @@ -37,7 +37,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(DeleteGitCredentialRequest.class) + return new ToStringer(DeleteCredentialsRequest.class) .add("credentialId", credentialId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java new file mode 100755 index 000000000..3b1fb2ec7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteCredentialsResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialsResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java index 0d53417be..72e27bbe5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java @@ -10,7 +10,7 @@ /** Delete a repo */ @Generated public class DeleteRepoRequest { - /** The ID for the corresponding repo to access. */ + /** ID of the Git folder (repo) object in the workspace. */ @JsonIgnore private Long repoId; public DeleteRepoRequest setRepoId(Long repoId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java similarity index 84% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java index f4dcbc7ea..2fafce6f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class UpdateResponse { +public class DeleteRepoResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateResponse.class).toString(); + return new ToStringer(DeleteRepoResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java similarity index 75% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java index 1539a99d9..2dea34f1e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java @@ -9,11 +9,11 @@ /** Get a credential entry */ @Generated -public class GetGitCredentialRequest { +public class GetCredentialsRequest { /** The ID for the corresponding credential to access. */ @JsonIgnore private Long credentialId; - public GetGitCredentialRequest setCredentialId(Long credentialId) { + public GetCredentialsRequest setCredentialId(Long credentialId) { this.credentialId = credentialId; return this; } @@ -26,7 +26,7 @@ public Long getCredentialId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - GetGitCredentialRequest that = (GetGitCredentialRequest) o; + GetCredentialsRequest that = (GetCredentialsRequest) o; return Objects.equals(credentialId, that.credentialId); } @@ -37,8 +37,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetGitCredentialRequest.class) - .add("credentialId", credentialId) - .toString(); + return new ToStringer(GetCredentialsRequest.class).add("credentialId", credentialId).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java index 674ebd734..ef4da2906 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java @@ -5,22 +5,50 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; @Generated public class GetCredentialsResponse { - /** */ - @JsonProperty("credentials") - private Collection credentials; + /** ID of the credential object in the workspace. */ + @JsonProperty("credential_id") + private Long credentialId; - public GetCredentialsResponse setCredentials(Collection credentials) { - this.credentials = credentials; + /** The Git provider associated with the credential. */ + @JsonProperty("git_provider") + private String gitProvider; + + /** + * The username or email provided with your Git provider account and associated with the + * credential. + */ + @JsonProperty("git_username") + private String gitUsername; + + public GetCredentialsResponse setCredentialId(Long credentialId) { + this.credentialId = credentialId; + return this; + } + + public Long getCredentialId() { + return credentialId; + } + + public GetCredentialsResponse setGitProvider(String gitProvider) { + this.gitProvider = gitProvider; + return this; + } + + public String getGitProvider() { + return gitProvider; + } + + public GetCredentialsResponse setGitUsername(String gitUsername) { + this.gitUsername = gitUsername; return this; } - public Collection getCredentials() { - return credentials; + public String getGitUsername() { + return gitUsername; } @Override @@ -28,16 +56,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetCredentialsResponse that = (GetCredentialsResponse) o; - return Objects.equals(credentials, that.credentials); + return Objects.equals(credentialId, that.credentialId) + && Objects.equals(gitProvider, that.gitProvider) + && Objects.equals(gitUsername, that.gitUsername); } @Override public int hashCode() { - return Objects.hash(credentials); + return Objects.hash(credentialId, gitProvider, gitUsername); } @Override public String toString() { - return new ToStringer(GetCredentialsResponse.class).add("credentials", credentials).toString(); + return new ToStringer(GetCredentialsResponse.class) + .add("credentialId", credentialId) + .add("gitProvider", gitProvider) + .add("gitUsername", gitUsername) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java index b46e8bd16..a241caa4d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoRequest.java @@ -10,7 +10,7 @@ /** Get a repo */ @Generated public class GetRepoRequest { - /** The ID for the corresponding repo to access. */ + /** ID of the Git folder (repo) object in the workspace. */ @JsonIgnore private Long repoId; public GetRepoRequest setRepoId(Long repoId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java new file mode 100755 index 000000000..a34c0cc98 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetRepoResponse.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetRepoResponse { + /** Branch that the local version of the repo is checked out to. */ + @JsonProperty("branch") + private String branch; + + /** SHA-1 hash representing the commit ID of the current HEAD of the repo. */ + @JsonProperty("head_commit_id") + private String headCommitId; + + /** ID of the Git folder (repo) object in the workspace. */ + @JsonProperty("id") + private Long id; + + /** Path of the Git folder (repo) in the workspace. */ + @JsonProperty("path") + private String path; + + /** Git provider of the linked Git repository. */ + @JsonProperty("provider") + private String provider; + + /** Sparse checkout settings for the Git folder (repo). */ + @JsonProperty("sparse_checkout") + private SparseCheckout sparseCheckout; + + /** URL of the linked Git repository. */ + @JsonProperty("url") + private String url; + + public GetRepoResponse setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public GetRepoResponse setHeadCommitId(String headCommitId) { + this.headCommitId = headCommitId; + return this; + } + + public String getHeadCommitId() { + return headCommitId; + } + + public GetRepoResponse setId(Long id) { + this.id = id; + return this; + } + + public Long getId() { + return id; + } + + public GetRepoResponse setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public GetRepoResponse setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public GetRepoResponse setSparseCheckout(SparseCheckout sparseCheckout) { + this.sparseCheckout = sparseCheckout; + return this; + } + + public SparseCheckout getSparseCheckout() { + return sparseCheckout; + } + + public GetRepoResponse setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetRepoResponse that = (GetRepoResponse) o; + return Objects.equals(branch, that.branch) + && Objects.equals(headCommitId, that.headCommitId) + && Objects.equals(id, that.id) + && Objects.equals(path, that.path) + && Objects.equals(provider, that.provider) + && Objects.equals(sparseCheckout, that.sparseCheckout) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url); + } + + @Override + public String toString() { + return new ToStringer(GetRepoResponse.class) + .add("branch", branch) + .add("headCommitId", headCommitId) + .add("id", id) + .add("path", path) + .add("provider", provider) + .add("sparseCheckout", sparseCheckout) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java index 7f5405076..a660838ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java @@ -31,7 +31,7 @@ public GitCredentialsAPI(GitCredentialsService mock) { } public CreateCredentialsResponse create(String gitProvider) { - return create(new CreateCredentials().setGitProvider(gitProvider)); + return create(new CreateCredentialsRequest().setGitProvider(gitProvider)); } /** @@ -41,12 +41,12 @@ public CreateCredentialsResponse create(String gitProvider) { * so any attempts to create credentials if an entry already exists will fail. Use the PATCH * endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. */ - public CreateCredentialsResponse create(CreateCredentials request) { + public CreateCredentialsResponse create(CreateCredentialsRequest request) { return impl.create(request); } public void delete(long credentialId) { - delete(new DeleteGitCredentialRequest().setCredentialId(credentialId)); + delete(new DeleteCredentialsRequest().setCredentialId(credentialId)); } /** @@ -54,12 +54,12 @@ public void delete(long credentialId) { * *

Deletes the specified Git credential. */ - public void delete(DeleteGitCredentialRequest request) { + public void delete(DeleteCredentialsRequest request) { impl.delete(request); } - public CredentialInfo get(long credentialId) { - return get(new GetGitCredentialRequest().setCredentialId(credentialId)); + public GetCredentialsResponse get(long credentialId) { + return get(new GetCredentialsRequest().setCredentialId(credentialId)); } /** @@ -67,7 +67,7 @@ public CredentialInfo get(long credentialId) { * *

Gets the Git credential with the specified credential ID. */ - public CredentialInfo get(GetGitCredentialRequest request) { + public GetCredentialsResponse get(GetCredentialsRequest request) { return impl.get(request); } @@ -78,11 +78,12 @@ public CredentialInfo get(GetGitCredentialRequest request) { */ public Iterable list() { return new Paginator<>( - null, (Void v) -> impl.list(), GetCredentialsResponse::getCredentials, response -> null); + null, (Void v) -> impl.list(), ListCredentialsResponse::getCredentials, response -> null); } - public void update(long credentialId) { - update(new UpdateCredentials().setCredentialId(credentialId)); + public void update(long credentialId, String gitProvider) { + update( + new UpdateCredentialsRequest().setCredentialId(credentialId).setGitProvider(gitProvider)); } /** @@ -90,7 +91,7 @@ public void update(long credentialId) { * *

Updates the specified Git credential. */ - public void update(UpdateCredentials request) { + public void update(UpdateCredentialsRequest request) { impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java index f17d5055a..8837469e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java @@ -16,7 +16,7 @@ public GitCredentialsImpl(ApiClient apiClient) { } @Override - public CreateCredentialsResponse create(CreateCredentials request) { + public CreateCredentialsResponse create(CreateCredentialsRequest request) { String path = "/api/2.0/git-credentials"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); @@ -25,34 +25,35 @@ public CreateCredentialsResponse create(CreateCredentials request) { } @Override - public void delete(DeleteGitCredentialRequest request) { + public void delete(DeleteCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteCredentialsResponse.class, headers); } @Override - public CredentialInfo get(GetGitCredentialRequest request) { + public GetCredentialsResponse get(GetCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CredentialInfo.class, headers); + return apiClient.GET(path, request, GetCredentialsResponse.class, headers); } @Override - public GetCredentialsResponse list() { + public ListCredentialsResponse list() { String path = "/api/2.0/git-credentials"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, GetCredentialsResponse.class, headers); + return apiClient.GET(path, ListCredentialsResponse.class, headers); } @Override - public void update(UpdateCredentials request) { + public void update(UpdateCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + apiClient.PATCH(path, request, UpdateCredentialsResponse.class, headers); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java index 6a8feeb3a..823232088 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java @@ -23,33 +23,33 @@ public interface GitCredentialsService { * so any attempts to create credentials if an entry already exists will fail. Use the PATCH * endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. */ - CreateCredentialsResponse create(CreateCredentials createCredentials); + CreateCredentialsResponse create(CreateCredentialsRequest createCredentialsRequest); /** * Delete a credential. * *

Deletes the specified Git credential. */ - void delete(DeleteGitCredentialRequest deleteGitCredentialRequest); + void delete(DeleteCredentialsRequest deleteCredentialsRequest); /** * Get a credential entry. * *

Gets the Git credential with the specified credential ID. */ - CredentialInfo get(GetGitCredentialRequest getGitCredentialRequest); + GetCredentialsResponse get(GetCredentialsRequest getCredentialsRequest); /** * Get Git credentials. * *

Lists the calling user's Git credentials. One credential per user is supported. */ - GetCredentialsResponse list(); + ListCredentialsResponse list(); /** * Update a credential. * *

Updates the specified Git credential. */ - void update(UpdateCredentials updateCredentials); + void update(UpdateCredentialsRequest updateCredentialsRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java new file mode 100755 index 000000000..6f3c22c24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListCredentialsResponse { + /** List of credentials. */ + @JsonProperty("credentials") + private Collection credentials; + + public ListCredentialsResponse setCredentials(Collection credentials) { + this.credentials = credentials; + return this; + } + + public Collection getCredentials() { + return credentials; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsResponse that = (ListCredentialsResponse) o; + return Objects.equals(credentials, that.credentials); + } + + @Override + public int hashCode() { + return Objects.hash(credentials); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsResponse.class).add("credentials", credentials).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java index e2bdc0e82..e8806105f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposRequest.java @@ -20,8 +20,9 @@ public class ListReposRequest { private String nextPageToken; /** - * Filters repos that have paths starting with the given path prefix. If not provided repos from - * /Repos will be served. + * Filters repos that have paths starting with the given path prefix. If not provided or when + * provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from + * `/Workspace/Repos` will be served. */ @JsonIgnore @QueryParam("path_prefix") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java index 8157c4858..70fd94ae8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListReposResponse.java @@ -11,13 +11,13 @@ @Generated public class ListReposResponse { /** - * Token that can be specified as a query parameter to the GET /repos endpoint to retrieve the + * Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the * next page of results. */ @JsonProperty("next_page_token") private String nextPageToken; - /** */ + /** List of Git folders (repos). */ @JsonProperty("repos") private Collection repos; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java index b2042cd53..403fd581c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java @@ -7,40 +7,34 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Git folder (repo) information. */ @Generated public class RepoInfo { - /** Branch that the local version of the repo is checked out to. */ + /** Name of the current git branch of the git folder (repo). */ @JsonProperty("branch") private String branch; - /** SHA-1 hash representing the commit ID of the current HEAD of the repo. */ + /** Current git commit id of the git folder (repo). */ @JsonProperty("head_commit_id") private String headCommitId; - /** ID of the repo object in the workspace. */ + /** Id of the git folder (repo) in the Workspace. */ @JsonProperty("id") private Long id; - /** - * Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If - * repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}. - */ + /** Root path of the git folder (repo) in the Workspace. */ @JsonProperty("path") private String path; - /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. - */ + /** Git provider of the remote git repository, e.g. `gitHub`. */ @JsonProperty("provider") private String provider; - /** */ + /** Sparse checkout config for the git folder (repo). */ @JsonProperty("sparse_checkout") private SparseCheckout sparseCheckout; - /** URL of the Git repository to be linked. */ + /** URL of the remote git repository. */ @JsonProperty("url") private String url; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java index b1a1271e5..dab9e912e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java @@ -35,8 +35,8 @@ public ReposAPI(ReposService mock) { impl = mock; } - public RepoInfo create(String url, String provider) { - return create(new CreateRepo().setUrl(url).setProvider(provider)); + public CreateRepoResponse create(String url, String provider) { + return create(new CreateRepoRequest().setUrl(url).setProvider(provider)); } /** @@ -46,7 +46,7 @@ public RepoInfo create(String url, String provider) { * repos created programmatically must be linked to a remote Git repo, unlike repos created in the * browser. */ - public RepoInfo create(CreateRepo request) { + public CreateRepoResponse create(CreateRepoRequest request) { return impl.create(request); } @@ -63,7 +63,7 @@ public void delete(DeleteRepoRequest request) { impl.delete(request); } - public RepoInfo get(long repoId) { + public GetRepoResponse get(long repoId) { return get(new GetRepoRequest().setRepoId(repoId)); } @@ -72,7 +72,7 @@ public RepoInfo get(long repoId) { * *

Returns the repo with the given repo ID. */ - public RepoInfo get(GetRepoRequest request) { + public GetRepoResponse get(GetRepoRequest request) { return impl.get(request); } @@ -106,8 +106,8 @@ public RepoPermissions getPermissions(GetRepoPermissionsRequest request) { /** * Get repos. * - *

Returns repos that the calling user has Manage permissions on. Results are paginated with - * each page containing twenty repos. + *

Returns repos that the calling user has Manage permissions on. Use `next_page_token` to + * iterate through additional pages. */ public Iterable list(ListReposRequest request) { return new Paginator<>( @@ -137,7 +137,7 @@ public RepoPermissions setPermissions(RepoPermissionsRequest request) { } public void update(long repoId) { - update(new UpdateRepo().setRepoId(repoId)); + update(new UpdateRepoRequest().setRepoId(repoId)); } /** @@ -146,7 +146,7 @@ public void update(long repoId) { *

Updates the repo to a different branch or tag, or updates the repo to the latest commit on * the same branch. */ - public void update(UpdateRepo request) { + public void update(UpdateRepoRequest request) { impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java index d3a1f9090..03def8f93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java @@ -16,27 +16,28 @@ public ReposImpl(ApiClient apiClient) { } @Override - public RepoInfo create(CreateRepo request) { + public CreateRepoResponse create(CreateRepoRequest request) { String path = "/api/2.0/repos"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RepoInfo.class, headers); + return apiClient.POST(path, request, CreateRepoResponse.class, headers); } @Override public void delete(DeleteRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteRepoResponse.class, headers); } @Override - public RepoInfo get(GetRepoRequest request) { + public GetRepoResponse get(GetRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RepoInfo.class, headers); + return apiClient.GET(path, request, GetRepoResponse.class, headers); } @Override @@ -75,12 +76,12 @@ public RepoPermissions setPermissions(RepoPermissionsRequest request) { } @Override - public void update(UpdateRepo request) { + public void update(UpdateRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + apiClient.PATCH(path, request, UpdateRepoResponse.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java index 757e0d527..313477542 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java @@ -28,7 +28,7 @@ public interface ReposService { * repos created programmatically must be linked to a remote Git repo, unlike repos created in the * browser. */ - RepoInfo create(CreateRepo createRepo); + CreateRepoResponse create(CreateRepoRequest createRepoRequest); /** * Delete a repo. @@ -42,7 +42,7 @@ public interface ReposService { * *

Returns the repo with the given repo ID. */ - RepoInfo get(GetRepoRequest getRepoRequest); + GetRepoResponse get(GetRepoRequest getRepoRequest); /** * Get repo permission levels. @@ -62,8 +62,8 @@ GetRepoPermissionLevelsResponse getPermissionLevels( /** * Get repos. * - *

Returns repos that the calling user has Manage permissions on. Results are paginated with - * each page containing twenty repos. + *

Returns repos that the calling user has Manage permissions on. Use `next_page_token` to + * iterate through additional pages. */ ListReposResponse list(ListReposRequest listReposRequest); @@ -80,7 +80,7 @@ GetRepoPermissionLevelsResponse getPermissionLevels( *

Updates the repo to a different branch or tag, or updates the repo to the latest commit on * the same branch. */ - void update(UpdateRepo updateRepo); + void update(UpdateRepoRequest updateRepoRequest); /** * Update repo permissions. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java index 4736bb3b9..eb1201e43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java @@ -8,9 +8,15 @@ import java.util.Collection; import java.util.Objects; +/** Sparse checkout configuration, it contains options like cone patterns. */ @Generated public class SparseCheckout { - /** List of patterns to include for sparse checkout. */ + /** + * List of sparse checkout cone patterns, see [cone mode handling] for details. + * + *

[cone mode handling]: + * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling + */ @JsonProperty("patterns") private Collection patterns; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java index 59b9fc67c..644efa487 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java @@ -8,9 +8,15 @@ import java.util.Collection; import java.util.Objects; +/** Sparse checkout configuration, it contains options like cone patterns. */ @Generated public class SparseCheckoutUpdate { - /** List of patterns to include for sparse checkout. */ + /** + * List of sparse checkout cone patterns, see [cone mode handling] for details. + * + *

[cone mode handling]: + * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling + */ @JsonProperty("patterns") private Collection patterns; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java similarity index 80% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java index 51e997459..620795bf8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java @@ -9,14 +9,14 @@ import java.util.Objects; @Generated -public class UpdateCredentials { +public class UpdateCredentialsRequest { /** The ID for the corresponding credential to access. */ @JsonIgnore private Long credentialId; /** - * Git provider. This field is case-insensitive. The available Git providers are gitHub, - * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, - * gitLabEnterpriseEdition and awsCodeCommit. + * Git provider. This field is case-insensitive. The available Git providers are `gitHub`, + * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, + * `gitLabEnterpriseEdition` and `awsCodeCommit`. */ @JsonProperty("git_provider") private String gitProvider; @@ -34,15 +34,14 @@ public class UpdateCredentials { /** * The personal access token used to authenticate to the corresponding Git provider. For certain - * providers, support may exist for other types of scoped access tokens. [Learn more]. The - * personal access token used to authenticate to the corresponding Git + * providers, support may exist for other types of scoped access tokens. [Learn more]. * *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html */ @JsonProperty("personal_access_token") private String personalAccessToken; - public UpdateCredentials setCredentialId(Long credentialId) { + public UpdateCredentialsRequest setCredentialId(Long credentialId) { this.credentialId = credentialId; return this; } @@ -51,7 +50,7 @@ public Long getCredentialId() { return credentialId; } - public UpdateCredentials setGitProvider(String gitProvider) { + public UpdateCredentialsRequest setGitProvider(String gitProvider) { this.gitProvider = gitProvider; return this; } @@ -60,7 +59,7 @@ public String getGitProvider() { return gitProvider; } - public UpdateCredentials setGitUsername(String gitUsername) { + public UpdateCredentialsRequest setGitUsername(String gitUsername) { this.gitUsername = gitUsername; return this; } @@ -69,7 +68,7 @@ public String getGitUsername() { return gitUsername; } - public UpdateCredentials setPersonalAccessToken(String personalAccessToken) { + public UpdateCredentialsRequest setPersonalAccessToken(String personalAccessToken) { this.personalAccessToken = personalAccessToken; return this; } @@ -82,7 +81,7 @@ public String getPersonalAccessToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - UpdateCredentials that = (UpdateCredentials) o; + UpdateCredentialsRequest that = (UpdateCredentialsRequest) o; return Objects.equals(credentialId, that.credentialId) && Objects.equals(gitProvider, that.gitProvider) && Objects.equals(gitUsername, that.gitUsername) @@ -96,7 +95,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateCredentials.class) + return new ToStringer(UpdateCredentialsRequest.class) .add("credentialId", credentialId) .add("gitProvider", gitProvider) .add("gitUsername", gitUsername) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java new file mode 100755 index 000000000..20e001bd3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class UpdateCredentialsResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialsResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java index 205b1f7ea..107125ef8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java @@ -9,12 +9,12 @@ import java.util.Objects; @Generated -public class UpdateRepo { +public class UpdateRepoRequest { /** Branch that the local version of the repo is checked out to. */ @JsonProperty("branch") private String branch; - /** The ID for the corresponding repo to access. */ + /** ID of the Git folder (repo) object in the workspace. */ @JsonIgnore private Long repoId; /** @@ -32,7 +32,7 @@ public class UpdateRepo { @JsonProperty("tag") private String tag; - public UpdateRepo setBranch(String branch) { + public UpdateRepoRequest setBranch(String branch) { this.branch = branch; return this; } @@ -41,7 +41,7 @@ public String getBranch() { return branch; } - public UpdateRepo setRepoId(Long repoId) { + public UpdateRepoRequest setRepoId(Long repoId) { this.repoId = repoId; return this; } @@ -50,7 +50,7 @@ public Long getRepoId() { return repoId; } - public UpdateRepo setSparseCheckout(SparseCheckoutUpdate sparseCheckout) { + public UpdateRepoRequest setSparseCheckout(SparseCheckoutUpdate sparseCheckout) { this.sparseCheckout = sparseCheckout; return this; } @@ -59,7 +59,7 @@ public SparseCheckoutUpdate getSparseCheckout() { return sparseCheckout; } - public UpdateRepo setTag(String tag) { + public UpdateRepoRequest setTag(String tag) { this.tag = tag; return this; } @@ -72,7 +72,7 @@ public String getTag() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - UpdateRepo that = (UpdateRepo) o; + UpdateRepoRequest that = (UpdateRepoRequest) o; return Objects.equals(branch, that.branch) && Objects.equals(repoId, that.repoId) && Objects.equals(sparseCheckout, that.sparseCheckout) @@ -86,7 +86,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateRepo.class) + return new ToStringer(UpdateRepoRequest.class) .add("branch", branch) .add("repoId", repoId) .add("sparseCheckout", sparseCheckout) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java new file mode 100755 index 000000000..c7d596164 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.workspace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class UpdateRepoResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UpdateRepoResponse.class).toString(); + } +} diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 49e56d1ee..504a5cb2f 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.32.2 + 0.33.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index 12df67b0e..780d08acf 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.32.2 + 0.33.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index 48f053b80..7c9ffddf8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.32.2 + 0.33.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for diff --git a/shaded/pom.xml b/shaded/pom.xml index 334c2fd20..17ca3fa42 100644 --- a/shaded/pom.xml +++ b/shaded/pom.xml @@ -4,7 +4,7 @@ 4.0.0 - 0.32.2 + 0.33.0 com.databricks