From 11eaeb7da7cdbe7db98b7ab500af2320379b9284 Mon Sep 17 00:00:00 2001 From: Satvik Ramaprasad Date: Fri, 18 Oct 2024 18:45:47 +0530 Subject: [PATCH 01/12] [Fix] CommonHttpsClient Builder - set timeout correctly (#362) ## Changes The issue was that `makeRequestConfig` was using `timeout` before it was initialized, I have refactored the code to both fix and make it more error proof in the future ## Tests Not possible to write UT for this as its too encapsulated, will need major refactors to be able to test via UT Co-authored-by: hectorcast-db --- .../databricks/sdk/core/commons/CommonsHttpClient.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java index 0a4a81f3..0af1d223 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java @@ -104,11 +104,8 @@ public CommonsHttpClient build() { private static final Logger LOG = LoggerFactory.getLogger(CommonsHttpClient.class); private final CloseableHttpClient hc; - private int timeout; private CommonsHttpClient(Builder builder) { - HttpClientBuilder httpClientBuilder = - HttpClientBuilder.create().setDefaultRequestConfig(makeRequestConfig()); int timeoutSeconds = 300; if (builder.databricksConfig != null && builder.databricksConfig.getHttpTimeoutSeconds() != null) { @@ -117,7 +114,9 @@ private CommonsHttpClient(Builder builder) { if (builder.timeoutSeconds != null) { timeoutSeconds = builder.timeoutSeconds; } - timeout = timeoutSeconds * 1000; + int timeout = timeoutSeconds * 1000; + HttpClientBuilder httpClientBuilder = + HttpClientBuilder.create().setDefaultRequestConfig(makeRequestConfig(timeout)); if (builder.proxyConfig != null) { ProxyUtils.setupProxy(builder.proxyConfig, httpClientBuilder); } @@ -135,7 +134,7 @@ private CommonsHttpClient(Builder builder) { hc = httpClientBuilder.build(); } - private RequestConfig makeRequestConfig() { + private RequestConfig makeRequestConfig(int timeout) { return RequestConfig.custom() .setConnectionRequestTimeout(timeout) .setConnectTimeout(timeout) From a33907e974ee14c6bae03270e17b407c52427530 Mon Sep 17 00:00:00 2001 From: Avijeet Gupta Date: Mon, 21 Oct 2024 19:39:09 +0530 Subject: [PATCH 02/12] [Feature] Experimental: Make retry strategy configurable (#363) Adds support to configure Retry Strategy in HttpClient. Currently only the default retry strategy is used. This strategy retries 3 times and does not have any sleep interval in between. For our use case, we would prefer using the ExponentialBackOffStrategy. This does not affect the default behavior but gives additional options to the users. --- .../sdk/core/commons/CommonsHttpClient.java | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java index 0af1d223..fb481732 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java @@ -23,6 +23,7 @@ import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; import org.apache.http.*; +import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.*; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; @@ -47,6 +48,7 @@ public static class Builder { private ProxyConfig proxyConfig; private SSLConnectionSocketFactory sslSocketFactory; private PoolingHttpClientConnectionManager connectionManager; + private HttpRequestRetryHandler requestRetryHandler; /** * @param databricksConfig The DatabricksConfig to use for the HttpClient. If the @@ -96,6 +98,17 @@ public Builder withConnectionManager(PoolingHttpClientConnectionManager connecti return this; } + /** + * @param requestRetryHandler the HttpRequestRetryHandler to use for the HttpClient. + * @return This builder. + *

Note: This API is experimental and may change or be removed in future releases + * without notice. + */ + public Builder withRequestRetryHandler(HttpRequestRetryHandler requestRetryHandler) { + this.requestRetryHandler = requestRetryHandler; + return this; + } + /** Builds a new instance of CommonsHttpClient with the configured parameters. */ public CommonsHttpClient build() { return new CommonsHttpClient(this); @@ -131,6 +144,9 @@ private CommonsHttpClient(Builder builder) { connectionManager.setMaxTotal(100); httpClientBuilder.setConnectionManager(connectionManager); } + if (builder.requestRetryHandler != null) { + httpClientBuilder.setRetryHandler(builder.requestRetryHandler); + } hc = httpClientBuilder.build(); } From 7b32354a3c75b917221cfe23f258d94cbc8ccfec Mon Sep 17 00:00:00 2001 From: Omer Lachish <289488+rauchy@users.noreply.github.com> Date: Tue, 22 Oct 2024 14:38:09 +0200 Subject: [PATCH 03/12] [Release] Release v0.34.0 (#368) ### New Features and Improvements * Experimental: Make retry strategy configurable ([#363](https://github.com/databricks/databricks-sdk-java/pull/363)). ### Bug Fixes * CommonHttpsClient Builder - set timeout correctly ([#362](https://github.com/databricks/databricks-sdk-java/pull/362)). Co-authored-by: Omer Lachish --- CHANGELOG.md | 14 ++++++++++++++ databricks-sdk-java/pom.xml | 2 +- .../java/com/databricks/sdk/core/UserAgent.java | 2 +- examples/docs/pom.xml | 2 +- examples/spring-boot-oauth-u2m-demo/pom.xml | 2 +- pom.xml | 2 +- shaded/pom.xml | 2 +- 7 files changed, 20 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2d7bf2c..2af228aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Version changelog +## [Release] Release v0.34.0 + +### New Features and Improvements + + * Experimental: Make retry strategy configurable ([#363](https://github.com/databricks/databricks-sdk-java/pull/363)). + + +### Bug Fixes + + * CommonHttpsClient Builder - set timeout correctly ([#362](https://github.com/databricks/databricks-sdk-java/pull/362)). + + + + ## [Release] Release v0.33.0 ### Bug Fixes diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index 977d4667..cba1f785 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.33.0 + 0.34.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 13b056bb..56c817d7 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -32,7 +32,7 @@ public String getValue() { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.33.0"; + private static final String version = "0.34.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 504a5cb2..32731d87 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.33.0 + 0.34.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index 780d08ac..4a89d103 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.33.0 + 0.34.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index 7c9ffddf..789662ed 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.33.0 + 0.34.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for diff --git a/shaded/pom.xml b/shaded/pom.xml index 17ca3fa4..21713f8e 100644 --- a/shaded/pom.xml +++ b/shaded/pom.xml @@ -4,7 +4,7 @@ 4.0.0 - 0.33.0 + 0.34.0 com.databricks From 779b6e3d63288d038c064613080baab73acab706 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Thu, 24 Oct 2024 10:49:26 +0200 Subject: [PATCH 04/12] [Internal] Automatically trigger integration tests on PR (#369) ## Changes Automatically trigger integration tests when a PR is opened or updated ## Tests Workflow below. --- .github/workflows/integration-tests.yml | 57 +++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 .github/workflows/integration-tests.yml diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 00000000..57e93b13 --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,57 @@ +name: Integration Tests + +on: + pull_request: + types: [opened, synchronize] + + merge_group: + +jobs: + trigger-tests: + if: github.event_name == 'pull_request' + name: Trigger Tests + runs-on: ubuntu-latest + environment: "test-trigger-is" + + steps: + - uses: actions/checkout@v3 + + - name: Generate GitHub App Token + id: generate-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }} + private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }} + owner: ${{ secrets.ORG_NAME }} + repositories: ${{secrets.REPO_NAME}} + + - name: Trigger Workflow in Another Repo + env: + GH_TOKEN: ${{ steps.generate-token.outputs.token }} + run: | + gh workflow run sdk-java-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \ + --ref main \ + -f pull_request_number=${{ github.event.pull_request.number }} \ + -f commit_sha=${{ github.event.pull_request.head.sha }} + + # Statuses and checks apply to specific commits (by hash). + # Enforcement of required checks is done both at the PR level and the merge queue level. + # In case of multiple commits in a single PR, the hash of the squashed commit + # will not match the one for the latest (approved) commit in the PR. + # We auto approve the check for the merge queue for two reasons: + # * Queue times out due to duration of tests. + # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing. + auto-approve: + if: github.event_name == 'merge_group' + runs-on: ubuntu-latest + steps: + - name: Mark Check + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + gh api -X POST -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + /repos/${{ github.repository }}/statuses/${{ github.sha }} \ + -f 'state=success' \ + -f 'context=Integration Tests Check' From 2f08742fad2c773f771359cdfeb9ca8f8c86e6f0 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Wed, 30 Oct 2024 10:15:18 +0100 Subject: [PATCH 05/12] [Internal] Add test instructions for external contributors (#370) ## Changes Add test instructions for external contributors ## Tests See Go Changes https://github.com/databricks/databricks-sdk-go/pull/1073 --- .github/workflows/external-message.yml | 114 ++++++++++++++++++++++++ .github/workflows/integration-tests.yml | 21 ++++- 2 files changed, 133 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/external-message.yml diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml new file mode 100644 index 00000000..065f5663 --- /dev/null +++ b/.github/workflows/external-message.yml @@ -0,0 +1,114 @@ +name: PR Comment + +# WARNING: +# THIS WORKFLOW ALWAYS RUNS FOR EXTERNAL CONTRIBUTORS WITHOUT ANY APPROVAL. +# THIS WORKFLOW RUNS FROM MAIN BRANCH, NOT FROM THE PR BRANCH. +# DO NOT PULL THE PR OR EXECUTE ANY CODE FROM THE PR. + +on: + pull_request_target: + types: [opened, reopened, synchronize] + branches: + - main + + +jobs: + comment-on-pr: + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + # NOTE: The following checks may not be accurate depending on Org or Repo settings. + - name: Check user and potential secret access + id: check-secrets-access + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + USER_LOGIN="${{ github.event.pull_request.user.login }}" + REPO_OWNER="${{ github.repository_owner }}" + REPO_NAME="${{ github.event.repository.name }}" + + echo "Pull request opened by: $USER_LOGIN" + + # Check if PR is from a fork + IS_FORK=$([[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]] && echo "true" || echo "false") + + HAS_ACCESS="false" + + # Check user's permission level on the repository + USER_PERMISSION=$(gh api repos/$REPO_OWNER/$REPO_NAME/collaborators/$USER_LOGIN/permission --jq '.permission') + + if [[ "$USER_PERMISSION" == "admin" || "$USER_PERMISSION" == "write" ]]; then + HAS_ACCESS="true" + elif [[ "$USER_PERMISSION" == "read" ]]; then + # For read access, we need to check if the user has been explicitly granted secret access + # This information is not directly available via API, so we'll make an assumption + # that read access does not imply secret access + HAS_ACCESS="false" + fi + + # Check if repo owner is an organization + IS_ORG=$(gh api users/$REPO_OWNER --jq '.type == "Organization"') + + if [[ "$IS_ORG" == "true" && "$HAS_ACCESS" == "false" ]]; then + # Check if user is a member of any team with write or admin access to the repo + TEAMS_WITH_ACCESS=$(gh api repos/$REPO_OWNER/$REPO_NAME/teams --jq '.[] | select(.permission == "push" or .permission == "admin") | .slug') + for team in $TEAMS_WITH_ACCESS; do + IS_TEAM_MEMBER=$(gh api orgs/$REPO_OWNER/teams/$team/memberships/$USER_LOGIN --silent && echo "true" || echo "false") + if [[ "$IS_TEAM_MEMBER" == "true" ]]; then + HAS_ACCESS="true" + break + fi + done + fi + + # If it's a fork, set HAS_ACCESS to false regardless of other checks + if [[ "$IS_FORK" == "true" ]]; then + HAS_ACCESS="false" + fi + + echo "has_secrets_access=$HAS_ACCESS" >> $GITHUB_OUTPUT + if [[ "$HAS_ACCESS" == "true" ]]; then + echo "User $USER_LOGIN likely has access to secrets" + else + echo "User $USER_LOGIN likely does not have access to secrets" + fi + + + - uses: actions/checkout@v4 + + - name: Delete old comments + if: steps.check-secrets-access.outputs.has_secrets_access != 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Delete previous comment if it exists + previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \ + --jq '.[] | select(.body | startswith("")) | .id') + echo "Previous comment IDs: $previous_comment_ids" + # Iterate over each comment ID and delete the comment + if [ ! -z "$previous_comment_ids" ]; then + echo "$previous_comment_ids" | while read -r comment_id; do + echo "Deleting comment with ID: $comment_id" + gh api "repos/${{ github.repository }}/issues/comments/$comment_id" -X DELETE + done + fi + + - name: Comment on PR + if: steps.check-secrets-access.outputs.has_secrets_access != 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMMIT_SHA: ${{ github.event.pull_request.head.sha }} + run: | + gh pr comment ${{ github.event.pull_request.number }} --body \ + " + Run integration tests manually: + [go/deco-tests-run/sdk-java](https://go/deco-tests-run/sdk-java) + + Inputs: + * PR number: ${{github.event.pull_request.number}} + * Commit SHA: \`${{ env.COMMIT_SHA }}\` + + Checks will be approved automatically on success. + " diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 57e93b13..e7dc1f1d 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -7,12 +7,29 @@ on: merge_group: jobs: + check-token: + name: Check secrets access + runs-on: ubuntu-latest + outputs: + has_token: ${{ steps.set-token-status.outputs.has_token }} + steps: + - name: Check if GITHUB_TOKEN is set + id: set-token-status + run: | + if [ -z "${{ secrets.GITHUB_TOKEN }}" ]; then + echo "GITHUB_TOKEN is empty. User has no access to tokens." + echo "::set-output name=has_token::false" + else + echo "GITHUB_TOKEN is set. User has no access to tokens." + echo "::set-output name=has_token::true" + fi + trigger-tests: - if: github.event_name == 'pull_request' name: Trigger Tests runs-on: ubuntu-latest + needs: check-token + if: github.event_name == 'pull_request' && needs.check-token.outputs.has_token == 'true' environment: "test-trigger-is" - steps: - uses: actions/checkout@v3 From a239caee037fe57327a9ea42302162e41ed880ff Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Thu, 31 Oct 2024 12:27:32 +0100 Subject: [PATCH 06/12] [Internal] Always write message for manual test integration (#374) ## Changes Old script could not be run from master due to security restrictions and there is no reliable way to detect if a user as secrets. ## Tests Run as pull_request. --- .github/workflows/external-message.yml | 68 ++----------------------- .github/workflows/integration-tests.yml | 9 ++-- 2 files changed, 10 insertions(+), 67 deletions(-) diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml index 065f5663..fdb3347f 100644 --- a/.github/workflows/external-message.yml +++ b/.github/workflows/external-message.yml @@ -11,7 +11,6 @@ on: branches: - main - jobs: comment-on-pr: runs-on: ubuntu-latest @@ -19,73 +18,15 @@ jobs: pull-requests: write steps: - # NOTE: The following checks may not be accurate depending on Org or Repo settings. - - name: Check user and potential secret access - id: check-secrets-access - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - USER_LOGIN="${{ github.event.pull_request.user.login }}" - REPO_OWNER="${{ github.repository_owner }}" - REPO_NAME="${{ github.event.repository.name }}" - - echo "Pull request opened by: $USER_LOGIN" - - # Check if PR is from a fork - IS_FORK=$([[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]] && echo "true" || echo "false") - - HAS_ACCESS="false" - - # Check user's permission level on the repository - USER_PERMISSION=$(gh api repos/$REPO_OWNER/$REPO_NAME/collaborators/$USER_LOGIN/permission --jq '.permission') - - if [[ "$USER_PERMISSION" == "admin" || "$USER_PERMISSION" == "write" ]]; then - HAS_ACCESS="true" - elif [[ "$USER_PERMISSION" == "read" ]]; then - # For read access, we need to check if the user has been explicitly granted secret access - # This information is not directly available via API, so we'll make an assumption - # that read access does not imply secret access - HAS_ACCESS="false" - fi - - # Check if repo owner is an organization - IS_ORG=$(gh api users/$REPO_OWNER --jq '.type == "Organization"') - - if [[ "$IS_ORG" == "true" && "$HAS_ACCESS" == "false" ]]; then - # Check if user is a member of any team with write or admin access to the repo - TEAMS_WITH_ACCESS=$(gh api repos/$REPO_OWNER/$REPO_NAME/teams --jq '.[] | select(.permission == "push" or .permission == "admin") | .slug') - for team in $TEAMS_WITH_ACCESS; do - IS_TEAM_MEMBER=$(gh api orgs/$REPO_OWNER/teams/$team/memberships/$USER_LOGIN --silent && echo "true" || echo "false") - if [[ "$IS_TEAM_MEMBER" == "true" ]]; then - HAS_ACCESS="true" - break - fi - done - fi - - # If it's a fork, set HAS_ACCESS to false regardless of other checks - if [[ "$IS_FORK" == "true" ]]; then - HAS_ACCESS="false" - fi - - echo "has_secrets_access=$HAS_ACCESS" >> $GITHUB_OUTPUT - if [[ "$HAS_ACCESS" == "true" ]]; then - echo "User $USER_LOGIN likely has access to secrets" - else - echo "User $USER_LOGIN likely does not have access to secrets" - fi - - - uses: actions/checkout@v4 - name: Delete old comments - if: steps.check-secrets-access.outputs.has_secrets_access != 'true' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | # Delete previous comment if it exists previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \ - --jq '.[] | select(.body | startswith("")) | .id') + --jq '.[] | select(.body | startswith("")) | .id') echo "Previous comment IDs: $previous_comment_ids" # Iterate over each comment ID and delete the comment if [ ! -z "$previous_comment_ids" ]; then @@ -96,14 +37,15 @@ jobs: fi - name: Comment on PR - if: steps.check-secrets-access.outputs.has_secrets_access != 'true' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} COMMIT_SHA: ${{ github.event.pull_request.head.sha }} run: | gh pr comment ${{ github.event.pull_request.number }} --body \ - " - Run integration tests manually: + " + If integration tests don't run automatically, an authorized user can run them manually by following the instructions below: + + Trigger: [go/deco-tests-run/sdk-java](https://go/deco-tests-run/sdk-java) Inputs: diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e7dc1f1d..c2893aa4 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -10,17 +10,18 @@ jobs: check-token: name: Check secrets access runs-on: ubuntu-latest + environment: "test-trigger-is" outputs: has_token: ${{ steps.set-token-status.outputs.has_token }} steps: - - name: Check if GITHUB_TOKEN is set + - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set id: set-token-status run: | - if [ -z "${{ secrets.GITHUB_TOKEN }}" ]; then - echo "GITHUB_TOKEN is empty. User has no access to tokens." + if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then + echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets." echo "::set-output name=has_token::false" else - echo "GITHUB_TOKEN is set. User has no access to tokens." + echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets." echo "::set-output name=has_token::true" fi From a1460c561c13fd3005080edf2392c8c3a6fb0e95 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 31 Oct 2024 12:37:54 +0100 Subject: [PATCH 07/12] [Internal] Move templates in the code generator (#373) ## Changes Move templates in the code generator. ## Tests The generator is working correctly with the new configuration. --- .codegen.json | 24 +--- .codegen/account.java.tmpl | 75 ---------- .codegen/api.java.tmpl | 219 ----------------------------- .codegen/error-mapper.java.tmpl | 15 -- .codegen/error-overrides.java.tmpl | 24 ---- .codegen/exception.java.tmpl | 25 ---- .codegen/impl.java.tmpl | 79 ----------- .codegen/interface.java.tmpl | 25 ---- .codegen/lib.tmpl | 26 ---- .codegen/model.java.tmpl | 87 ------------ .codegen/workspace.java.tmpl | 84 ----------- 11 files changed, 2 insertions(+), 681 deletions(-) delete mode 100644 .codegen/account.java.tmpl delete mode 100644 .codegen/api.java.tmpl delete mode 100644 .codegen/error-mapper.java.tmpl delete mode 100644 .codegen/error-overrides.java.tmpl delete mode 100644 .codegen/exception.java.tmpl delete mode 100644 .codegen/impl.java.tmpl delete mode 100644 .codegen/interface.java.tmpl delete mode 100644 .codegen/lib.tmpl delete mode 100644 .codegen/model.java.tmpl delete mode 100644 .codegen/workspace.java.tmpl diff --git a/.codegen.json b/.codegen.json index 9a8816c6..b823e4a2 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1,26 +1,6 @@ { - "formatter": "mvn spotless:apply", + "mode": "java_v0", "changelog_config": ".codegen/changelog_config.yml", - "template_libraries": [ - ".codegen/lib.tmpl" - ], - "types": { - ".codegen/model.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}.java" - }, - "services": { - ".codegen/interface.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}Service.java", - ".codegen/api.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}API.java", - ".codegen/impl.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}Impl.java" - }, - "exception_types": { - ".codegen/exception.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/{{.PascalName}}.java" - }, - "batch": { - ".codegen/workspace.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java", - ".codegen/account.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java", - ".codegen/error-mapper.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java", - ".codegen/error-overrides.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java" - }, "version": { "pom.xml": "databricks-sdk-parent\n $VERSION", "databricks-sdk-java/pom.xml": "databricks-sdk-parent\n $VERSION", @@ -32,6 +12,6 @@ "toolchain": { "require": ["mvn", "java"], "setup": ["rm -rf databricks-sdk-java/src/main/java/com/databricks/sdk/service"], - "post_generate": ["mvn --errors clean test"] + "post_generate": ["mvn spotless:apply","mvn --errors clean test"] } } diff --git a/.codegen/account.java.tmpl b/.codegen/account.java.tmpl deleted file mode 100644 index 71307f9a..00000000 --- a/.codegen/account.java.tmpl +++ /dev/null @@ -1,75 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.ConfigLoader; -import com.databricks.sdk.core.DatabricksConfig; -import com.databricks.sdk.core.utils.AzureUtils; -import com.databricks.sdk.service.provisioning.*; -{{range .Services}}{{if and .IsAccounts (not .IsDataPlane)}} -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}API; -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}Service;{{end}}{{end}} -import com.databricks.sdk.support.Generated; - -/** - * Entry point for accessing Databricks account-level APIs - */ -@Generated -public class AccountClient { - private final ApiClient apiClient; - private final DatabricksConfig config; - {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - private {{.PascalName}}API {{(.TrimPrefix "account").CamelName}}API;{{end}}{{end}} - - public AccountClient() { - this(ConfigLoader.getDefault()); - } - - public AccountClient(DatabricksConfig config) { - this.config = config; - apiClient = new ApiClient(config); - {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - {{(.TrimPrefix "account").CamelName}}API = new {{.PascalName}}API(apiClient);{{end}}{{end}} - } - - /** Constructor for mocks */ - public AccountClient(boolean mock) { - apiClient = null; - config = null; - } - {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - {{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} - public {{.PascalName}}API {{(.TrimPrefix "account").CamelName}}() { - return {{(.TrimPrefix "account").CamelName}}API; - } - {{end}}{{end}} - {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - /** Replace the default {{.PascalName}}Service with a custom implementation. */ - public AccountClient with{{(.TrimPrefix "account").PascalName}}Impl({{.PascalName}}Service {{.CamelName}}) { - return this.with{{(.TrimPrefix "account").PascalName}}API(new {{.PascalName}}API({{.CamelName}})); - } - - /** Replace the default {{.PascalName}}API with a custom implementation. */ - public AccountClient with{{(.TrimPrefix "account").PascalName}}API({{.PascalName}}API {{.CamelName}}) { - this.{{(.TrimPrefix "account").CamelName}}API = {{.CamelName}}; - return this; - } - {{end}}{{end}} - public ApiClient apiClient() { - return apiClient; - } - - public DatabricksConfig config() { - return config; - } - - public WorkspaceClient getWorkspaceClient(Workspace workspace) { - String host = this.config.getDatabricksEnvironment().getDeploymentUrl(workspace.getDeploymentName()); - DatabricksConfig config = this.config.newWithWorkspaceHost(host); - AzureUtils.getAzureWorkspaceResourceId(workspace).map(config::setAzureWorkspaceResourceId); - return new WorkspaceClient(config); - } -} diff --git a/.codegen/api.java.tmpl b/.codegen/api.java.tmpl deleted file mode 100644 index 72f03b50..00000000 --- a/.codegen/api.java.tmpl +++ /dev/null @@ -1,219 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.{{.Package.Name}}; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.Map; -import java.time.Duration; -import java.util.Arrays; -import java.util.concurrent.TimeoutException; -import java.util.function.Consumer; -import java.util.function.Function; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Paginator; -import com.databricks.sdk.support.Wait; - -{{range .Package.ImportedEntities}} -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}};{{end}} - -{{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} -@Generated -public class {{.PascalName}}API { - private static final Logger LOG = LoggerFactory.getLogger({{.PascalName}}API.class); - - private final {{.PascalName}}Service impl; - - {{range .Subservices}} - private {{.PascalName}}API {{.CamelName}}API; - {{end}} - - /** Regular-use constructor */ - public {{.PascalName}}API(ApiClient apiClient) { - impl = new {{.PascalName}}Impl(apiClient); - {{range .Subservices}} - {{.CamelName}}API = new {{.PascalName}}API(apiClient); - {{end}} - } - - /** Constructor for mocks */ - public {{.PascalName}}API({{.PascalName}}Service mock) { - impl = mock; - } - {{range .Waits}} - public {{.Poll.Response.PascalName}} {{.CamelName}}({{range $i, $x := .Binding}}{{if $i}}, {{end}}{{template "type" .PollField.Entity}} {{.PollField.CamelName}}{{end}}) - throws TimeoutException { - return {{.CamelName}}({{range .Binding}}{{.PollField.CamelName}}, {{end}}Duration.ofMinutes({{.Timeout}}), null); - } - - public {{.Poll.Response.PascalName}} {{.CamelName}}({{range .Binding}}{{template "type" .PollField.Entity}} {{.PollField.CamelName}}, {{end}} - Duration timeout, Consumer<{{.Poll.Response.PascalName}}> callback) throws TimeoutException { - long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List<{{.Status.Entity.PascalName}}> targetStates = Arrays.asList({{range $i,$x := .Success}}{{if $i}}, {{end}}{{.Entity.PascalName}}.{{.ConstantName}}{{end}});{{if .Failure}} - java.util.List<{{.Status.Entity.PascalName}}> failureStates = Arrays.asList({{range $i,$x := .Failure}}{{if $i}}, {{end}}{{.Entity.PascalName}}.{{.ConstantName}}{{end}});{{end}} - String statusMessage = "polling..."; - int attempt = 1; - while (System.currentTimeMillis() < deadline) { - {{.Poll.Response.PascalName}} poll = {{template "java-name" .Poll}}(new {{.Poll.Request.PascalName}}(){{range .Binding}}.set{{.PollField.PascalName}}({{.PollField.CamelName}}){{- end}}); - {{.Status.Entity.PascalName}} status = poll{{range .StatusPath}}.get{{.PascalName}}(){{end}}; - {{if .ComplexMessagePath -}} - statusMessage = String.format("current status: %s", status); - if (poll.get{{.MessagePathHead.PascalName}}() != null) { - statusMessage = poll{{range .MessagePath}}.get{{.PascalName}}(){{end}}; - } - {{- else if .MessagePath -}} - statusMessage = poll{{range .MessagePath}}.get{{.PascalName}}(){{end}}; - {{- else -}} - statusMessage = String.format("current status: %s", status); - {{- end}} - if (targetStates.contains(status)) { - return poll; - } - if (callback != null) { - callback.accept(poll); - }{{if .Failure -}} - if (failureStates.contains(status)) { - String msg = String.format("failed to reach {{range $i, $e := .Success}}{{if $i}} or {{end}}{{$e.ConstantName}}{{end}}, got %s: %s", status, statusMessage); - throw new IllegalStateException(msg); - } - {{end}} - String prefix = String.format("{{range $i, $b := .Binding}}{{if $i}}, {{end -}}{{.PollField.CamelName}}=%s{{- end}}"{{range .Binding}}, {{.PollField.CamelName}}{{- end}}); - int sleep = attempt; - if (sleep > 10) { - // sleep 10s max per attempt - sleep = 10; - } - LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); - try { - Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new DatabricksException("Current thread was interrupted", e); - } - attempt++; - } - throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); - } - {{end}} - - {{range .Methods}} - {{if and .Request .Request.RequiredFields}} - public {{template "method-return-type" .}} {{template "java-name" .}}({{range $i, $p := .Request.RequiredFields -}} - {{if $i}}, {{end}}{{template "type-unboxed" .Entity }} {{.CamelName}}{{if .IsNameReserved}}Value{{end}} - {{- end}}) { - {{if or (not .Response.IsEmpty) .Wait -}}return {{end}}{{template "java-name" .}}(new {{.Request.PascalName}}(){{range .Request.RequiredFields}} - .set{{.PascalName}}({{.CamelName}}{{if .IsNameReserved}}Value{{end}}){{end}}); - } - {{end}} - - {{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} - public {{template "method-return-type" .}} {{template "java-name" .}}({{if .Request}}{{template "type" .Request}} request{{end}}) { - {{template "method-call" .}} - } - {{end}} - - {{range .Subservices}} - /** - * {{.Summary}} - */ - public {{.PascalName}}API {{.PascalName}}() { - return {{.CamelName}}API; - } - {{end}} - - public {{.PascalName}}Service impl() { - return impl; - } -} - -{{define "java-name" -}} -{{.CamelName}}{{if .IsNameReserved}}Content{{end}} -{{- end}} - -{{define "method-call" -}} - {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}{{template "method-call-retried" .}} - {{- else if .Pagination -}}{{template "method-call-paginated" .}} - {{- else}}{{template "method-call-default" .}}{{end}} -{{- end}} - -{{define "method-call-paginated" -}} - {{- if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) -}} - request.set{{.Pagination.Offset.PascalName}}( - {{- if eq .Pagination.Increment 1 -}} - 1 - {{- else if contains .Path "/scim/v2/" -}} - 1 - {{- else -}} - 0 - {{- end}}L);{{end -}} - {{if and .Pagination.Limit (contains .Path "/scim/v2/")}} - if (request.get{{.Pagination.Limit.PascalName}}() == null) { - request.set{{.Pagination.Limit.PascalName}}(100L); - }{{end -}} - return new Paginator<>( - {{ if .Request }}request{{ else }}null{{ end }}, - {{ if .Request }}impl::{{template "java-name" .}}{{ else }}(Void v) -> impl.{{template "java-name" .}}(){{ end }}, - {{template "type" .Response}}::get{{.Pagination.Results.PascalName}}, - response -> - {{ if not .Pagination.MultiRequest }} - null - {{- else if eq .Path "/api/2.1/clusters/events" -}} - response.getNextPage() - {{- else if .Pagination.Token -}} - { - String token = response.get{{.Pagination.Token.Bind.PascalName}}(); - if (token == null || token.isEmpty()) { - return null; - } - return request.set{{.Pagination.Token.PollField.PascalName}}(token); - } - {{- else if eq .Pagination.Increment 1 -}} - { - Long page = request.get{{.Pagination.Offset.PascalName}}(); - if (page == null) { - page = 1L; // redash uses 1-based pagination - } - return request.set{{.Pagination.Offset.PascalName}}(page+1L); - } - {{- else -}} - { - Long offset = request.get{{.Pagination.Offset.PascalName}}(); - if (offset == null) { - offset = 0L; - } - offset += response.get{{.Pagination.Results.PascalName}}().size(); - return request.set{{.Pagination.Offset.PascalName}}(offset); - } - {{- end}} - ){{if .NeedsOffsetDedupe -}}.withDedupe({{.Pagination.Entity.PascalName}}::get{{.IdentifierField.PascalName}}){{end}}; -{{- end}} - -{{define "method-call-retried" -}} - {{if not .Response.IsEmpty}}{{.Response.PascalName}} response = {{end}}impl.{{template "java-name" .}}(request); - return new Wait<>((timeout, callback) -> {{.Wait.CamelName}}({{range .Wait.Binding}}{{if .IsResponseBind}}response{{else}}request{{end}}.get{{.Bind.PascalName}}(), {{end}}timeout, callback){{if not .Response.IsEmpty}}, response{{end}}); -{{- end}} - -{{define "method-call-default" -}} - {{if not .Response.IsEmpty -}}return {{end}}impl.{{template "java-name" .}}({{if .Request}}request{{end}}); -{{- end}} - -{{define "method-return-type" -}} - {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }}Wait<{{.Wait.Poll.Response.PascalName}},{{if not .Response.IsEmpty}}{{.Response.PascalName}}{{else}}Void{{end}}> - {{- else if not .Response.IsEmpty }}{{if .Response.ArrayValue -}} - Iterable<{{ template "type" .Response.ArrayValue }}> - {{- else if .Pagination -}} - Iterable<{{ template "type" .Pagination.Entity }}> - {{- else -}} - {{template "type" .Response}} - {{- end}}{{else}}void{{end}} -{{- end}} diff --git a/.codegen/error-mapper.java.tmpl b/.codegen/error-mapper.java.tmpl deleted file mode 100644 index 0e605663..00000000 --- a/.codegen/error-mapper.java.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.core.error; - -import com.databricks.sdk.support.Generated; - -@Generated -class ErrorMapper extends AbstractErrorMapper { - public ErrorMapper() { - {{range .ErrorStatusCodeMapping}}statusCode({{.StatusCode}}, com.databricks.sdk.core.error.platform.{{.PascalName}}::new); - {{end}} - {{range .ErrorCodeMapping}}errorCode("{{.ErrorCode}}", com.databricks.sdk.core.error.platform.{{.PascalName}}::new); - {{end}} - } -} diff --git a/.codegen/error-overrides.java.tmpl b/.codegen/error-overrides.java.tmpl deleted file mode 100644 index fb9cff73..00000000 --- a/.codegen/error-overrides.java.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.core.error; - -import java.util.Arrays; -import java.util.List; - -import com.databricks.sdk.support.Generated; - -@Generated -class ErrorOverrides { - static final List> ALL_OVERRIDES = Arrays.asList( -{{- range $i, $x := .ErrorOverrides }} - {{if not (eq $i 0)}}, {{end}}new ErrorOverride<>( - "{{$x.Name}}", - "{{ replaceAll "\\" "\\\\" $x.PathRegex}}", - "{{$x.Verb}}", - "{{ replaceAll "\\" "\\\\" $x.StatusCodeMatcher}}", - "{{ replaceAll "\\" "\\\\" $x.ErrorCodeMatcher}}", - "{{ replaceAll "\\" "\\\\" $x.MessageMatcher}}", - com.databricks.sdk.core.error.platform.{{$x.OverrideErrorCode.PascalName}}.class) -{{- end}} - ); -} diff --git a/.codegen/exception.java.tmpl b/.codegen/exception.java.tmpl deleted file mode 100644 index 2e285c3f..00000000 --- a/.codegen/exception.java.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.core.error.platform; - -import com.databricks.sdk.core.DatabricksError; -import com.databricks.sdk.core.error.ErrorDetail; -import com.databricks.sdk.support.Generated; - -import java.util.List; - -/** - {{.Comment " * " 80}} - */ -@Generated -public class {{.PascalName}} extends {{if .Inherit -}}{{.Inherit.PascalName}}{{else}}DatabricksError{{end}} { - public {{.PascalName}}(String message, List details) { - super("{{.Name}}", message, {{if not .Inherit}}{{.StatusCode}}, {{end}}details); - } - - {{if not .Inherit}} - public {{.PascalName}}(String errorCode, String message, List details) { - super(errorCode, message, {{.StatusCode}}, details); - } - {{end}} -} diff --git a/.codegen/impl.java.tmpl b/.codegen/impl.java.tmpl deleted file mode 100644 index 3a0ecd41..00000000 --- a/.codegen/impl.java.tmpl +++ /dev/null @@ -1,79 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.{{.Package.Name}}; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.Map; -import java.util.HashMap; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.http.Encoding; -import com.databricks.sdk.support.Generated; - -{{range .Package.ImportedEntities}} -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}};{{end}} - -/** Package-local implementation of {{.PascalName}} */ -@Generated -class {{.PascalName}}Impl implements {{.PascalName}}Service { - private final ApiClient apiClient; - - public {{.PascalName}}Impl(ApiClient apiClient) { - this.apiClient = apiClient; - } - {{range .Methods}} - @Override - public {{if not .Response.IsEmpty -}}{{template "type" .Response}}{{else}}void{{end}} {{.CamelName}}{{if .IsNameReserved}}Content{{end}}({{if .Request}}{{template "type" .Request}} request{{end}}) { - String path = {{ template "path" . }}; - {{ template "headers" . -}} - {{ if .Response.IsEmpty -}} - {{ template "api-call" . }} - {{- else if .Response.ArrayValue -}} return apiClient.getCollection(path, null, {{template "type" .Response.ArrayValue}}.class, headers); - {{- else if .Response.MapValue -}} return apiClient.getStringMap(path, {{ template "request-param" .}}, headers); - {{- else }}return {{ template "api-call" . }} - {{- end}} - } - {{end}} -} - -{{- define "path" -}} -{{- if .PathParts -}} - String.format("{{range .PathParts -}} - {{- .Prefix -}} - {{- if or .Field .IsAccountId -}}%s{{- end -}} - {{- end -}}" - {{- range .PathParts -}} - {{- if and .Field .Field.IsPathMultiSegment -}}, Encoding.encodeMultiSegmentPathParameter(request.get{{.Field.PascalName}}()) - {{- else if .Field -}}, request.get{{.Field.PascalName}}() - {{- else if .IsAccountId -}}, apiClient.configuredAccountID() - {{- end -}} - {{- end -}}) -{{- else -}} - "{{.Path}}" -{{- end -}} -{{- end -}} - -{{ define "api-call" }} -apiClient.{{.Verb}}(path - {{- if .Request}}, {{ template "request-param" .}}{{end}} - , {{ if not .Response -}}Void - {{- else}}{{template "type" .Response}}{{- end -}}.class - , headers); -{{- end }} - -{{ define "request-param" -}} - {{- if or (and .Operation .Operation.RequestBody) (eq .Verb "GET") (eq .Verb "DELETE") (eq .Verb "HEAD") -}} - request{{ if .RequestBodyField }}.get{{.RequestBodyField.PascalName}}(){{end}} - {{- else -}} - null - {{- end -}} -{{- end }} - -{{ define "headers" -}} - Map headers = new HashMap<>(); - {{- range $key, $value := .FixedRequestHeaders }} - headers.put("{{$key}}", "{{$value}}"); - {{- end -}} -{{- end }} diff --git a/.codegen/interface.java.tmpl b/.codegen/interface.java.tmpl deleted file mode 100644 index 794c79b2..00000000 --- a/.codegen/interface.java.tmpl +++ /dev/null @@ -1,25 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.{{.Package.Name}}; - -import java.util.Collection; -import java.util.Map; -import java.io.InputStream; - -import com.databricks.sdk.support.Generated; - -/** - {{.Comment " * " 80}} - * - * This is the high-level interface, that contains generated methods. - * - * Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface {{.PascalName}}Service { - {{- range .Methods}} - {{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} - {{if not .Response.IsEmpty -}}{{template "type" .Response}}{{else}}void{{end}} {{.CamelName}}{{if .IsNameReserved}}Content{{end}}({{if .Request}}{{template "type" .Request}} {{.Request.CamelName}}{{if .IsNameReserved}}Content{{end}}{{end}}); - {{end}} -} diff --git a/.codegen/lib.tmpl b/.codegen/lib.tmpl deleted file mode 100644 index 88b42123..00000000 --- a/.codegen/lib.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -{{- define "type-unboxed" -}} - {{- if .IsBool}}boolean - {{- else if .IsInt64}}long - {{- else if .IsFloat64}}double - {{- else if .IsInt}}long - {{- else }}{{template "type" .}}{{end}} -{{- end -}} - -{{- define "type" -}} - {{- if not . }}Object /* ERROR */ - {{- else if .IsExternal }}com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}} - {{- else if .IsAny}}Object - {{- else if .IsEmpty}}{{.PascalName}} - {{- else if .IsString}}String - {{- else if .IsBool}}Boolean - {{- else if .IsInt64}}Long - {{- else if .IsFloat64}}Double - {{- else if .IsInt}}Long - {{- else if .ArrayValue }}Collection<{{template "type" .ArrayValue}}> - {{- else if .MapValue }}Map - {{- else if .IsByteStream}}InputStream - {{- else if .IsObject }}{{.PascalName}} - {{- else if .Enum }}{{.PascalName}} - {{- else}}Object /* MISSING TYPE */ - {{- end -}} -{{- end -}} diff --git a/.codegen/model.java.tmpl b/.codegen/model.java.tmpl deleted file mode 100644 index 98028fe9..00000000 --- a/.codegen/model.java.tmpl +++ /dev/null @@ -1,87 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.{{.Package.Name}}; - -import java.util.Map; -import java.io.InputStream; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Header; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; - -{{if .IsExternal}} -There is a small issue with the code generator causing this class to be generated twice. This should be fixed upstream -in the code generator, after which this block can be removed. For now, we skip generating the model for all external -types. -{{skipThisFile}} -{{end}} -{{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} -{{if or .Fields .IsEmpty -}} -@Generated -public class {{.PascalName}} { - {{- range .Fields}} - /** - {{.Comment " * " 80}} - */ - {{if .IsJson}}@JsonProperty("{{.Name}}"){{else}}@JsonIgnore{{end}}{{if .IsQuery}}@QueryParam("{{.Name}}"){{end}}{{if .IsHeader}}@Header("{{.Name}}"){{end}} - private {{template "type" .Entity }} {{.CamelName}}{{if .IsNameReserved}}Value{{end}}; - {{end}} - - {{- range .Fields}} - public {{template "type" .Of}} set{{.PascalName}}({{template "type" .Entity }} {{.CamelName}}{{if .IsNameReserved}}Value{{end}}) { - this.{{.CamelName}}{{if .IsNameReserved}}Value{{end}} = {{.CamelName}}{{if .IsNameReserved}}Value{{end}}; - return this; - } - - public {{template "type" .Entity }} get{{.PascalName}}() { - return {{.CamelName}}{{if .IsNameReserved}}Value{{end}}; - } - {{end}} - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - {{if .IsEmpty -}} - return true; - {{- else -}} - {{.PascalName}} that = ({{.PascalName}}) o; - return {{range $i, $x := .Fields}}{{if $i}}&& {{end}}Objects.equals({{.CamelName}}{{if .IsNameReserved}}Value{{end}}, that.{{.CamelName}}{{if .IsNameReserved}}Value{{end}}) - {{end}}; - {{- end }} - } - - @Override - public int hashCode() { - return Objects.hash({{range $i, $x := .Fields}}{{if $i}}, {{end}}{{.CamelName}}{{if .IsNameReserved}}Value{{end}}{{end}}); - } - - @Override - public String toString() { - return new ToStringer({{.PascalName}}.class){{range .Fields}} - .add("{{.CamelName}}{{if .IsNameReserved}}Value{{end}}", {{.CamelName}}{{if .IsNameReserved}}Value{{end}}) - {{- end}}.toString(); - } -} -{{else if .Enum -}} -@Generated -public enum {{.PascalName}}{ - {{range .Enum -}} - {{ if not (eq .Content .ConstantName) }} - @JsonProperty("{{.Content}}") {{end}}{{.ConstantName}},{{.Comment "// " 80}} - {{ end }} -} -{{ else }} -Any types that have no fields (i.e. primitives or array types) or are not enums are not represented by distinct Java -types in the Java SDK. -{{skipThisFile}} -{{- end}} - diff --git a/.codegen/workspace.java.tmpl b/.codegen/workspace.java.tmpl deleted file mode 100644 index a191ef14..00000000 --- a/.codegen/workspace.java.tmpl +++ /dev/null @@ -1,84 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.ConfigLoader; -import com.databricks.sdk.core.DatabricksConfig; -{{- /* The mapping of mixin replacements should be defined in the .codegen.json file somehow, so that it can be used - in multiple pipelines in the template itself. Pipelines in go template are not able to reference variables - defined in an outer scope (https://github.com/golang/go/issues/17454). */ -}} -import com.databricks.sdk.mixin.ClustersExt; -import com.databricks.sdk.mixin.DbfsExt; -import com.databricks.sdk.mixin.SecretsExt; -{{range .Services}}{{if and (not .IsAccounts) (not .IsDataPlane)}} -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}API; -import com.databricks.sdk.service.{{.Package.Name}}.{{.PascalName}}Service; -{{end}}{{end}} -import com.databricks.sdk.support.Generated; - -{{- define "api" -}} - {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "SecretsAPI" "SecretsExt" -}} - {{- $genApi := concat .PascalName "API" -}} - {{- getOrDefault $mixins $genApi $genApi -}} -{{- end -}} - -/** - * Entry point for accessing Databricks workspace-level APIs - */ -@Generated -public class WorkspaceClient { - private final ApiClient apiClient; - private final DatabricksConfig config; - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - private {{template "api" .}} {{.CamelName}}API;{{end}}{{end}} - - public WorkspaceClient() { - this(ConfigLoader.getDefault()); - } - - public WorkspaceClient(DatabricksConfig config) { - this.config = config; - apiClient = new ApiClient(config); - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - {{.CamelName}}API = new {{template "api" .}}(apiClient);{{end}}{{end}} - } - - /** Constructor for mocks */ - public WorkspaceClient(boolean mock) { - this(mock, null /* apiClient */); - } - - /** Constructor for mocks */ - public WorkspaceClient(boolean mock, ApiClient apiClient) { - this.apiClient = apiClient; - this.config = null; - } - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - {{if .Description}}/** - {{.Comment " * " 80}} - */{{end}} - public {{template "api" .}} {{.CamelName}}() { - return {{.CamelName}}API; - } - {{end}}{{end}} - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - /** Replace the default {{.PascalName}}Service with a custom implementation. */ - public WorkspaceClient with{{.PascalName}}Impl({{.PascalName}}Service {{.CamelName}}) { - return this.with{{.PascalName}}API(new {{template "api" .}}({{.CamelName}})); - } - - /** Replace the default {{.PascalName}}API with a custom implementation. */ - public WorkspaceClient with{{.PascalName}}API({{template "api" .}} {{.CamelName}}) { - this.{{.CamelName}}API = {{.CamelName}}; - return this; - } - {{end}}{{end}} - public ApiClient apiClient() { - return apiClient; - } - - public DatabricksConfig config() { - return config; - } -} From f04f6cbb233ac294798414f777921b49f402e907 Mon Sep 17 00:00:00 2001 From: Satvik Ramaprasad Date: Wed, 6 Nov 2024 12:58:48 +0530 Subject: [PATCH 08/12] [Feature] DatabricksConfig: Add clone() support (#376) ## Changes Adds support for cloning DatabricksConfig(), this is needed because we need a way to set configurations per API call such as 1. timeout 2. httpClient configuration 3. debugHeaders However, we still want to use the cached oauth token etc and we would like the header factory to be a common object across workspace clients and databricks configs Ideally this should be supported by the SDK itself natively, however since its not supported and it will take significant migration to achieve that, this is a work around to achieve the same. ## Tests Added UT --- .../databricks/sdk/core/DatabricksConfig.java | 33 +++++++++++-------- .../sdk/core/DatabricksConfigTest.java | 18 ++++++++++ 2 files changed, 38 insertions(+), 13 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 3b692860..20e7f883 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -631,6 +631,25 @@ public DatabricksEnvironment getDatabricksEnvironment() { return DatabricksEnvironment.getEnvironmentFromHostname(this.host); } + private DatabricksConfig clone(Set fieldsToSkip) { + DatabricksConfig newConfig = new DatabricksConfig(); + for (Field f : DatabricksConfig.class.getDeclaredFields()) { + if (fieldsToSkip.contains(f.getName())) { + continue; + } + try { + f.set(newConfig, f.get(this)); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + return newConfig; + } + + public DatabricksConfig clone() { + return clone(new HashSet<>()); + } + public DatabricksConfig newWithWorkspaceHost(String host) { Set fieldsToSkip = new HashSet<>( @@ -645,18 +664,6 @@ public DatabricksConfig newWithWorkspaceHost(String host) { // don't cache the // header factory. "headerFactory")); - DatabricksConfig newConfig = new DatabricksConfig(); - for (Field f : DatabricksConfig.class.getDeclaredFields()) { - if (fieldsToSkip.contains(f.getName())) { - continue; - } - try { - f.set(newConfig, f.get(this)); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - newConfig.setHost(host); - return newConfig; + return clone(fieldsToSkip).setHost(host); } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java index 3ec891c6..e552a142 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DatabricksConfigTest.java @@ -177,4 +177,22 @@ public void testNewWithWorkspaceHost() { assert newWorkspaceConfig.getClientId().equals("my-client-id"); assert newWorkspaceConfig.getClientSecret().equals("my-client-secret"); } + + @Test + public void testClone() { + DatabricksConfig config = + new DatabricksConfig() + .setAuthType("oauth-m2m") + .setClientId("my-client-id") + .setClientSecret("my-client-secret") + .setAccountId("account-id") + .setHost("https://account.cloud.databricks.com"); + + DatabricksConfig newWorkspaceConfig = config.clone(); + + assert newWorkspaceConfig.getHost().equals("https://account.cloud.databricks.com"); + assert newWorkspaceConfig.getAuthType().equals("oauth-m2m"); + assert newWorkspaceConfig.getClientId().equals("my-client-id"); + assert newWorkspaceConfig.getClientSecret().equals("my-client-secret"); + } } From 5deef7de05c52154cb2a74af68538cb6513c36a9 Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 6 Nov 2024 15:19:03 +0100 Subject: [PATCH 09/12] [Internal] Refresh PR template (#381) ## What changes are proposed in this pull request? This PR updates the PR template to remove outdated check boxes and emphasize testing. ## How is this tested? N/A --- .github/PULL_REQUEST_TEMPLATE.md | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5be08701..91e519ed 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,6 +1,28 @@ -## Changes - +## What changes are proposed in this pull request? -## Tests - +Provide the readers and reviewers with the information they need to understand +this PR in a comprehensive manner. +Specifically, try to answer the two following questions: + +- **WHAT** changes are being made in the PR? This should be a summary of the + major changes to allow the reader to quickly understand the PR without having + to look at the code. +- **WHY** are these changes needed? This should provide the context that the + reader might be missing. For example, were there any decisions behind the + change that are not reflected in the code itself? + +The “why part” is the most important of the two as it usually cannot be +inferred from the code itself. A well-written PR description will help future +developers (including your future self) to know how to interact and update your +code. + +## How is this tested? + +Describe any tests you have done; especially if test tests are not part of +the unit tests (e.g. local tests). + +**ALWAYS ANSWER THIS QUESTION:** Answer with "N/A" if tests are not applicable +to your PR (e.g. if the PR only modifies comments). Do not be afraid of +answering "Not tested" if the PR has not been tested. Being clear about what +has been done and not done provides important context to the reviewers. \ No newline at end of file From 849cd3dad8fd1cd551a4b7e43987ff2c3ecb2b0b Mon Sep 17 00:00:00 2001 From: Samikshya Chand <148681192+samikshya-db@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:35:23 +0530 Subject: [PATCH 10/12] [Fix] Fix vulnerabilities in the present SDK version (#383) ## What changes are proposed in this pull request? - **What** : - Update commons.io to fix the [CVE in the present version](https://mvnrepository.com/artifact/com.databricks/databricks-sdk-java/0.34.0). Looks like depandabot PRs are no longer being created/merged. [[Link](https://github.com/databricks/databricks-sdk-java/pull/261/files)] - Change ini4j configuration because of vulnerability. - **Why** - ini4j 0.5.4 version has an infinite loop situation in the following piece of code. This loop can cause excessive memory and CPU usage, potentially crashing the application. Alternate libraries like Apache Commons Configuration gracefully handle the situation (by limiting the recursions internally). I will raise a PR on SDK later today to replace the ini4j library. Moreover : the official site of ini4j [is up for sale](http://www.ini4j.org/) and the last update to this maven package was done in [2015](https://mvnrepository.com/artifact/org.ini4j/ini4j). There is no reason we should continue to use this package. ``` Ini ini = new Ini(); ini.load(new ByteArrayInputStream(""" [deploy] a = ${test/a} b = ${doc/b} [test] a = ${deploy/a} b = ${deploy/b} [doc] a = 15 b = 45 """.getBytes(StandardCharsets.UTF_8))); // Will cause stack overflow ini.get("deploy").fetch("a"); ``` ## How is this tested? - The existing unit tests run fine. --- databricks-sdk-java/pom.xml | 8 ++-- .../com/databricks/sdk/core/ConfigLoader.java | 41 ++++++++++--------- 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index cba1f785..a49f2205 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -49,9 +49,9 @@ provided - org.ini4j - ini4j - 0.5.4 + org.apache.commons + commons-configuration2 + 2.11.0 compile @@ -67,7 +67,7 @@ commons-io commons-io - 2.13.0 + 2.14.0 org.junit.jupiter diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigLoader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigLoader.java index 933fa50a..47779d0e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigLoader.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigLoader.java @@ -1,16 +1,17 @@ package com.databricks.sdk.core; import com.databricks.sdk.core.utils.Environment; -import java.io.File; import java.io.FileNotFoundException; +import java.io.FileReader; import java.io.IOException; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Paths; import java.util.*; -import org.ini4j.Ini; -import org.ini4j.Profile; +import org.apache.commons.configuration2.INIConfiguration; +import org.apache.commons.configuration2.SubnodeConfiguration; +import org.apache.commons.configuration2.ex.ConfigurationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,7 +41,7 @@ public static DatabricksConfig resolve(DatabricksConfig cfg) throws DatabricksEx } } - static void loadFromEnvironmentVariables(DatabricksConfig cfg) throws IllegalAccessException { + static void loadFromEnvironmentVariables(DatabricksConfig cfg) { if (cfg.getEnv() == null) { return; } @@ -57,7 +58,7 @@ static void loadFromEnvironmentVariables(DatabricksConfig cfg) throws IllegalAcc } accessor.setValueOnConfig(cfg, env); } - } catch (DatabricksException e) { + } catch (DatabricksException | IllegalAccessException e) { String msg = String.format("%s auth: %s", cfg.getCredentialsProvider().authType(), e.getMessage()); throw new DatabricksException(msg, e); @@ -86,27 +87,27 @@ static void loadFromConfig(DatabricksConfig cfg) throws IllegalAccessException { configFile = configFile.replaceFirst("^~", userHome); } - Ini ini = parseDatabricksCfg(configFile, isDefaultConfig); + INIConfiguration ini = parseDatabricksCfg(configFile, isDefaultConfig); if (ini == null) return; + String profile = cfg.getProfile(); boolean hasExplicitProfile = !isNullOrEmpty(profile); if (!hasExplicitProfile) { profile = "DEFAULT"; } - - Profile.Section section = ini.get(profile); - if (section == null && !hasExplicitProfile) { + SubnodeConfiguration section = ini.getSection(profile); + boolean sectionNotPresent = section == null || section.isEmpty(); + if (sectionNotPresent && !hasExplicitProfile) { LOG.info("{} has no {} profile configured", configFile, profile); return; } - - if (section == null) { + if (sectionNotPresent) { String msg = String.format("resolve: %s has no %s profile configured", configFile, profile); throw new DatabricksException(msg); } for (ConfigAttributeAccessor accessor : accessors) { - String value = section.get(accessor.getName()); + String value = section.getString(accessor.getName()); if (!isNullOrEmpty(accessor.getValueFromConfig(cfg))) { continue; } @@ -114,18 +115,18 @@ static void loadFromConfig(DatabricksConfig cfg) throws IllegalAccessException { } } - private static Ini parseDatabricksCfg(String configFile, boolean isDefaultConfig) { - Ini ini = new Ini(); - try { - ini.load(new File(configFile)); + private static INIConfiguration parseDatabricksCfg(String configFile, boolean isDefaultConfig) { + INIConfiguration iniConfig = new INIConfiguration(); + try (FileReader reader = new FileReader(configFile)) { + iniConfig.read(reader); } catch (FileNotFoundException e) { if (isDefaultConfig) { return null; } - } catch (IOException e) { + } catch (IOException | ConfigurationException e) { throw new DatabricksException("Cannot load " + configFile, e); } - return ini; + return iniConfig; } public static void fixHostIfNeeded(DatabricksConfig cfg) { @@ -230,12 +231,12 @@ public static String debugString(DatabricksConfig cfg) { if (!attrsUsed.isEmpty()) { buf.add(String.format("Config: %s", String.join(", ", attrsUsed))); } else { - buf.add(String.format("Config: ")); + buf.add("Config: "); } if (!envsUsed.isEmpty()) { buf.add(String.format("Env: %s", String.join(", ", envsUsed))); } else { - buf.add(String.format("Env: ")); + buf.add("Env: "); } return String.join(". ", buf); } catch (IllegalAccessException e) { From 9b7ca5dede4f89c4f3a126bf07f9a1b8eef25e6d Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Thu, 7 Nov 2024 16:50:49 +0100 Subject: [PATCH 11/12] [Release] Release v0.35.0 (#385) ### New Features and Improvements * DatabricksConfig: Add clone() support ([#376](https://github.com/databricks/databricks-sdk-java/pull/376)). ### Bug Fixes * Fix vulnerabilities in the present SDK version ([#383](https://github.com/databricks/databricks-sdk-java/pull/383)). ### Internal Changes * Add test instructions for external contributors ([#370](https://github.com/databricks/databricks-sdk-java/pull/370)). * Always write message for manual test integration ([#374](https://github.com/databricks/databricks-sdk-java/pull/374)). * Automatically trigger integration tests on PR ([#369](https://github.com/databricks/databricks-sdk-java/pull/369)). * Move templates in the code generator ([#373](https://github.com/databricks/databricks-sdk-java/pull/373)). * Refresh PR template ([#381](https://github.com/databricks/databricks-sdk-java/pull/381)). ### API Changes: * Added `workspaceClient.aibiDashboardEmbeddingAccessPolicy()` service and `workspaceClient.aibiDashboardEmbeddingApprovedDomains()` service. * Added `workspaceClient.credentials()` service. * Added `appDeployment` field for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. * Added `app` field for `com.databricks.sdk.service.apps.CreateAppRequest`. * Added `app` field for `com.databricks.sdk.service.apps.UpdateAppRequest`. * Added `table` field for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. * Added `azureAad` field for `com.databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`. * Added `omitUsername` field for `com.databricks.sdk.service.catalog.ListTablesRequest`. * Added `fullName` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. * Added `dashboard` field for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. * Added `schedule` field for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. * Added `subscription` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. * Added `warehouseId` field for `com.databricks.sdk.service.dashboards.Schedule`. * Added `dashboard` field for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. * Added `schedule` field for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. * Added `only` field for `com.databricks.sdk.service.jobs.RunNow`. * Added `pageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. * Added `nextPageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.EditPipeline`. * Added `connectionName` field for `com.databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. * Added `isNoPublicIpEnabled` field for `com.databricks.sdk.service.provisioning.CreateWorkspaceRequest`. * Added `privateAccessSettingsId` field for `com.databricks.sdk.service.provisioning.UpdateWorkspaceRequest`. * Added `externalCustomerInfo` and `isNoPublicIpEnabled` fields for `com.databricks.sdk.service.provisioning.Workspace`. * Added `lastUsedDay` field for `com.databricks.sdk.service.settings.TokenInfo`. * Changed `create()` method for `workspaceClient.apps()` service with new required argument order. * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service . New request type is `com.databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` class. * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service to type `executeMessageQuery()` method for `workspaceClient.genie()` service. * Changed `create()`, `createSchedule()`, `createSubscription()` and `updateSchedule()` methods for `workspaceClient.lakeview()` service with new required argument order. * Removed `workspaceClient.cleanRooms()` service. * Removed `deploymentId`, `mode` and `sourceCodePath` fields for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. * Removed `description`, `name` and `resources` fields for `com.databricks.sdk.service.apps.CreateAppRequest`. * Removed `description` and `resources` fields for `com.databricks.sdk.service.apps.UpdateAppRequest`. * Removed `name` and `spec` fields for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. * Removed `displayName`, `parentPath`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. * Removed `cronSchedule`, `displayName` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. * Removed `subscriber` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. * Removed `displayName`, `etag`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. * Removed `cronSchedule`, `displayName`, `etag` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. * Removed `prevPageToken` field for `com.databricks.sdk.service.jobs.Run`. OpenAPI SHA: d25296d2f4aa7bd6195c816fdf82e0f960f775da, Date: 2024-11-07 --- .codegen/_openapi_sha | 2 +- .gitattributes | 62 +++-- CHANGELOG.md | 68 +++++ databricks-sdk-java/pom.xml | 2 +- .../com/databricks/sdk/WorkspaceClient.java | 65 ++--- .../com/databricks/sdk/core/UserAgent.java | 2 +- .../databricks/sdk/service/apps/AppsAPI.java | 60 ++-- .../databricks/sdk/service/apps/AppsImpl.java | 6 +- .../sdk/service/apps/AppsService.java | 4 +- .../apps/CreateAppDeploymentRequest.java | 67 ++--- .../sdk/service/apps/CreateAppRequest.java | 57 +--- .../sdk/service/apps/UpdateAppRequest.java | 50 +--- .../sdk/service/catalog/AwsIamRole.java | 78 ++++++ .../catalog/AzureActiveDirectoryToken.java | 50 ++++ .../service/catalog/AzureManagedIdentity.java | 88 ++++++ .../sdk/service/catalog/ColumnTypeName.java | 1 + .../catalog/CreateCredentialRequest.java | 125 +++++++++ .../catalog/CreateFunctionSecurityType.java | 2 +- .../catalog/CreateOnlineTableRequest.java | 38 +-- .../sdk/service/catalog/CredentialInfo.java | 259 ++++++++++++++++++ .../service/catalog/CredentialPurpose.java | 10 + .../catalog/CredentialValidationResult.java | 58 ++++ .../sdk/service/catalog/CredentialsAPI.java | 138 ++++++++++ .../sdk/service/catalog/CredentialsImpl.java | 78 ++++++ .../service/catalog/CredentialsService.java | 85 ++++++ .../catalog/DeleteCredentialRequest.java | 60 ++++ .../catalog/DeleteCredentialResponse.java | 28 ++ .../service/catalog/ExternalLocationInfo.java | 5 +- .../catalog/FunctionInfoSecurityType.java | 2 +- ...emporaryServiceCredentialAzureOptions.java | 51 ++++ ...rateTemporaryServiceCredentialRequest.java | 60 ++++ ...erateTemporaryTableCredentialResponse.java | 20 ++ .../catalog/GetBindingsSecurableType.java | 3 + .../service/catalog/GetCredentialRequest.java | 42 +++ .../sdk/service/catalog/IsolationMode.java | 3 - .../catalog/ListCredentialsRequest.java | 84 ++++++ .../ListCredentialsResponse.java} | 30 +- .../service/catalog/ListTablesRequest.java | 20 ++ .../sdk/service/catalog/OnlineTablesAPI.java | 59 +++- .../sdk/service/catalog/OnlineTablesImpl.java | 2 +- .../sdk/service/catalog/SecurableType.java | 3 + .../catalog/StorageCredentialInfo.java | 21 +- .../service/catalog/TemporaryCredentials.java | 84 ++++++ .../catalog/UpdateBindingsSecurableType.java | 3 + .../catalog/UpdateCredentialRequest.java | 177 ++++++++++++ .../catalog/UpdateExternalLocation.java | 5 +- .../catalog/UpdateStorageCredential.java | 5 +- .../catalog/ValidateCredentialRequest.java | 90 ++++++ .../catalog/ValidateCredentialResponse.java | 43 +++ .../catalog/ValidateCredentialResult.java | 13 + .../service/compute/ClusterPoliciesAPI.java | 5 +- .../compute/ClusterPoliciesService.java | 5 +- .../sdk/service/compute/ClustersAPI.java | 4 +- .../sdk/service/compute/ClustersService.java | 4 +- .../service/compute/CommandExecutionAPI.java | 72 ++--- .../sdk/service/compute/EditCluster.java | 2 +- .../sdk/service/compute/InstancePoolsAPI.java | 3 +- .../service/compute/InstancePoolsService.java | 3 +- .../dashboards/CreateDashboardRequest.java | 78 +----- .../dashboards/CreateScheduleRequest.java | 52 +--- .../dashboards/CreateSubscriptionRequest.java | 21 +- .../sdk/service/dashboards/GenieAPI.java | 4 +- ...a => GenieExecuteMessageQueryRequest.java} | 12 +- .../sdk/service/dashboards/GenieImpl.java | 2 +- .../sdk/service/dashboards/GenieService.java | 2 +- .../sdk/service/dashboards/LakeviewAPI.java | 24 +- .../sdk/service/dashboards/LakeviewImpl.java | 10 +- .../service/dashboards/MessageErrorType.java | 1 + .../sdk/service/dashboards/Schedule.java | 20 +- .../dashboards/UpdateDashboardRequest.java | 86 ++---- .../dashboards/UpdateScheduleRequest.java | 72 +---- .../sdk/service/iam/PermissionsAPI.java | 5 +- .../sdk/service/iam/PermissionsService.java | 5 +- .../databricks/sdk/service/iam/UsersAPI.java | 4 +- .../sdk/service/iam/UsersService.java | 4 +- .../sdk/service/jobs/GetRunRequest.java | 4 +- .../databricks/sdk/service/jobs/JobsAPI.java | 4 +- .../sdk/service/jobs/JobsService.java | 4 +- .../sdk/service/jobs/RepairRun.java | 5 +- .../com/databricks/sdk/service/jobs/Run.java | 16 -- .../sdk/service/jobs/RunJobTask.java | 5 +- .../databricks/sdk/service/jobs/RunNow.java | 24 +- .../sdk/service/jobs/RunParameters.java | 5 +- .../databricks/sdk/service/jobs/RunTask.java | 42 +-- .../sdk/service/jobs/SubmitTask.java | 42 +-- .../com/databricks/sdk/service/jobs/Task.java | 42 +-- .../sdk/service/marketplace/AssetType.java | 1 + .../sdk/service/ml/ExperimentsAPI.java | 3 +- .../sdk/service/ml/ExperimentsService.java | 3 +- .../sdk/service/ml/ModelRegistryAPI.java | 5 +- .../sdk/service/ml/ModelRegistryService.java | 5 +- .../ListServicePrincipalSecretsRequest.java | 29 +- .../ListServicePrincipalSecretsResponse.java | 19 +- .../oauth2/ServicePrincipalSecretsAPI.java | 11 +- .../sdk/service/pipelines/CreatePipeline.java | 18 +- .../sdk/service/pipelines/EditPipeline.java | 18 +- .../service/pipelines/IngestionConfig.java | 6 +- .../IngestionGatewayPipelineDefinition.java | 28 +- .../IngestionPipelineDefinition.java | 8 +- .../sdk/service/pipelines/PipelineSpec.java | 18 +- .../sdk/service/pipelines/PipelinesAPI.java | 24 +- .../service/pipelines/PipelinesService.java | 4 +- .../sdk/service/pipelines/RestartWindow.java | 84 ++++++ .../pipelines/RestartWindowDaysOfWeek.java | 20 ++ .../provisioning/CreateWorkspaceRequest.java | 16 ++ .../provisioning/ExternalCustomerInfo.java | 74 +++++ .../provisioning/UpdateWorkspaceRequest.java | 19 ++ .../sdk/service/provisioning/Workspace.java | 35 +++ .../service/serving/ServingEndpointsAPI.java | 5 +- .../serving/ServingEndpointsService.java | 5 +- .../AibiDashboardEmbeddingAccessPolicy.java | 45 +++ ...AibiDashboardEmbeddingAccessPolicyAPI.java | 64 +++++ ...EmbeddingAccessPolicyAccessPolicyType.java | 12 + ...ibiDashboardEmbeddingAccessPolicyImpl.java | 36 +++ ...DashboardEmbeddingAccessPolicyService.java | 35 +++ ...DashboardEmbeddingAccessPolicySetting.java | 88 ++++++ ...AibiDashboardEmbeddingApprovedDomains.java | 46 ++++ ...iDashboardEmbeddingApprovedDomainsAPI.java | 66 +++++ ...DashboardEmbeddingApprovedDomainsImpl.java | 39 +++ ...hboardEmbeddingApprovedDomainsService.java | 34 +++ ...hboardEmbeddingApprovedDomainsSetting.java | 89 ++++++ ...rdEmbeddingAccessPolicySettingRequest.java | 55 ++++ ...mbeddingApprovedDomainsSettingRequest.java | 55 ++++ .../sdk/service/settings/SettingsAPI.java | 22 ++ .../sdk/service/settings/TokenInfo.java | 16 ++ .../service/settings/TokenManagementAPI.java | 4 +- .../settings/TokenManagementService.java | 4 +- ...rdEmbeddingAccessPolicySettingRequest.java | 82 ++++++ ...mbeddingApprovedDomainsSettingRequest.java | 82 ++++++ .../service/sharing/CentralCleanRoomInfo.java | 109 -------- .../service/sharing/CleanRoomAssetInfo.java | 104 ------- .../sdk/service/sharing/CleanRoomCatalog.java | 75 ----- .../sharing/CleanRoomCatalogUpdate.java | 58 ---- .../sharing/CleanRoomCollaboratorInfo.java | 65 ----- .../sdk/service/sharing/CleanRoomInfo.java | 174 ------------ .../sharing/CleanRoomNotebookInfo.java | 59 ---- .../service/sharing/CleanRoomTableInfo.java | 105 ------- .../sdk/service/sharing/CleanRoomsAPI.java | 125 --------- .../sdk/service/sharing/CleanRoomsImpl.java | 59 ---- .../service/sharing/CleanRoomsService.java | 71 ----- .../sdk/service/sharing/ColumnInfo.java | 221 --------------- .../sdk/service/sharing/ColumnMask.java | 64 ----- .../sdk/service/sharing/ColumnTypeName.java | 31 --- .../sdk/service/sharing/CreateCleanRoom.java | 74 ----- .../sharing/DeleteCleanRoomRequest.java | 42 --- .../service/sharing/GetCleanRoomRequest.java | 61 ----- .../sharing/ListCleanRoomsRequest.java | 68 ----- .../sdk/service/sharing/UpdateCleanRoom.java | 90 ------ .../sdk/service/sql/ChannelName.java | 2 +- .../service/sql/StatementExecutionAPI.java | 10 +- .../sql/StatementExecutionService.java | 10 +- .../sdk/service/sql/WarehousesAPI.java | 3 +- .../sdk/service/sql/WarehousesService.java | 3 +- .../sdk/service/workspace/ImportFormat.java | 1 + .../sdk/service/workspace/ReposAPI.java | 4 +- .../sdk/service/workspace/ReposService.java | 4 +- .../sdk/service/workspace/WorkspaceAPI.java | 5 +- .../service/workspace/WorkspaceService.java | 5 +- examples/docs/pom.xml | 2 +- examples/spring-boot-oauth-u2m-demo/pom.xml | 2 +- pom.xml | 2 +- shaded/pom.xml | 2 +- 162 files changed, 3677 insertions(+), 2426 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/{sharing/ListCleanRoomsResponse.java => catalog/ListCredentialsResponse.java} (58%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/{ExecuteMessageQueryRequest.java => GenieExecuteMessageQueryRequest.java} (78%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2d9cb6d8..5f4b5086 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -cf9c61453990df0f9453670f2fe68e1b128647a2 \ No newline at end of file +d25296d2f4aa7bd6195c816fdf82e0f960f775da \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index ce331956..a1050873 100755 --- a/.gitattributes +++ b/.gitattributes @@ -152,8 +152,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMat databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true @@ -180,6 +183,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -197,7 +201,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchem databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true @@ -209,6 +219,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAlias databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -257,6 +269,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAP databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true @@ -268,6 +282,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindings databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true @@ -298,6 +313,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalog databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true @@ -419,6 +436,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true @@ -429,6 +447,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssig databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true @@ -445,6 +464,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java linguist-generated=true @@ -710,11 +732,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSc databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true @@ -1528,6 +1550,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Pipelines databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true @@ -1578,6 +1602,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Encryp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java linguist-generated=true @@ -1729,6 +1754,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpA databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java linguist-generated=true @@ -1821,6 +1857,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true @@ -1915,6 +1953,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true @@ -1933,39 +1973,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceC databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java linguist-generated=true @@ -2008,7 +2029,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataO databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 2af228aa..5bb9782c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,73 @@ # Version changelog +## [Release] Release v0.35.0 + +### New Features and Improvements + + * DatabricksConfig: Add clone() support ([#376](https://github.com/databricks/databricks-sdk-java/pull/376)). + + +### Bug Fixes + + * Fix vulnerabilities in the present SDK version ([#383](https://github.com/databricks/databricks-sdk-java/pull/383)). + + +### Internal Changes + + * Add test instructions for external contributors ([#370](https://github.com/databricks/databricks-sdk-java/pull/370)). + * Always write message for manual test integration ([#374](https://github.com/databricks/databricks-sdk-java/pull/374)). + * Automatically trigger integration tests on PR ([#369](https://github.com/databricks/databricks-sdk-java/pull/369)). + * Move templates in the code generator ([#373](https://github.com/databricks/databricks-sdk-java/pull/373)). + * Refresh PR template ([#381](https://github.com/databricks/databricks-sdk-java/pull/381)). + + +### API Changes: + + * Added `workspaceClient.aibiDashboardEmbeddingAccessPolicy()` service and `workspaceClient.aibiDashboardEmbeddingApprovedDomains()` service. + * Added `workspaceClient.credentials()` service. + * Added `appDeployment` field for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Added `app` field for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Added `app` field for `com.databricks.sdk.service.apps.UpdateAppRequest`. + * Added `table` field for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Added `azureAad` field for `com.databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`. + * Added `omitUsername` field for `com.databricks.sdk.service.catalog.ListTablesRequest`. + * Added `fullName` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. + * Added `dashboard` field for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Added `schedule` field for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Added `subscription` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Added `warehouseId` field for `com.databricks.sdk.service.dashboards.Schedule`. + * Added `dashboard` field for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Added `schedule` field for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Added `only` field for `com.databricks.sdk.service.jobs.RunNow`. + * Added `pageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. + * Added `nextPageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.EditPipeline`. + * Added `connectionName` field for `com.databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. + * Added `isNoPublicIpEnabled` field for `com.databricks.sdk.service.provisioning.CreateWorkspaceRequest`. + * Added `privateAccessSettingsId` field for `com.databricks.sdk.service.provisioning.UpdateWorkspaceRequest`. + * Added `externalCustomerInfo` and `isNoPublicIpEnabled` fields for `com.databricks.sdk.service.provisioning.Workspace`. + * Added `lastUsedDay` field for `com.databricks.sdk.service.settings.TokenInfo`. + * Changed `create()` method for `workspaceClient.apps()` service with new required argument order. + * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service . New request type is `com.databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` class. + * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service to type `executeMessageQuery()` method for `workspaceClient.genie()` service. + * Changed `create()`, `createSchedule()`, `createSubscription()` and `updateSchedule()` methods for `workspaceClient.lakeview()` service with new required argument order. + * Removed `workspaceClient.cleanRooms()` service. + * Removed `deploymentId`, `mode` and `sourceCodePath` fields for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Removed `description`, `name` and `resources` fields for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Removed `description` and `resources` fields for `com.databricks.sdk.service.apps.UpdateAppRequest`. + * Removed `name` and `spec` fields for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Removed `displayName`, `parentPath`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Removed `cronSchedule`, `displayName` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Removed `subscriber` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Removed `displayName`, `etag`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Removed `cronSchedule`, `displayName`, `etag` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Removed `prevPageToken` field for `com.databricks.sdk.service.jobs.Run`. + +OpenAPI SHA: d25296d2f4aa7bd6195c816fdf82e0f960f775da, Date: 2024-11-07 + + ## [Release] Release v0.34.0 ### New Features and Improvements diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index a49f2205..8a531596 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.34.0 + 0.35.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index c9a11f3f..655080d9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -16,6 +16,8 @@ import com.databricks.sdk.service.catalog.CatalogsService; import com.databricks.sdk.service.catalog.ConnectionsAPI; import com.databricks.sdk.service.catalog.ConnectionsService; +import com.databricks.sdk.service.catalog.CredentialsAPI; +import com.databricks.sdk.service.catalog.CredentialsService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -138,8 +140,6 @@ import com.databricks.sdk.service.settings.TokensService; import com.databricks.sdk.service.settings.WorkspaceConfAPI; import com.databricks.sdk.service.settings.WorkspaceConfService; -import com.databricks.sdk.service.sharing.CleanRoomsAPI; -import com.databricks.sdk.service.sharing.CleanRoomsService; import com.databricks.sdk.service.sharing.ProvidersAPI; import com.databricks.sdk.service.sharing.ProvidersService; import com.databricks.sdk.service.sharing.RecipientActivationAPI; @@ -199,7 +199,6 @@ public class WorkspaceClient { private AppsAPI appsAPI; private ArtifactAllowlistsAPI artifactAllowlistsAPI; private CatalogsAPI catalogsAPI; - private CleanRoomsAPI cleanRoomsAPI; private ClusterPoliciesAPI clusterPoliciesAPI; private ClustersExt clustersAPI; private CommandExecutionAPI commandExecutionAPI; @@ -209,6 +208,7 @@ public class WorkspaceClient { private ConsumerListingsAPI consumerListingsAPI; private ConsumerPersonalizationRequestsAPI consumerPersonalizationRequestsAPI; private ConsumerProvidersAPI consumerProvidersAPI; + private CredentialsAPI credentialsAPI; private CredentialsManagerAPI credentialsManagerAPI; private CurrentUserAPI currentUserAPI; private DashboardWidgetsAPI dashboardWidgetsAPI; @@ -298,7 +298,6 @@ public WorkspaceClient(DatabricksConfig config) { appsAPI = new AppsAPI(apiClient); artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); catalogsAPI = new CatalogsAPI(apiClient); - cleanRoomsAPI = new CleanRoomsAPI(apiClient); clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); clustersAPI = new ClustersExt(apiClient); commandExecutionAPI = new CommandExecutionAPI(apiClient); @@ -308,6 +307,7 @@ public WorkspaceClient(DatabricksConfig config) { consumerListingsAPI = new ConsumerListingsAPI(apiClient); consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient); consumerProvidersAPI = new ConsumerProvidersAPI(apiClient); + credentialsAPI = new CredentialsAPI(apiClient); credentialsManagerAPI = new CredentialsManagerAPI(apiClient); currentUserAPI = new CurrentUserAPI(apiClient); dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); @@ -459,18 +459,6 @@ public CatalogsAPI catalogs() { return catalogsAPI; } - /** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the - * **CREATE_CLEAN_ROOM** privilege. - */ - public CleanRoomsAPI cleanRooms() { - return cleanRoomsAPI; - } - /** * You can use cluster policies to control users' ability to configure clusters based on a set of * rules. These rules specify which attributes or attribute values can be used during cluster @@ -580,6 +568,19 @@ public ConsumerProvidersAPI consumerProviders() { return consumerProvidersAPI; } + /** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another + * user or group to manage permissions on it + */ + public CredentialsAPI credentials() { + return credentialsAPI; + } + /** * Credentials manager interacts with with Identity Providers to to perform token exchanges using * stored credentials and refresh tokens. @@ -1453,11 +1454,11 @@ public SharesAPI shares() { * might have already completed execution when the cancel request arrives. Polling for status * until a terminal state is reached is a reliable way to determine the final state. - Wait * timeouts are approximate, occur server-side, and cannot account for things such as caller - * delays and network latency from caller to service. - The system will auto-close a statement - * after one hour if the client stops polling and thus you must poll at least once an hour. - The - * results are only available for one hour after success; polling does not extend this. - The SQL - * Execution API must be used for the entire lifecycle of the statement. For example, you cannot - * use the Jobs API to execute the command, and then the SQL Execution API to cancel it. + * delays and network latency from caller to service. - To guarantee that the statement is kept + * alive, you must poll at least once every 15 minutes. - The results are only available for one + * hour after success; polling does not extend this. - The SQL Execution API must be used for the + * entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the + * command, and then the SQL Execution API to cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html @@ -1721,17 +1722,6 @@ public WorkspaceClient withCatalogsAPI(CatalogsAPI catalogs) { return this; } - /** Replace the default CleanRoomsService with a custom implementation. */ - public WorkspaceClient withCleanRoomsImpl(CleanRoomsService cleanRooms) { - return this.withCleanRoomsAPI(new CleanRoomsAPI(cleanRooms)); - } - - /** Replace the default CleanRoomsAPI with a custom implementation. */ - public WorkspaceClient withCleanRoomsAPI(CleanRoomsAPI cleanRooms) { - this.cleanRoomsAPI = cleanRooms; - return this; - } - /** Replace the default ClusterPoliciesService with a custom implementation. */ public WorkspaceClient withClusterPoliciesImpl(ClusterPoliciesService clusterPolicies) { return this.withClusterPoliciesAPI(new ClusterPoliciesAPI(clusterPolicies)); @@ -1837,6 +1827,17 @@ public WorkspaceClient withConsumerProvidersAPI(ConsumerProvidersAPI consumerPro return this; } + /** Replace the default CredentialsService with a custom implementation. */ + public WorkspaceClient withCredentialsImpl(CredentialsService credentials) { + return this.withCredentialsAPI(new CredentialsAPI(credentials)); + } + + /** Replace the default CredentialsAPI with a custom implementation. */ + public WorkspaceClient withCredentialsAPI(CredentialsAPI credentials) { + this.credentialsAPI = credentials; + return this; + } + /** Replace the default CredentialsManagerService with a custom implementation. */ public WorkspaceClient withCredentialsManagerImpl(CredentialsManagerService credentialsManager) { return this.withCredentialsManagerAPI(new CredentialsManagerAPI(credentialsManager)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 56c817d7..4d9698f8 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -32,7 +32,7 @@ public String getValue() { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.34.0"; + private static final String version = "0.35.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java index 35d1b609..4b611f21 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java @@ -81,23 +81,27 @@ public App waitGetAppActive(String name, Duration timeout, Consumer callbac throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public App waitGetAppStopped(String name) throws TimeoutException { - return waitGetAppStopped(name, Duration.ofMinutes(20), null); + public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId) + throws TimeoutException { + return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null); } - public App waitGetAppStopped(String name, Duration timeout, Consumer callback) + public AppDeployment waitGetDeploymentAppSucceeded( + String appName, String deploymentId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(ComputeState.STOPPED); - java.util.List failureStates = Arrays.asList(ComputeState.ERROR); + java.util.List targetStates = Arrays.asList(AppDeploymentState.SUCCEEDED); + java.util.List failureStates = Arrays.asList(AppDeploymentState.FAILED); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - App poll = get(new GetAppRequest().setName(name)); - ComputeState status = poll.getComputeStatus().getState(); + AppDeployment poll = + getDeployment( + new GetAppDeploymentRequest().setAppName(appName).setDeploymentId(deploymentId)); + AppDeploymentState status = poll.getStatus().getState(); statusMessage = String.format("current status: %s", status); - if (poll.getComputeStatus() != null) { - statusMessage = poll.getComputeStatus().getMessage(); + if (poll.getStatus() != null) { + statusMessage = poll.getStatus().getMessage(); } if (targetStates.contains(status)) { return poll; @@ -106,11 +110,11 @@ public App waitGetAppStopped(String name, Duration timeout, Consumer callba callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach STOPPED, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("name=%s", name); + String prefix = String.format("appName=%s, deploymentId=%s", appName, deploymentId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -128,27 +132,23 @@ public App waitGetAppStopped(String name, Duration timeout, Consumer callba throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId) - throws TimeoutException { - return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null); + public App waitGetAppStopped(String name) throws TimeoutException { + return waitGetAppStopped(name, Duration.ofMinutes(20), null); } - public AppDeployment waitGetDeploymentAppSucceeded( - String appName, String deploymentId, Duration timeout, Consumer callback) + public App waitGetAppStopped(String name, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(AppDeploymentState.SUCCEEDED); - java.util.List failureStates = Arrays.asList(AppDeploymentState.FAILED); + java.util.List targetStates = Arrays.asList(ComputeState.STOPPED); + java.util.List failureStates = Arrays.asList(ComputeState.ERROR); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - AppDeployment poll = - getDeployment( - new GetAppDeploymentRequest().setAppName(appName).setDeploymentId(deploymentId)); - AppDeploymentState status = poll.getStatus().getState(); + App poll = get(new GetAppRequest().setName(name)); + ComputeState status = poll.getComputeStatus().getState(); statusMessage = String.format("current status: %s", status); - if (poll.getStatus() != null) { - statusMessage = poll.getStatus().getMessage(); + if (poll.getComputeStatus() != null) { + statusMessage = poll.getComputeStatus().getMessage(); } if (targetStates.contains(status)) { return poll; @@ -157,11 +157,11 @@ public AppDeployment waitGetDeploymentAppSucceeded( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach STOPPED, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("appName=%s, deploymentId=%s", appName, deploymentId); + String prefix = String.format("name=%s", name); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -179,10 +179,6 @@ public AppDeployment waitGetDeploymentAppSucceeded( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public Wait create(String name) { - return create(new CreateAppRequest().setName(name)); - } - /** * Create an app. * @@ -327,7 +323,9 @@ public AppPermissions setPermissions(String appName) { /** * Set app permissions. * - *

Sets permissions on an app. Apps can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public AppPermissions setPermissions(AppPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index f6936a13..e4530664 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -21,7 +21,7 @@ public App create(CreateAppRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, App.class, headers); + return apiClient.POST(path, request.getApp(), App.class, headers); } @Override @@ -38,7 +38,7 @@ public AppDeployment deploy(CreateAppDeploymentRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, AppDeployment.class, headers); + return apiClient.POST(path, request.getAppDeployment(), AppDeployment.class, headers); } @Override @@ -125,7 +125,7 @@ public App update(UpdateAppRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, App.class, headers); + return apiClient.PATCH(path, request.getApp(), App.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java index 26e0310e..d5909455 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java @@ -80,7 +80,9 @@ GetAppPermissionLevelsResponse getPermissionLevels( /** * Set app permissions. * - *

Sets permissions on an app. Apps can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ AppPermissions setPermissions(AppPermissionsRequest appPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java index 3952d58b..1d042567 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java @@ -8,28 +8,24 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create an app deployment */ @Generated public class CreateAppDeploymentRequest { + /** */ + @JsonProperty("app_deployment") + private AppDeployment appDeployment; + /** The name of the app. */ @JsonIgnore private String appName; - /** The unique id of the deployment. */ - @JsonProperty("deployment_id") - private String deploymentId; - - /** The mode of which the deployment will manage the source code. */ - @JsonProperty("mode") - private AppDeploymentMode mode; + public CreateAppDeploymentRequest setAppDeployment(AppDeployment appDeployment) { + this.appDeployment = appDeployment; + return this; + } - /** - * The workspace file system path of the source code used to create the app deployment. This is - * different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - * app. The former refers to the original source code location of the app in the workspace during - * deployment creation, whereas the latter provides a system generated stable snapshotted source - * code path used by the deployment. - */ - @JsonProperty("source_code_path") - private String sourceCodePath; + public AppDeployment getAppDeployment() { + return appDeployment; + } public CreateAppDeploymentRequest setAppName(String appName) { this.appName = appName; @@ -40,56 +36,25 @@ public String getAppName() { return appName; } - public CreateAppDeploymentRequest setDeploymentId(String deploymentId) { - this.deploymentId = deploymentId; - return this; - } - - public String getDeploymentId() { - return deploymentId; - } - - public CreateAppDeploymentRequest setMode(AppDeploymentMode mode) { - this.mode = mode; - return this; - } - - public AppDeploymentMode getMode() { - return mode; - } - - public CreateAppDeploymentRequest setSourceCodePath(String sourceCodePath) { - this.sourceCodePath = sourceCodePath; - return this; - } - - public String getSourceCodePath() { - return sourceCodePath; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppDeploymentRequest that = (CreateAppDeploymentRequest) o; - return Objects.equals(appName, that.appName) - && Objects.equals(deploymentId, that.deploymentId) - && Objects.equals(mode, that.mode) - && Objects.equals(sourceCodePath, that.sourceCodePath); + return Objects.equals(appDeployment, that.appDeployment) + && Objects.equals(appName, that.appName); } @Override public int hashCode() { - return Objects.hash(appName, deploymentId, mode, sourceCodePath); + return Objects.hash(appDeployment, appName); } @Override public String toString() { return new ToStringer(CreateAppDeploymentRequest.class) + .add("appDeployment", appDeployment) .add("appName", appName) - .add("deploymentId", deploymentId) - .add("mode", mode) - .add("sourceCodePath", sourceCodePath) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java index e835442a..7d1076bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java @@ -5,51 +5,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; +/** Create an app */ @Generated public class CreateAppRequest { - /** The description of the app. */ - @JsonProperty("description") - private String description; + /** */ + @JsonProperty("app") + private App app; - /** - * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - * It must be unique within the workspace. - */ - @JsonProperty("name") - private String name; - - /** Resources for the app. */ - @JsonProperty("resources") - private Collection resources; - - public CreateAppRequest setDescription(String description) { - this.description = description; - return this; - } - - public String getDescription() { - return description; - } - - public CreateAppRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateAppRequest setResources(Collection resources) { - this.resources = resources; + public CreateAppRequest setApp(App app) { + this.app = app; return this; } - public Collection getResources() { - return resources; + public App getApp() { + return app; } @Override @@ -57,22 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppRequest that = (CreateAppRequest) o; - return Objects.equals(description, that.description) - && Objects.equals(name, that.name) - && Objects.equals(resources, that.resources); + return Objects.equals(app, that.app); } @Override public int hashCode() { - return Objects.hash(description, name, resources); + return Objects.hash(app); } @Override public String toString() { - return new ToStringer(CreateAppRequest.class) - .add("description", description) - .add("name", name) - .add("resources", resources) - .toString(); + return new ToStringer(CreateAppRequest.class).add("app", app).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java index da8b8c4c..4727ea97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java @@ -4,34 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; +/** Update an app */ @Generated public class UpdateAppRequest { - /** The description of the app. */ - @JsonProperty("description") - private String description; + /** */ + @JsonProperty("app") + private App app; - /** - * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - * It must be unique within the workspace. - */ - @JsonProperty("name") - private String name; + /** The name of the app. */ + @JsonIgnore private String name; - /** Resources for the app. */ - @JsonProperty("resources") - private Collection resources; - - public UpdateAppRequest setDescription(String description) { - this.description = description; + public UpdateAppRequest setApp(App app) { + this.app = app; return this; } - public String getDescription() { - return description; + public App getApp() { + return app; } public UpdateAppRequest setName(String name) { @@ -43,36 +36,21 @@ public String getName() { return name; } - public UpdateAppRequest setResources(Collection resources) { - this.resources = resources; - return this; - } - - public Collection getResources() { - return resources; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateAppRequest that = (UpdateAppRequest) o; - return Objects.equals(description, that.description) - && Objects.equals(name, that.name) - && Objects.equals(resources, that.resources); + return Objects.equals(app, that.app) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(description, name, resources); + return Objects.hash(app, name); } @Override public String toString() { - return new ToStringer(UpdateAppRequest.class) - .add("description", description) - .add("name", name) - .add("resources", resources) - .toString(); + return new ToStringer(UpdateAppRequest.class).add("app", app).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java new file mode 100755 index 00000000..628bed84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The AWS IAM role configuration */ +@Generated +public class AwsIamRole { + /** The external ID used in role assumption to prevent the confused deputy problem. */ + @JsonProperty("external_id") + private String externalId; + + /** The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials. */ + @JsonProperty("role_arn") + private String roleArn; + + /** + * The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity + * that is going to assume the AWS IAM role. + */ + @JsonProperty("unity_catalog_iam_arn") + private String unityCatalogIamArn; + + public AwsIamRole setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public AwsIamRole setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + public AwsIamRole setUnityCatalogIamArn(String unityCatalogIamArn) { + this.unityCatalogIamArn = unityCatalogIamArn; + return this; + } + + public String getUnityCatalogIamArn() { + return unityCatalogIamArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRole that = (AwsIamRole) o; + return Objects.equals(externalId, that.externalId) + && Objects.equals(roleArn, that.roleArn) + && Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn); + } + + @Override + public int hashCode() { + return Objects.hash(externalId, roleArn, unityCatalogIamArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRole.class) + .add("externalId", externalId) + .add("roleArn", roleArn) + .add("unityCatalogIamArn", unityCatalogIamArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java new file mode 100755 index 00000000..b545ea99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + * Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ +@Generated +public class AzureActiveDirectoryToken { + /** + * Opaque token that contains claims that you can use in Azure Active Directory to access cloud + * services. + */ + @JsonProperty("aad_token") + private String aadToken; + + public AzureActiveDirectoryToken setAadToken(String aadToken) { + this.aadToken = aadToken; + return this; + } + + public String getAadToken() { + return aadToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureActiveDirectoryToken that = (AzureActiveDirectoryToken) o; + return Objects.equals(aadToken, that.aadToken); + } + + @Override + public int hashCode() { + return Objects.hash(aadToken); + } + + @Override + public String toString() { + return new ToStringer(AzureActiveDirectoryToken.class).add("aadToken", aadToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java new file mode 100755 index 00000000..be399782 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Azure managed identity configuration. */ +@Generated +public class AzureManagedIdentity { + /** + * The Azure resource ID of the Azure Databricks Access Connector. Use the format + * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`. + */ + @JsonProperty("access_connector_id") + private String accessConnectorId; + + /** + * The Databricks internal ID that represents this managed identity. This field is only used to + * persist the credential_id once it is fetched from the credentials manager - as we only use the + * protobuf serializer to store credentials, this ID gets persisted to the database. . + */ + @JsonProperty("credential_id") + private String credentialId; + + /** + * The Azure resource ID of the managed identity. Use the format, + * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` + * This is only available for user-assgined identities. For system-assigned identities, the + * access_connector_id is used to identify the identity. If this field is not provided, then we + * assume the AzureManagedIdentity is using the system-assigned identity. + */ + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentity setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentity setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public AzureManagedIdentity setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentity that = (AzureManagedIdentity) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(credentialId, that.credentialId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, credentialId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentity.class) + .add("accessConnectorId", accessConnectorId) + .add("credentialId", credentialId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java index 62916683..ff2e8f11 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java @@ -28,4 +28,5 @@ public enum ColumnTypeName { TIMESTAMP, TIMESTAMP_NTZ, USER_DEFINED_TYPE, + VARIANT, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java new file mode 100755 index 00000000..b4a9bbe2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java @@ -0,0 +1,125 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** + * The credential name. The name must be unique among storage and service credentials within the + * metastore. + */ + @JsonProperty("name") + private String name; + + /** Indicates the purpose of the credential. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + /** + * Optional. Supplying true to this argument skips validation of the created set of credentials. + */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public CreateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CreateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CreateCredentialRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateCredentialRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCredentialRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CreateCredentialRequest setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialRequest that = (CreateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(purpose, that.purpose) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash(awsIamRole, azureManagedIdentity, comment, name, purpose, skipValidation); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("name", name) + .add("purpose", purpose) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java index ef46d6a4..a0b13a4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Function security type. */ +/** The security type of the function. */ @Generated public enum CreateFunctionSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java index 4b77e9b1..7f3a0730 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java @@ -7,33 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Online Table information. */ +/** Create an Online Table */ @Generated public class CreateOnlineTableRequest { - /** Full three-part (catalog, schema, table) name of the table. */ - @JsonProperty("name") - private String name; + /** Online Table information. */ + @JsonProperty("table") + private OnlineTable table; - /** Specification of the online table. */ - @JsonProperty("spec") - private OnlineTableSpec spec; - - public CreateOnlineTableRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateOnlineTableRequest setSpec(OnlineTableSpec spec) { - this.spec = spec; + public CreateOnlineTableRequest setTable(OnlineTable table) { + this.table = table; return this; } - public OnlineTableSpec getSpec() { - return spec; + public OnlineTable getTable() { + return table; } @Override @@ -41,19 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateOnlineTableRequest that = (CreateOnlineTableRequest) o; - return Objects.equals(name, that.name) && Objects.equals(spec, that.spec); + return Objects.equals(table, that.table); } @Override public int hashCode() { - return Objects.hash(name, spec); + return Objects.hash(table); } @Override public String toString() { - return new ToStringer(CreateOnlineTableRequest.class) - .add("name", name) - .add("spec", spec) - .toString(); + return new ToStringer(CreateOnlineTableRequest.class).add("table", table).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java new file mode 100755 index 00000000..8945b6c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java @@ -0,0 +1,259 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CredentialInfo { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** Time at which this credential was created, in epoch milliseconds. */ + @JsonProperty("created_at") + private Long createdAt; + + /** Username of credential creator. */ + @JsonProperty("created_by") + private String createdBy; + + /** The full name of the credential. */ + @JsonProperty("full_name") + private String fullName; + + /** The unique identifier of the credential. */ + @JsonProperty("id") + private String id; + + /** + * Whether the current securable is accessible from all workspaces or a specific set of + * workspaces. + */ + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + /** Unique identifier of the parent metastore. */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** + * The credential name. The name must be unique among storage and service credentials within the + * metastore. + */ + @JsonProperty("name") + private String name; + + /** Username of current owner of credential. */ + @JsonProperty("owner") + private String owner; + + /** Indicates the purpose of the credential. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + /** Time at which this credential was last modified, in epoch milliseconds. */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** Username of user who last modified the credential. */ + @JsonProperty("updated_by") + private String updatedBy; + + public CredentialInfo setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CredentialInfo setAzureManagedIdentity(AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CredentialInfo setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CredentialInfo setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CredentialInfo setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public CredentialInfo setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CredentialInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CredentialInfo setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public CredentialInfo setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public CredentialInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CredentialInfo setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CredentialInfo setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CredentialInfo setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public CredentialInfo setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialInfo that = (CredentialInfo) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(fullName, that.fullName) + && Objects.equals(id, that.id) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(purpose, that.purpose) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + comment, + createdAt, + createdBy, + fullName, + id, + isolationMode, + metastoreId, + name, + owner, + purpose, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(CredentialInfo.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("fullName", fullName) + .add("id", id) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("purpose", purpose) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java new file mode 100755 index 00000000..ec09daad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum CredentialPurpose { + SERVICE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java new file mode 100755 index 00000000..a823a534 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CredentialValidationResult { + /** Error message would exist when the result does not equal to **PASS**. */ + @JsonProperty("message") + private String message; + + /** The results of the tested operation. */ + @JsonProperty("result") + private ValidateCredentialResult result; + + public CredentialValidationResult setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public CredentialValidationResult setResult(ValidateCredentialResult result) { + this.result = result; + return this; + } + + public ValidateCredentialResult getResult() { + return result; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialValidationResult that = (CredentialValidationResult) o; + return Objects.equals(message, that.message) && Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(message, result); + } + + @Override + public String toString() { + return new ToStringer(CredentialValidationResult.class) + .add("message", message) + .add("result", result) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java new file mode 100755 index 00000000..659f078a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it + */ +@Generated +public class CredentialsAPI { + private static final Logger LOG = LoggerFactory.getLogger(CredentialsAPI.class); + + private final CredentialsService impl; + + /** Regular-use constructor */ + public CredentialsAPI(ApiClient apiClient) { + impl = new CredentialsImpl(apiClient); + } + + /** Constructor for mocks */ + public CredentialsAPI(CredentialsService mock) { + impl = mock; + } + + /** + * Create a credential. + * + *

Creates a new credential. + */ + public CredentialInfo createCredential(CreateCredentialRequest request) { + return impl.createCredential(request); + } + + public void deleteCredential(String nameArg) { + deleteCredential(new DeleteCredentialRequest().setNameArg(nameArg)); + } + + /** + * Delete a credential. + * + *

Deletes a credential from the metastore. The caller must be an owner of the credential. + */ + public void deleteCredential(DeleteCredentialRequest request) { + impl.deleteCredential(request); + } + + /** + * Generate a temporary service credential. + * + *

Returns a set of temporary credentials generated using the specified service credential. The + * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service + * credential. + */ + public TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest request) { + return impl.generateTemporaryServiceCredential(request); + } + + public CredentialInfo getCredential(String nameArg) { + return getCredential(new GetCredentialRequest().setNameArg(nameArg)); + } + + /** + * Get a credential. + * + *

Gets a credential from the metastore. The caller must be a metastore admin, the owner of the + * credential, or have any permission on the credential. + */ + public CredentialInfo getCredential(GetCredentialRequest request) { + return impl.getCredential(request); + } + + /** + * List credentials. + * + *

Gets an array of credentials (as __CredentialInfo__ objects). + * + *

The array is limited to only the credentials that the caller has permission to access. If + * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no + * guarantee of a specific ordering of the elements in the array. + */ + public Iterable listCredentials(ListCredentialsRequest request) { + return new Paginator<>( + request, + impl::listCredentials, + ListCredentialsResponse::getCredentials, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public CredentialInfo updateCredential(String nameArg) { + return updateCredential(new UpdateCredentialRequest().setNameArg(nameArg)); + } + + /** + * Update a credential. + * + *

Updates a credential on the metastore. + * + *

The caller must be the owner of the credential or a metastore admin or have the `MANAGE` + * permission. If the caller is a metastore admin, only the __owner__ field can be changed. + */ + public CredentialInfo updateCredential(UpdateCredentialRequest request) { + return impl.updateCredential(request); + } + + /** + * Validate a credential. + * + *

Validates a credential. + * + *

Either the __credential_name__ or the cloud-specific credential must be provided. + * + *

The caller must be a metastore admin or the credential owner. + */ + public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) { + return impl.validateCredential(request); + } + + public CredentialsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java new file mode 100755 index 00000000..b2aad264 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of Credentials */ +@Generated +class CredentialsImpl implements CredentialsService { + private final ApiClient apiClient; + + public CredentialsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CredentialInfo createCredential(CreateCredentialRequest request) { + String path = "/api/2.1/unity-catalog/credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CredentialInfo.class, headers); + } + + @Override + public void deleteCredential(DeleteCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteCredentialResponse.class, headers); + } + + @Override + public TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest request) { + String path = "/api/2.1/unity-catalog/temporary-service-credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, TemporaryCredentials.class, headers); + } + + @Override + public CredentialInfo getCredential(GetCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, CredentialInfo.class, headers); + } + + @Override + public ListCredentialsResponse listCredentials(ListCredentialsRequest request) { + String path = "/api/2.1/unity-catalog/credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListCredentialsResponse.class, headers); + } + + @Override + public CredentialInfo updateCredential(UpdateCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, CredentialInfo.class, headers); + } + + @Override + public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) { + String path = "/api/2.1/unity-catalog/validate-credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, ValidateCredentialResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java new file mode 100755 index 00000000..2317a707 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface CredentialsService { + /** + * Create a credential. + * + *

Creates a new credential. + */ + CredentialInfo createCredential(CreateCredentialRequest createCredentialRequest); + + /** + * Delete a credential. + * + *

Deletes a credential from the metastore. The caller must be an owner of the credential. + */ + void deleteCredential(DeleteCredentialRequest deleteCredentialRequest); + + /** + * Generate a temporary service credential. + * + *

Returns a set of temporary credentials generated using the specified service credential. The + * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service + * credential. + */ + TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest generateTemporaryServiceCredentialRequest); + + /** + * Get a credential. + * + *

Gets a credential from the metastore. The caller must be a metastore admin, the owner of the + * credential, or have any permission on the credential. + */ + CredentialInfo getCredential(GetCredentialRequest getCredentialRequest); + + /** + * List credentials. + * + *

Gets an array of credentials (as __CredentialInfo__ objects). + * + *

The array is limited to only the credentials that the caller has permission to access. If + * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no + * guarantee of a specific ordering of the elements in the array. + */ + ListCredentialsResponse listCredentials(ListCredentialsRequest listCredentialsRequest); + + /** + * Update a credential. + * + *

Updates a credential on the metastore. + * + *

The caller must be the owner of the credential or a metastore admin or have the `MANAGE` + * permission. If the caller is a metastore admin, only the __owner__ field can be changed. + */ + CredentialInfo updateCredential(UpdateCredentialRequest updateCredentialRequest); + + /** + * Validate a credential. + * + *

Validates a credential. + * + *

Either the __credential_name__ or the cloud-specific credential must be provided. + * + *

The caller must be a metastore admin or the credential owner. + */ + ValidateCredentialResponse validateCredential( + ValidateCredentialRequest validateCredentialRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java new file mode 100755 index 00000000..a3549cb5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a credential */ +@Generated +public class DeleteCredentialRequest { + /** Force deletion even if there are dependent services. */ + @JsonIgnore + @QueryParam("force") + private Boolean force; + + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + public DeleteCredentialRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCredentialRequest that = (DeleteCredentialRequest) o; + return Objects.equals(force, that.force) && Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(force, nameArg); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialRequest.class) + .add("force", force) + .add("nameArg", nameArg) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java new file mode 100755 index 00000000..1ad27875 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteCredentialResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index 051a796a..3fe7a365 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -52,10 +52,7 @@ public class ExternalLocationInfo { @JsonProperty("fallback") private Boolean fallback; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java index d4ca791f..5b45675b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Function security type. */ +/** The security type of the function. */ @Generated public enum FunctionInfoSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java new file mode 100755 index 00000000..31dca4b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Options to customize the requested temporary credential */ +@Generated +public class GenerateTemporaryServiceCredentialAzureOptions { + /** + * The resources to which the temporary Azure credential should apply. These resources are the + * scopes that are passed to the token provider (see + * https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python) + */ + @JsonProperty("resources") + private Collection resources; + + public GenerateTemporaryServiceCredentialAzureOptions setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialAzureOptions that = + (GenerateTemporaryServiceCredentialAzureOptions) o; + return Objects.equals(resources, that.resources); + } + + @Override + public int hashCode() { + return Objects.hash(resources); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialAzureOptions.class) + .add("resources", resources) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java new file mode 100755 index 00000000..3e1d8fc4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenerateTemporaryServiceCredentialRequest { + /** Options to customize the requested temporary credential */ + @JsonProperty("azure_options") + private GenerateTemporaryServiceCredentialAzureOptions azureOptions; + + /** The name of the service credential used to generate a temporary credential */ + @JsonProperty("credential_name") + private String credentialName; + + public GenerateTemporaryServiceCredentialRequest setAzureOptions( + GenerateTemporaryServiceCredentialAzureOptions azureOptions) { + this.azureOptions = azureOptions; + return this; + } + + public GenerateTemporaryServiceCredentialAzureOptions getAzureOptions() { + return azureOptions; + } + + public GenerateTemporaryServiceCredentialRequest setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialRequest that = (GenerateTemporaryServiceCredentialRequest) o; + return Objects.equals(azureOptions, that.azureOptions) + && Objects.equals(credentialName, that.credentialName); + } + + @Override + public int hashCode() { + return Objects.hash(azureOptions, credentialName); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialRequest.class) + .add("azureOptions", azureOptions) + .add("credentialName", credentialName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java index a18c68db..be752eec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java @@ -16,6 +16,14 @@ public class GenerateTemporaryTableCredentialResponse { @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; + /** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or + * Managed Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + /** * Azure temporary credentials for API authentication. Read more at * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas @@ -58,6 +66,15 @@ public AwsCredentials getAwsTempCredentials() { return awsTempCredentials; } + public GenerateTemporaryTableCredentialResponse setAzureAad(AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + public GenerateTemporaryTableCredentialResponse setAzureUserDelegationSas( AzureUserDelegationSas azureUserDelegationSas) { this.azureUserDelegationSas = azureUserDelegationSas; @@ -111,6 +128,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GenerateTemporaryTableCredentialResponse that = (GenerateTemporaryTableCredentialResponse) o; return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas) && Objects.equals(expirationTime, that.expirationTime) && Objects.equals(gcpOauthToken, that.gcpOauthToken) @@ -122,6 +140,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( awsTempCredentials, + azureAad, azureUserDelegationSas, expirationTime, gcpOauthToken, @@ -133,6 +152,7 @@ public int hashCode() { public String toString() { return new ToStringer(GenerateTemporaryTableCredentialResponse.class) .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) .add("azureUserDelegationSas", azureUserDelegationSas) .add("expirationTime", expirationTime) .add("gcpOauthToken", gcpOauthToken) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java index 59200e27..a90291dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java @@ -13,6 +13,9 @@ public enum GetBindingsSecurableType { @JsonProperty("external_location") EXTERNAL_LOCATION, + @JsonProperty("service_credential") + SERVICE_CREDENTIAL, + @JsonProperty("storage_credential") STORAGE_CREDENTIAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java new file mode 100755 index 00000000..cfb1de4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a credential */ +@Generated +public class GetCredentialRequest { + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + public GetCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialRequest that = (GetCredentialRequest) o; + return Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(nameArg); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialRequest.class).add("nameArg", nameArg).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java index 1c6e3168..db13d61b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java @@ -4,9 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Whether the current securable is accessible from all workspaces or a specific set of workspaces. - */ @Generated public enum IsolationMode { ISOLATION_MODE_ISOLATED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java new file mode 100755 index 00000000..775a697e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List credentials */ +@Generated +public class ListCredentialsRequest { + /** + * Maximum number of credentials to return. - If not set, the default max page size is used. - + * When set to a value greater than 0, the page length is the minimum of this value and a + * server-configured value. - When set to 0, the page length is set to a server-configured value + * (recommended). - When set to a value less than 0, an invalid parameter error is returned. + */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** Opaque token to retrieve the next page of results. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** Return only credentials for the specified purpose. */ + @JsonIgnore + @QueryParam("purpose") + private CredentialPurpose purpose; + + public ListCredentialsRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListCredentialsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListCredentialsRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsRequest that = (ListCredentialsRequest) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(purpose, that.purpose); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, purpose); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsRequest.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("purpose", purpose) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java similarity index 58% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java index cadcaa6f..79ebd190 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; +package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,10 +9,10 @@ import java.util.Objects; @Generated -public class ListCleanRoomsResponse { - /** An array of clean rooms. Remote details (central) are not included. */ - @JsonProperty("clean_rooms") - private Collection cleanRooms; +public class ListCredentialsResponse { + /** */ + @JsonProperty("credentials") + private Collection credentials; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. @@ -21,16 +21,16 @@ public class ListCleanRoomsResponse { @JsonProperty("next_page_token") private String nextPageToken; - public ListCleanRoomsResponse setCleanRooms(Collection cleanRooms) { - this.cleanRooms = cleanRooms; + public ListCredentialsResponse setCredentials(Collection credentials) { + this.credentials = credentials; return this; } - public Collection getCleanRooms() { - return cleanRooms; + public Collection getCredentials() { + return credentials; } - public ListCleanRoomsResponse setNextPageToken(String nextPageToken) { + public ListCredentialsResponse setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -43,20 +43,20 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListCleanRoomsResponse that = (ListCleanRoomsResponse) o; - return Objects.equals(cleanRooms, that.cleanRooms) + ListCredentialsResponse that = (ListCredentialsResponse) o; + return Objects.equals(credentials, that.credentials) && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(cleanRooms, nextPageToken); + return Objects.hash(credentials, nextPageToken); } @Override public String toString() { - return new ToStringer(ListCleanRoomsResponse.class) - .add("cleanRooms", cleanRooms) + return new ToStringer(ListCredentialsResponse.class) + .add("credentials", credentials) .add("nextPageToken", nextPageToken) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java index 8a53c278..f5d9fd2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java @@ -54,6 +54,14 @@ public class ListTablesRequest { @QueryParam("omit_properties") private Boolean omitProperties; + /** + * Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the + * response or not. + */ + @JsonIgnore + @QueryParam("omit_username") + private Boolean omitUsername; + /** Opaque token to send for the next page of results (pagination). */ @JsonIgnore @QueryParam("page_token") @@ -127,6 +135,15 @@ public Boolean getOmitProperties() { return omitProperties; } + public ListTablesRequest setOmitUsername(Boolean omitUsername) { + this.omitUsername = omitUsername; + return this; + } + + public Boolean getOmitUsername() { + return omitUsername; + } + public ListTablesRequest setPageToken(String pageToken) { this.pageToken = pageToken; return this; @@ -157,6 +174,7 @@ public boolean equals(Object o) { && Objects.equals(maxResults, that.maxResults) && Objects.equals(omitColumns, that.omitColumns) && Objects.equals(omitProperties, that.omitProperties) + && Objects.equals(omitUsername, that.omitUsername) && Objects.equals(pageToken, that.pageToken) && Objects.equals(schemaName, that.schemaName); } @@ -171,6 +189,7 @@ public int hashCode() { maxResults, omitColumns, omitProperties, + omitUsername, pageToken, schemaName); } @@ -185,6 +204,7 @@ public String toString() { .add("maxResults", maxResults) .add("omitColumns", omitColumns) .add("omitProperties", omitProperties) + .add("omitUsername", omitUsername) .add("pageToken", pageToken) .add("schemaName", schemaName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java index 32f99a52..0a3f6886 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java @@ -2,7 +2,13 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Wait; +import java.time.Duration; +import java.util.Arrays; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,13 +29,62 @@ public OnlineTablesAPI(OnlineTablesService mock) { impl = mock; } + public OnlineTable waitGetOnlineTableActive(String name) throws TimeoutException { + return waitGetOnlineTableActive(name, Duration.ofMinutes(20), null); + } + + public OnlineTable waitGetOnlineTableActive( + String name, Duration timeout, Consumer callback) throws TimeoutException { + long deadline = System.currentTimeMillis() + timeout.toMillis(); + java.util.List targetStates = + Arrays.asList(ProvisioningInfoState.ACTIVE); + java.util.List failureStates = + Arrays.asList(ProvisioningInfoState.FAILED); + String statusMessage = "polling..."; + int attempt = 1; + while (System.currentTimeMillis() < deadline) { + OnlineTable poll = get(new GetOnlineTableRequest().setName(name)); + ProvisioningInfoState status = poll.getUnityCatalogProvisioningState(); + statusMessage = String.format("current status: %s", status); + if (targetStates.contains(status)) { + return poll; + } + if (callback != null) { + callback.accept(poll); + } + if (failureStates.contains(status)) { + String msg = String.format("failed to reach ACTIVE, got %s: %s", status, statusMessage); + throw new IllegalStateException(msg); + } + + String prefix = String.format("name=%s", name); + int sleep = attempt; + if (sleep > 10) { + // sleep 10s max per attempt + sleep = 10; + } + LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); + } + /** * Create an Online Table. * *

Create a new Online Table. */ - public OnlineTable create(CreateOnlineTableRequest request) { - return impl.create(request); + public Wait create(CreateOnlineTableRequest request) { + OnlineTable response = impl.create(request); + return new Wait<>( + (timeout, callback) -> waitGetOnlineTableActive(response.getName(), timeout, callback), + response); } public void delete(String name) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index 3b29957f..a1d482fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -21,7 +21,7 @@ public OnlineTable create(CreateOnlineTableRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, OnlineTable.class, headers); + return apiClient.POST(path, request.getTable(), OnlineTable.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java index ccbf21e3..76c85e2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java @@ -14,6 +14,9 @@ public enum SecurableType { @JsonProperty("connection") CONNECTION, + @JsonProperty("credential") + CREDENTIAL, + @JsonProperty("external_location") EXTERNAL_LOCATION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java index 12b687e6..b3d32add 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java @@ -41,14 +41,15 @@ public class StorageCredentialInfo { @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountResponse databricksGcpServiceAccount; + /** The full name of the credential. */ + @JsonProperty("full_name") + private String fullName; + /** The unique identifier of the credential. */ @JsonProperty("id") private String id; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; @@ -155,6 +156,15 @@ public DatabricksGcpServiceAccountResponse getDatabricksGcpServiceAccount() { return databricksGcpServiceAccount; } + public StorageCredentialInfo setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + public StorageCredentialInfo setId(String id) { this.id = id; return this; @@ -249,6 +259,7 @@ public boolean equals(Object o) { && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(fullName, that.fullName) && Objects.equals(id, that.id) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(metastoreId, that.metastoreId) @@ -271,6 +282,7 @@ public int hashCode() { createdAt, createdBy, databricksGcpServiceAccount, + fullName, id, isolationMode, metastoreId, @@ -293,6 +305,7 @@ public String toString() { .add("createdAt", createdAt) .add("createdBy", createdBy) .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("fullName", fullName) .add("id", id) .add("isolationMode", isolationMode) .add("metastoreId", metastoreId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java new file mode 100755 index 00000000..a42b2572 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TemporaryCredentials { + /** + * AWS temporary credentials for API authentication. Read more at + * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. + */ + @JsonProperty("aws_temp_credentials") + private AwsCredentials awsTempCredentials; + + /** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or + * Managed Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + + /** + * Server time when the credential will expire, in epoch milliseconds. The API client is advised + * to cache the credential given this expiration time. + */ + @JsonProperty("expiration_time") + private Long expirationTime; + + public TemporaryCredentials setAwsTempCredentials(AwsCredentials awsTempCredentials) { + this.awsTempCredentials = awsTempCredentials; + return this; + } + + public AwsCredentials getAwsTempCredentials() { + return awsTempCredentials; + } + + public TemporaryCredentials setAzureAad(AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + + public TemporaryCredentials setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TemporaryCredentials that = (TemporaryCredentials) o; + return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) + && Objects.equals(expirationTime, that.expirationTime); + } + + @Override + public int hashCode() { + return Objects.hash(awsTempCredentials, azureAad, expirationTime); + } + + @Override + public String toString() { + return new ToStringer(TemporaryCredentials.class) + .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) + .add("expirationTime", expirationTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java index 1b462a9b..ab1a503f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java @@ -13,6 +13,9 @@ public enum UpdateBindingsSecurableType { @JsonProperty("external_location") EXTERNAL_LOCATION, + @JsonProperty("service_credential") + SERVICE_CREDENTIAL, + @JsonProperty("storage_credential") STORAGE_CREDENTIAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java new file mode 100755 index 00000000..fdaf1643 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java @@ -0,0 +1,177 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** Force update even if there are dependent services. */ + @JsonProperty("force") + private Boolean force; + + /** + * Whether the current securable is accessible from all workspaces or a specific set of + * workspaces. + */ + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + /** New name of credential. */ + @JsonProperty("new_name") + private String newName; + + /** Username of current owner of credential. */ + @JsonProperty("owner") + private String owner; + + /** Supply true to this argument to skip validation of the updated credential. */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public UpdateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public UpdateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public UpdateCredentialRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateCredentialRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public UpdateCredentialRequest setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + public UpdateCredentialRequest setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateCredentialRequest setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateCredentialRequest setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCredentialRequest that = (UpdateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(force, that.force) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(nameArg, that.nameArg) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + comment, + force, + isolationMode, + nameArg, + newName, + owner, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("force", force) + .add("isolationMode", isolationMode) + .add("nameArg", nameArg) + .add("newName", newName) + .add("owner", owner) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java index 3c749ac0..c4017fb4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java @@ -38,10 +38,7 @@ public class UpdateExternalLocation { @JsonProperty("force") private Boolean force; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java index 14f92001..50415150 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java @@ -38,10 +38,7 @@ public class UpdateStorageCredential { @JsonProperty("force") private Boolean force; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java new file mode 100755 index 00000000..8bb5ff66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ValidateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Required. The name of an existing credential or long-lived cloud credential to validate. */ + @JsonProperty("credential_name") + private String credentialName; + + /** The purpose of the credential. This should only be used when the credential is specified. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + public ValidateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public ValidateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public ValidateCredentialRequest setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public ValidateCredentialRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialRequest that = (ValidateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(purpose, that.purpose); + } + + @Override + public int hashCode() { + return Objects.hash(awsIamRole, azureManagedIdentity, credentialName, purpose); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("credentialName", credentialName) + .add("purpose", purpose) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java new file mode 100755 index 00000000..ef23a12c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ValidateCredentialResponse { + /** The results of the validation check. */ + @JsonProperty("results") + private Collection results; + + public ValidateCredentialResponse setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialResponse that = (ValidateCredentialResponse) o; + return Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(results); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialResponse.class).add("results", results).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java new file mode 100755 index 00000000..7fa55d34 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** A enum represents the result of the file operation */ +@Generated +public enum ValidateCredentialResult { + FAIL, + PASS, + SKIP, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java index 936bbaf0..f42ce8e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java @@ -142,8 +142,9 @@ public ClusterPolicyPermissions setPermissions(String clusterPolicyId) { /** * Set cluster policy permissions. * - *

Sets permissions on a cluster policy. Cluster policies can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java index 10654b89..64f2a13f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java @@ -87,8 +87,9 @@ ClusterPolicyPermissions getPermissions( /** * Set cluster policy permissions. * - *

Sets permissions on a cluster policy. Cluster policies can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ClusterPolicyPermissions setPermissions( ClusterPolicyPermissionsRequest clusterPolicyPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java index 2fe2801c..285d9349 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java @@ -394,7 +394,9 @@ public ClusterPermissions setPermissions(String clusterId) { /** * Set cluster permissions. * - *

Sets permissions on a cluster. Clusters can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ClusterPermissions setPermissions(ClusterPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java index f257b5f6..b85b439b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java @@ -180,7 +180,9 @@ GetClusterPermissionLevelsResponse getPermissionLevels( /** * Set cluster permissions. * - *

Sets permissions on a cluster. Clusters can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ClusterPermissions setPermissions(ClusterPermissionsRequest clusterPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java index ff2644f7..886970e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java @@ -93,34 +93,27 @@ public CommandStatusResponse waitCommandStatusCommandExecutionCancelled( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( - String clusterId, String commandId, String contextId) throws TimeoutException { - return waitCommandStatusCommandExecutionFinishedOrError( - clusterId, commandId, contextId, Duration.ofMinutes(20), null); + public ContextStatusResponse waitContextStatusCommandExecutionRunning( + String clusterId, String contextId) throws TimeoutException { + return waitContextStatusCommandExecutionRunning( + clusterId, contextId, Duration.ofMinutes(20), null); } - public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( + public ContextStatusResponse waitContextStatusCommandExecutionRunning( String clusterId, - String commandId, String contextId, Duration timeout, - Consumer callback) + Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = - Arrays.asList(CommandStatus.FINISHED, CommandStatus.ERROR); - java.util.List failureStates = - Arrays.asList(CommandStatus.CANCELLED, CommandStatus.CANCELLING); + java.util.List targetStates = Arrays.asList(ContextStatus.RUNNING); + java.util.List failureStates = Arrays.asList(ContextStatus.ERROR); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - CommandStatusResponse poll = - commandStatus( - new CommandStatusRequest() - .setClusterId(clusterId) - .setCommandId(commandId) - .setContextId(contextId)); - CommandStatus status = poll.getStatus(); + ContextStatusResponse poll = + contextStatus(new ContextStatusRequest().setClusterId(clusterId).setContextId(contextId)); + ContextStatus status = poll.getStatus(); statusMessage = String.format("current status: %s", status); if (targetStates.contains(status)) { return poll; @@ -129,14 +122,11 @@ public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = - String.format("failed to reach FINISHED or ERROR, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = - String.format( - "clusterId=%s, commandId=%s, contextId=%s", clusterId, commandId, contextId); + String prefix = String.format("clusterId=%s, contextId=%s", clusterId, contextId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -154,27 +144,34 @@ public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public ContextStatusResponse waitContextStatusCommandExecutionRunning( - String clusterId, String contextId) throws TimeoutException { - return waitContextStatusCommandExecutionRunning( - clusterId, contextId, Duration.ofMinutes(20), null); + public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( + String clusterId, String commandId, String contextId) throws TimeoutException { + return waitCommandStatusCommandExecutionFinishedOrError( + clusterId, commandId, contextId, Duration.ofMinutes(20), null); } - public ContextStatusResponse waitContextStatusCommandExecutionRunning( + public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( String clusterId, + String commandId, String contextId, Duration timeout, - Consumer callback) + Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(ContextStatus.RUNNING); - java.util.List failureStates = Arrays.asList(ContextStatus.ERROR); + java.util.List targetStates = + Arrays.asList(CommandStatus.FINISHED, CommandStatus.ERROR); + java.util.List failureStates = + Arrays.asList(CommandStatus.CANCELLED, CommandStatus.CANCELLING); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - ContextStatusResponse poll = - contextStatus(new ContextStatusRequest().setClusterId(clusterId).setContextId(contextId)); - ContextStatus status = poll.getStatus(); + CommandStatusResponse poll = + commandStatus( + new CommandStatusRequest() + .setClusterId(clusterId) + .setCommandId(commandId) + .setContextId(contextId)); + CommandStatus status = poll.getStatus(); statusMessage = String.format("current status: %s", status); if (targetStates.contains(status)) { return poll; @@ -183,11 +180,14 @@ public ContextStatusResponse waitContextStatusCommandExecutionRunning( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); + String msg = + String.format("failed to reach FINISHED or ERROR, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("clusterId=%s, contextId=%s", clusterId, contextId); + String prefix = + String.format( + "clusterId=%s, commandId=%s, contextId=%s", clusterId, commandId, contextId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 8fb986e4..794e1f59 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -48,7 +48,7 @@ public class EditCluster { @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; - /** ID of the cluser */ + /** ID of the cluster */ @JsonProperty("cluster_id") private String clusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java index 11431a3c..435127f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java @@ -145,7 +145,8 @@ public InstancePoolPermissions setPermissions(String instancePoolId) { /** * Set instance pool permissions. * - *

Sets permissions on an instance pool. Instance pools can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java index ae5e5525..0a7d03ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java @@ -82,7 +82,8 @@ InstancePoolPermissions getPermissions( /** * Set instance pool permissions. * - *

Sets permissions on an instance pool. Instance pools can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ InstancePoolPermissions setPermissions( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java index 62d74c45..9c6ddecd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java @@ -7,68 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create dashboard */ @Generated public class CreateDashboardRequest { - /** The display name of the dashboard. */ - @JsonProperty("display_name") - private String displayName; + /** */ + @JsonProperty("dashboard") + private Dashboard dashboard; - /** - * The workspace path of the folder containing the dashboard. Includes leading slash and no - * trailing slash. This field is excluded in List Dashboards responses. - */ - @JsonProperty("parent_path") - private String parentPath; - - /** - * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - * includes the `serialized_dashboard` field. This field provides the structure of the JSON string - * that represents the dashboard's layout and components. - * - *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - */ - @JsonProperty("serialized_dashboard") - private String serializedDashboard; - - /** The warehouse ID used to run the dashboard. */ - @JsonProperty("warehouse_id") - private String warehouseId; - - public CreateDashboardRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public CreateDashboardRequest setParentPath(String parentPath) { - this.parentPath = parentPath; - return this; - } - - public String getParentPath() { - return parentPath; - } - - public CreateDashboardRequest setSerializedDashboard(String serializedDashboard) { - this.serializedDashboard = serializedDashboard; - return this; - } - - public String getSerializedDashboard() { - return serializedDashboard; - } - - public CreateDashboardRequest setWarehouseId(String warehouseId) { - this.warehouseId = warehouseId; + public CreateDashboardRequest setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; return this; } - public String getWarehouseId() { - return warehouseId; + public Dashboard getDashboard() { + return dashboard; } @Override @@ -76,24 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateDashboardRequest that = (CreateDashboardRequest) o; - return Objects.equals(displayName, that.displayName) - && Objects.equals(parentPath, that.parentPath) - && Objects.equals(serializedDashboard, that.serializedDashboard) - && Objects.equals(warehouseId, that.warehouseId); + return Objects.equals(dashboard, that.dashboard); } @Override public int hashCode() { - return Objects.hash(displayName, parentPath, serializedDashboard, warehouseId); + return Objects.hash(dashboard); } @Override public String toString() { - return new ToStringer(CreateDashboardRequest.class) - .add("displayName", displayName) - .add("parentPath", parentPath) - .add("serializedDashboard", serializedDashboard) - .add("warehouseId", warehouseId) - .toString(); + return new ToStringer(CreateDashboardRequest.class).add("dashboard", dashboard).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java index 8e1d5716..1c364865 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java @@ -8,31 +8,15 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create dashboard schedule */ @Generated public class CreateScheduleRequest { - /** The cron expression describing the frequency of the periodic refresh for this schedule. */ - @JsonProperty("cron_schedule") - private CronSchedule cronSchedule; - /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** The display name for schedule. */ - @JsonProperty("display_name") - private String displayName; - - /** The status indicates whether this schedule is paused or not. */ - @JsonProperty("pause_status") - private SchedulePauseStatus pauseStatus; - - public CreateScheduleRequest setCronSchedule(CronSchedule cronSchedule) { - this.cronSchedule = cronSchedule; - return this; - } - - public CronSchedule getCronSchedule() { - return cronSchedule; - } + /** */ + @JsonProperty("schedule") + private Schedule schedule; public CreateScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -43,22 +27,13 @@ public String getDashboardId() { return dashboardId; } - public CreateScheduleRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public CreateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) { - this.pauseStatus = pauseStatus; + public CreateScheduleRequest setSchedule(Schedule schedule) { + this.schedule = schedule; return this; } - public SchedulePauseStatus getPauseStatus() { - return pauseStatus; + public Schedule getSchedule() { + return schedule; } @Override @@ -66,24 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateScheduleRequest that = (CreateScheduleRequest) o; - return Objects.equals(cronSchedule, that.cronSchedule) - && Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(pauseStatus, that.pauseStatus); + return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(schedule, that.schedule); } @Override public int hashCode() { - return Objects.hash(cronSchedule, dashboardId, displayName, pauseStatus); + return Objects.hash(dashboardId, schedule); } @Override public String toString() { return new ToStringer(CreateScheduleRequest.class) - .add("cronSchedule", cronSchedule) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("pauseStatus", pauseStatus) + .add("schedule", schedule) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java index 9ece761b..66ce0422 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java @@ -8,6 +8,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create schedule subscription */ @Generated public class CreateSubscriptionRequest { /** UUID identifying the dashboard to which the subscription belongs. */ @@ -16,9 +17,9 @@ public class CreateSubscriptionRequest { /** UUID identifying the schedule to which the subscription belongs. */ @JsonIgnore private String scheduleId; - /** Subscriber details for users and destinations to be added as subscribers to the schedule. */ - @JsonProperty("subscriber") - private Subscriber subscriber; + /** */ + @JsonProperty("subscription") + private Subscription subscription; public CreateSubscriptionRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -38,13 +39,13 @@ public String getScheduleId() { return scheduleId; } - public CreateSubscriptionRequest setSubscriber(Subscriber subscriber) { - this.subscriber = subscriber; + public CreateSubscriptionRequest setSubscription(Subscription subscription) { + this.subscription = subscription; return this; } - public Subscriber getSubscriber() { - return subscriber; + public Subscription getSubscription() { + return subscription; } @Override @@ -54,12 +55,12 @@ public boolean equals(Object o) { CreateSubscriptionRequest that = (CreateSubscriptionRequest) o; return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(scheduleId, that.scheduleId) - && Objects.equals(subscriber, that.subscriber); + && Objects.equals(subscription, that.subscription); } @Override public int hashCode() { - return Objects.hash(dashboardId, scheduleId, subscriber); + return Objects.hash(dashboardId, scheduleId, subscription); } @Override @@ -67,7 +68,7 @@ public String toString() { return new ToStringer(CreateSubscriptionRequest.class) .add("dashboardId", dashboardId) .add("scheduleId", scheduleId) - .add("subscriber", subscriber) + .add("subscription", subscription) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index c81ff268..4b8dd23c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -124,7 +124,7 @@ public Wait createMessage( public GenieGetMessageQueryResultResponse executeMessageQuery( String spaceId, String conversationId, String messageId) { return executeMessageQuery( - new ExecuteMessageQueryRequest() + new GenieExecuteMessageQueryRequest() .setSpaceId(spaceId) .setConversationId(conversationId) .setMessageId(messageId)); @@ -136,7 +136,7 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( *

Execute the SQL query in the message. */ public GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest request) { + GenieExecuteMessageQueryRequest request) { return impl.executeMessageQuery(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java similarity index 78% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java index 9c47c9e6..4ad41a28 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java @@ -9,7 +9,7 @@ /** Execute SQL query in a conversation message */ @Generated -public class ExecuteMessageQueryRequest { +public class GenieExecuteMessageQueryRequest { /** Conversation ID */ @JsonIgnore private String conversationId; @@ -19,7 +19,7 @@ public class ExecuteMessageQueryRequest { /** Genie space ID */ @JsonIgnore private String spaceId; - public ExecuteMessageQueryRequest setConversationId(String conversationId) { + public GenieExecuteMessageQueryRequest setConversationId(String conversationId) { this.conversationId = conversationId; return this; } @@ -28,7 +28,7 @@ public String getConversationId() { return conversationId; } - public ExecuteMessageQueryRequest setMessageId(String messageId) { + public GenieExecuteMessageQueryRequest setMessageId(String messageId) { this.messageId = messageId; return this; } @@ -37,7 +37,7 @@ public String getMessageId() { return messageId; } - public ExecuteMessageQueryRequest setSpaceId(String spaceId) { + public GenieExecuteMessageQueryRequest setSpaceId(String spaceId) { this.spaceId = spaceId; return this; } @@ -50,7 +50,7 @@ public String getSpaceId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ExecuteMessageQueryRequest that = (ExecuteMessageQueryRequest) o; + GenieExecuteMessageQueryRequest that = (GenieExecuteMessageQueryRequest) o; return Objects.equals(conversationId, that.conversationId) && Objects.equals(messageId, that.messageId) && Objects.equals(spaceId, that.spaceId); @@ -63,7 +63,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ExecuteMessageQueryRequest.class) + return new ToStringer(GenieExecuteMessageQueryRequest.class) .add("conversationId", conversationId) .add("messageId", messageId) .add("spaceId", spaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 7a282ccd..fc0c9236 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -29,7 +29,7 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) @Override public GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest request) { + GenieExecuteMessageQueryRequest request) { String path = String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/execute-query", diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index 325c0df2..5dad69dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -30,7 +30,7 @@ GenieMessage createMessage( *

Execute the SQL query in the message. */ GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest executeMessageQueryRequest); + GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest); /** * Get conversation message. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java index 8f7f6605..6f4978b0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java @@ -27,10 +27,6 @@ public LakeviewAPI(LakeviewService mock) { impl = mock; } - public Dashboard create(String displayName) { - return create(new CreateDashboardRequest().setDisplayName(displayName)); - } - /** * Create dashboard. * @@ -40,9 +36,8 @@ public Dashboard create(CreateDashboardRequest request) { return impl.create(request); } - public Schedule createSchedule(String dashboardId, CronSchedule cronSchedule) { - return createSchedule( - new CreateScheduleRequest().setDashboardId(dashboardId).setCronSchedule(cronSchedule)); + public Schedule createSchedule(String dashboardId) { + return createSchedule(new CreateScheduleRequest().setDashboardId(dashboardId)); } /** Create dashboard schedule. */ @@ -50,13 +45,9 @@ public Schedule createSchedule(CreateScheduleRequest request) { return impl.createSchedule(request); } - public Subscription createSubscription( - String dashboardId, String scheduleId, Subscriber subscriber) { + public Subscription createSubscription(String dashboardId, String scheduleId) { return createSubscription( - new CreateSubscriptionRequest() - .setDashboardId(dashboardId) - .setScheduleId(scheduleId) - .setSubscriber(subscriber)); + new CreateSubscriptionRequest().setDashboardId(dashboardId).setScheduleId(scheduleId)); } /** Create schedule subscription. */ @@ -256,12 +247,9 @@ public Dashboard update(UpdateDashboardRequest request) { return impl.update(request); } - public Schedule updateSchedule(String dashboardId, String scheduleId, CronSchedule cronSchedule) { + public Schedule updateSchedule(String dashboardId, String scheduleId) { return updateSchedule( - new UpdateScheduleRequest() - .setDashboardId(dashboardId) - .setScheduleId(scheduleId) - .setCronSchedule(cronSchedule)); + new UpdateScheduleRequest().setDashboardId(dashboardId).setScheduleId(scheduleId)); } /** Update dashboard schedule. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index f6b46852..3d9689b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -21,7 +21,7 @@ public Dashboard create(CreateDashboardRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Dashboard.class, headers); + return apiClient.POST(path, request.getDashboard(), Dashboard.class, headers); } @Override @@ -31,7 +31,7 @@ public Schedule createSchedule(CreateScheduleRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Schedule.class, headers); + return apiClient.POST(path, request.getSchedule(), Schedule.class, headers); } @Override @@ -43,7 +43,7 @@ public Subscription createSubscription(CreateSubscriptionRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Subscription.class, headers); + return apiClient.POST(path, request.getSubscription(), Subscription.class, headers); } @Override @@ -177,7 +177,7 @@ public Dashboard update(UpdateDashboardRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, Dashboard.class, headers); + return apiClient.PATCH(path, request.getDashboard(), Dashboard.class, headers); } @Override @@ -189,6 +189,6 @@ public Schedule updateSchedule(UpdateScheduleRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, Schedule.class, headers); + return apiClient.PUT(path, request.getSchedule(), Schedule.class, headers); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index 5e1c9458..12c0af08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -31,6 +31,7 @@ public enum MessageErrorType { LOCAL_CONTEXT_EXCEEDED_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, + NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE, NO_QUERY_TO_VISUALIZE_EXCEPTION, NO_TABLES_TO_QUERY_EXCEPTION, RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java index 8897fe16..0cf82ee0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java @@ -45,6 +45,10 @@ public class Schedule { @JsonProperty("update_time") private String updateTime; + /** The warehouse id to run the dashboard with for the schedule. */ + @JsonProperty("warehouse_id") + private String warehouseId; + public Schedule setCreateTime(String createTime) { this.createTime = createTime; return this; @@ -117,6 +121,15 @@ public String getUpdateTime() { return updateTime; } + public Schedule setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -129,7 +142,8 @@ public boolean equals(Object o) { && Objects.equals(etag, that.etag) && Objects.equals(pauseStatus, that.pauseStatus) && Objects.equals(scheduleId, that.scheduleId) - && Objects.equals(updateTime, that.updateTime); + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); } @Override @@ -142,7 +156,8 @@ public int hashCode() { etag, pauseStatus, scheduleId, - updateTime); + updateTime, + warehouseId); } @Override @@ -156,6 +171,7 @@ public String toString() { .add("pauseStatus", pauseStatus) .add("scheduleId", scheduleId) .add("updateTime", updateTime) + .add("warehouseId", warehouseId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java index f9821b02..84298ffd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -8,36 +8,24 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Update dashboard */ @Generated public class UpdateDashboardRequest { + /** */ + @JsonProperty("dashboard") + private Dashboard dashboard; + /** UUID identifying the dashboard. */ @JsonIgnore private String dashboardId; - /** The display name of the dashboard. */ - @JsonProperty("display_name") - private String displayName; - - /** - * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard - * has not been modified since the last read. This field is excluded in List Dashboards responses. - */ - @JsonProperty("etag") - private String etag; - - /** - * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - * includes the `serialized_dashboard` field. This field provides the structure of the JSON string - * that represents the dashboard's layout and components. - * - *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - */ - @JsonProperty("serialized_dashboard") - private String serializedDashboard; + public UpdateDashboardRequest setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; + return this; + } - /** The warehouse ID used to run the dashboard. */ - @JsonProperty("warehouse_id") - private String warehouseId; + public Dashboard getDashboard() { + return dashboard; + } public UpdateDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -48,67 +36,25 @@ public String getDashboardId() { return dashboardId; } - public UpdateDashboardRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public UpdateDashboardRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - public UpdateDashboardRequest setSerializedDashboard(String serializedDashboard) { - this.serializedDashboard = serializedDashboard; - return this; - } - - public String getSerializedDashboard() { - return serializedDashboard; - } - - public UpdateDashboardRequest setWarehouseId(String warehouseId) { - this.warehouseId = warehouseId; - return this; - } - - public String getWarehouseId() { - return warehouseId; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateDashboardRequest that = (UpdateDashboardRequest) o; - return Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(etag, that.etag) - && Objects.equals(serializedDashboard, that.serializedDashboard) - && Objects.equals(warehouseId, that.warehouseId); + return Objects.equals(dashboard, that.dashboard) + && Objects.equals(dashboardId, that.dashboardId); } @Override public int hashCode() { - return Objects.hash(dashboardId, displayName, etag, serializedDashboard, warehouseId); + return Objects.hash(dashboard, dashboardId); } @Override public String toString() { return new ToStringer(UpdateDashboardRequest.class) + .add("dashboard", dashboard) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("etag", etag) - .add("serializedDashboard", serializedDashboard) - .add("warehouseId", warehouseId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java index cfc48f11..cffdc637 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java @@ -8,43 +8,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Update dashboard schedule */ @Generated public class UpdateScheduleRequest { - /** The cron expression describing the frequency of the periodic refresh for this schedule. */ - @JsonProperty("cron_schedule") - private CronSchedule cronSchedule; - /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** The display name for schedule. */ - @JsonProperty("display_name") - private String displayName; - - /** - * The etag for the schedule. Must be left empty on create, must be provided on updates to ensure - * that the schedule has not been modified since the last read, and can be optionally provided on - * delete. - */ - @JsonProperty("etag") - private String etag; - - /** The status indicates whether this schedule is paused or not. */ - @JsonProperty("pause_status") - private SchedulePauseStatus pauseStatus; + /** */ + @JsonProperty("schedule") + private Schedule schedule; /** UUID identifying the schedule. */ @JsonIgnore private String scheduleId; - public UpdateScheduleRequest setCronSchedule(CronSchedule cronSchedule) { - this.cronSchedule = cronSchedule; - return this; - } - - public CronSchedule getCronSchedule() { - return cronSchedule; - } - public UpdateScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; return this; @@ -54,31 +30,13 @@ public String getDashboardId() { return dashboardId; } - public UpdateScheduleRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public UpdateScheduleRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - public UpdateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) { - this.pauseStatus = pauseStatus; + public UpdateScheduleRequest setSchedule(Schedule schedule) { + this.schedule = schedule; return this; } - public SchedulePauseStatus getPauseStatus() { - return pauseStatus; + public Schedule getSchedule() { + return schedule; } public UpdateScheduleRequest setScheduleId(String scheduleId) { @@ -95,27 +53,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateScheduleRequest that = (UpdateScheduleRequest) o; - return Objects.equals(cronSchedule, that.cronSchedule) - && Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(etag, that.etag) - && Objects.equals(pauseStatus, that.pauseStatus) + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(schedule, that.schedule) && Objects.equals(scheduleId, that.scheduleId); } @Override public int hashCode() { - return Objects.hash(cronSchedule, dashboardId, displayName, etag, pauseStatus, scheduleId); + return Objects.hash(dashboardId, schedule, scheduleId); } @Override public String toString() { return new ToStringer(UpdateScheduleRequest.class) - .add("cronSchedule", cronSchedule) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("etag", etag) - .add("pauseStatus", pauseStatus) + .add("schedule", schedule) .add("scheduleId", scheduleId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java index 2eb2601d..52182458 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java @@ -118,8 +118,9 @@ public ObjectPermissions set(String requestObjectType, String requestObjectId) { /** * Set object permissions. * - *

Sets permissions on an object. Objects can inherit permissions from their parent objects or - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ public ObjectPermissions set(PermissionsRequest request) { return impl.set(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java index e641809e..03ccea94 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java @@ -80,8 +80,9 @@ GetPermissionLevelsResponse getPermissionLevels( /** * Set object permissions. * - *

Sets permissions on an object. Objects can inherit permissions from their parent objects or - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ ObjectPermissions set(PermissionsRequest permissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java index e2764fd0..d079aba0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java @@ -132,7 +132,9 @@ public void patch(PartialUpdate request) { /** * Set password permissions. * - *

Sets permissions on all passwords. Passwords can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java index f922d831..83d9421e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java @@ -76,7 +76,9 @@ public interface UsersService { /** * Set password permissions. * - *

Sets permissions on all passwords. Passwords can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ PasswordPermissions setPermissions(PasswordPermissionsRequest passwordPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java index 88691b4d..bf870d3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java @@ -22,8 +22,8 @@ public class GetRunRequest { private Boolean includeResolvedValues; /** - * To list the next page or the previous page of job tasks, set this field to the value of the - * `next_page_token` or `prev_page_token` returned in the GetJob response. + * To list the next page of job tasks, set this field to the value of the `next_page_token` + * returned in the GetJob response. */ @JsonIgnore @QueryParam("page_token") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java index a5ec8c8b..5b0ce638 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java @@ -341,7 +341,9 @@ public JobPermissions setPermissions(String jobId) { /** * Set job permissions. * - *

Sets permissions on a job. Jobs can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public JobPermissions setPermissions(JobPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java index 57433b9a..46696459 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java @@ -156,7 +156,9 @@ GetJobPermissionLevelsResponse getPermissionLevels( /** * Set job permissions. * - *

Sets permissions on a job. Jobs can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ JobPermissions setPermissions(JobPermissionsRequest jobPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java index d79a7699..7fb9ace3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java @@ -25,8 +25,9 @@ public class RepairRun { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 20ace3a6..f076ba72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -130,10 +130,6 @@ public class Run { @JsonProperty("overriding_parameters") private RunParameters overridingParameters; - /** A token that can be used to list the previous page of sub-resources. */ - @JsonProperty("prev_page_token") - private String prevPageToken; - /** The time in milliseconds that the run has spent in the queue. */ @JsonProperty("queue_duration") private Long queueDuration; @@ -386,15 +382,6 @@ public RunParameters getOverridingParameters() { return overridingParameters; } - public Run setPrevPageToken(String prevPageToken) { - this.prevPageToken = prevPageToken; - return this; - } - - public String getPrevPageToken() { - return prevPageToken; - } - public Run setQueueDuration(Long queueDuration) { this.queueDuration = queueDuration; return this; @@ -553,7 +540,6 @@ public boolean equals(Object o) { && Objects.equals(numberInJob, that.numberInJob) && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) && Objects.equals(overridingParameters, that.overridingParameters) - && Objects.equals(prevPageToken, that.prevPageToken) && Objects.equals(queueDuration, that.queueDuration) && Objects.equals(repairHistory, that.repairHistory) && Objects.equals(runDuration, that.runDuration) @@ -592,7 +578,6 @@ public int hashCode() { numberInJob, originalAttemptRunId, overridingParameters, - prevPageToken, queueDuration, repairHistory, runDuration, @@ -631,7 +616,6 @@ public String toString() { .add("numberInJob", numberInJob) .add("originalAttemptRunId", originalAttemptRunId) .add("overridingParameters", overridingParameters) - .add("prevPageToken", prevPageToken) .add("queueDuration", queueDuration) .add("repairHistory", repairHistory) .add("runDuration", runDuration) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java index eb1f27f0..593a3dc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java @@ -25,8 +25,9 @@ public class RunJobTask { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index fce617b4..d99cb5c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -42,8 +42,9 @@ public class RunNow { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; @@ -76,6 +77,13 @@ public class RunNow { @JsonProperty("notebook_params") private Map notebookParams; + /** + * A list of task keys to run inside of the job. If this field is not provided, all tasks in the + * job will be run. + */ + @JsonProperty("only") + private Collection only; + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; @@ -188,6 +196,15 @@ public Map getNotebookParams() { return notebookParams; } + public RunNow setOnly(Collection only) { + this.only = only; + return this; + } + + public Collection getOnly() { + return only; + } + public RunNow setPipelineParams(PipelineParams pipelineParams) { this.pipelineParams = pipelineParams; return this; @@ -253,6 +270,7 @@ public boolean equals(Object o) { && Objects.equals(jobId, that.jobId) && Objects.equals(jobParameters, that.jobParameters) && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(only, that.only) && Objects.equals(pipelineParams, that.pipelineParams) && Objects.equals(pythonNamedParams, that.pythonNamedParams) && Objects.equals(pythonParams, that.pythonParams) @@ -270,6 +288,7 @@ public int hashCode() { jobId, jobParameters, notebookParams, + only, pipelineParams, pythonNamedParams, pythonParams, @@ -287,6 +306,7 @@ public String toString() { .add("jobId", jobId) .add("jobParameters", jobParameters) .add("notebookParams", notebookParams) + .add("only", only) .add("pipelineParams", pipelineParams) .add("pythonNamedParams", pythonNamedParams) .add("pythonParams", pythonParams) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java index b76cd839..be39a1c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java @@ -25,8 +25,9 @@ public class RunParameters { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index 4a4b436a..07c7e410 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -38,16 +38,16 @@ public class RunTask { private ClusterInstance clusterInstance; /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private RunConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -103,7 +103,10 @@ public class RunTask { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private RunForEachTask forEachTask; @@ -136,10 +139,7 @@ public class RunTask { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -150,11 +150,14 @@ public class RunTask { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -182,7 +185,7 @@ public class RunTask { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; @@ -200,17 +203,17 @@ public class RunTask { @JsonProperty("setup_duration") private Long setupDuration; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -228,7 +231,10 @@ public class RunTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 6f8a12c1..be1e7918 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -11,16 +11,16 @@ @Generated public class SubmitTask { /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private ConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -59,7 +59,10 @@ public class SubmitTask { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -78,10 +81,7 @@ public class SubmitTask { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -92,11 +92,14 @@ public class SubmitTask { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -108,21 +111,21 @@ public class SubmitTask { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -140,7 +143,10 @@ public class SubmitTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 0f902639..011b3ee3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -11,16 +11,16 @@ @Generated public class Task { /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private ConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -64,7 +64,10 @@ public class Task { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -106,10 +109,7 @@ public class Task { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -120,11 +120,14 @@ public class Task { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -148,21 +151,21 @@ public class Task { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -180,7 +183,10 @@ public class Task { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java index e37118d9..69909494 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java @@ -11,4 +11,5 @@ public enum AssetType { ASSET_TYPE_MEDIA, ASSET_TYPE_MODEL, ASSET_TYPE_NOTEBOOK, + ASSET_TYPE_PARTNER_INTEGRATION, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 7f392335..8c4c27e8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -463,7 +463,8 @@ public ExperimentPermissions setPermissions(String experimentId) { /** * Set experiment permissions. * - *

Sets permissions on an experiment. Experiments can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index c4a64162..323c848c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -274,7 +274,8 @@ ExperimentPermissions getPermissions( /** * Set experiment permissions. * - *

Sets permissions on an experiment. Experiments can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ ExperimentPermissions setPermissions(ExperimentPermissionsRequest experimentPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java index e70d4204..9c4325eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java @@ -478,8 +478,9 @@ public RegisteredModelPermissions setPermissions(String registeredModelId) { /** * Set registered model permissions. * - *

Sets permissions on a registered model. Registered models can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public RegisteredModelPermissions setPermissions(RegisteredModelPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java index bf353561..3f9dfae1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java @@ -241,8 +241,9 @@ SearchModelVersionsResponse searchModelVersions( /** * Set registered model permissions. * - *

Sets permissions on a registered model. Registered models can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ RegisteredModelPermissions setPermissions( RegisteredModelPermissionsRequest registeredModelPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java index 3ac7226b..816fb09b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; @@ -10,9 +11,31 @@ /** List service principal secrets */ @Generated public class ListServicePrincipalSecretsRequest { + /** + * An opaque page token which was the `next_page_token` in the response of the previous request to + * list the secrets for this service principal. Provide this token to retrieve the next page of + * secret entries. When providing a `page_token`, all other parameters provided to the request + * must match the previous request. To list all of the secrets for a service principal, it is + * necessary to continue requesting pages of entries until the response contains no + * `next_page_token`. Note that the number of entries returned must not be used to determine when + * the listing is complete. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + /** The service principal ID. */ @JsonIgnore private Long servicePrincipalId; + public ListServicePrincipalSecretsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public ListServicePrincipalSecretsRequest setServicePrincipalId(Long servicePrincipalId) { this.servicePrincipalId = servicePrincipalId; return this; @@ -27,17 +50,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListServicePrincipalSecretsRequest that = (ListServicePrincipalSecretsRequest) o; - return Objects.equals(servicePrincipalId, that.servicePrincipalId); + return Objects.equals(pageToken, that.pageToken) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); } @Override public int hashCode() { - return Objects.hash(servicePrincipalId); + return Objects.hash(pageToken, servicePrincipalId); } @Override public String toString() { return new ToStringer(ListServicePrincipalSecretsRequest.class) + .add("pageToken", pageToken) .add("servicePrincipalId", servicePrincipalId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java index 1b0d38c8..dd971e93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java @@ -10,10 +10,23 @@ @Generated public class ListServicePrincipalSecretsResponse { + /** A token, which can be sent as `page_token` to retrieve the next page. */ + @JsonProperty("next_page_token") + private String nextPageToken; + /** List of the secrets */ @JsonProperty("secrets") private Collection secrets; + public ListServicePrincipalSecretsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + public ListServicePrincipalSecretsResponse setSecrets(Collection secrets) { this.secrets = secrets; return this; @@ -28,17 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListServicePrincipalSecretsResponse that = (ListServicePrincipalSecretsResponse) o; - return Objects.equals(secrets, that.secrets); + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(secrets, that.secrets); } @Override public int hashCode() { - return Objects.hash(secrets); + return Objects.hash(nextPageToken, secrets); } @Override public String toString() { return new ToStringer(ListServicePrincipalSecretsResponse.class) + .add("nextPageToken", nextPageToken) .add("secrets", secrets) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java index bbadb41e..534617ed 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java @@ -80,7 +80,16 @@ public Iterable list(long servicePrincipalId) { */ public Iterable list(ListServicePrincipalSecretsRequest request) { return new Paginator<>( - request, impl::list, ListServicePrincipalSecretsResponse::getSecrets, response -> null); + request, + impl::list, + ListServicePrincipalSecretsResponse::getSecrets, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } public ServicePrincipalSecretsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index e51d9ff1..6a35d663 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -64,7 +64,7 @@ public class CreatePipeline { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -95,6 +95,10 @@ public class CreatePipeline { @JsonProperty("photon") private Boolean photon; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -293,6 +297,15 @@ public Boolean getPhoton() { return photon; } + public CreatePipeline setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public CreatePipeline setSchema(String schema) { this.schema = schema; return this; @@ -362,6 +375,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -391,6 +405,7 @@ public int hashCode() { name, notifications, photon, + restartWindow, schema, serverless, storage, @@ -420,6 +435,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 52cbcee7..878e76bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -69,7 +69,7 @@ public class EditPipeline { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -104,6 +104,10 @@ public class EditPipeline { @JsonProperty("pipeline_id") private String pipelineId; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -311,6 +315,15 @@ public String getPipelineId() { return pipelineId; } + public EditPipeline setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public EditPipeline setSchema(String schema) { this.schema = schema; return this; @@ -381,6 +394,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -411,6 +425,7 @@ public int hashCode() { notifications, photon, pipelineId, + restartWindow, schema, serverless, storage, @@ -441,6 +456,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("pipelineId", pipelineId) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java index 92f853ae..c1ed47fe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java @@ -9,15 +9,15 @@ @Generated public class IngestionConfig { - /** Select tables from a specific source report. */ + /** Select a specific source report. */ @JsonProperty("report") private ReportSpec report; - /** Select tables from a specific source schema. */ + /** Select all tables from a specific source schema. */ @JsonProperty("schema") private SchemaSpec schema; - /** Select tables from a specific source table. */ + /** Select a specific source table. */ @JsonProperty("table") private TableSpec table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java index e989241e..58142faf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java @@ -10,12 +10,19 @@ @Generated public class IngestionGatewayPipelineDefinition { /** - * Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the - * source. + * [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this + * gateway pipeline uses to communicate with the source. */ @JsonProperty("connection_id") private String connectionId; + /** + * Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the + * source. + */ + @JsonProperty("connection_name") + private String connectionName; + /** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */ @JsonProperty("gateway_storage_catalog") private String gatewayStorageCatalog; @@ -41,6 +48,15 @@ public String getConnectionId() { return connectionId; } + public IngestionGatewayPipelineDefinition setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + public IngestionGatewayPipelineDefinition setGatewayStorageCatalog(String gatewayStorageCatalog) { this.gatewayStorageCatalog = gatewayStorageCatalog; return this; @@ -74,6 +90,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; IngestionGatewayPipelineDefinition that = (IngestionGatewayPipelineDefinition) o; return Objects.equals(connectionId, that.connectionId) + && Objects.equals(connectionName, that.connectionName) && Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog) && Objects.equals(gatewayStorageName, that.gatewayStorageName) && Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema); @@ -82,13 +99,18 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - connectionId, gatewayStorageCatalog, gatewayStorageName, gatewayStorageSchema); + connectionId, + connectionName, + gatewayStorageCatalog, + gatewayStorageName, + gatewayStorageSchema); } @Override public String toString() { return new ToStringer(IngestionGatewayPipelineDefinition.class) .add("connectionId", connectionId) + .add("connectionName", connectionName) .add("gatewayStorageCatalog", gatewayStorageCatalog) .add("gatewayStorageName", gatewayStorageName) .add("gatewayStorageSchema", gatewayStorageSchema) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index 7fa3f4d9..8cd10563 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -11,15 +11,15 @@ @Generated public class IngestionPipelineDefinition { /** - * Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - * source. Specify either ingestion_gateway_id or connection_name. + * Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with + * the source. This is used with connectors for applications like Salesforce, Workday, and so on. */ @JsonProperty("connection_name") private String connectionName; /** - * Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - * with the source. Specify either ingestion_gateway_id or connection_name. + * Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate + * with the source database. This is used with connectors to databases like SQL Server. */ @JsonProperty("ingestion_gateway_id") private String ingestionGatewayId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index c880ee65..26aecddf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -56,7 +56,7 @@ public class PipelineSpec { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -87,6 +87,10 @@ public class PipelineSpec { @JsonProperty("photon") private Boolean photon; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -267,6 +271,15 @@ public Boolean getPhoton() { return photon; } + public PipelineSpec setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public PipelineSpec setSchema(String schema) { this.schema = schema; return this; @@ -334,6 +347,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -361,6 +375,7 @@ public int hashCode() { name, notifications, photon, + restartWindow, schema, serverless, storage, @@ -388,6 +403,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 9e00ea3c..92117d39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -44,15 +44,15 @@ public PipelinesAPI(PipelinesService mock) { impl = mock; } - public GetPipelineResponse waitGetPipelineIdle(String pipelineId) throws TimeoutException { - return waitGetPipelineIdle(pipelineId, Duration.ofMinutes(20), null); + public GetPipelineResponse waitGetPipelineRunning(String pipelineId) throws TimeoutException { + return waitGetPipelineRunning(pipelineId, Duration.ofMinutes(20), null); } - public GetPipelineResponse waitGetPipelineIdle( + public GetPipelineResponse waitGetPipelineRunning( String pipelineId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(PipelineState.IDLE); + java.util.List targetStates = Arrays.asList(PipelineState.RUNNING); java.util.List failureStates = Arrays.asList(PipelineState.FAILED); String statusMessage = "polling..."; int attempt = 1; @@ -67,7 +67,7 @@ public GetPipelineResponse waitGetPipelineIdle( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach IDLE, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } @@ -89,15 +89,15 @@ public GetPipelineResponse waitGetPipelineIdle( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public GetPipelineResponse waitGetPipelineRunning(String pipelineId) throws TimeoutException { - return waitGetPipelineRunning(pipelineId, Duration.ofMinutes(20), null); + public GetPipelineResponse waitGetPipelineIdle(String pipelineId) throws TimeoutException { + return waitGetPipelineIdle(pipelineId, Duration.ofMinutes(20), null); } - public GetPipelineResponse waitGetPipelineRunning( + public GetPipelineResponse waitGetPipelineIdle( String pipelineId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(PipelineState.RUNNING); + java.util.List targetStates = Arrays.asList(PipelineState.IDLE); java.util.List failureStates = Arrays.asList(PipelineState.FAILED); String statusMessage = "polling..."; int attempt = 1; @@ -112,7 +112,7 @@ public GetPipelineResponse waitGetPipelineRunning( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach IDLE, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } @@ -269,7 +269,9 @@ public PipelinePermissions setPermissions(String pipelineId) { /** * Set pipeline permissions. * - *

Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public PipelinePermissions setPermissions(PipelinePermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index 127bbb06..332eabdc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -90,7 +90,9 @@ ListPipelineEventsResponse listPipelineEvents( /** * Set pipeline permissions. * - *

Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ PipelinePermissions setPermissions(PipelinePermissionsRequest pipelinePermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java new file mode 100755 index 00000000..3156277a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RestartWindow { + /** + * Days of week in which the restart is allowed to happen (within a five-hour window starting at + * start_hour). If not specified all days of the week will be used. + */ + @JsonProperty("days_of_week") + private RestartWindowDaysOfWeek daysOfWeek; + + /** + * An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + * Continuous pipeline restart is triggered only within a five-hour window starting at this hour. + */ + @JsonProperty("start_hour") + private Long startHour; + + /** + * Time zone id of restart window. See + * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html + * for details. If not specified, UTC will be used. + */ + @JsonProperty("time_zone_id") + private String timeZoneId; + + public RestartWindow setDaysOfWeek(RestartWindowDaysOfWeek daysOfWeek) { + this.daysOfWeek = daysOfWeek; + return this; + } + + public RestartWindowDaysOfWeek getDaysOfWeek() { + return daysOfWeek; + } + + public RestartWindow setStartHour(Long startHour) { + this.startHour = startHour; + return this; + } + + public Long getStartHour() { + return startHour; + } + + public RestartWindow setTimeZoneId(String timeZoneId) { + this.timeZoneId = timeZoneId; + return this; + } + + public String getTimeZoneId() { + return timeZoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestartWindow that = (RestartWindow) o; + return Objects.equals(daysOfWeek, that.daysOfWeek) + && Objects.equals(startHour, that.startHour) + && Objects.equals(timeZoneId, that.timeZoneId); + } + + @Override + public int hashCode() { + return Objects.hash(daysOfWeek, startHour, timeZoneId); + } + + @Override + public String toString() { + return new ToStringer(RestartWindow.class) + .add("daysOfWeek", daysOfWeek) + .add("startHour", startHour) + .add("timeZoneId", timeZoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java new file mode 100755 index 00000000..37bf738a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** + * Days of week in which the restart is allowed to happen (within a five-hour window starting at + * start_hour). If not specified all days of the week will be used. + */ +@Generated +public enum RestartWindowDaysOfWeek { + FRIDAY, + MONDAY, + SATURDAY, + SUNDAY, + THURSDAY, + TUESDAY, + WEDNESDAY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java index 1d928eaa..2cc6ec80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java @@ -93,6 +93,10 @@ public class CreateWorkspaceRequest { @JsonProperty("gke_config") private GkeConfig gkeConfig; + /** Whether no public IP is enabled for the workspace. */ + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + /** * The Google Cloud region of the workspace data plane in your Google account. For example, * `us-east4`. @@ -225,6 +229,15 @@ public GkeConfig getGkeConfig() { return gkeConfig; } + public CreateWorkspaceRequest setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + public CreateWorkspaceRequest setLocation(String location) { this.location = location; return this; @@ -311,6 +324,7 @@ public boolean equals(Object o) { && Objects.equals(deploymentName, that.deploymentName) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) @@ -333,6 +347,7 @@ public int hashCode() { deploymentName, gcpManagedNetworkConfig, gkeConfig, + isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, networkId, @@ -354,6 +369,7 @@ public String toString() { .add("deploymentName", deploymentName) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkId", networkId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java new file mode 100755 index 00000000..7654c68e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalCustomerInfo { + /** Email of the authoritative user. */ + @JsonProperty("authoritative_user_email") + private String authoritativeUserEmail; + + /** The authoritative user full name. */ + @JsonProperty("authoritative_user_full_name") + private String authoritativeUserFullName; + + /** The legal entity name for the external workspace */ + @JsonProperty("customer_name") + private String customerName; + + public ExternalCustomerInfo setAuthoritativeUserEmail(String authoritativeUserEmail) { + this.authoritativeUserEmail = authoritativeUserEmail; + return this; + } + + public String getAuthoritativeUserEmail() { + return authoritativeUserEmail; + } + + public ExternalCustomerInfo setAuthoritativeUserFullName(String authoritativeUserFullName) { + this.authoritativeUserFullName = authoritativeUserFullName; + return this; + } + + public String getAuthoritativeUserFullName() { + return authoritativeUserFullName; + } + + public ExternalCustomerInfo setCustomerName(String customerName) { + this.customerName = customerName; + return this; + } + + public String getCustomerName() { + return customerName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalCustomerInfo that = (ExternalCustomerInfo) o; + return Objects.equals(authoritativeUserEmail, that.authoritativeUserEmail) + && Objects.equals(authoritativeUserFullName, that.authoritativeUserFullName) + && Objects.equals(customerName, that.customerName); + } + + @Override + public int hashCode() { + return Objects.hash(authoritativeUserEmail, authoritativeUserFullName, customerName); + } + + @Override + public String toString() { + return new ToStringer(ExternalCustomerInfo.class) + .add("authoritativeUserEmail", authoritativeUserEmail) + .add("authoritativeUserFullName", authoritativeUserFullName) + .add("customerName", customerName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java index cfb8817a..a690adac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java @@ -52,6 +52,13 @@ public class UpdateWorkspaceRequest { @JsonProperty("network_id") private String networkId; + /** + * The ID of the workspace's private access settings configuration object. This parameter is + * available only for updating failed workspaces. + */ + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + /** * The ID of the workspace's storage configuration object. This parameter is available only for * updating failed workspaces. @@ -124,6 +131,15 @@ public String getNetworkId() { return networkId; } + public UpdateWorkspaceRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + public UpdateWorkspaceRequest setStorageConfigurationId(String storageConfigurationId) { this.storageConfigurationId = storageConfigurationId; return this; @@ -163,6 +179,7 @@ public boolean equals(Object o) { managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(networkId, that.networkId) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) && Objects.equals(storageConfigurationId, that.storageConfigurationId) && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) && Objects.equals(workspaceId, that.workspaceId); @@ -177,6 +194,7 @@ public int hashCode() { managedServicesCustomerManagedKeyId, networkConnectivityConfigId, networkId, + privateAccessSettingsId, storageConfigurationId, storageCustomerManagedKeyId, workspaceId); @@ -191,6 +209,7 @@ public String toString() { .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("networkId", networkId) + .add("privateAccessSettingsId", privateAccessSettingsId) .add("storageConfigurationId", storageConfigurationId) .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) .add("workspaceId", workspaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java index b6abc5a8..4d6b61c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java @@ -55,6 +55,13 @@ public class Workspace { @JsonProperty("deployment_name") private String deploymentName; + /** + * If this workspace is for a external customer, then external_customer_info is populated. If this + * workspace is not for a external customer, then external_customer_info is empty. + */ + @JsonProperty("external_customer_info") + private ExternalCustomerInfo externalCustomerInfo; + /** * The network settings for the workspace. The configurations are only for Databricks-managed * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the @@ -83,6 +90,10 @@ public class Workspace { @JsonProperty("gke_config") private GkeConfig gkeConfig; + /** Whether no public IP is enabled for the workspace. */ + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + /** * The Google Cloud region of the workspace data plane in your Google account (for example, * `us-east4`). @@ -231,6 +242,15 @@ public String getDeploymentName() { return deploymentName; } + public Workspace setExternalCustomerInfo(ExternalCustomerInfo externalCustomerInfo) { + this.externalCustomerInfo = externalCustomerInfo; + return this; + } + + public ExternalCustomerInfo getExternalCustomerInfo() { + return externalCustomerInfo; + } + public Workspace setGcpManagedNetworkConfig(GcpManagedNetworkConfig gcpManagedNetworkConfig) { this.gcpManagedNetworkConfig = gcpManagedNetworkConfig; return this; @@ -249,6 +269,15 @@ public GkeConfig getGkeConfig() { return gkeConfig; } + public Workspace setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + public Workspace setLocation(String location) { this.location = location; return this; @@ -363,8 +392,10 @@ public boolean equals(Object o) { && Objects.equals(credentialsId, that.credentialsId) && Objects.equals(customTags, that.customTags) && Objects.equals(deploymentName, that.deploymentName) + && Objects.equals(externalCustomerInfo, that.externalCustomerInfo) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) @@ -391,8 +422,10 @@ public int hashCode() { credentialsId, customTags, deploymentName, + externalCustomerInfo, gcpManagedNetworkConfig, gkeConfig, + isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, networkId, @@ -418,8 +451,10 @@ public String toString() { .add("credentialsId", credentialsId) .add("customTags", customTags) .add("deploymentName", deploymentName) + .add("externalCustomerInfo", externalCustomerInfo) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkId", networkId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index 5ef50b90..37c8b27b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -276,8 +276,9 @@ public ServingEndpointPermissions setPermissions(String servingEndpointId) { /** * Set serving endpoint permissions. * - *

Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ServingEndpointPermissions setPermissions(ServingEndpointPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index f35cb2a7..5a42d11c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -114,8 +114,9 @@ ServingEndpointPermissions getPermissions( /** * Set serving endpoint permissions. * - *

Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ServingEndpointPermissions setPermissions( ServingEndpointPermissionsRequest servingEndpointPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java new file mode 100755 index 00000000..698c7863 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingAccessPolicy { + /** */ + @JsonProperty("access_policy_type") + private AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType; + + public AibiDashboardEmbeddingAccessPolicy setAccessPolicyType( + AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType) { + this.accessPolicyType = accessPolicyType; + return this; + } + + public AibiDashboardEmbeddingAccessPolicyAccessPolicyType getAccessPolicyType() { + return accessPolicyType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicy that = (AibiDashboardEmbeddingAccessPolicy) o; + return Objects.equals(accessPolicyType, that.accessPolicyType); + } + + @Override + public int hashCode() { + return Objects.hash(accessPolicyType); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicy.class) + .add("accessPolicyType", accessPolicyType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java new file mode 100755 index 00000000..35af0c8f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. By default, this setting is conditionally enabled + * (ALLOW_APPROVED_DOMAINS). + */ +@Generated +public class AibiDashboardEmbeddingAccessPolicyAPI { + private static final Logger LOG = + LoggerFactory.getLogger(AibiDashboardEmbeddingAccessPolicyAPI.class); + + private final AibiDashboardEmbeddingAccessPolicyService impl; + + /** Regular-use constructor */ + public AibiDashboardEmbeddingAccessPolicyAPI(ApiClient apiClient) { + impl = new AibiDashboardEmbeddingAccessPolicyImpl(apiClient); + } + + /** Constructor for mocks */ + public AibiDashboardEmbeddingAccessPolicyAPI(AibiDashboardEmbeddingAccessPolicyService mock) { + impl = mock; + } + + /** + * Retrieve the AI/BI dashboard embedding access policy. + * + *

Retrieves the AI/BI dashboard embedding access policy. The default setting is + * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. + */ + public AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest request) { + return impl.get(request); + } + + public AibiDashboardEmbeddingAccessPolicySetting update( + boolean allowMissing, AibiDashboardEmbeddingAccessPolicySetting setting, String fieldMask) { + return update( + new UpdateAibiDashboardEmbeddingAccessPolicySettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the AI/BI dashboard embedding access policy. + * + *

Updates the AI/BI dashboard embedding access policy at the workspace level. + */ + public AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) { + return impl.update(request); + } + + public AibiDashboardEmbeddingAccessPolicyService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java new file mode 100755 index 00000000..7fc964b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AibiDashboardEmbeddingAccessPolicyAccessPolicyType { + ALLOW_ALL_DOMAINS, + ALLOW_APPROVED_DOMAINS, + DENY_ALL_DOMAINS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java new file mode 100755 index 00000000..b2736799 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java @@ -0,0 +1,36 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of AibiDashboardEmbeddingAccessPolicy */ +@Generated +class AibiDashboardEmbeddingAccessPolicyImpl implements AibiDashboardEmbeddingAccessPolicyService { + private final ApiClient apiClient; + + public AibiDashboardEmbeddingAccessPolicyImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + } + + @Override + public AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java new file mode 100755 index 00000000..cf7a24f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java @@ -0,0 +1,35 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. By default, this setting is conditionally enabled + * (ALLOW_APPROVED_DOMAINS). + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface AibiDashboardEmbeddingAccessPolicyService { + /** + * Retrieve the AI/BI dashboard embedding access policy. + * + *

Retrieves the AI/BI dashboard embedding access policy. The default setting is + * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. + */ + AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest + getAibiDashboardEmbeddingAccessPolicySettingRequest); + + /** + * Update the AI/BI dashboard embedding access policy. + * + *

Updates the AI/BI dashboard embedding access policy at the workspace level. + */ + AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest + updateAibiDashboardEmbeddingAccessPolicySettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java new file mode 100755 index 00000000..ead0e1b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingAccessPolicySetting { + /** */ + @JsonProperty("aibi_dashboard_embedding_access_policy") + private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingAccessPolicySetting setAibiDashboardEmbeddingAccessPolicy( + AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy) { + this.aibiDashboardEmbeddingAccessPolicy = aibiDashboardEmbeddingAccessPolicy; + return this; + } + + public AibiDashboardEmbeddingAccessPolicy getAibiDashboardEmbeddingAccessPolicy() { + return aibiDashboardEmbeddingAccessPolicy; + } + + public AibiDashboardEmbeddingAccessPolicySetting setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingAccessPolicySetting setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicySetting that = (AibiDashboardEmbeddingAccessPolicySetting) o; + return Objects.equals( + aibiDashboardEmbeddingAccessPolicy, that.aibiDashboardEmbeddingAccessPolicy) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingAccessPolicy, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicySetting.class) + .add("aibiDashboardEmbeddingAccessPolicy", aibiDashboardEmbeddingAccessPolicy) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java new file mode 100755 index 00000000..ecfa5097 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingApprovedDomains { + /** */ + @JsonProperty("approved_domains") + private Collection approvedDomains; + + public AibiDashboardEmbeddingApprovedDomains setApprovedDomains( + Collection approvedDomains) { + this.approvedDomains = approvedDomains; + return this; + } + + public Collection getApprovedDomains() { + return approvedDomains; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomains that = (AibiDashboardEmbeddingApprovedDomains) o; + return Objects.equals(approvedDomains, that.approvedDomains); + } + + @Override + public int hashCode() { + return Objects.hash(approvedDomains); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomains.class) + .add("approvedDomains", approvedDomains) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java new file mode 100755 index 00000000..6a096aca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains + * list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + */ +@Generated +public class AibiDashboardEmbeddingApprovedDomainsAPI { + private static final Logger LOG = + LoggerFactory.getLogger(AibiDashboardEmbeddingApprovedDomainsAPI.class); + + private final AibiDashboardEmbeddingApprovedDomainsService impl; + + /** Regular-use constructor */ + public AibiDashboardEmbeddingApprovedDomainsAPI(ApiClient apiClient) { + impl = new AibiDashboardEmbeddingApprovedDomainsImpl(apiClient); + } + + /** Constructor for mocks */ + public AibiDashboardEmbeddingApprovedDomainsAPI( + AibiDashboardEmbeddingApprovedDomainsService mock) { + impl = mock; + } + + /** + * Retrieve the list of domains approved to host embedded AI/BI dashboards. + * + *

Retrieves the list of domains approved to host embedded AI/BI dashboards. + */ + public AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + return impl.get(request); + } + + public AibiDashboardEmbeddingApprovedDomainsSetting update( + boolean allowMissing, + AibiDashboardEmbeddingApprovedDomainsSetting setting, + String fieldMask) { + return update( + new UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the list of domains approved to host embedded AI/BI dashboards. + * + *

Updates the list of domains approved to host embedded AI/BI dashboards. This update will + * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. + */ + public AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + return impl.update(request); + } + + public AibiDashboardEmbeddingApprovedDomainsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java new file mode 100755 index 00000000..e026484e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java @@ -0,0 +1,39 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of AibiDashboardEmbeddingApprovedDomains */ +@Generated +class AibiDashboardEmbeddingApprovedDomainsImpl + implements AibiDashboardEmbeddingApprovedDomainsService { + private final ApiClient apiClient; + + public AibiDashboardEmbeddingApprovedDomainsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET( + path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + } + + @Override + public AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH( + path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java new file mode 100755 index 00000000..ec55a9a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java @@ -0,0 +1,34 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains + * list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface AibiDashboardEmbeddingApprovedDomainsService { + /** + * Retrieve the list of domains approved to host embedded AI/BI dashboards. + * + *

Retrieves the list of domains approved to host embedded AI/BI dashboards. + */ + AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest + getAibiDashboardEmbeddingApprovedDomainsSettingRequest); + + /** + * Update the list of domains approved to host embedded AI/BI dashboards. + * + *

Updates the list of domains approved to host embedded AI/BI dashboards. This update will + * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. + */ + AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest + updateAibiDashboardEmbeddingApprovedDomainsSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java new file mode 100755 index 00000000..14c06081 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingApprovedDomainsSetting { + /** */ + @JsonProperty("aibi_dashboard_embedding_approved_domains") + private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingApprovedDomainsSetting setAibiDashboardEmbeddingApprovedDomains( + AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains) { + this.aibiDashboardEmbeddingApprovedDomains = aibiDashboardEmbeddingApprovedDomains; + return this; + } + + public AibiDashboardEmbeddingApprovedDomains getAibiDashboardEmbeddingApprovedDomains() { + return aibiDashboardEmbeddingApprovedDomains; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomainsSetting that = + (AibiDashboardEmbeddingApprovedDomainsSetting) o; + return Objects.equals( + aibiDashboardEmbeddingApprovedDomains, that.aibiDashboardEmbeddingApprovedDomains) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingApprovedDomains, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomainsSetting.class) + .add("aibiDashboardEmbeddingApprovedDomains", aibiDashboardEmbeddingApprovedDomains) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java new file mode 100755 index 00000000..48a8c391 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the AI/BI dashboard embedding access policy */ +@Generated +public class GetAibiDashboardEmbeddingAccessPolicySettingRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingAccessPolicySettingRequest that = + (GetAibiDashboardEmbeddingAccessPolicySettingRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingAccessPolicySettingRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java new file mode 100755 index 00000000..a9db24cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the list of domains approved to host embedded AI/BI dashboards */ +@Generated +public class GetAibiDashboardEmbeddingApprovedDomainsSettingRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest that = + (GetAibiDashboardEmbeddingApprovedDomainsSettingRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index e7f1c92a..f1ba301d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -13,6 +13,10 @@ public class SettingsAPI { private final SettingsService impl; + private AibiDashboardEmbeddingAccessPolicyAPI aibiDashboardEmbeddingAccessPolicyAPI; + + private AibiDashboardEmbeddingApprovedDomainsAPI aibiDashboardEmbeddingApprovedDomainsAPI; + private AutomaticClusterUpdateAPI automaticClusterUpdateAPI; private ComplianceSecurityProfileAPI complianceSecurityProfileAPI; @@ -31,6 +35,11 @@ public class SettingsAPI { public SettingsAPI(ApiClient apiClient) { impl = new SettingsImpl(apiClient); + aibiDashboardEmbeddingAccessPolicyAPI = new AibiDashboardEmbeddingAccessPolicyAPI(apiClient); + + aibiDashboardEmbeddingApprovedDomainsAPI = + new AibiDashboardEmbeddingApprovedDomainsAPI(apiClient); + automaticClusterUpdateAPI = new AutomaticClusterUpdateAPI(apiClient); complianceSecurityProfileAPI = new ComplianceSecurityProfileAPI(apiClient); @@ -51,6 +60,19 @@ public SettingsAPI(SettingsService mock) { impl = mock; } + /** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. + */ + public AibiDashboardEmbeddingAccessPolicyAPI AibiDashboardEmbeddingAccessPolicy() { + return aibiDashboardEmbeddingAccessPolicyAPI; + } + + /** Controls the list of domains approved to host the embedded AI/BI dashboards. */ + public AibiDashboardEmbeddingApprovedDomainsAPI AibiDashboardEmbeddingApprovedDomains() { + return aibiDashboardEmbeddingApprovedDomainsAPI; + } + /** Controls whether automatic cluster update is enabled for the current workspace. */ public AutomaticClusterUpdateAPI AutomaticClusterUpdate() { return automaticClusterUpdateAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java index 20c4a6bf..08e22738 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java @@ -29,6 +29,10 @@ public class TokenInfo { @JsonProperty("expiry_time") private Long expiryTime; + /** Approximate timestamp for the day the token was last used. Accurate up to 1 day. */ + @JsonProperty("last_used_day") + private Long lastUsedDay; + /** User ID of the user that owns the token. */ @JsonProperty("owner_id") private Long ownerId; @@ -86,6 +90,15 @@ public Long getExpiryTime() { return expiryTime; } + public TokenInfo setLastUsedDay(Long lastUsedDay) { + this.lastUsedDay = lastUsedDay; + return this; + } + + public Long getLastUsedDay() { + return lastUsedDay; + } + public TokenInfo setOwnerId(Long ownerId) { this.ownerId = ownerId; return this; @@ -123,6 +136,7 @@ public boolean equals(Object o) { && Objects.equals(createdByUsername, that.createdByUsername) && Objects.equals(creationTime, that.creationTime) && Objects.equals(expiryTime, that.expiryTime) + && Objects.equals(lastUsedDay, that.lastUsedDay) && Objects.equals(ownerId, that.ownerId) && Objects.equals(tokenId, that.tokenId) && Objects.equals(workspaceId, that.workspaceId); @@ -136,6 +150,7 @@ public int hashCode() { createdByUsername, creationTime, expiryTime, + lastUsedDay, ownerId, tokenId, workspaceId); @@ -149,6 +164,7 @@ public String toString() { .add("createdByUsername", createdByUsername) .add("creationTime", creationTime) .add("expiryTime", expiryTime) + .add("lastUsedDay", lastUsedDay) .add("ownerId", ownerId) .add("tokenId", tokenId) .add("workspaceId", workspaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java index f105eef5..5e531153 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java @@ -97,7 +97,9 @@ public Iterable list(ListTokenManagementRequest request) { /** * Set token permissions. * - *

Sets permissions on all tokens. Tokens can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public TokenPermissions setPermissions(TokenPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java index ab02b56a..ad46b6ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java @@ -58,7 +58,9 @@ public interface TokenManagementService { /** * Set token permissions. * - *

Sets permissions on all tokens. Tokens can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ TokenPermissions setPermissions(TokenPermissionsRequest tokenPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java new file mode 100755 index 00000000..9e8a2ff8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. The field mask needs to be supplied as single string. + * To specify multiple fields in the field mask, use comma as the separator (no space). + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private AibiDashboardEmbeddingAccessPolicySetting setting; + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setSetting( + AibiDashboardEmbeddingAccessPolicySetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingAccessPolicySetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest that = + (UpdateAibiDashboardEmbeddingAccessPolicySettingRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java new file mode 100755 index 00000000..a3e7de0d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. The field mask needs to be supplied as single string. + * To specify multiple fields in the field mask, use comma as the separator (no space). + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private AibiDashboardEmbeddingApprovedDomainsSetting setting; + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setSetting( + AibiDashboardEmbeddingApprovedDomainsSetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest that = + (UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java deleted file mode 100755 index b9bc1f6b..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java +++ /dev/null @@ -1,109 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CentralCleanRoomInfo { - /** - * All assets from all collaborators that are available in the clean room. Only one of table_info - * or notebook_info will be filled in. - */ - @JsonProperty("clean_room_assets") - private Collection cleanRoomAssets; - - /** All collaborators who are in the clean room. */ - @JsonProperty("collaborators") - private Collection collaborators; - - /** The collaborator who created the clean room. */ - @JsonProperty("creator") - private CleanRoomCollaboratorInfo creator; - - /** The cloud where clean room tasks will be run. */ - @JsonProperty("station_cloud") - private String stationCloud; - - /** The region where clean room tasks will be run. */ - @JsonProperty("station_region") - private String stationRegion; - - public CentralCleanRoomInfo setCleanRoomAssets(Collection cleanRoomAssets) { - this.cleanRoomAssets = cleanRoomAssets; - return this; - } - - public Collection getCleanRoomAssets() { - return cleanRoomAssets; - } - - public CentralCleanRoomInfo setCollaborators( - Collection collaborators) { - this.collaborators = collaborators; - return this; - } - - public Collection getCollaborators() { - return collaborators; - } - - public CentralCleanRoomInfo setCreator(CleanRoomCollaboratorInfo creator) { - this.creator = creator; - return this; - } - - public CleanRoomCollaboratorInfo getCreator() { - return creator; - } - - public CentralCleanRoomInfo setStationCloud(String stationCloud) { - this.stationCloud = stationCloud; - return this; - } - - public String getStationCloud() { - return stationCloud; - } - - public CentralCleanRoomInfo setStationRegion(String stationRegion) { - this.stationRegion = stationRegion; - return this; - } - - public String getStationRegion() { - return stationRegion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CentralCleanRoomInfo that = (CentralCleanRoomInfo) o; - return Objects.equals(cleanRoomAssets, that.cleanRoomAssets) - && Objects.equals(collaborators, that.collaborators) - && Objects.equals(creator, that.creator) - && Objects.equals(stationCloud, that.stationCloud) - && Objects.equals(stationRegion, that.stationRegion); - } - - @Override - public int hashCode() { - return Objects.hash(cleanRoomAssets, collaborators, creator, stationCloud, stationRegion); - } - - @Override - public String toString() { - return new ToStringer(CentralCleanRoomInfo.class) - .add("cleanRoomAssets", cleanRoomAssets) - .add("collaborators", collaborators) - .add("creator", creator) - .add("stationCloud", stationCloud) - .add("stationRegion", stationRegion) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java deleted file mode 100755 index bd09e02c..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java +++ /dev/null @@ -1,104 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomAssetInfo { - /** Time at which this asset was added, in epoch milliseconds. */ - @JsonProperty("added_at") - private Long addedAt; - - /** Details about the notebook asset. */ - @JsonProperty("notebook_info") - private CleanRoomNotebookInfo notebookInfo; - - /** The collaborator who owns the asset. */ - @JsonProperty("owner") - private CleanRoomCollaboratorInfo owner; - - /** Details about the table asset. */ - @JsonProperty("table_info") - private CleanRoomTableInfo tableInfo; - - /** Time at which this asset was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") - private Long updatedAt; - - public CleanRoomAssetInfo setAddedAt(Long addedAt) { - this.addedAt = addedAt; - return this; - } - - public Long getAddedAt() { - return addedAt; - } - - public CleanRoomAssetInfo setNotebookInfo(CleanRoomNotebookInfo notebookInfo) { - this.notebookInfo = notebookInfo; - return this; - } - - public CleanRoomNotebookInfo getNotebookInfo() { - return notebookInfo; - } - - public CleanRoomAssetInfo setOwner(CleanRoomCollaboratorInfo owner) { - this.owner = owner; - return this; - } - - public CleanRoomCollaboratorInfo getOwner() { - return owner; - } - - public CleanRoomAssetInfo setTableInfo(CleanRoomTableInfo tableInfo) { - this.tableInfo = tableInfo; - return this; - } - - public CleanRoomTableInfo getTableInfo() { - return tableInfo; - } - - public CleanRoomAssetInfo setUpdatedAt(Long updatedAt) { - this.updatedAt = updatedAt; - return this; - } - - public Long getUpdatedAt() { - return updatedAt; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomAssetInfo that = (CleanRoomAssetInfo) o; - return Objects.equals(addedAt, that.addedAt) - && Objects.equals(notebookInfo, that.notebookInfo) - && Objects.equals(owner, that.owner) - && Objects.equals(tableInfo, that.tableInfo) - && Objects.equals(updatedAt, that.updatedAt); - } - - @Override - public int hashCode() { - return Objects.hash(addedAt, notebookInfo, owner, tableInfo, updatedAt); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomAssetInfo.class) - .add("addedAt", addedAt) - .add("notebookInfo", notebookInfo) - .add("owner", owner) - .add("tableInfo", tableInfo) - .add("updatedAt", updatedAt) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java deleted file mode 100755 index a0f1a3d6..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java +++ /dev/null @@ -1,75 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomCatalog { - /** Name of the catalog in the clean room station. Empty for notebooks. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The details of the shared notebook files. */ - @JsonProperty("notebook_files") - private Collection notebookFiles; - - /** The details of the shared tables. */ - @JsonProperty("tables") - private Collection tables; - - public CleanRoomCatalog setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomCatalog setNotebookFiles(Collection notebookFiles) { - this.notebookFiles = notebookFiles; - return this; - } - - public Collection getNotebookFiles() { - return notebookFiles; - } - - public CleanRoomCatalog setTables(Collection tables) { - this.tables = tables; - return this; - } - - public Collection getTables() { - return tables; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCatalog that = (CleanRoomCatalog) o; - return Objects.equals(catalogName, that.catalogName) - && Objects.equals(notebookFiles, that.notebookFiles) - && Objects.equals(tables, that.tables); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, notebookFiles, tables); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCatalog.class) - .add("catalogName", catalogName) - .add("notebookFiles", notebookFiles) - .add("tables", tables) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java deleted file mode 100755 index 1a72b29c..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java +++ /dev/null @@ -1,58 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomCatalogUpdate { - /** The name of the catalog to update assets. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The updates to the assets in the catalog. */ - @JsonProperty("updates") - private SharedDataObjectUpdate updates; - - public CleanRoomCatalogUpdate setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomCatalogUpdate setUpdates(SharedDataObjectUpdate updates) { - this.updates = updates; - return this; - } - - public SharedDataObjectUpdate getUpdates() { - return updates; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCatalogUpdate that = (CleanRoomCatalogUpdate) o; - return Objects.equals(catalogName, that.catalogName) && Objects.equals(updates, that.updates); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, updates); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCatalogUpdate.class) - .add("catalogName", catalogName) - .add("updates", updates) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java deleted file mode 100755 index b484fd76..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java +++ /dev/null @@ -1,65 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomCollaboratorInfo { - /** - * The global Unity Catalog metastore id of the collaborator. Also known as the sharing - * identifier. The identifier is of format __cloud__:__region__:__metastore-uuid__. - */ - @JsonProperty("global_metastore_id") - private String globalMetastoreId; - - /** - * The organization name of the collaborator. This is configured in the metastore for Delta - * Sharing and is used to identify the organization to other collaborators. - */ - @JsonProperty("organization_name") - private String organizationName; - - public CleanRoomCollaboratorInfo setGlobalMetastoreId(String globalMetastoreId) { - this.globalMetastoreId = globalMetastoreId; - return this; - } - - public String getGlobalMetastoreId() { - return globalMetastoreId; - } - - public CleanRoomCollaboratorInfo setOrganizationName(String organizationName) { - this.organizationName = organizationName; - return this; - } - - public String getOrganizationName() { - return organizationName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCollaboratorInfo that = (CleanRoomCollaboratorInfo) o; - return Objects.equals(globalMetastoreId, that.globalMetastoreId) - && Objects.equals(organizationName, that.organizationName); - } - - @Override - public int hashCode() { - return Objects.hash(globalMetastoreId, organizationName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCollaboratorInfo.class) - .add("globalMetastoreId", globalMetastoreId) - .add("organizationName", organizationName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java deleted file mode 100755 index 1f0c8628..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java +++ /dev/null @@ -1,174 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomInfo { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** Time at which this clean room was created, in epoch milliseconds. */ - @JsonProperty("created_at") - private Long createdAt; - - /** Username of clean room creator. */ - @JsonProperty("created_by") - private String createdBy; - - /** Catalog aliases shared by the current collaborator with asset details. */ - @JsonProperty("local_catalogs") - private Collection localCatalogs; - - /** Name of the clean room. */ - @JsonProperty("name") - private String name; - - /** Username of current owner of clean room. */ - @JsonProperty("owner") - private String owner; - - /** Central clean room details. */ - @JsonProperty("remote_detailed_info") - private CentralCleanRoomInfo remoteDetailedInfo; - - /** Time at which this clean room was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") - private Long updatedAt; - - /** Username of clean room updater. */ - @JsonProperty("updated_by") - private String updatedBy; - - public CleanRoomInfo setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public CleanRoomInfo setCreatedAt(Long createdAt) { - this.createdAt = createdAt; - return this; - } - - public Long getCreatedAt() { - return createdAt; - } - - public CleanRoomInfo setCreatedBy(String createdBy) { - this.createdBy = createdBy; - return this; - } - - public String getCreatedBy() { - return createdBy; - } - - public CleanRoomInfo setLocalCatalogs(Collection localCatalogs) { - this.localCatalogs = localCatalogs; - return this; - } - - public Collection getLocalCatalogs() { - return localCatalogs; - } - - public CleanRoomInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CleanRoomInfo setOwner(String owner) { - this.owner = owner; - return this; - } - - public String getOwner() { - return owner; - } - - public CleanRoomInfo setRemoteDetailedInfo(CentralCleanRoomInfo remoteDetailedInfo) { - this.remoteDetailedInfo = remoteDetailedInfo; - return this; - } - - public CentralCleanRoomInfo getRemoteDetailedInfo() { - return remoteDetailedInfo; - } - - public CleanRoomInfo setUpdatedAt(Long updatedAt) { - this.updatedAt = updatedAt; - return this; - } - - public Long getUpdatedAt() { - return updatedAt; - } - - public CleanRoomInfo setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - return this; - } - - public String getUpdatedBy() { - return updatedBy; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomInfo that = (CleanRoomInfo) o; - return Objects.equals(comment, that.comment) - && Objects.equals(createdAt, that.createdAt) - && Objects.equals(createdBy, that.createdBy) - && Objects.equals(localCatalogs, that.localCatalogs) - && Objects.equals(name, that.name) - && Objects.equals(owner, that.owner) - && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo) - && Objects.equals(updatedAt, that.updatedAt) - && Objects.equals(updatedBy, that.updatedBy); - } - - @Override - public int hashCode() { - return Objects.hash( - comment, - createdAt, - createdBy, - localCatalogs, - name, - owner, - remoteDetailedInfo, - updatedAt, - updatedBy); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomInfo.class) - .add("comment", comment) - .add("createdAt", createdAt) - .add("createdBy", createdBy) - .add("localCatalogs", localCatalogs) - .add("name", name) - .add("owner", owner) - .add("remoteDetailedInfo", remoteDetailedInfo) - .add("updatedAt", updatedAt) - .add("updatedBy", updatedBy) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java deleted file mode 100755 index eaf19d6a..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomNotebookInfo { - /** The base64 representation of the notebook content in HTML. */ - @JsonProperty("notebook_content") - private String notebookContent; - - /** The name of the notebook. */ - @JsonProperty("notebook_name") - private String notebookName; - - public CleanRoomNotebookInfo setNotebookContent(String notebookContent) { - this.notebookContent = notebookContent; - return this; - } - - public String getNotebookContent() { - return notebookContent; - } - - public CleanRoomNotebookInfo setNotebookName(String notebookName) { - this.notebookName = notebookName; - return this; - } - - public String getNotebookName() { - return notebookName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomNotebookInfo that = (CleanRoomNotebookInfo) o; - return Objects.equals(notebookContent, that.notebookContent) - && Objects.equals(notebookName, that.notebookName); - } - - @Override - public int hashCode() { - return Objects.hash(notebookContent, notebookName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomNotebookInfo.class) - .add("notebookContent", notebookContent) - .add("notebookName", notebookName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java deleted file mode 100755 index 6581fdd5..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomTableInfo { - /** Name of parent catalog. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The array of __ColumnInfo__ definitions of the table's columns. */ - @JsonProperty("columns") - private Collection columns; - - /** Full name of table, in form of __catalog_name__.__schema_name__.__table_name__ */ - @JsonProperty("full_name") - private String fullName; - - /** Name of table, relative to parent schema. */ - @JsonProperty("name") - private String name; - - /** Name of parent schema relative to its parent catalog. */ - @JsonProperty("schema_name") - private String schemaName; - - public CleanRoomTableInfo setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomTableInfo setColumns(Collection columns) { - this.columns = columns; - return this; - } - - public Collection getColumns() { - return columns; - } - - public CleanRoomTableInfo setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } - - public CleanRoomTableInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CleanRoomTableInfo setSchemaName(String schemaName) { - this.schemaName = schemaName; - return this; - } - - public String getSchemaName() { - return schemaName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomTableInfo that = (CleanRoomTableInfo) o; - return Objects.equals(catalogName, that.catalogName) - && Objects.equals(columns, that.columns) - && Objects.equals(fullName, that.fullName) - && Objects.equals(name, that.name) - && Objects.equals(schemaName, that.schemaName); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, columns, fullName, name, schemaName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomTableInfo.class) - .add("catalogName", catalogName) - .add("columns", columns) - .add("fullName", fullName) - .add("name", name) - .add("schemaName", schemaName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java deleted file mode 100755 index 09b1944a..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java +++ /dev/null @@ -1,125 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Paginator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** - * privilege. - */ -@Generated -public class CleanRoomsAPI { - private static final Logger LOG = LoggerFactory.getLogger(CleanRoomsAPI.class); - - private final CleanRoomsService impl; - - /** Regular-use constructor */ - public CleanRoomsAPI(ApiClient apiClient) { - impl = new CleanRoomsImpl(apiClient); - } - - /** Constructor for mocks */ - public CleanRoomsAPI(CleanRoomsService mock) { - impl = mock; - } - - public CleanRoomInfo create(String name, CentralCleanRoomInfo remoteDetailedInfo) { - return create(new CreateCleanRoom().setName(name).setRemoteDetailedInfo(remoteDetailedInfo)); - } - - /** - * Create a clean room. - * - *

Creates a new clean room with specified colaborators. The caller must be a metastore admin - * or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - */ - public CleanRoomInfo create(CreateCleanRoom request) { - return impl.create(request); - } - - public void delete(String name) { - delete(new DeleteCleanRoomRequest().setName(name)); - } - - /** - * Delete a clean room. - * - *

Deletes a data object clean room from the metastore. The caller must be an owner of the - * clean room. - */ - public void delete(DeleteCleanRoomRequest request) { - impl.delete(request); - } - - public CleanRoomInfo get(String name) { - return get(new GetCleanRoomRequest().setName(name)); - } - - /** - * Get a clean room. - * - *

Gets a data object clean room from the metastore. The caller must be a metastore admin or - * the owner of the clean room. - */ - public CleanRoomInfo get(GetCleanRoomRequest request) { - return impl.get(request); - } - - /** - * List clean rooms. - * - *

Gets an array of data object clean rooms from the metastore. The caller must be a metastore - * admin or the owner of the clean room. There is no guarantee of a specific ordering of the - * elements in the array. - */ - public Iterable list(ListCleanRoomsRequest request) { - return new Paginator<>( - request, - impl::list, - ListCleanRoomsResponse::getCleanRooms, - response -> { - String token = response.getNextPageToken(); - if (token == null || token.isEmpty()) { - return null; - } - return request.setPageToken(token); - }); - } - - public CleanRoomInfo update(String name) { - return update(new UpdateCleanRoom().setName(name)); - } - - /** - * Update a clean room. - * - *

Updates the clean room with the changes and data objects in the request. The caller must be - * the owner of the clean room or a metastore admin. - * - *

When the caller is a metastore admin, only the __owner__ field can be updated. - * - *

In the case that the clean room name is changed **updateCleanRoom** requires that the caller - * is both the clean room owner and a metastore admin. - * - *

For each table that is added through this method, the clean room owner must also have - * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients - * to be able to access the table. Typically, you should use a group as the clean room owner. - * - *

Table removals through **update** do not require additional privileges. - */ - public CleanRoomInfo update(UpdateCleanRoom request) { - return impl.update(request); - } - - public CleanRoomsService impl() { - return impl; - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java deleted file mode 100755 index 8e0e85e6..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; - -/** Package-local implementation of CleanRooms */ -@Generated -class CleanRoomsImpl implements CleanRoomsService { - private final ApiClient apiClient; - - public CleanRoomsImpl(ApiClient apiClient) { - this.apiClient = apiClient; - } - - @Override - public CleanRoomInfo create(CreateCleanRoom request) { - String path = "/api/2.1/unity-catalog/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CleanRoomInfo.class, headers); - } - - @Override - public void delete(DeleteCleanRoomRequest request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); - } - - @Override - public CleanRoomInfo get(GetCleanRoomRequest request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CleanRoomInfo.class, headers); - } - - @Override - public ListCleanRoomsResponse list(ListCleanRoomsRequest request) { - String path = "/api/2.1/unity-catalog/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCleanRoomsResponse.class, headers); - } - - @Override - public CleanRoomInfo update(UpdateCleanRoom request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CleanRoomInfo.class, headers); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java deleted file mode 100755 index fd3ef23a..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java +++ /dev/null @@ -1,71 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; - -/** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** - * privilege. - * - *

This is the high-level interface, that contains generated methods. - * - *

Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface CleanRoomsService { - /** - * Create a clean room. - * - *

Creates a new clean room with specified colaborators. The caller must be a metastore admin - * or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - */ - CleanRoomInfo create(CreateCleanRoom createCleanRoom); - - /** - * Delete a clean room. - * - *

Deletes a data object clean room from the metastore. The caller must be an owner of the - * clean room. - */ - void delete(DeleteCleanRoomRequest deleteCleanRoomRequest); - - /** - * Get a clean room. - * - *

Gets a data object clean room from the metastore. The caller must be a metastore admin or - * the owner of the clean room. - */ - CleanRoomInfo get(GetCleanRoomRequest getCleanRoomRequest); - - /** - * List clean rooms. - * - *

Gets an array of data object clean rooms from the metastore. The caller must be a metastore - * admin or the owner of the clean room. There is no guarantee of a specific ordering of the - * elements in the array. - */ - ListCleanRoomsResponse list(ListCleanRoomsRequest listCleanRoomsRequest); - - /** - * Update a clean room. - * - *

Updates the clean room with the changes and data objects in the request. The caller must be - * the owner of the clean room or a metastore admin. - * - *

When the caller is a metastore admin, only the __owner__ field can be updated. - * - *

In the case that the clean room name is changed **updateCleanRoom** requires that the caller - * is both the clean room owner and a metastore admin. - * - *

For each table that is added through this method, the clean room owner must also have - * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients - * to be able to access the table. Typically, you should use a group as the clean room owner. - * - *

Table removals through **update** do not require additional privileges. - */ - CleanRoomInfo update(UpdateCleanRoom updateCleanRoom); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java deleted file mode 100755 index 40abbd42..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java +++ /dev/null @@ -1,221 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class ColumnInfo { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** */ - @JsonProperty("mask") - private ColumnMask mask; - - /** Name of Column. */ - @JsonProperty("name") - private String name; - - /** Whether field may be Null (default: true). */ - @JsonProperty("nullable") - private Boolean nullable; - - /** Partition index for column. */ - @JsonProperty("partition_index") - private Long partitionIndex; - - /** Ordinal position of column (starting at position 0). */ - @JsonProperty("position") - private Long position; - - /** Format of IntervalType. */ - @JsonProperty("type_interval_type") - private String typeIntervalType; - - /** Full data type specification, JSON-serialized. */ - @JsonProperty("type_json") - private String typeJson; - - /** Name of type (INT, STRUCT, MAP, etc.). */ - @JsonProperty("type_name") - private ColumnTypeName typeName; - - /** Digits of precision; required for DecimalTypes. */ - @JsonProperty("type_precision") - private Long typePrecision; - - /** Digits to right of decimal; Required for DecimalTypes. */ - @JsonProperty("type_scale") - private Long typeScale; - - /** Full data type specification as SQL/catalogString text. */ - @JsonProperty("type_text") - private String typeText; - - public ColumnInfo setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public ColumnInfo setMask(ColumnMask mask) { - this.mask = mask; - return this; - } - - public ColumnMask getMask() { - return mask; - } - - public ColumnInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public ColumnInfo setNullable(Boolean nullable) { - this.nullable = nullable; - return this; - } - - public Boolean getNullable() { - return nullable; - } - - public ColumnInfo setPartitionIndex(Long partitionIndex) { - this.partitionIndex = partitionIndex; - return this; - } - - public Long getPartitionIndex() { - return partitionIndex; - } - - public ColumnInfo setPosition(Long position) { - this.position = position; - return this; - } - - public Long getPosition() { - return position; - } - - public ColumnInfo setTypeIntervalType(String typeIntervalType) { - this.typeIntervalType = typeIntervalType; - return this; - } - - public String getTypeIntervalType() { - return typeIntervalType; - } - - public ColumnInfo setTypeJson(String typeJson) { - this.typeJson = typeJson; - return this; - } - - public String getTypeJson() { - return typeJson; - } - - public ColumnInfo setTypeName(ColumnTypeName typeName) { - this.typeName = typeName; - return this; - } - - public ColumnTypeName getTypeName() { - return typeName; - } - - public ColumnInfo setTypePrecision(Long typePrecision) { - this.typePrecision = typePrecision; - return this; - } - - public Long getTypePrecision() { - return typePrecision; - } - - public ColumnInfo setTypeScale(Long typeScale) { - this.typeScale = typeScale; - return this; - } - - public Long getTypeScale() { - return typeScale; - } - - public ColumnInfo setTypeText(String typeText) { - this.typeText = typeText; - return this; - } - - public String getTypeText() { - return typeText; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ColumnInfo that = (ColumnInfo) o; - return Objects.equals(comment, that.comment) - && Objects.equals(mask, that.mask) - && Objects.equals(name, that.name) - && Objects.equals(nullable, that.nullable) - && Objects.equals(partitionIndex, that.partitionIndex) - && Objects.equals(position, that.position) - && Objects.equals(typeIntervalType, that.typeIntervalType) - && Objects.equals(typeJson, that.typeJson) - && Objects.equals(typeName, that.typeName) - && Objects.equals(typePrecision, that.typePrecision) - && Objects.equals(typeScale, that.typeScale) - && Objects.equals(typeText, that.typeText); - } - - @Override - public int hashCode() { - return Objects.hash( - comment, - mask, - name, - nullable, - partitionIndex, - position, - typeIntervalType, - typeJson, - typeName, - typePrecision, - typeScale, - typeText); - } - - @Override - public String toString() { - return new ToStringer(ColumnInfo.class) - .add("comment", comment) - .add("mask", mask) - .add("name", name) - .add("nullable", nullable) - .add("partitionIndex", partitionIndex) - .add("position", position) - .add("typeIntervalType", typeIntervalType) - .add("typeJson", typeJson) - .add("typeName", typeName) - .add("typePrecision", typePrecision) - .add("typeScale", typeScale) - .add("typeText", typeText) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java deleted file mode 100755 index 9c0e3e84..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java +++ /dev/null @@ -1,64 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ColumnMask { - /** The full name of the column mask SQL UDF. */ - @JsonProperty("function_name") - private String functionName; - - /** - * The list of additional table columns to be passed as input to the column mask function. The - * first arg of the mask function should be of the type of the column being masked and the types - * of the rest of the args should match the types of columns in 'using_column_names'. - */ - @JsonProperty("using_column_names") - private Collection usingColumnNames; - - public ColumnMask setFunctionName(String functionName) { - this.functionName = functionName; - return this; - } - - public String getFunctionName() { - return functionName; - } - - public ColumnMask setUsingColumnNames(Collection usingColumnNames) { - this.usingColumnNames = usingColumnNames; - return this; - } - - public Collection getUsingColumnNames() { - return usingColumnNames; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ColumnMask that = (ColumnMask) o; - return Objects.equals(functionName, that.functionName) - && Objects.equals(usingColumnNames, that.usingColumnNames); - } - - @Override - public int hashCode() { - return Objects.hash(functionName, usingColumnNames); - } - - @Override - public String toString() { - return new ToStringer(ColumnMask.class) - .add("functionName", functionName) - .add("usingColumnNames", usingColumnNames) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java deleted file mode 100755 index 7586ecb3..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; - -/** Name of type (INT, STRUCT, MAP, etc.). */ -@Generated -public enum ColumnTypeName { - ARRAY, - BINARY, - BOOLEAN, - BYTE, - CHAR, - DATE, - DECIMAL, - DOUBLE, - FLOAT, - INT, - INTERVAL, - LONG, - MAP, - NULL, - SHORT, - STRING, - STRUCT, - TABLE_TYPE, - TIMESTAMP, - TIMESTAMP_NTZ, - USER_DEFINED_TYPE, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java deleted file mode 100755 index 3b769a75..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java +++ /dev/null @@ -1,74 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CreateCleanRoom { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** Name of the clean room. */ - @JsonProperty("name") - private String name; - - /** Central clean room details. */ - @JsonProperty("remote_detailed_info") - private CentralCleanRoomInfo remoteDetailedInfo; - - public CreateCleanRoom setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public CreateCleanRoom setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateCleanRoom setRemoteDetailedInfo(CentralCleanRoomInfo remoteDetailedInfo) { - this.remoteDetailedInfo = remoteDetailedInfo; - return this; - } - - public CentralCleanRoomInfo getRemoteDetailedInfo() { - return remoteDetailedInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CreateCleanRoom that = (CreateCleanRoom) o; - return Objects.equals(comment, that.comment) - && Objects.equals(name, that.name) - && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo); - } - - @Override - public int hashCode() { - return Objects.hash(comment, name, remoteDetailedInfo); - } - - @Override - public String toString() { - return new ToStringer(CreateCleanRoom.class) - .add("comment", comment) - .add("name", name) - .add("remoteDetailedInfo", remoteDetailedInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java deleted file mode 100755 index 1fd0c92e..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Delete a clean room */ -@Generated -public class DeleteCleanRoomRequest { - /** The name of the clean room. */ - @JsonIgnore private String name; - - public DeleteCleanRoomRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteCleanRoomRequest that = (DeleteCleanRoomRequest) o; - return Objects.equals(name, that.name); - } - - @Override - public int hashCode() { - return Objects.hash(name); - } - - @Override - public String toString() { - return new ToStringer(DeleteCleanRoomRequest.class).add("name", name).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java deleted file mode 100755 index 982732d1..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get a clean room */ -@Generated -public class GetCleanRoomRequest { - /** Whether to include remote details (central) on the clean room. */ - @JsonIgnore - @QueryParam("include_remote_details") - private Boolean includeRemoteDetails; - - /** The name of the clean room. */ - @JsonIgnore private String name; - - public GetCleanRoomRequest setIncludeRemoteDetails(Boolean includeRemoteDetails) { - this.includeRemoteDetails = includeRemoteDetails; - return this; - } - - public Boolean getIncludeRemoteDetails() { - return includeRemoteDetails; - } - - public GetCleanRoomRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCleanRoomRequest that = (GetCleanRoomRequest) o; - return Objects.equals(includeRemoteDetails, that.includeRemoteDetails) - && Objects.equals(name, that.name); - } - - @Override - public int hashCode() { - return Objects.hash(includeRemoteDetails, name); - } - - @Override - public String toString() { - return new ToStringer(GetCleanRoomRequest.class) - .add("includeRemoteDetails", includeRemoteDetails) - .add("name", name) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java deleted file mode 100755 index c58abe94..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** List clean rooms */ -@Generated -public class ListCleanRoomsRequest { - /** - * Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not - * recommended). - when set to a value greater than 0, the page length is the minimum of this - * value and a server configured value; - when set to 0, the page length is set to a server - * configured value (recommended); - when set to a value less than 0, an invalid parameter error - * is returned; - */ - @JsonIgnore - @QueryParam("max_results") - private Long maxResults; - - /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") - private String pageToken; - - public ListCleanRoomsRequest setMaxResults(Long maxResults) { - this.maxResults = maxResults; - return this; - } - - public Long getMaxResults() { - return maxResults; - } - - public ListCleanRoomsRequest setPageToken(String pageToken) { - this.pageToken = pageToken; - return this; - } - - public String getPageToken() { - return pageToken; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ListCleanRoomsRequest that = (ListCleanRoomsRequest) o; - return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); - } - - @Override - public int hashCode() { - return Objects.hash(maxResults, pageToken); - } - - @Override - public String toString() { - return new ToStringer(ListCleanRoomsRequest.class) - .add("maxResults", maxResults) - .add("pageToken", pageToken) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java deleted file mode 100755 index 033dc519..00000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java +++ /dev/null @@ -1,90 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class UpdateCleanRoom { - /** Array of shared data object updates. */ - @JsonProperty("catalog_updates") - private Collection catalogUpdates; - - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** The name of the clean room. */ - @JsonIgnore private String name; - - /** Username of current owner of clean room. */ - @JsonProperty("owner") - private String owner; - - public UpdateCleanRoom setCatalogUpdates(Collection catalogUpdates) { - this.catalogUpdates = catalogUpdates; - return this; - } - - public Collection getCatalogUpdates() { - return catalogUpdates; - } - - public UpdateCleanRoom setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public UpdateCleanRoom setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public UpdateCleanRoom setOwner(String owner) { - this.owner = owner; - return this; - } - - public String getOwner() { - return owner; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpdateCleanRoom that = (UpdateCleanRoom) o; - return Objects.equals(catalogUpdates, that.catalogUpdates) - && Objects.equals(comment, that.comment) - && Objects.equals(name, that.name) - && Objects.equals(owner, that.owner); - } - - @Override - public int hashCode() { - return Objects.hash(catalogUpdates, comment, name, owner); - } - - @Override - public String toString() { - return new ToStringer(UpdateCleanRoom.class) - .add("catalogUpdates", catalogUpdates) - .add("comment", comment) - .add("name", name) - .add("owner", owner) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 82eb48e5..a1900b66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -9,5 +9,5 @@ public enum ChannelName { CHANNEL_NAME_CURRENT, CHANNEL_NAME_CUSTOM, CHANNEL_NAME_PREVIEW, - CHANNEL_NAME_UNSPECIFIED, + CHANNEL_NAME_PREVIOUS, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java index 8e579b47..8d25fa97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java @@ -87,11 +87,11 @@ * completed execution when the cancel request arrives. Polling for status until a terminal state is * reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur * server-side, and cannot account for things such as caller delays and network latency from caller - * to service. - The system will auto-close a statement after one hour if the client stops polling - * and thus you must poll at least once an hour. - The results are only available for one hour after - * success; polling does not extend this. - The SQL Execution API must be used for the entire - * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and - * then the SQL Execution API to cancel it. + * to service. - To guarantee that the statement is kept alive, you must poll at least once every 15 + * minutes. - The results are only available for one hour after success; polling does not extend + * this. - The SQL Execution API must be used for the entire lifecycle of the statement. For + * example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to + * cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java index b22e9dbe..5132b035 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java @@ -84,11 +84,11 @@ * completed execution when the cancel request arrives. Polling for status until a terminal state is * reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur * server-side, and cannot account for things such as caller delays and network latency from caller - * to service. - The system will auto-close a statement after one hour if the client stops polling - * and thus you must poll at least once an hour. - The results are only available for one hour after - * success; polling does not extend this. - The SQL Execution API must be used for the entire - * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and - * then the SQL Execution API to cancel it. + * to service. - To guarantee that the statement is kept alive, you must poll at least once every 15 + * minutes. - The results are only available for one hour after success; polling does not extend + * this. - The SQL Execution API must be used for the entire lifecycle of the statement. For + * example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to + * cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java index 9564b525..00b0c394 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java @@ -233,7 +233,8 @@ public WarehousePermissions setPermissions(String warehouseId) { /** * Set SQL warehouse permissions. * - *

Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public WarehousePermissions setPermissions(WarehousePermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java index c1da3aed..8b18fcca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java @@ -76,7 +76,8 @@ WarehousePermissions getPermissions( /** * Set SQL warehouse permissions. * - *

Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ WarehousePermissions setPermissions(WarehousePermissionsRequest warehousePermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java index 0448adcf..6b3de442 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java @@ -23,6 +23,7 @@ public enum ImportFormat { // directories. HTML, // The notebook is imported as an HTML file. JUPYTER, // The notebook is imported as a Jupyter/IPython Notebook file. + RAW, R_MARKDOWN, // The notebook is imported from R Markdown format. SOURCE, // The notebook or directory is imported as source code. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java index dab9e912..8a51b47a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java @@ -130,7 +130,9 @@ public RepoPermissions setPermissions(String repoId) { /** * Set repo permissions. * - *

Sets permissions on a repo. Repos can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public RepoPermissions setPermissions(RepoPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java index 31347754..188c2d30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java @@ -70,7 +70,9 @@ GetRepoPermissionLevelsResponse getPermissionLevels( /** * Set repo permissions. * - *

Sets permissions on a repo. Repos can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ RepoPermissions setPermissions(RepoPermissionsRequest repoPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 48682618..3933eeff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -175,8 +175,9 @@ public WorkspaceObjectPermissions setPermissions( /** * Set workspace object permissions. * - *

Sets permissions on a workspace object. Workspace objects can inherit permissions from their - * parent objects or root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ public WorkspaceObjectPermissions setPermissions(WorkspaceObjectPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java index e6e7be35..93dc9842 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java @@ -98,8 +98,9 @@ WorkspaceObjectPermissions getPermissions( /** * Set workspace object permissions. * - *

Sets permissions on a workspace object. Workspace objects can inherit permissions from their - * parent objects or root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ WorkspaceObjectPermissions setPermissions( WorkspaceObjectPermissionsRequest workspaceObjectPermissionsRequest); diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 32731d87..89e86caf 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.34.0 + 0.35.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index 4a89d103..b8a37bff 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.34.0 + 0.35.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index 789662ed..6b6116a7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.34.0 + 0.35.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for diff --git a/shaded/pom.xml b/shaded/pom.xml index 21713f8e..8c3d0966 100644 --- a/shaded/pom.xml +++ b/shaded/pom.xml @@ -4,7 +4,7 @@ 4.0.0 - 0.34.0 + 0.35.0 com.databricks From 4db2b8ba8591b7fe49dd04dc5501ac1733bf6650 Mon Sep 17 00:00:00 2001 From: Giorgi Kikolashvili <47174341+gkiko10@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:19:58 +0100 Subject: [PATCH 12/12] [Internal] Update Jobs GetRun API to support paginated responses for jobs and ForEach tasks (#386) ## What changes are proposed in this pull request? Introduces extension for jobs getRun call that paginates tasks and iterations arrays in the response and returns aggregated response to the caller. This change is necessary to prepare for jobs API 2.2 release that serves paginated response. Pagination is over once the next_page_token is absent from the response. The pagination logic is not exposed to the customer. ## How is this tested? Unit tests --- .../com/databricks/sdk/mixin/JobsExt.java | 60 +++++++++++++ .../com/databricks/sdk/mixin/JobsExtTest.java | 85 +++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/JobsExt.java create mode 100644 databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/JobsExtTest.java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/JobsExt.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/JobsExt.java new file mode 100644 index 00000000..f6f15f90 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/mixin/JobsExt.java @@ -0,0 +1,60 @@ +package com.databricks.sdk.mixin; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.service.jobs.*; +import java.util.Collection; + +public class JobsExt extends JobsAPI { + + public JobsExt(ApiClient apiClient) { + super(apiClient); + } + + public JobsExt(JobsService mock) { + super(mock); + } + + /** + * Wrap the {@code JobsApi.getRun} operation to retrieve paginated content without breaking the + * response contract. + * + *

Depending on the Jobs API version used under the hood, tasks or iteration runs retrieved by + * the initial request may be truncated due to high cardinalities. Truncation can happen for job + * runs over 100 task runs, as well as ForEach task runs with over 100 iteration runs. To avoid + * returning an incomplete {@code Run} object to the user, this method performs all the requests + * required to collect all task/iteration runs into a single {@code Run} object. + */ + @Override + public Run getRun(GetRunRequest request) { + Run run = super.getRun(request); + + /* + * fetch all additional pages (if any) and accumulate the result in a single response + */ + + Collection iterations = run.getIterations(); + boolean paginatingIterations = iterations != null && !iterations.isEmpty(); + + Run currRun = run; + while (currRun.getNextPageToken() != null) { + request.setPageToken(currRun.getNextPageToken()); + currRun = super.getRun(request); + if (paginatingIterations) { + Collection newIterations = currRun.getIterations(); + if (newIterations != null) { + run.getIterations().addAll(newIterations); + } + } else { + Collection newTasks = currRun.getTasks(); + if (newTasks != null) { + run.getTasks().addAll(newTasks); + } + } + } + + // now that we've added all pages to the Run, the tokens are useless + run.setNextPageToken(null); + + return run; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/JobsExtTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/JobsExtTest.java new file mode 100644 index 00000000..73fb2233 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/mixin/JobsExtTest.java @@ -0,0 +1,85 @@ +package com.databricks.sdk.mixin; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.service.jobs.GetRunRequest; +import com.databricks.sdk.service.jobs.JobsService; +import com.databricks.sdk.service.jobs.Run; +import com.databricks.sdk.service.jobs.RunTask; +import java.util.ArrayList; +import java.util.Collection; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class JobsExtTest { + + @Test + public void testGetRunPaginationWithTasks() { + JobsService service = Mockito.mock(JobsService.class); + + Run firstPage = new Run().setNextPageToken("tokenToSecondPage"); + addTasks(firstPage, 0L, 1L); + Run secondPage = new Run().setNextPageToken("tokenToThirdPage"); + addTasks(secondPage, 2L, 3L); + Run thirdPage = new Run(); + addTasks(thirdPage, 4L); + + when(service.getRun(any())).thenReturn(firstPage).thenReturn(secondPage).thenReturn(thirdPage); + + JobsExt jobsExt = new JobsExt(service); + + GetRunRequest request = new GetRunRequest(); + + Run run = jobsExt.getRun(request); + + Run expectedRun = new Run(); + addTasks(expectedRun, 0L, 1L, 2L, 3L, 4L); + + assertEquals(expectedRun, run); + verify(service, times(3)).getRun(any()); + } + + @Test + public void testGetRunPaginationWithIterations() { + JobsService service = Mockito.mock(JobsService.class); + + Run firstPage = new Run().setNextPageToken("tokenToSecondPage"); + addIterations(firstPage, 0L, 1L); + Run secondPage = new Run().setNextPageToken("tokenToThirdPage"); + addIterations(secondPage, 2L, 3L); + Run thirdPage = new Run(); + addIterations(thirdPage, 4L); + + when(service.getRun(any())).thenReturn(firstPage).thenReturn(secondPage).thenReturn(thirdPage); + + JobsExt jobsExt = new JobsExt(service); + + GetRunRequest request = new GetRunRequest(); + + Run run = jobsExt.getRun(request); + + Run expectedRun = new Run(); + addIterations(expectedRun, 0L, 1L, 2L, 3L, 4L); + + assertEquals(expectedRun, run); + verify(service, times(3)).getRun(any()); + } + + private void addTasks(Run run, long... taskRunIds) { + Collection tasks = new ArrayList<>(); + for (long runId : taskRunIds) { + tasks.add(new RunTask().setRunId(runId)); + } + run.setTasks(tasks); + } + + private void addIterations(Run run, long... iterationRunIds) { + Collection iterations = new ArrayList<>(); + for (long runId : iterationRunIds) { + iterations.add(new RunTask().setRunId(runId)); + } + run.setIterations(iterations); + } +}