diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index 550f96c30..beaa7d122 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -309,14 +309,7 @@ public AccountMetastoresAPI metastores() {
/**
* These APIs provide configurations for the network connectivity of your workspaces for
- * serverless compute resources. This API provides stable subnets for your workspace so that you
- * can configure your firewalls on your Azure Storage accounts to allow access from Databricks.
- * You can also use the API to provision private endpoints for Databricks to privately connect
- * serverless compute resources to your Azure resources using Azure Private Link. See [configure
- * serverless secure connectivity].
- *
- * [configure serverless secure connectivity]:
- * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
+ * serverless compute resources.
*/
public NetworkConnectivityAPI networkConnectivity() {
return networkConnectivityAPI;
@@ -384,16 +377,7 @@ public AccountServicePrincipalsAPI servicePrincipals() {
return servicePrincipalsAPI;
}
- /**
- * The Personal Compute enablement setting lets you control which users can use the Personal
- * Compute default policy to create compute resources. By default all users in all workspaces have
- * access (ON), but you can change the setting to instead let individual workspaces configure
- * access control (DELEGATE).
- *
- *
There is only one instance of this setting per account. Since this setting has a default
- * value, this setting is present on all accounts even though it's never set on a given account.
- * Deletion reverts the value of the setting back to the default value.
- */
+ /** Accounts Settings API allows users to manage settings at the account level. */
public AccountSettingsAPI settings() {
return settingsAPI;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 764ffd2f3..0f7b1ce1b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -70,6 +70,8 @@
import com.databricks.sdk.service.iam.CurrentUserService;
import com.databricks.sdk.service.iam.GroupsAPI;
import com.databricks.sdk.service.iam.GroupsService;
+import com.databricks.sdk.service.iam.PermissionMigrationAPI;
+import com.databricks.sdk.service.iam.PermissionMigrationService;
import com.databricks.sdk.service.iam.PermissionsAPI;
import com.databricks.sdk.service.iam.PermissionsService;
import com.databricks.sdk.service.iam.ServicePrincipalsAPI;
@@ -78,6 +80,30 @@
import com.databricks.sdk.service.iam.UsersService;
import com.databricks.sdk.service.jobs.JobsAPI;
import com.databricks.sdk.service.jobs.JobsService;
+import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsAPI;
+import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsService;
+import com.databricks.sdk.service.marketplace.ConsumerInstallationsAPI;
+import com.databricks.sdk.service.marketplace.ConsumerInstallationsService;
+import com.databricks.sdk.service.marketplace.ConsumerListingsAPI;
+import com.databricks.sdk.service.marketplace.ConsumerListingsService;
+import com.databricks.sdk.service.marketplace.ConsumerPersonalizationRequestsAPI;
+import com.databricks.sdk.service.marketplace.ConsumerPersonalizationRequestsService;
+import com.databricks.sdk.service.marketplace.ConsumerProvidersAPI;
+import com.databricks.sdk.service.marketplace.ConsumerProvidersService;
+import com.databricks.sdk.service.marketplace.ProviderExchangeFiltersAPI;
+import com.databricks.sdk.service.marketplace.ProviderExchangeFiltersService;
+import com.databricks.sdk.service.marketplace.ProviderExchangesAPI;
+import com.databricks.sdk.service.marketplace.ProviderExchangesService;
+import com.databricks.sdk.service.marketplace.ProviderFilesAPI;
+import com.databricks.sdk.service.marketplace.ProviderFilesService;
+import com.databricks.sdk.service.marketplace.ProviderListingsAPI;
+import com.databricks.sdk.service.marketplace.ProviderListingsService;
+import com.databricks.sdk.service.marketplace.ProviderPersonalizationRequestsAPI;
+import com.databricks.sdk.service.marketplace.ProviderPersonalizationRequestsService;
+import com.databricks.sdk.service.marketplace.ProviderProviderAnalyticsDashboardsAPI;
+import com.databricks.sdk.service.marketplace.ProviderProviderAnalyticsDashboardsService;
+import com.databricks.sdk.service.marketplace.ProviderProvidersAPI;
+import com.databricks.sdk.service.marketplace.ProviderProvidersService;
import com.databricks.sdk.service.ml.ExperimentsAPI;
import com.databricks.sdk.service.ml.ExperimentsService;
import com.databricks.sdk.service.ml.ModelRegistryAPI;
@@ -159,6 +185,11 @@ public class WorkspaceClient {
private ClustersExt clustersAPI;
private CommandExecutionAPI commandExecutionAPI;
private ConnectionsAPI connectionsAPI;
+ private ConsumerFulfillmentsAPI consumerFulfillmentsAPI;
+ private ConsumerInstallationsAPI consumerInstallationsAPI;
+ private ConsumerListingsAPI consumerListingsAPI;
+ private ConsumerPersonalizationRequestsAPI consumerPersonalizationRequestsAPI;
+ private ConsumerProvidersAPI consumerProvidersAPI;
private CredentialsManagerAPI credentialsManagerAPI;
private CurrentUserAPI currentUserAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
@@ -185,9 +216,17 @@ public class WorkspaceClient {
private ModelRegistryAPI modelRegistryAPI;
private ModelVersionsAPI modelVersionsAPI;
private OnlineTablesAPI onlineTablesAPI;
+ private PermissionMigrationAPI permissionMigrationAPI;
private PermissionsAPI permissionsAPI;
private PipelinesAPI pipelinesAPI;
private PolicyFamiliesAPI policyFamiliesAPI;
+ private ProviderExchangeFiltersAPI providerExchangeFiltersAPI;
+ private ProviderExchangesAPI providerExchangesAPI;
+ private ProviderFilesAPI providerFilesAPI;
+ private ProviderListingsAPI providerListingsAPI;
+ private ProviderPersonalizationRequestsAPI providerPersonalizationRequestsAPI;
+ private ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboardsAPI;
+ private ProviderProvidersAPI providerProvidersAPI;
private ProvidersAPI providersAPI;
private QueriesAPI queriesAPI;
private QueryHistoryAPI queryHistoryAPI;
@@ -236,6 +275,11 @@ public WorkspaceClient(DatabricksConfig config) {
clustersAPI = new ClustersExt(apiClient);
commandExecutionAPI = new CommandExecutionAPI(apiClient);
connectionsAPI = new ConnectionsAPI(apiClient);
+ consumerFulfillmentsAPI = new ConsumerFulfillmentsAPI(apiClient);
+ consumerInstallationsAPI = new ConsumerInstallationsAPI(apiClient);
+ consumerListingsAPI = new ConsumerListingsAPI(apiClient);
+ consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient);
+ consumerProvidersAPI = new ConsumerProvidersAPI(apiClient);
credentialsManagerAPI = new CredentialsManagerAPI(apiClient);
currentUserAPI = new CurrentUserAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
@@ -262,9 +306,17 @@ public WorkspaceClient(DatabricksConfig config) {
modelRegistryAPI = new ModelRegistryAPI(apiClient);
modelVersionsAPI = new ModelVersionsAPI(apiClient);
onlineTablesAPI = new OnlineTablesAPI(apiClient);
+ permissionMigrationAPI = new PermissionMigrationAPI(apiClient);
permissionsAPI = new PermissionsAPI(apiClient);
pipelinesAPI = new PipelinesAPI(apiClient);
policyFamiliesAPI = new PolicyFamiliesAPI(apiClient);
+ providerExchangeFiltersAPI = new ProviderExchangeFiltersAPI(apiClient);
+ providerExchangesAPI = new ProviderExchangesAPI(apiClient);
+ providerFilesAPI = new ProviderFilesAPI(apiClient);
+ providerListingsAPI = new ProviderListingsAPI(apiClient);
+ providerPersonalizationRequestsAPI = new ProviderPersonalizationRequestsAPI(apiClient);
+ providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient);
+ providerProvidersAPI = new ProviderProvidersAPI(apiClient);
providersAPI = new ProvidersAPI(apiClient);
queriesAPI = new QueriesAPI(apiClient);
queryHistoryAPI = new QueryHistoryAPI(apiClient);
@@ -443,6 +495,40 @@ public ConnectionsAPI connections() {
return connectionsAPI;
}
+ /** Fulfillments are entities that allow consumers to preview installations. */
+ public ConsumerFulfillmentsAPI consumerFulfillments() {
+ return consumerFulfillmentsAPI;
+ }
+
+ /**
+ * Installations are entities that allow consumers to interact with Databricks Marketplace
+ * listings.
+ */
+ public ConsumerInstallationsAPI consumerInstallations() {
+ return consumerInstallationsAPI;
+ }
+
+ /**
+ * Listings are the core entities in the Marketplace. They represent the products that are
+ * available for consumption.
+ */
+ public ConsumerListingsAPI consumerListings() {
+ return consumerListingsAPI;
+ }
+
+ /**
+ * Personalization Requests allow customers to interact with the individualized Marketplace
+ * listing flow.
+ */
+ public ConsumerPersonalizationRequestsAPI consumerPersonalizationRequests() {
+ return consumerPersonalizationRequestsAPI;
+ }
+
+ /** Providers are the entities that publish listings to the Marketplace. */
+ public ConsumerProvidersAPI consumerProviders() {
+ return consumerProvidersAPI;
+ }
+
/**
* Credentials manager interacts with with Identity Providers to to perform token exchanges using
* stored credentials and refresh tokens.
@@ -741,17 +827,13 @@ public LakeviewAPI lakeview() {
*
*
To make third-party or custom code available to notebooks and jobs running on your clusters,
* you can install a library. Libraries can be written in Python, Java, Scala, and R. You can
- * upload Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and
+ * upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and
* CRAN repositories.
*
*
Cluster libraries can be used by all notebooks running on a cluster. You can install a
* cluster library directly from a public repository such as PyPI or Maven, using a previously
* installed workspace library, or using an init script.
*
- *
When you install a library on a cluster, a notebook already attached to that cluster will
- * not immediately see the new library. You must first detach and then reattach the notebook to
- * the cluster.
- *
*
When you uninstall a library from a cluster, the library is removed only when you restart
* the cluster. Until you restart the cluster, the status of the uninstalled library appears as
* Uninstall pending restart.
@@ -809,6 +891,14 @@ public OnlineTablesAPI onlineTables() {
return onlineTablesAPI;
}
+ /**
+ * This spec contains undocumented permission migration APIs used in
+ * https://github.com/databrickslabs/ucx.
+ */
+ public PermissionMigrationAPI permissionMigration() {
+ return permissionMigrationAPI;
+ }
+
/**
* Permissions API are used to create read, write, edit, update and manage access for various
* users on different objects and endpoints.
@@ -854,6 +944,9 @@ public OnlineTablesAPI onlineTables() {
*
For the mapping of the required permissions for specific actions or abilities and other
* important information, see [Access Control].
*
+ *
Note that to manage access control on service principals, use **[Account Access Control
+ * Proxy](:service:accountaccesscontrolproxy)**.
+ *
*
[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
*/
public PermissionsAPI permissions() {
@@ -894,6 +987,52 @@ public PolicyFamiliesAPI policyFamilies() {
return policyFamiliesAPI;
}
+ /** Marketplace exchanges filters curate which groups can access an exchange. */
+ public ProviderExchangeFiltersAPI providerExchangeFilters() {
+ return providerExchangeFiltersAPI;
+ }
+
+ /**
+ * Marketplace exchanges allow providers to share their listings with a curated set of customers.
+ */
+ public ProviderExchangesAPI providerExchanges() {
+ return providerExchangesAPI;
+ }
+
+ /**
+ * Marketplace offers a set of file APIs for various purposes such as preview notebooks and
+ * provider icons.
+ */
+ public ProviderFilesAPI providerFiles() {
+ return providerFilesAPI;
+ }
+
+ /**
+ * Listings are the core entities in the Marketplace. They represent the products that are
+ * available for consumption.
+ */
+ public ProviderListingsAPI providerListings() {
+ return providerListingsAPI;
+ }
+
+ /**
+ * Personalization requests are an alternate to instantly available listings. Control the
+ * lifecycle of personalized solutions.
+ */
+ public ProviderPersonalizationRequestsAPI providerPersonalizationRequests() {
+ return providerPersonalizationRequestsAPI;
+ }
+
+ /** Manage templated analytics solution for providers. */
+ public ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboards() {
+ return providerProviderAnalyticsDashboardsAPI;
+ }
+
+ /** Providers are entities that manage assets in Marketplace. */
+ public ProviderProvidersAPI providerProviders() {
+ return providerProvidersAPI;
+ }
+
/**
* A data provider is an object representing the organization in the real world who shares the
* data. A provider contains shares which further contain the shared data.
@@ -1057,18 +1196,7 @@ public ServingEndpointsAPI servingEndpoints() {
return servingEndpointsAPI;
}
- /**
- * The default namespace setting API allows users to configure the default namespace for a
- * Databricks workspace.
- *
- *
Through this API, users can retrieve, set, or modify the default namespace used when queries
- * do not reference a fully qualified three-level name. For example, if you use the API to set
- * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the
- * object 'retail_prod.default.myTable' (the schema 'default' is always assumed).
- *
- *
This setting requires a restart of clusters and SQL warehouses to take effect. Additionally,
- * the default namespace only applies when using Unity Catalog-enabled compute.
- */
+ /** Workspace Settings API allows users to manage settings at the workspace level. */
public SettingsAPI settings() {
return settingsAPI;
}
@@ -1457,6 +1585,67 @@ public WorkspaceClient withConnectionsAPI(ConnectionsAPI connections) {
return this;
}
+ /** Replace the default ConsumerFulfillmentsService with a custom implementation. */
+ public WorkspaceClient withConsumerFulfillmentsImpl(
+ ConsumerFulfillmentsService consumerFulfillments) {
+ return this.withConsumerFulfillmentsAPI(new ConsumerFulfillmentsAPI(consumerFulfillments));
+ }
+
+ /** Replace the default ConsumerFulfillmentsAPI with a custom implementation. */
+ public WorkspaceClient withConsumerFulfillmentsAPI(ConsumerFulfillmentsAPI consumerFulfillments) {
+ this.consumerFulfillmentsAPI = consumerFulfillments;
+ return this;
+ }
+
+ /** Replace the default ConsumerInstallationsService with a custom implementation. */
+ public WorkspaceClient withConsumerInstallationsImpl(
+ ConsumerInstallationsService consumerInstallations) {
+ return this.withConsumerInstallationsAPI(new ConsumerInstallationsAPI(consumerInstallations));
+ }
+
+ /** Replace the default ConsumerInstallationsAPI with a custom implementation. */
+ public WorkspaceClient withConsumerInstallationsAPI(
+ ConsumerInstallationsAPI consumerInstallations) {
+ this.consumerInstallationsAPI = consumerInstallations;
+ return this;
+ }
+
+ /** Replace the default ConsumerListingsService with a custom implementation. */
+ public WorkspaceClient withConsumerListingsImpl(ConsumerListingsService consumerListings) {
+ return this.withConsumerListingsAPI(new ConsumerListingsAPI(consumerListings));
+ }
+
+ /** Replace the default ConsumerListingsAPI with a custom implementation. */
+ public WorkspaceClient withConsumerListingsAPI(ConsumerListingsAPI consumerListings) {
+ this.consumerListingsAPI = consumerListings;
+ return this;
+ }
+
+ /** Replace the default ConsumerPersonalizationRequestsService with a custom implementation. */
+ public WorkspaceClient withConsumerPersonalizationRequestsImpl(
+ ConsumerPersonalizationRequestsService consumerPersonalizationRequests) {
+ return this.withConsumerPersonalizationRequestsAPI(
+ new ConsumerPersonalizationRequestsAPI(consumerPersonalizationRequests));
+ }
+
+ /** Replace the default ConsumerPersonalizationRequestsAPI with a custom implementation. */
+ public WorkspaceClient withConsumerPersonalizationRequestsAPI(
+ ConsumerPersonalizationRequestsAPI consumerPersonalizationRequests) {
+ this.consumerPersonalizationRequestsAPI = consumerPersonalizationRequests;
+ return this;
+ }
+
+ /** Replace the default ConsumerProvidersService with a custom implementation. */
+ public WorkspaceClient withConsumerProvidersImpl(ConsumerProvidersService consumerProviders) {
+ return this.withConsumerProvidersAPI(new ConsumerProvidersAPI(consumerProviders));
+ }
+
+ /** Replace the default ConsumerProvidersAPI with a custom implementation. */
+ public WorkspaceClient withConsumerProvidersAPI(ConsumerProvidersAPI consumerProviders) {
+ this.consumerProvidersAPI = consumerProviders;
+ return this;
+ }
+
/** Replace the default CredentialsManagerService with a custom implementation. */
public WorkspaceClient withCredentialsManagerImpl(CredentialsManagerService credentialsManager) {
return this.withCredentialsManagerAPI(new CredentialsManagerAPI(credentialsManager));
@@ -1743,6 +1932,18 @@ public WorkspaceClient withOnlineTablesAPI(OnlineTablesAPI onlineTables) {
return this;
}
+ /** Replace the default PermissionMigrationService with a custom implementation. */
+ public WorkspaceClient withPermissionMigrationImpl(
+ PermissionMigrationService permissionMigration) {
+ return this.withPermissionMigrationAPI(new PermissionMigrationAPI(permissionMigration));
+ }
+
+ /** Replace the default PermissionMigrationAPI with a custom implementation. */
+ public WorkspaceClient withPermissionMigrationAPI(PermissionMigrationAPI permissionMigration) {
+ this.permissionMigrationAPI = permissionMigration;
+ return this;
+ }
+
/** Replace the default PermissionsService with a custom implementation. */
public WorkspaceClient withPermissionsImpl(PermissionsService permissions) {
return this.withPermissionsAPI(new PermissionsAPI(permissions));
@@ -1776,6 +1977,94 @@ public WorkspaceClient withPolicyFamiliesAPI(PolicyFamiliesAPI policyFamilies) {
return this;
}
+ /** Replace the default ProviderExchangeFiltersService with a custom implementation. */
+ public WorkspaceClient withProviderExchangeFiltersImpl(
+ ProviderExchangeFiltersService providerExchangeFilters) {
+ return this.withProviderExchangeFiltersAPI(
+ new ProviderExchangeFiltersAPI(providerExchangeFilters));
+ }
+
+ /** Replace the default ProviderExchangeFiltersAPI with a custom implementation. */
+ public WorkspaceClient withProviderExchangeFiltersAPI(
+ ProviderExchangeFiltersAPI providerExchangeFilters) {
+ this.providerExchangeFiltersAPI = providerExchangeFilters;
+ return this;
+ }
+
+ /** Replace the default ProviderExchangesService with a custom implementation. */
+ public WorkspaceClient withProviderExchangesImpl(ProviderExchangesService providerExchanges) {
+ return this.withProviderExchangesAPI(new ProviderExchangesAPI(providerExchanges));
+ }
+
+ /** Replace the default ProviderExchangesAPI with a custom implementation. */
+ public WorkspaceClient withProviderExchangesAPI(ProviderExchangesAPI providerExchanges) {
+ this.providerExchangesAPI = providerExchanges;
+ return this;
+ }
+
+ /** Replace the default ProviderFilesService with a custom implementation. */
+ public WorkspaceClient withProviderFilesImpl(ProviderFilesService providerFiles) {
+ return this.withProviderFilesAPI(new ProviderFilesAPI(providerFiles));
+ }
+
+ /** Replace the default ProviderFilesAPI with a custom implementation. */
+ public WorkspaceClient withProviderFilesAPI(ProviderFilesAPI providerFiles) {
+ this.providerFilesAPI = providerFiles;
+ return this;
+ }
+
+ /** Replace the default ProviderListingsService with a custom implementation. */
+ public WorkspaceClient withProviderListingsImpl(ProviderListingsService providerListings) {
+ return this.withProviderListingsAPI(new ProviderListingsAPI(providerListings));
+ }
+
+ /** Replace the default ProviderListingsAPI with a custom implementation. */
+ public WorkspaceClient withProviderListingsAPI(ProviderListingsAPI providerListings) {
+ this.providerListingsAPI = providerListings;
+ return this;
+ }
+
+ /** Replace the default ProviderPersonalizationRequestsService with a custom implementation. */
+ public WorkspaceClient withProviderPersonalizationRequestsImpl(
+ ProviderPersonalizationRequestsService providerPersonalizationRequests) {
+ return this.withProviderPersonalizationRequestsAPI(
+ new ProviderPersonalizationRequestsAPI(providerPersonalizationRequests));
+ }
+
+ /** Replace the default ProviderPersonalizationRequestsAPI with a custom implementation. */
+ public WorkspaceClient withProviderPersonalizationRequestsAPI(
+ ProviderPersonalizationRequestsAPI providerPersonalizationRequests) {
+ this.providerPersonalizationRequestsAPI = providerPersonalizationRequests;
+ return this;
+ }
+
+ /**
+ * Replace the default ProviderProviderAnalyticsDashboardsService with a custom implementation.
+ */
+ public WorkspaceClient withProviderProviderAnalyticsDashboardsImpl(
+ ProviderProviderAnalyticsDashboardsService providerProviderAnalyticsDashboards) {
+ return this.withProviderProviderAnalyticsDashboardsAPI(
+ new ProviderProviderAnalyticsDashboardsAPI(providerProviderAnalyticsDashboards));
+ }
+
+ /** Replace the default ProviderProviderAnalyticsDashboardsAPI with a custom implementation. */
+ public WorkspaceClient withProviderProviderAnalyticsDashboardsAPI(
+ ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboards) {
+ this.providerProviderAnalyticsDashboardsAPI = providerProviderAnalyticsDashboards;
+ return this;
+ }
+
+ /** Replace the default ProviderProvidersService with a custom implementation. */
+ public WorkspaceClient withProviderProvidersImpl(ProviderProvidersService providerProviders) {
+ return this.withProviderProvidersAPI(new ProviderProvidersAPI(providerProviders));
+ }
+
+ /** Replace the default ProviderProvidersAPI with a custom implementation. */
+ public WorkspaceClient withProviderProvidersAPI(ProviderProvidersAPI providerProviders) {
+ this.providerProvidersAPI = providerProviders;
+ return this;
+ }
+
/** Replace the default ProvidersService with a custom implementation. */
public WorkspaceClient withProvidersImpl(ProvidersService providers) {
return this.withProvidersAPI(new ProvidersAPI(providers));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java
index 95072a4b8..6660ce571 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java
@@ -4,6 +4,9 @@
import com.databricks.sdk.core.http.HttpClient;
import com.databricks.sdk.core.http.Request;
import com.databricks.sdk.core.http.Response;
+import com.databricks.sdk.core.retry.RequestBasedRetryStrategyPicker;
+import com.databricks.sdk.core.retry.RetryStrategy;
+import com.databricks.sdk.core.retry.RetryStrategyPicker;
import com.databricks.sdk.core.utils.SerDeUtils;
import com.databricks.sdk.core.utils.SystemTimer;
import com.databricks.sdk.core.utils.Timer;
@@ -14,6 +17,8 @@
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -35,7 +40,9 @@ public class ApiClient {
private final HttpClient httpClient;
private final BodyLogger bodyLogger;
+ private final RetryStrategyPicker retryStrategyPicker;
private final Timer timer;
+ private static final String RETRY_AFTER_HEADER = "retry-after";
public ApiClient() {
this(ConfigLoader.getDefault());
@@ -63,11 +70,12 @@ public ApiClient(DatabricksConfig config, Timer timer) {
debugTruncateBytes = 96;
}
- maxAttempts = 3;
+ maxAttempts = 4;
mapper = SerDeUtils.createMapper();
random = new Random();
httpClient = config.getHttpClient();
bodyLogger = new BodyLogger(mapper, 1024, debugTruncateBytes);
+ retryStrategyPicker = new RequestBasedRetryStrategyPicker(this.config);
this.timer = timer;
}
@@ -142,6 +150,14 @@ public O GET(String path, I in, Class target, Map head
}
}
+ public O POST(String path, Class target, Map headers) {
+ try {
+ return execute(prepareRequest("POST", path, null, headers), target);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
public O POST(String path, I in, Class target, Map headers) {
try {
return execute(prepareRequest("POST", path, in, headers), target);
@@ -220,6 +236,7 @@ private Response getResponse(Request in) {
}
private Response executeInner(Request in) {
+ RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in);
int attemptNumber = 0;
while (true) {
attemptNumber++;
@@ -249,18 +266,15 @@ private Response executeInner(Request in) {
LOG.debug("Request {} failed", in, e);
}
- // The request is not retried under three conditions:
- // 1. The request succeeded (err == null, out != null). In this case, the response is
- // returned.
- // 2. The request failed with a non-retriable error (err != null, out == null).
- // 3. The request failed with a retriable error, but the number of attempts exceeds
- // maxAttempts.
- DatabricksError res = ApiErrors.checkForRetry(out, err);
- if (!res.isRetriable()) {
- if (res.getErrorCode() == null) {
- return out;
- }
- throw res;
+ // Check if the request succeeded
+ if (isRequestSuccessful(out, err)) {
+ return out;
+ }
+ // The request did not succeed.
+ // Check if the request cannot be retried: if yes, retry after backoff, else throw the error.
+ DatabricksError databricksError = ApiErrors.getDatabricksError(out, err);
+ if (!retryStrategy.isRetriable(databricksError)) {
+ throw databricksError;
}
if (attemptNumber == maxAttempts) {
throw new DatabricksException(
@@ -268,24 +282,60 @@ private Response executeInner(Request in) {
}
// Retry after a backoff.
- int sleepMillis = getBackoffMillis(attemptNumber);
+ long sleepMillis = getBackoffMillis(out, attemptNumber);
LOG.debug(String.format("Retry %s in %dms", in.getRequestLine(), sleepMillis));
try {
- timer.wait(sleepMillis);
+ timer.sleep(sleepMillis);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
}
- private int getBackoffMillis(int attemptNumber) {
- int maxWait = 10000;
+ private boolean isRequestSuccessful(Response response, Exception e) {
+ return e == null && response.getStatusCode() >= 200 && response.getStatusCode() < 300;
+ }
+
+ public long getBackoffMillis(Response response, int attemptNumber) {
+ Optional backoffMillisInResponse = getBackoffFromRetryAfterHeader(response);
+ if (backoffMillisInResponse.isPresent()) {
+ return backoffMillisInResponse.get();
+ }
+ int minWait = 1000; // 1 second
+ int maxWait = 60000; // 1 minute
int minJitter = 50;
int maxJitter = 750;
- int wait = Math.min(maxWait, attemptNumber * 1000);
- wait += random.nextInt(maxJitter - minJitter + 1) + minJitter;
- return wait;
+ int wait = Math.min(maxWait, minWait * (1 << (attemptNumber - 1)));
+ int jitter = random.nextInt(maxJitter - minJitter + 1) + minJitter;
+ return wait + jitter;
+ }
+
+ public static Optional getBackoffFromRetryAfterHeader(Response response) {
+ if (response == null) return Optional.empty();
+ List retryAfterHeader = response.getHeaders(RETRY_AFTER_HEADER);
+ if (retryAfterHeader == null) {
+ return Optional.empty();
+ }
+ long waitTime = 0;
+ for (String retryAfter : retryAfterHeader) {
+ try {
+ // Datetime in header is always in GMT
+ ZonedDateTime retryAfterDate =
+ ZonedDateTime.parse(retryAfter, DateTimeFormatter.RFC_1123_DATE_TIME);
+ ZonedDateTime now = ZonedDateTime.now();
+ waitTime = java.time.Duration.between(now, retryAfterDate).getSeconds();
+ } catch (Exception e) {
+ // If not a date, assume it is seconds
+ try {
+ waitTime = Long.parseLong(retryAfter);
+ } catch (NumberFormatException nfe) {
+ // Just fallback to using exponential backoff
+ return Optional.empty();
+ }
+ }
+ }
+ return Optional.of(waitTime * 1000);
}
private String makeLogRecord(Request in, Response out) {
@@ -349,19 +399,24 @@ private void fillInHeaders(T target, Response response) {
if (firstHeader == null) {
continue;
}
- try {
- field.setAccessible(true);
- if (field.getType() == String.class) {
- field.set(target, firstHeader);
- } else if (field.getType() == Long.class) {
- field.set(target, Long.parseLong(firstHeader));
- } else {
- LOG.warn("Unsupported header type: " + field.getType());
+ // Synchronize on field across all methods which alter its accessibility to ensure
+ // multi threaded access of these objects (e.g. in the example of concurrent creation of
+ // workspace clients or config resolution) are safe
+ synchronized (field) {
+ try {
+ field.setAccessible(true);
+ if (field.getType() == String.class) {
+ field.set(target, firstHeader);
+ } else if (field.getType() == Long.class) {
+ field.set(target, Long.parseLong(firstHeader));
+ } else {
+ LOG.warn("Unsupported header type: " + field.getType());
+ }
+ } catch (IllegalAccessException e) {
+ throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
+ } finally {
+ field.setAccessible(false);
}
- } catch (IllegalAccessException e) {
- throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
- } finally {
- field.setAccessible(false);
}
}
}
@@ -380,13 +435,18 @@ public void deserialize(Response response, T object) throws IOException {
Optional contentsField = getContentsField(object);
if (contentsField.isPresent()) {
Field field = contentsField.get();
- try {
- field.setAccessible(true);
- field.set(object, response.getBody());
- } catch (IllegalAccessException e) {
- throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
- } finally {
- field.setAccessible(false);
+ // Synchronize on field across all methods which alter its accessibility to ensure
+ // multi threaded access of these objects (e.g. in the example of concurrent creation of
+ // workspace clients or config resolution) are safe
+ synchronized (field) {
+ try {
+ field.setAccessible(true);
+ field.set(object, response.getBody());
+ } catch (IllegalAccessException e) {
+ throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
+ } finally {
+ field.setAccessible(false);
+ }
}
} else if (response.getBody() != null) {
mapper.readerForUpdating(object).readValue(response.getBody());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java
index f67bd15e7..73cb3cba2 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java
@@ -36,22 +36,33 @@ public String getEnv(Map getEnv) {
}
public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalAccessException {
- field.setAccessible(true);
- if (field.getType() == String.class) {
- field.set(cfg, value);
- } else if (field.getType() == int.class) {
- field.set(cfg, Integer.parseInt(value));
- } else if (field.getType() == boolean.class) {
- field.set(cfg, Boolean.parseBoolean(value));
+ // Synchronize on field across all methods which alter its accessibility to ensure
+ // multi threaded access of these objects (e.g. in the example of concurrent creation of
+ // workspace clients or config resolution) are safe
+ synchronized (field) {
+ field.setAccessible(true);
+ if (field.getType() == String.class) {
+ field.set(cfg, value);
+ } else if (field.getType() == int.class) {
+ field.set(cfg, Integer.parseInt(value));
+ } else if (field.getType() == boolean.class) {
+ field.set(cfg, Boolean.parseBoolean(value));
+ } else if (field.getType() == ProxyConfig.ProxyAuthType.class) {
+ if (value != null) {
+ field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value));
+ }
+ }
+ field.setAccessible(false);
}
- field.setAccessible(false);
}
public Object getValueFromConfig(DatabricksConfig cfg) throws IllegalAccessException {
- field.setAccessible(true);
- Object value = field.get(cfg);
- field.setAccessible(false);
- return value;
+ synchronized (field) {
+ field.setAccessible(true);
+ Object value = field.get(cfg);
+ field.setAccessible(false);
+ return value;
+ }
}
public String getAuthType() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
index 35c7e2d35..1c4f06389 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
@@ -114,6 +114,24 @@ public class DatabricksConfig {
@ConfigAttribute(env = "DATABRICKS_RATE_LIMIT")
private Integer rateLimit;
+ @ConfigAttribute(env = "PROXY_HOST")
+ private String proxyHost;
+
+ @ConfigAttribute(env = "PROXY_PORT")
+ private Integer proxyPort;
+
+ @ConfigAttribute(env = "PROXY_USERNAME")
+ private String proxyUsername;
+
+ @ConfigAttribute(env = "PROXY_PASSWORD")
+ private String proxyPassword;
+
+ @ConfigAttribute(env = "PROXY_AUTH_TYPE")
+ private ProxyConfig.ProxyAuthType proxyAuthType;
+
+ @ConfigAttribute(env = "USE_SYSTEM_PROPERTIES_HTTP")
+ private Boolean useSystemPropertiesHttp;
+
private volatile boolean resolved;
private HeaderFactory headerFactory;
@@ -156,12 +174,8 @@ private void initHttp() {
if (httpClient != null) {
return;
}
- int timeout = 300;
- if (httpTimeoutSeconds != null) {
- timeout = httpTimeoutSeconds;
- }
// eventually it'll get decoupled from config.
- httpClient = new CommonsHttpClient(timeout);
+ httpClient = new CommonsHttpClient(this);
}
public synchronized Map authenticate() throws DatabricksException {
@@ -462,6 +476,60 @@ public DatabricksConfig setHttpClient(HttpClient httpClient) {
return this;
}
+ public String getProxyHost() {
+ return proxyHost;
+ }
+
+ public DatabricksConfig setProxyHost(String proxyHost) {
+ this.proxyHost = proxyHost;
+ return this;
+ }
+
+ public Integer getProxyPort() {
+ return proxyPort;
+ }
+
+ public DatabricksConfig setProxyPort(Integer proxyPort) {
+ this.proxyPort = proxyPort;
+ return this;
+ }
+
+ public String getProxyUsername() {
+ return proxyUsername;
+ }
+
+ public DatabricksConfig setProxyUsername(String proxyUsername) {
+ this.proxyUsername = proxyUsername;
+ return this;
+ }
+
+ public String getProxyPassword() {
+ return proxyPassword;
+ }
+
+ public DatabricksConfig setProxyPassword(String proxyPassword) {
+ this.proxyPassword = proxyPassword;
+ return this;
+ }
+
+ public ProxyConfig.ProxyAuthType getProxyAuthType() {
+ return proxyAuthType;
+ }
+
+ public DatabricksConfig setProxyAuthType(ProxyConfig.ProxyAuthType proxyAuthType) {
+ this.proxyAuthType = proxyAuthType;
+ return this;
+ }
+
+ public Boolean getUseSystemPropertiesHttp() {
+ return useSystemPropertiesHttp;
+ }
+
+ public DatabricksConfig setUseSystemPropertiesHttp(Boolean useSystemPropertiesHttp) {
+ this.useSystemPropertiesHttp = useSystemPropertiesHttp;
+ return this;
+ }
+
public boolean isAzure() {
return this.getDatabricksEnvironment().getCloud() == Cloud.AZURE;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java
index 6ad5aeafc..6538b6719 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java
@@ -1,15 +1,9 @@
package com.databricks.sdk.core;
import com.databricks.sdk.core.error.ErrorDetail;
-import java.net.ConnectException;
-import java.net.SocketException;
-import java.net.SocketTimeoutException;
-import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* The result of checking whether {@code ApiClient} should retry a request.
@@ -21,24 +15,6 @@
*/
public class DatabricksError extends DatabricksException {
private static final String ERROR_INFO_TYPE = "type.googleapis.com/google.rpc.ErrorInfo";
- private final Logger LOG = LoggerFactory.getLogger(getClass().getName());
-
- /** Errors returned by Databricks services which are known to be retriable. */
- private static final List TRANSIENT_ERROR_STRING_MATCHES =
- Arrays.asList(
- "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException",
- "does not have any associated worker environments",
- "There is no worker environment with id",
- "Unknown worker environment",
- "ClusterNotReadyException");
-
- /**
- * Exception classes thrown by Java and Java libraries in which case the request should be
- * retried.
- */
- private static final List> RETRYABLE_CLASSES =
- Arrays.asList(SocketException.class, SocketTimeoutException.class, ConnectException.class);
-
private final String message;
private final Throwable cause;
private final String errorCode;
@@ -89,48 +65,15 @@ public String getErrorCode() {
return errorCode;
}
- int getStatusCode() {
+ public int getStatusCode() {
return statusCode;
}
- public boolean isMissing() {
- return statusCode == 404;
- }
-
- public boolean isTooManyRequests() {
- return statusCode == 429;
- }
-
- public boolean isRetriable() {
- if (isTooManyRequests()) {
- return true;
- }
- for (String substring : TRANSIENT_ERROR_STRING_MATCHES) {
- if (message != null && message.contains(substring)) {
- LOG.debug("Attempting retry because of {}", substring);
- return true;
- }
- }
- for (Class extends Throwable> clazz : RETRYABLE_CLASSES) {
- if (isCausedBy(cause, clazz)) {
- LOG.debug("Attempting retry because cause or nested cause extends {}", clazz.getName());
- return true;
- }
- }
- return false;
+ public Throwable getCause() {
+ return cause;
}
List getDetailsByType(String type) {
return this.details.stream().filter(e -> e.getType().equals(type)).collect(Collectors.toList());
}
-
- private static boolean isCausedBy(Throwable throwable, Class extends Throwable> clazz) {
- if (throwable == null) {
- return false;
- }
- if (clazz.isInstance(throwable)) {
- return true;
- }
- return isCausedBy(throwable.getCause(), clazz);
- }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java
new file mode 100644
index 000000000..c06985eba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java
@@ -0,0 +1,81 @@
+package com.databricks.sdk.core;
+
+public class ProxyConfig {
+ private String host;
+ private Integer port;
+ private String username;
+ private String password;
+ private ProxyAuthType proxyAuthType;
+ private Boolean useSystemProperties;
+
+ public enum ProxyAuthType {
+ // Currently we only support BASIC and SPNEGO
+ NONE,
+ BASIC,
+ // We only support kerberos for negotiate
+ SPNEGO
+ }
+
+ public ProxyConfig(DatabricksConfig config) {
+ this.host = config.getProxyHost();
+ this.port = config.getProxyPort();
+ this.username = config.getProxyUsername();
+ this.password = config.getProxyPassword();
+ this.proxyAuthType = config.getProxyAuthType();
+ this.useSystemProperties = config.getUseSystemPropertiesHttp();
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public ProxyConfig setHost(String host) {
+ this.host = host;
+ return this;
+ }
+
+ public Integer getPort() {
+ return port;
+ }
+
+ public ProxyConfig setPort(Integer port) {
+ this.port = port;
+ return this;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public ProxyConfig setUsername(String username) {
+ this.username = username;
+ return this;
+ }
+
+ public String getPassword() {
+ return password;
+ }
+
+ public ProxyConfig setPassword(String password) {
+ this.password = password;
+ return this;
+ }
+
+ public ProxyAuthType getProxyAuthType() {
+ return proxyAuthType;
+ }
+
+ public ProxyConfig setProxyAuthType(ProxyAuthType proxyAuthType) {
+ this.proxyAuthType = proxyAuthType;
+ return this;
+ }
+
+ public Boolean getUseSystemProperties() {
+ return useSystemProperties;
+ }
+
+ public ProxyConfig setUseSystemProperties(Boolean useSystemProperties) {
+ this.useSystemProperties = useSystemProperties;
+ return this;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
index c7d7d23b7..176051091 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
@@ -13,7 +13,7 @@ public class UserAgent {
// TODO: check if reading from
// /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties
// or getClass().getPackage().getImplementationVersion() is enough.
- private static final String version = "0.19.0";
+ private static final String version = "0.23.0";
public static void withProduct(String product, String productVersion) {
UserAgent.product = product;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java
index b84a09aa4..c834e2c50 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java
@@ -2,11 +2,14 @@
import static org.apache.http.entity.ContentType.APPLICATION_JSON;
+import com.databricks.sdk.core.DatabricksConfig;
import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.ProxyConfig;
import com.databricks.sdk.core.http.HttpClient;
import com.databricks.sdk.core.http.Request;
import com.databricks.sdk.core.http.Response;
import com.databricks.sdk.core.utils.CustomCloseInputStream;
+import com.databricks.sdk.core.utils.ProxyUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
@@ -41,6 +44,20 @@ public CommonsHttpClient(int timeoutSeconds) {
hc = makeClosableHttpClient();
}
+ public CommonsHttpClient(DatabricksConfig databricksConfig) {
+ this(
+ databricksConfig.getHttpTimeoutSeconds() == null
+ ? 300
+ : databricksConfig.getHttpTimeoutSeconds(),
+ new ProxyConfig(databricksConfig));
+ }
+
+ public CommonsHttpClient(int timeoutSeconds, ProxyConfig proxyConfig) {
+ timeout = timeoutSeconds * 1000;
+ connectionManager.setMaxTotal(100);
+ hc = makeClosableHttpClient(proxyConfig);
+ }
+
private RequestConfig makeRequestConfig() {
return RequestConfig.custom()
.setConnectionRequestTimeout(timeout)
@@ -56,6 +73,15 @@ private CloseableHttpClient makeClosableHttpClient() {
.build();
}
+ private CloseableHttpClient makeClosableHttpClient(ProxyConfig proxyConfig) {
+ HttpClientBuilder builder =
+ HttpClientBuilder.create()
+ .setConnectionManager(connectionManager)
+ .setDefaultRequestConfig(makeRequestConfig());
+ ProxyUtils.setupProxy(proxyConfig, builder);
+ return builder.build();
+ }
+
@Override
public Response execute(Request in) throws IOException {
HttpUriRequest request = transformRequest(in);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java
new file mode 100644
index 000000000..4142281fe
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java
@@ -0,0 +1,58 @@
+package com.databricks.sdk.core.error;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.http.Response;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+abstract class AbstractErrorMapper {
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractErrorMapper.class);
+
+ @FunctionalInterface
+ protected interface ErrorCodeRule {
+ DatabricksError create(String message, List details);
+ }
+
+ @FunctionalInterface
+ protected interface StatusCodeRule {
+ DatabricksError create(String errorCode, String message, List details);
+ }
+
+ public DatabricksError apply(Response resp, ApiErrorBody errorBody) {
+ for (ErrorOverride> override : ErrorOverrides.ALL_OVERRIDES) {
+ if (override.matches(errorBody, resp)) {
+ LOG.debug(
+ "Overriding error with {} (original status code: {}, original error code: {})",
+ override.getDebugName(),
+ resp.getStatusCode(),
+ errorBody.getErrorCode());
+ return override.makeError(errorBody);
+ }
+ }
+ int code = resp.getStatusCode();
+ String message = errorBody.getMessage();
+ String errorCode = errorBody.getErrorCode();
+ List details = errorBody.getErrorDetails();
+ if (errorCodeMapping.containsKey(errorCode)) {
+ return errorCodeMapping.get(errorCode).create(message, details);
+ }
+ if (statusCodeMapping.containsKey(code)) {
+ return statusCodeMapping.get(code).create(errorCode, message, details);
+ }
+ return new DatabricksError(errorCode, message, code, details);
+ }
+
+ private final Map statusCodeMapping = new HashMap<>();
+ private final Map errorCodeMapping = new HashMap<>();
+
+ protected void statusCode(int code, StatusCodeRule rule) {
+ statusCodeMapping.put(code, rule);
+ }
+
+ protected void errorCode(String errorCode, ErrorCodeRule rule) {
+ errorCodeMapping.put(errorCode, rule);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java
index c284fbfcf..dd2962b68 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java
@@ -15,8 +15,9 @@
public class ApiErrors {
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final Pattern HTML_ERROR_REGEX = Pattern.compile("(.*)
");
+ private static final ErrorMapper ERROR_MAPPER = new ErrorMapper();
- public static DatabricksError checkForRetry(Response out, Exception error) {
+ public static DatabricksError getDatabricksError(Response out, Exception error) {
if (error != null) {
// If the endpoint did not respond to the request, interpret the exception.
return new DatabricksError("IO_ERROR", 523, error);
@@ -51,11 +52,7 @@ private static DatabricksError readErrorFromResponse(Response response) {
if (errorBody.getErrorDetails() == null) {
errorBody.setErrorDetails(Collections.emptyList());
}
- return new DatabricksError(
- errorBody.getErrorCode(),
- errorBody.getMessage(),
- response.getStatusCode(),
- errorBody.getErrorDetails());
+ return ERROR_MAPPER.apply(response, errorBody);
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java
new file mode 100644
index 000000000..1f38ceb1b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java
@@ -0,0 +1,39 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+class ErrorMapper extends AbstractErrorMapper {
+ public ErrorMapper() {
+ statusCode(400, com.databricks.sdk.core.error.platform.BadRequest::new);
+ statusCode(401, com.databricks.sdk.core.error.platform.Unauthenticated::new);
+ statusCode(403, com.databricks.sdk.core.error.platform.PermissionDenied::new);
+ statusCode(404, com.databricks.sdk.core.error.platform.NotFound::new);
+ statusCode(409, com.databricks.sdk.core.error.platform.ResourceConflict::new);
+ statusCode(429, com.databricks.sdk.core.error.platform.TooManyRequests::new);
+ statusCode(499, com.databricks.sdk.core.error.platform.Cancelled::new);
+ statusCode(500, com.databricks.sdk.core.error.platform.InternalError::new);
+ statusCode(501, com.databricks.sdk.core.error.platform.NotImplemented::new);
+ statusCode(503, com.databricks.sdk.core.error.platform.TemporarilyUnavailable::new);
+ statusCode(504, com.databricks.sdk.core.error.platform.DeadlineExceeded::new);
+
+ errorCode(
+ "INVALID_PARAMETER_VALUE",
+ com.databricks.sdk.core.error.platform.InvalidParameterValue::new);
+ errorCode(
+ "RESOURCE_DOES_NOT_EXIST",
+ com.databricks.sdk.core.error.platform.ResourceDoesNotExist::new);
+ errorCode("ABORTED", com.databricks.sdk.core.error.platform.Aborted::new);
+ errorCode("ALREADY_EXISTS", com.databricks.sdk.core.error.platform.AlreadyExists::new);
+ errorCode(
+ "RESOURCE_ALREADY_EXISTS",
+ com.databricks.sdk.core.error.platform.ResourceAlreadyExists::new);
+ errorCode("RESOURCE_EXHAUSTED", com.databricks.sdk.core.error.platform.ResourceExhausted::new);
+ errorCode(
+ "REQUEST_LIMIT_EXCEEDED", com.databricks.sdk.core.error.platform.RequestLimitExceeded::new);
+ errorCode("UNKNOWN", com.databricks.sdk.core.error.platform.Unknown::new);
+ errorCode("DATA_LOSS", com.databricks.sdk.core.error.platform.DataLoss::new);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java
new file mode 100644
index 000000000..cb35d0544
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java
@@ -0,0 +1,90 @@
+package com.databricks.sdk.core.error;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Response;
+import java.lang.reflect.Constructor;
+import java.util.List;
+import java.util.regex.Pattern;
+
+public class ErrorOverride {
+ private final String debugName;
+ private final Pattern pathRegex;
+ private final String verb;
+ private final Pattern statusCodeMatcher;
+ private final Pattern errorCodeMatcher;
+ private final Pattern messageMatcher;
+ private final Class customError;
+
+ public ErrorOverride(
+ String debugName,
+ String pathRegex,
+ String verb,
+ String statusCodeMatcher,
+ String errorCodeMatcher,
+ String messageMatcher,
+ Class customError) {
+ this.debugName = debugName;
+ this.pathRegex = ErrorOverride.compilePattern(pathRegex);
+ this.verb = verb;
+ this.statusCodeMatcher = ErrorOverride.compilePattern(statusCodeMatcher);
+ this.errorCodeMatcher = ErrorOverride.compilePattern(errorCodeMatcher);
+ this.messageMatcher = ErrorOverride.compilePattern(messageMatcher);
+ this.customError = customError;
+ }
+
+ public boolean matches(ApiErrorBody body, Response resp) {
+ if (!resp.getRequest().getMethod().equals(this.verb)) {
+ return false;
+ }
+
+ if (this.pathRegex != null
+ && !this.pathRegex.matcher(resp.getRequest().getUri().getPath()).matches()) {
+ return false;
+ }
+ String statusCode = Integer.toString(resp.getStatusCode());
+ if (this.statusCodeMatcher != null && !this.statusCodeMatcher.matcher(statusCode).matches()) {
+ return false;
+ }
+ if (this.errorCodeMatcher != null
+ && !this.errorCodeMatcher.matcher(body.getErrorCode()).matches()) {
+ return false;
+ }
+ // Allow matching substring of the error message.
+ if (this.messageMatcher != null && !this.messageMatcher.matcher(body.getMessage()).find()) {
+ return false;
+ }
+ return true;
+ }
+
+ public String getDebugName() {
+ return this.debugName;
+ }
+
+ public T makeError(ApiErrorBody body) {
+ Constructor>[] constructors = this.customError.getConstructors();
+ for (Constructor> constructor : constructors) {
+ Class>[] parameterTypes = constructor.getParameterTypes();
+ // All errors have a 2-argument constructor for the message and the error body.
+ if (parameterTypes.length == 2
+ && parameterTypes[0].equals(String.class)
+ && parameterTypes[1].equals(List.class)) {
+ try {
+ return (T) constructor.newInstance(body.getMessage(), body.getErrorDetails());
+ } catch (Exception e) {
+ throw new DatabricksException(
+ "Error creating custom error for error type " + this.customError.getName(), e);
+ }
+ }
+ }
+ throw new DatabricksException(
+ "No suitable constructor found for error type " + this.customError.getName());
+ }
+
+ private static Pattern compilePattern(String pattern) {
+ if (pattern == null || pattern.isEmpty()) {
+ return null;
+ }
+ return Pattern.compile(pattern);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java
new file mode 100755
index 000000000..e269a6d99
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java
@@ -0,0 +1,29 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error;
+
+import com.databricks.sdk.support.Generated;
+import java.util.Arrays;
+import java.util.List;
+
+@Generated
+class ErrorOverrides {
+ static final List> ALL_OVERRIDES =
+ Arrays.asList(
+ new ErrorOverride<>(
+ "Clusters InvalidParameterValue=>ResourceDoesNotExist",
+ "^/api/2\\.\\d/clusters/get",
+ "GET",
+ "^400$",
+ "INVALID_PARAMETER_VALUE",
+ "Cluster .* does not exist",
+ com.databricks.sdk.core.error.platform.ResourceDoesNotExist.class),
+ new ErrorOverride<>(
+ "Jobs InvalidParameterValue=>ResourceDoesNotExist",
+ "^/api/2\\.\\d/jobs/get",
+ "GET",
+ "^400$",
+ "INVALID_PARAMETER_VALUE",
+ "Job .* does not exist",
+ com.databricks.sdk.core.error.platform.ResourceDoesNotExist.class));
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java
new file mode 100755
index 000000000..bac89a9ec
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/**
+ * the operation was aborted, typically due to a concurrency issue such as a sequencer check failure
+ */
+@Generated
+public class Aborted extends ResourceConflict {
+ public Aborted(String message, List details) {
+ super("ABORTED", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java
new file mode 100755
index 000000000..6396f5a92
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** operation was rejected due a conflict with an existing resource */
+@Generated
+public class AlreadyExists extends ResourceConflict {
+ public AlreadyExists(String message, List details) {
+ super("ALREADY_EXISTS", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java
new file mode 100755
index 000000000..12e2dd0e8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the request is invalid */
+@Generated
+public class BadRequest extends DatabricksError {
+ public BadRequest(String message, List details) {
+ super("BAD_REQUEST", message, 400, details);
+ }
+
+ public BadRequest(String errorCode, String message, List details) {
+ super(errorCode, message, 400, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java
new file mode 100755
index 000000000..03850d441
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the operation was explicitly canceled by the caller */
+@Generated
+public class Cancelled extends DatabricksError {
+ public Cancelled(String message, List details) {
+ super("CANCELLED", message, 499, details);
+ }
+
+ public Cancelled(String errorCode, String message, List details) {
+ super(errorCode, message, 499, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java
new file mode 100755
index 000000000..61b39e36f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** unrecoverable data loss or corruption */
+@Generated
+public class DataLoss extends InternalError {
+ public DataLoss(String message, List details) {
+ super("DATA_LOSS", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java
new file mode 100755
index 000000000..79c011643
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the deadline expired before the operation could complete */
+@Generated
+public class DeadlineExceeded extends DatabricksError {
+ public DeadlineExceeded(String message, List details) {
+ super("DEADLINE_EXCEEDED", message, 504, details);
+ }
+
+ public DeadlineExceeded(String errorCode, String message, List details) {
+ super(errorCode, message, 504, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java
new file mode 100755
index 000000000..db70ccb69
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** some invariants expected by the underlying system have been broken */
+@Generated
+public class InternalError extends DatabricksError {
+ public InternalError(String message, List details) {
+ super("INTERNAL_ERROR", message, 500, details);
+ }
+
+ public InternalError(String errorCode, String message, List details) {
+ super(errorCode, message, 500, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java
new file mode 100755
index 000000000..4f4fb3757
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** supplied value for a parameter was invalid */
+@Generated
+public class InvalidParameterValue extends BadRequest {
+ public InvalidParameterValue(String message, List details) {
+ super("INVALID_PARAMETER_VALUE", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java
new file mode 100755
index 000000000..30939c6d9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the operation was performed on a resource that does not exist */
+@Generated
+public class NotFound extends DatabricksError {
+ public NotFound(String message, List details) {
+ super("NOT_FOUND", message, 404, details);
+ }
+
+ public NotFound(String errorCode, String message, List details) {
+ super(errorCode, message, 404, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java
new file mode 100755
index 000000000..4761ee9ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the operation is not implemented or is not supported/enabled in this service */
+@Generated
+public class NotImplemented extends DatabricksError {
+ public NotImplemented(String message, List details) {
+ super("NOT_IMPLEMENTED", message, 501, details);
+ }
+
+ public NotImplemented(String errorCode, String message, List details) {
+ super(errorCode, message, 501, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java
new file mode 100755
index 000000000..96dddf2f7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the caller does not have permission to execute the specified operation */
+@Generated
+public class PermissionDenied extends DatabricksError {
+ public PermissionDenied(String message, List details) {
+ super("PERMISSION_DENIED", message, 403, details);
+ }
+
+ public PermissionDenied(String errorCode, String message, List details) {
+ super(errorCode, message, 403, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java
new file mode 100755
index 000000000..4ac0bd2b0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** cluster request was rejected because it would exceed a resource limit */
+@Generated
+public class RequestLimitExceeded extends TooManyRequests {
+ public RequestLimitExceeded(String message, List details) {
+ super("REQUEST_LIMIT_EXCEEDED", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java
new file mode 100755
index 000000000..5ca274901
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** operation was rejected due a conflict with an existing resource */
+@Generated
+public class ResourceAlreadyExists extends ResourceConflict {
+ public ResourceAlreadyExists(String message, List details) {
+ super("RESOURCE_ALREADY_EXISTS", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java
new file mode 100755
index 000000000..8530091a8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** maps to all HTTP 409 (Conflict) responses */
+@Generated
+public class ResourceConflict extends DatabricksError {
+ public ResourceConflict(String message, List details) {
+ super("RESOURCE_CONFLICT", message, 409, details);
+ }
+
+ public ResourceConflict(String errorCode, String message, List details) {
+ super(errorCode, message, 409, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java
new file mode 100755
index 000000000..23952007f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** operation was performed on a resource that does not exist */
+@Generated
+public class ResourceDoesNotExist extends NotFound {
+ public ResourceDoesNotExist(String message, List details) {
+ super("RESOURCE_DOES_NOT_EXIST", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java
new file mode 100755
index 000000000..4652c9a3d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** operation is rejected due to per-user rate limiting */
+@Generated
+public class ResourceExhausted extends TooManyRequests {
+ public ResourceExhausted(String message, List details) {
+ super("RESOURCE_EXHAUSTED", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java
new file mode 100755
index 000000000..111b909cc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the service is currently unavailable */
+@Generated
+public class TemporarilyUnavailable extends DatabricksError {
+ public TemporarilyUnavailable(String message, List details) {
+ super("TEMPORARILY_UNAVAILABLE", message, 503, details);
+ }
+
+ public TemporarilyUnavailable(String errorCode, String message, List details) {
+ super(errorCode, message, 503, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java
new file mode 100755
index 000000000..d8b7bf611
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** maps to HTTP code: 429 Too Many Requests */
+@Generated
+public class TooManyRequests extends DatabricksError {
+ public TooManyRequests(String message, List details) {
+ super("TOO_MANY_REQUESTS", message, 429, details);
+ }
+
+ public TooManyRequests(String errorCode, String message, List details) {
+ super(errorCode, message, 429, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java
new file mode 100755
index 000000000..5d191b3e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.DatabricksError;
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** the request does not have valid authentication (AuthN) credentials for the operation */
+@Generated
+public class Unauthenticated extends DatabricksError {
+ public Unauthenticated(String message, List details) {
+ super("UNAUTHENTICATED", message, 401, details);
+ }
+
+ public Unauthenticated(String errorCode, String message, List details) {
+ super(errorCode, message, 401, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java
new file mode 100755
index 000000000..8e5096bed
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.core.error.platform;
+
+import com.databricks.sdk.core.error.ErrorDetail;
+import com.databricks.sdk.support.Generated;
+import java.util.List;
+
+/** this error is used as a fallback if the platform-side mapping is missing some reason */
+@Generated
+public class Unknown extends InternalError {
+ public Unknown(String message, List details) {
+ super("UNKNOWN", message, details);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java
new file mode 100644
index 000000000..fc2fea902
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java
@@ -0,0 +1,102 @@
+package com.databricks.sdk.core.http;
+
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.BitSet;
+
+/**
+ * Utility class for encoding strings for use in URLs.
+ *
+ * Adapted from URLEncodingUtils.java from Apache's HttpClient library.
+ */
+public class Encoding {
+
+ /**
+ * Unreserved characters, i.e. alphanumeric, plus: {@code _ - ! . ~ ' ( ) *}
+ *
+ *
This list is the same as the {@code unreserved} list in RFC 2396
+ */
+ private static final BitSet UNRESERVED = new BitSet(256);
+
+ /**
+ * Characters which are safe to use in a path, excluding /, i.e. {@link #UNRESERVED} plus
+ * punctuation plus @
+ */
+ private static final BitSet PATHSAFE = new BitSet(256);
+
+ /** Characters which are safe to use in a path, including /. */
+ private static final BitSet PATH_SPECIAL = new BitSet(256);
+
+ static {
+ // unreserved chars
+ // alpha characters
+ for (int i = 'a'; i <= 'z'; i++) {
+ UNRESERVED.set(i);
+ }
+ for (int i = 'A'; i <= 'Z'; i++) {
+ UNRESERVED.set(i);
+ }
+ // numeric characters
+ for (int i = '0'; i <= '9'; i++) {
+ UNRESERVED.set(i);
+ }
+ UNRESERVED.set('_'); // these are the characters of the "mark" list
+ UNRESERVED.set('-');
+ UNRESERVED.set('.');
+ UNRESERVED.set('*');
+ UNRESERVED.set('!');
+ UNRESERVED.set('~');
+ UNRESERVED.set('\'');
+ UNRESERVED.set('(');
+ UNRESERVED.set(')');
+
+ // URL path safe
+ PATHSAFE.or(UNRESERVED);
+ PATHSAFE.set(';'); // param separator
+ PATHSAFE.set(':'); // RFC 2396
+ PATHSAFE.set('@');
+ PATHSAFE.set('&');
+ PATHSAFE.set('=');
+ PATHSAFE.set('+');
+ PATHSAFE.set('$');
+ PATHSAFE.set(',');
+
+ PATH_SPECIAL.or(PATHSAFE);
+ PATH_SPECIAL.set('/');
+ }
+
+ private static final int RADIX = 16;
+
+ private static String urlEncode(
+ final String content,
+ final Charset charset,
+ final BitSet safechars,
+ final boolean blankAsPlus) {
+ if (content == null) {
+ return null;
+ }
+ final StringBuilder buf = new StringBuilder();
+ final ByteBuffer bb = charset.encode(content);
+ while (bb.hasRemaining()) {
+ final int b = bb.get() & 0xff;
+ if (safechars.get(b)) {
+ buf.append((char) b);
+ } else if (blankAsPlus && b == ' ') {
+ buf.append('+');
+ } else {
+ buf.append("%");
+ final char hex1 = Character.toUpperCase(Character.forDigit((b >> 4) & 0xF, RADIX));
+ final char hex2 = Character.toUpperCase(Character.forDigit(b & 0xF, RADIX));
+ buf.append(hex1);
+ buf.append(hex2);
+ }
+ }
+ return buf.toString();
+ }
+
+ public static String encodeMultiSegmentPathParameter(String param) {
+ return urlEncode(param, StandardCharsets.UTF_8, PATH_SPECIAL, false);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java
new file mode 100644
index 000000000..8f60bf04e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java
@@ -0,0 +1,56 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.DatabricksError;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * This class is used to determine if an idempotent request should be retried. An idempotent request
+ * should always be retried except if the error is non-recoverable..
+ */
+public class IdempotentRequestRetryStrategy implements RetryStrategy {
+
+ private static final Set> NON_RETRIABLE_EXCEPTIONS =
+ new HashSet<>(
+ Arrays.asList(
+ IllegalArgumentException.class,
+ IllegalStateException.class,
+ UnsupportedOperationException.class,
+ IndexOutOfBoundsException.class,
+ NullPointerException.class,
+ ClassCastException.class,
+ NumberFormatException.class,
+ ArrayIndexOutOfBoundsException.class,
+ ArrayStoreException.class,
+ ArithmeticException.class,
+ NegativeArraySizeException.class));
+
+ private static final Set NON_RETRIABLE_HTTP_CODES =
+ new HashSet<>(Arrays.asList(400, 401, 403, 404, 405, 409, 410, 411, 412, 413, 414, 415, 416));
+
+ @Override
+ public boolean isRetriable(DatabricksError databricksError) {
+ if (RetryUtils.isCausedByTransientError(databricksError)) {
+ return true;
+ }
+ if (isNonRetriableException(databricksError)) {
+ return false;
+ }
+ if (isNonRetriableHttpCode(databricksError)) {
+ return false;
+ }
+ return true;
+ }
+
+ private boolean isNonRetriableException(DatabricksError databricksError) {
+ if (databricksError.getCause() == null) {
+ return false;
+ }
+ return NON_RETRIABLE_EXCEPTIONS.contains(databricksError.getCause().getClass());
+ }
+
+ private boolean isNonRetriableHttpCode(DatabricksError databricksError) {
+ return NON_RETRIABLE_HTTP_CODES.contains(databricksError.getStatusCode());
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java
new file mode 100644
index 000000000..db4740700
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java
@@ -0,0 +1,69 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.DatabricksError;
+import java.net.*;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class is used to determine if a non-idempotent request should be retried. We essentially
+ * want to ensure that any request that could have potentially been processed by the server is not
+ * retried.
+ */
+public class NonIdempotentRequestRetryStrategy implements RetryStrategy {
+ private final Logger LOGGER = LoggerFactory.getLogger(getClass().getName());
+
+ private static final List> RETRIABLE_CLASSES =
+ Arrays.asList(
+ ConnectException.class,
+ UnknownHostException.class,
+ NoRouteToHostException.class,
+ PortUnreachableException.class);
+
+ private static final Set RETRIABLE_HTTP_CODES =
+ new HashSet<>(
+ Arrays.asList(
+ /* Too many requests */ 429, /* Request not processed by server */ 501, 503));
+
+ @Override
+ public boolean isRetriable(DatabricksError databricksError) {
+ if (RetryUtils.isCausedByTransientError(databricksError)) {
+ return true;
+ }
+ if (isClientSideException(databricksError)) {
+ return true;
+ }
+ if (isRetriableHttpErrorCode(databricksError)) {
+ return true;
+ }
+ return false;
+ }
+
+ private boolean isRetriableHttpErrorCode(DatabricksError databricksError) {
+ return RETRIABLE_HTTP_CODES.contains(databricksError.getStatusCode());
+ }
+
+ private boolean isClientSideException(DatabricksError error) {
+ for (Class extends Throwable> clazz : RETRIABLE_CLASSES) {
+ if (isCausedBy(error.getCause(), clazz)) {
+ LOGGER.debug("Attempting retry because cause or nested cause extends {}", clazz.getName());
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private static boolean isCausedBy(Throwable throwable, Class extends Throwable> clazz) {
+ if (throwable == null) {
+ return false;
+ }
+ if (clazz.isInstance(throwable)) {
+ return true;
+ }
+ return isCausedBy(throwable.getCause(), clazz);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java
new file mode 100644
index 000000000..8b4105c05
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java
@@ -0,0 +1,83 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.DatabricksConfig;
+import com.databricks.sdk.core.http.Request;
+import java.util.AbstractMap;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+/**
+ * A RetryStrategyPicker that selects a retry strategy based on whether the request is idempotent or
+ * not.
+ */
+public class RequestBasedRetryStrategyPicker implements RetryStrategyPicker {
+ private static final List IDEMPOTENT_REQUESTS =
+ Arrays.asList(
+ // Create a new session v1.0
+ new Request("POST", "/api/2.0/sql/statements/sessions/"),
+ // Create a new session v2.0
+ new Request("POST", "/api/2.0/sql/sessions/"),
+ // Delete an existing session v1.0
+ new Request("DELETE", "/api/2.0/sql/statements/sessions/.*"),
+ // Delete an existing session v2.0
+ new Request("DELETE", "/api/2.0/sql/sessions/.*"),
+ // Get status of a statement
+ new Request("GET", "/api/2.0/sql/statements/.*"),
+ // Close a statement
+ new Request("DELETE", "/api/2.0/sql/statements/.*"),
+ // Fetch a chunk of a statement result
+ new Request("GET", "/api/2.0/sql/statements/.*/result/chunks/.*"));
+
+ private final List> idempotentRequestsPattern;
+ private static final NonIdempotentRequestRetryStrategy NON_IDEMPOTENT_RETRY_STRATEGY =
+ new NonIdempotentRequestRetryStrategy();
+ private static final IdempotentRequestRetryStrategy IDEMPOTENT_RETRY_STRATEGY =
+ new IdempotentRequestRetryStrategy();
+
+ public RequestBasedRetryStrategyPicker(DatabricksConfig config) {
+ this.idempotentRequestsPattern =
+ IDEMPOTENT_REQUESTS.stream()
+ .map(
+ request ->
+ new AbstractMap.SimpleEntry<>(
+ request.getMethod(),
+ Pattern.compile(
+ config.getHost() + request.getUrl(), Pattern.CASE_INSENSITIVE)))
+ .collect(Collectors.toList());
+ }
+
+ /**
+ * This function gets the retry strategy for a given request based on whether the request is
+ * idempotent or not.
+ *
+ * @param request to get the retry strategy for
+ * @return the retry strategy for the given request
+ */
+ @Override
+ public RetryStrategy getRetryStrategy(Request request) {
+ if (isIdempotentRequest(request)) {
+ return IDEMPOTENT_RETRY_STRATEGY;
+ } else {
+ return NON_IDEMPOTENT_RETRY_STRATEGY;
+ }
+ }
+
+ /**
+ * This function checks if a given request is idempotent.
+ *
+ * @param request to check if it is idempotent
+ * @return true if the request is idempotent, false otherwise
+ */
+ private boolean isIdempotentRequest(Request request) {
+ for (Map.Entry idempotentRequest : idempotentRequestsPattern) {
+ if (idempotentRequest.getKey().equals(request.getMethod())
+ && idempotentRequest.getValue().matcher(request.getUrl()).find()) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java
new file mode 100644
index 000000000..0c80421f6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java
@@ -0,0 +1,14 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.DatabricksError;
+
+/** This interface is used to determine if a request should be retried. */
+public interface RetryStrategy {
+ /**
+ * This method is used to determine if a request should be retried.
+ *
+ * @param databricksError The DatabricksError wrapped response/error object.
+ * @return true if the request should be retried, false otherwise.
+ */
+ boolean isRetriable(DatabricksError databricksError);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java
new file mode 100644
index 000000000..9b3cfaf27
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java
@@ -0,0 +1,14 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.http.Request;
+
+/** This interface is used to pick the appropriate retry strategy for a given request. */
+public interface RetryStrategyPicker {
+ /**
+ * This method is used to get the retry strategy for a given request.
+ *
+ * @param request The request for which the retry strategy is needed.
+ * @return The retry strategy for the given request.
+ */
+ RetryStrategy getRetryStrategy(Request request);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java
new file mode 100644
index 000000000..ce7358a9e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java
@@ -0,0 +1,25 @@
+package com.databricks.sdk.core.retry;
+
+import com.databricks.sdk.core.DatabricksError;
+import java.util.Arrays;
+import java.util.List;
+
+public class RetryUtils {
+ private static final List TRANSIENT_ERROR_STRING_MATCHES =
+ Arrays.asList(
+ "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException",
+ "does not have any associated worker environments",
+ "There is no worker environment with id",
+ "Unknown worker environment",
+ "ClusterNotReadyException");
+
+ public static boolean isCausedByTransientError(DatabricksError databricksError) {
+ String message = databricksError.getMessage();
+ for (String match : TRANSIENT_ERROR_STRING_MATCHES) {
+ if (message != null && message.contains(match)) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java
new file mode 100644
index 000000000..57da54273
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java
@@ -0,0 +1,143 @@
+package com.databricks.sdk.core.utils;
+
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.ProxyConfig;
+import java.security.Principal;
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthSchemeProvider;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.Credentials;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.client.config.AuthSchemes;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.ProxyAuthenticationStrategy;
+
+/**
+ * This class is used to setup the proxy configs for the http client. This includes setting up the
+ * proxy host, port, and authentication.
+ */
+public class ProxyUtils {
+
+ /**
+ * Setup the proxy configuration in the http client builder.
+ *
+ * @param config the proxy configuration
+ * @param builder the http client builder
+ */
+ public static void setupProxy(ProxyConfig config, HttpClientBuilder builder) {
+ String proxyHost = null;
+ Integer proxyPort = null;
+ String proxyUser = null;
+ String proxyPassword = null;
+ if (config.getUseSystemProperties() != null && config.getUseSystemProperties()) {
+ builder.useSystemProperties();
+ String protocol = System.getProperty("https.proxyHost") != null ? "https" : "http";
+ proxyHost = System.getProperty(protocol + ".proxyHost");
+ proxyPort = Integer.parseInt(System.getProperty(protocol + ".proxyPort"));
+ proxyUser = System.getProperty(protocol + ".proxyUser");
+ proxyPassword = System.getProperty(protocol + ".proxyPassword");
+ }
+ // Override system properties if proxy configuration is explicitly set
+ if (config.getHost() != null) {
+ proxyHost = config.getHost();
+ proxyPort = config.getPort();
+ proxyUser = config.getUsername();
+ proxyPassword = config.getPassword();
+ builder.setProxy(new HttpHost(proxyHost, proxyPort));
+ }
+ setupProxyAuth(
+ proxyHost, proxyPort, config.getProxyAuthType(), proxyUser, proxyPassword, builder);
+ }
+
+ /**
+ * This method sets up the proxy authentication in the http client builder.
+ *
+ * @param proxyHost the proxy host
+ * @param proxyPort the proxy port
+ * @param proxyAuthType the proxy authentication type
+ * @param proxyUser the proxy user
+ * @param proxyPassword the proxy password
+ * @param builder the http client builder
+ */
+ public static void setupProxyAuth(
+ String proxyHost,
+ Integer proxyPort,
+ ProxyConfig.ProxyAuthType proxyAuthType,
+ String proxyUser,
+ String proxyPassword,
+ HttpClientBuilder builder) {
+ if (proxyAuthType == null) {
+ return;
+ }
+ AuthScope authScope = new AuthScope(proxyHost, proxyPort);
+ switch (proxyAuthType) {
+ case NONE:
+ break;
+ case BASIC:
+ setupBasicProxyAuth(builder, authScope, proxyUser, proxyPassword);
+ break;
+ case SPNEGO:
+ setupNegotiateProxyAuth(builder, authScope);
+ break;
+ default:
+ throw new DatabricksException("Unknown proxy auth type: " + proxyAuthType);
+ }
+ }
+
+ /**
+ * This method sets up the proxy authentication using the negotiate mechanism in the http client
+ * builder.
+ *
+ * @param builder the http client builder
+ * @param authScope the authentication scope
+ */
+ public static void setupNegotiateProxyAuth(HttpClientBuilder builder, AuthScope authScope) {
+ // We only support kerberos for negotiate as of now
+ System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
+ // "java.security.krb5.conf" system property needs to be set if krb5.conf is not in the default
+ // location
+ // Use "sun.security.krb5.debug" and "sun.security.jgss.debug" system properties for debugging
+ Credentials useJaasCreds =
+ new Credentials() {
+ public String getPassword() {
+ return null;
+ }
+
+ public Principal getUserPrincipal() {
+ return null;
+ }
+ };
+
+ CredentialsProvider credsProvider = new BasicCredentialsProvider();
+ credsProvider.setCredentials(authScope, useJaasCreds);
+ builder
+ .setDefaultCredentialsProvider(credsProvider)
+ .setDefaultAuthSchemeRegistry(
+ RegistryBuilder.create()
+ .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true))
+ .build());
+ }
+
+ /**
+ * This method sets up the proxy authentication using the basic mechanism credentials provided
+ * into the http client builder.
+ *
+ * @param builder the http client builder
+ * @param authScope the authentication scope
+ * @param proxyUser the proxy user
+ * @param proxyPassword the proxy password
+ */
+ public static void setupBasicProxyAuth(
+ HttpClientBuilder builder, AuthScope authScope, String proxyUser, String proxyPassword) {
+ CredentialsProvider credsProvider = new BasicCredentialsProvider();
+ credsProvider.setCredentials(
+ authScope, new UsernamePasswordCredentials(proxyUser, proxyPassword));
+ builder
+ .setDefaultCredentialsProvider(credsProvider)
+ .setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy());
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java
index 9187d18c6..728df59e4 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java
@@ -2,7 +2,7 @@
public class SystemTimer implements Timer {
@Override
- public void wait(int milliseconds) throws InterruptedException {
+ public void sleep(long milliseconds) throws InterruptedException {
Thread.sleep(milliseconds);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java
index d2a5c97bf..7e95872fa 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java
@@ -1,7 +1,7 @@
package com.databricks.sdk.core.utils;
public interface Timer {
- void wait(int milliseconds) throws InterruptedException;
+ void sleep(long milliseconds) throws InterruptedException;
long getCurrentTime();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
index 70add4e35..4de8a1314 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -73,7 +74,7 @@ public WrappedBudgetWithStatus get(GetBudgetRequest request) {
* that the budget is configured to include.
*/
public Iterable list() {
- return impl.list().getBudgets();
+ return new Paginator<>(null, (Void v) -> impl.list(), BudgetList::getBudgets, response -> null);
}
public void update(String budgetId, Budget budget) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
index 574be8ed5..d00bca784 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -128,7 +129,11 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) {
* Gets all Databricks log delivery configurations associated with an account specified by ID.
*/
public Iterable list(ListLogDeliveryRequest request) {
- return impl.list(request).getLogDeliveryConfigurations();
+ return new Paginator<>(
+ request,
+ impl::list,
+ WrappedLogDeliveryConfigurations::getLogDeliveryConfigurations,
+ response -> null);
}
public void patchStatus(String logDeliveryConfigurationId, LogDeliveryConfigStatus status) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
index 502048323..68a2c8e08 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,7 +81,11 @@ public Iterable list(String metastoreId) {
* Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
*/
public Iterable list(ListAccountMetastoreAssignmentsRequest request) {
- return impl.list(request).getWorkspaceIds();
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListAccountMetastoreAssignmentsResponse::getWorkspaceIds,
+ response -> null);
}
public void update(long workspaceId, String metastoreId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
index bc97167b9..3ed7e76b4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -67,7 +68,8 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) {
* Gets all Unity Catalog metastores associated with an account specified by ID.
*/
public Iterable list() {
- return impl.list().getMetastores();
+ return new Paginator<>(
+ null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null);
}
public AccountsMetastoreInfo update(String metastoreId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java
new file mode 100755
index 000000000..2eac19555
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AwsIamRoleRequest {
+ /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */
+ @JsonProperty("role_arn")
+ private String roleArn;
+
+ public AwsIamRoleRequest setRoleArn(String roleArn) {
+ this.roleArn = roleArn;
+ return this;
+ }
+
+ public String getRoleArn() {
+ return roleArn;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AwsIamRoleRequest that = (AwsIamRoleRequest) o;
+ return Objects.equals(roleArn, that.roleArn);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(roleArn);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AwsIamRoleRequest.class).add("roleArn", roleArn).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
similarity index 84%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
index 8b07a85cb..50420d04a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java
@@ -8,7 +8,7 @@
import java.util.Objects;
@Generated
-public class AwsIamRole {
+public class AwsIamRoleResponse {
/** The external ID used in role assumption to prevent confused deputy problem.. */
@JsonProperty("external_id")
private String externalId;
@@ -24,7 +24,7 @@ public class AwsIamRole {
@JsonProperty("unity_catalog_iam_arn")
private String unityCatalogIamArn;
- public AwsIamRole setExternalId(String externalId) {
+ public AwsIamRoleResponse setExternalId(String externalId) {
this.externalId = externalId;
return this;
}
@@ -33,7 +33,7 @@ public String getExternalId() {
return externalId;
}
- public AwsIamRole setRoleArn(String roleArn) {
+ public AwsIamRoleResponse setRoleArn(String roleArn) {
this.roleArn = roleArn;
return this;
}
@@ -42,7 +42,7 @@ public String getRoleArn() {
return roleArn;
}
- public AwsIamRole setUnityCatalogIamArn(String unityCatalogIamArn) {
+ public AwsIamRoleResponse setUnityCatalogIamArn(String unityCatalogIamArn) {
this.unityCatalogIamArn = unityCatalogIamArn;
return this;
}
@@ -55,7 +55,7 @@ public String getUnityCatalogIamArn() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- AwsIamRole that = (AwsIamRole) o;
+ AwsIamRoleResponse that = (AwsIamRoleResponse) o;
return Objects.equals(externalId, that.externalId)
&& Objects.equals(roleArn, that.roleArn)
&& Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn);
@@ -68,7 +68,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(AwsIamRole.class)
+ return new ToStringer(AwsIamRoleResponse.class)
.add("externalId", externalId)
.add("roleArn", roleArn)
.add("unityCatalogIamArn", unityCatalogIamArn)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java
new file mode 100755
index 000000000..36122a638
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AzureManagedIdentityRequest {
+ /**
+ * The Azure resource ID of the Azure Databricks Access Connector. Use the format
+ * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.
+ */
+ @JsonProperty("access_connector_id")
+ private String accessConnectorId;
+
+ /**
+ * The Azure resource ID of the managed identity. Use the format
+ * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}.
+ * This is only available for user-assgined identities. For system-assigned identities, the
+ * access_connector_id is used to identify the identity. If this field is not provided, then we
+ * assume the AzureManagedIdentity is for a system-assigned identity.
+ */
+ @JsonProperty("managed_identity_id")
+ private String managedIdentityId;
+
+ public AzureManagedIdentityRequest setAccessConnectorId(String accessConnectorId) {
+ this.accessConnectorId = accessConnectorId;
+ return this;
+ }
+
+ public String getAccessConnectorId() {
+ return accessConnectorId;
+ }
+
+ public AzureManagedIdentityRequest setManagedIdentityId(String managedIdentityId) {
+ this.managedIdentityId = managedIdentityId;
+ return this;
+ }
+
+ public String getManagedIdentityId() {
+ return managedIdentityId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AzureManagedIdentityRequest that = (AzureManagedIdentityRequest) o;
+ return Objects.equals(accessConnectorId, that.accessConnectorId)
+ && Objects.equals(managedIdentityId, that.managedIdentityId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessConnectorId, managedIdentityId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AzureManagedIdentityRequest.class)
+ .add("accessConnectorId", accessConnectorId)
+ .add("managedIdentityId", managedIdentityId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
similarity index 84%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
index 2ea7f739c..91fbf7d9c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java
@@ -8,7 +8,7 @@
import java.util.Objects;
@Generated
-public class AzureManagedIdentity {
+public class AzureManagedIdentityResponse {
/**
* The Azure resource ID of the Azure Databricks Access Connector. Use the format
* /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.
@@ -30,7 +30,7 @@ public class AzureManagedIdentity {
@JsonProperty("managed_identity_id")
private String managedIdentityId;
- public AzureManagedIdentity setAccessConnectorId(String accessConnectorId) {
+ public AzureManagedIdentityResponse setAccessConnectorId(String accessConnectorId) {
this.accessConnectorId = accessConnectorId;
return this;
}
@@ -39,7 +39,7 @@ public String getAccessConnectorId() {
return accessConnectorId;
}
- public AzureManagedIdentity setCredentialId(String credentialId) {
+ public AzureManagedIdentityResponse setCredentialId(String credentialId) {
this.credentialId = credentialId;
return this;
}
@@ -48,7 +48,7 @@ public String getCredentialId() {
return credentialId;
}
- public AzureManagedIdentity setManagedIdentityId(String managedIdentityId) {
+ public AzureManagedIdentityResponse setManagedIdentityId(String managedIdentityId) {
this.managedIdentityId = managedIdentityId;
return this;
}
@@ -61,7 +61,7 @@ public String getManagedIdentityId() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- AzureManagedIdentity that = (AzureManagedIdentity) o;
+ AzureManagedIdentityResponse that = (AzureManagedIdentityResponse) o;
return Objects.equals(accessConnectorId, that.accessConnectorId)
&& Objects.equals(credentialId, that.credentialId)
&& Objects.equals(managedIdentityId, that.managedIdentityId);
@@ -74,7 +74,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(AzureManagedIdentity.class)
+ return new ToStringer(AzureManagedIdentityResponse.class)
.add("accessConnectorId", accessConnectorId)
.add("credentialId", credentialId)
.add("managedIdentityId", managedIdentityId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
index 308993366..edaee94b0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java
@@ -9,20 +9,11 @@
/** Cancel refresh */
@Generated
public class CancelRefreshRequest {
- /** Full name of the table. */
- private String fullName;
-
/** ID of the refresh. */
private String refreshId;
- public CancelRefreshRequest setFullName(String fullName) {
- this.fullName = fullName;
- return this;
- }
-
- public String getFullName() {
- return fullName;
- }
+ /** Full name of the table. */
+ private String tableName;
public CancelRefreshRequest setRefreshId(String refreshId) {
this.refreshId = refreshId;
@@ -33,24 +24,33 @@ public String getRefreshId() {
return refreshId;
}
+ public CancelRefreshRequest setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CancelRefreshRequest that = (CancelRefreshRequest) o;
- return Objects.equals(fullName, that.fullName) && Objects.equals(refreshId, that.refreshId);
+ return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, refreshId);
+ return Objects.hash(refreshId, tableName);
}
@Override
public String toString() {
return new ToStringer(CancelRefreshRequest.class)
- .add("fullName", fullName)
.add("refreshId", refreshId)
+ .add("tableName", tableName)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
index 313f31c88..c0cec6fcf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java
@@ -10,7 +10,10 @@
@Generated
public class CatalogInfo {
- /** Indicate whether or not the catalog info contains only browsable metadata. */
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
@JsonProperty("browse_only")
private Boolean browseOnly;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
index d9bae3a42..6b3ea1712 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -81,8 +82,9 @@ public CatalogInfo get(GetCatalogRequest request) {
* caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a
* specific ordering of the elements in the array.
*/
- public Iterable list() {
- return impl.list().getCatalogs();
+ public Iterable list(ListCatalogsRequest request) {
+ return new Paginator<>(
+ request, impl::list, ListCatalogsResponse::getCatalogs, response -> null);
}
public CatalogInfo update(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java
index d0be97ff1..404904ace 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java
@@ -41,11 +41,11 @@ public CatalogInfo get(GetCatalogRequest request) {
}
@Override
- public ListCatalogsResponse list() {
+ public ListCatalogsResponse list(ListCatalogsRequest request) {
String path = "/api/2.1/unity-catalog/catalogs";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
- return apiClient.GET(path, ListCatalogsResponse.class, headers);
+ return apiClient.GET(path, request, ListCatalogsResponse.class, headers);
}
@Override
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
index 9047ff738..86f3a6b7d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java
@@ -50,7 +50,7 @@ public interface CatalogsService {
* caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a
* specific ordering of the elements in the array.
*/
- ListCatalogsResponse list();
+ ListCatalogsResponse list(ListCatalogsRequest listCatalogsRequest);
/**
* Update a catalog.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
index eaa07f9fd..04c6eab69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,7 +85,8 @@ public ConnectionInfo get(GetConnectionRequest request) {
* List all connections.
*/
public Iterable list() {
- return impl.list().getConnections();
+ return new Paginator<>(
+ null, (Void v) -> impl.list(), ListConnectionsResponse::getConnections, response -> null);
}
public ConnectionInfo update(String name, Map options) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java
index 0beb7de43..d7aa83947 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java
@@ -27,22 +27,19 @@ public class CreateMonitor {
* time windows).
*/
@JsonProperty("custom_metrics")
- private Collection customMetrics;
+ private Collection customMetrics;
/** The data classification config for the monitor. */
@JsonProperty("data_classification_config")
private MonitorDataClassificationConfig dataClassificationConfig;
- /** Full name of the table. */
- private String fullName;
-
/** Configuration for monitoring inference logs. */
@JsonProperty("inference_log")
- private MonitorInferenceLogProfileType inferenceLog;
+ private MonitorInferenceLog inferenceLog;
/** The notification settings for the monitor. */
@JsonProperty("notifications")
- private Collection notifications;
+ private MonitorNotifications notifications;
/** Schema where output metric tables are created. */
@JsonProperty("output_schema_name")
@@ -67,11 +64,14 @@ public class CreateMonitor {
/** Configuration for monitoring snapshot tables. */
@JsonProperty("snapshot")
- private MonitorSnapshotProfileType snapshot;
+ private MonitorSnapshot snapshot;
+
+ /** Full name of the table. */
+ private String tableName;
/** Configuration for monitoring time series tables. */
@JsonProperty("time_series")
- private MonitorTimeSeriesProfileType timeSeries;
+ private MonitorTimeSeries timeSeries;
/**
* Optional argument to specify the warehouse for dashboard creation. If not specified, the first
@@ -98,12 +98,12 @@ public String getBaselineTableName() {
return baselineTableName;
}
- public CreateMonitor setCustomMetrics(Collection customMetrics) {
+ public CreateMonitor setCustomMetrics(Collection customMetrics) {
this.customMetrics = customMetrics;
return this;
}
- public Collection getCustomMetrics() {
+ public Collection getCustomMetrics() {
return customMetrics;
}
@@ -117,30 +117,21 @@ public MonitorDataClassificationConfig getDataClassificationConfig() {
return dataClassificationConfig;
}
- public CreateMonitor setFullName(String fullName) {
- this.fullName = fullName;
- return this;
- }
-
- public String getFullName() {
- return fullName;
- }
-
- public CreateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) {
+ public CreateMonitor setInferenceLog(MonitorInferenceLog inferenceLog) {
this.inferenceLog = inferenceLog;
return this;
}
- public MonitorInferenceLogProfileType getInferenceLog() {
+ public MonitorInferenceLog getInferenceLog() {
return inferenceLog;
}
- public CreateMonitor setNotifications(Collection notifications) {
+ public CreateMonitor setNotifications(MonitorNotifications notifications) {
this.notifications = notifications;
return this;
}
- public Collection getNotifications() {
+ public MonitorNotifications getNotifications() {
return notifications;
}
@@ -180,21 +171,30 @@ public Collection getSlicingExprs() {
return slicingExprs;
}
- public CreateMonitor setSnapshot(MonitorSnapshotProfileType snapshot) {
+ public CreateMonitor setSnapshot(MonitorSnapshot snapshot) {
this.snapshot = snapshot;
return this;
}
- public MonitorSnapshotProfileType getSnapshot() {
+ public MonitorSnapshot getSnapshot() {
return snapshot;
}
- public CreateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) {
+ public CreateMonitor setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public CreateMonitor setTimeSeries(MonitorTimeSeries timeSeries) {
this.timeSeries = timeSeries;
return this;
}
- public MonitorTimeSeriesProfileType getTimeSeries() {
+ public MonitorTimeSeries getTimeSeries() {
return timeSeries;
}
@@ -216,7 +216,6 @@ public boolean equals(Object o) {
&& Objects.equals(baselineTableName, that.baselineTableName)
&& Objects.equals(customMetrics, that.customMetrics)
&& Objects.equals(dataClassificationConfig, that.dataClassificationConfig)
- && Objects.equals(fullName, that.fullName)
&& Objects.equals(inferenceLog, that.inferenceLog)
&& Objects.equals(notifications, that.notifications)
&& Objects.equals(outputSchemaName, that.outputSchemaName)
@@ -224,6 +223,7 @@ public boolean equals(Object o) {
&& Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard)
&& Objects.equals(slicingExprs, that.slicingExprs)
&& Objects.equals(snapshot, that.snapshot)
+ && Objects.equals(tableName, that.tableName)
&& Objects.equals(timeSeries, that.timeSeries)
&& Objects.equals(warehouseId, that.warehouseId);
}
@@ -235,7 +235,6 @@ public int hashCode() {
baselineTableName,
customMetrics,
dataClassificationConfig,
- fullName,
inferenceLog,
notifications,
outputSchemaName,
@@ -243,6 +242,7 @@ public int hashCode() {
skipBuiltinDashboard,
slicingExprs,
snapshot,
+ tableName,
timeSeries,
warehouseId);
}
@@ -254,7 +254,6 @@ public String toString() {
.add("baselineTableName", baselineTableName)
.add("customMetrics", customMetrics)
.add("dataClassificationConfig", dataClassificationConfig)
- .add("fullName", fullName)
.add("inferenceLog", inferenceLog)
.add("notifications", notifications)
.add("outputSchemaName", outputSchemaName)
@@ -262,6 +261,7 @@ public String toString() {
.add("skipBuiltinDashboard", skipBuiltinDashboard)
.add("slicingExprs", slicingExprs)
.add("snapshot", snapshot)
+ .add("tableName", tableName)
.add("timeSeries", timeSeries)
.add("warehouseId", warehouseId)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
similarity index 75%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
index 9c78e4656..4b77e9b13 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java
@@ -9,7 +9,7 @@
/** Online Table information. */
@Generated
-public class ViewData {
+public class CreateOnlineTableRequest {
/** Full three-part (catalog, schema, table) name of the table. */
@JsonProperty("name")
private String name;
@@ -18,7 +18,7 @@ public class ViewData {
@JsonProperty("spec")
private OnlineTableSpec spec;
- public ViewData setName(String name) {
+ public CreateOnlineTableRequest setName(String name) {
this.name = name;
return this;
}
@@ -27,7 +27,7 @@ public String getName() {
return name;
}
- public ViewData setSpec(OnlineTableSpec spec) {
+ public CreateOnlineTableRequest setSpec(OnlineTableSpec spec) {
this.spec = spec;
return this;
}
@@ -40,7 +40,7 @@ public OnlineTableSpec getSpec() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- ViewData that = (ViewData) o;
+ CreateOnlineTableRequest that = (CreateOnlineTableRequest) o;
return Objects.equals(name, that.name) && Objects.equals(spec, that.spec);
}
@@ -51,6 +51,9 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(ViewData.class).add("name", name).add("spec", spec).toString();
+ return new ToStringer(CreateOnlineTableRequest.class)
+ .add("name", name)
+ .add("spec", spec)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
index 0054ad672..23717f93d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java
@@ -11,11 +11,11 @@
public class CreateStorageCredential {
/** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
- private AwsIamRole awsIamRole;
+ private AwsIamRoleRequest awsIamRole;
/** The Azure managed identity configuration. */
@JsonProperty("azure_managed_identity")
- private AzureManagedIdentity azureManagedIdentity;
+ private AzureManagedIdentityRequest azureManagedIdentity;
/** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
@@ -45,22 +45,22 @@ public class CreateStorageCredential {
@JsonProperty("skip_validation")
private Boolean skipValidation;
- public CreateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) {
+ public CreateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) {
this.awsIamRole = awsIamRole;
return this;
}
- public AwsIamRole getAwsIamRole() {
+ public AwsIamRoleRequest getAwsIamRole() {
return awsIamRole;
}
public CreateStorageCredential setAzureManagedIdentity(
- AzureManagedIdentity azureManagedIdentity) {
+ AzureManagedIdentityRequest azureManagedIdentity) {
this.azureManagedIdentity = azureManagedIdentity;
return this;
}
- public AzureManagedIdentity getAzureManagedIdentity() {
+ public AzureManagedIdentityRequest getAzureManagedIdentity() {
return azureManagedIdentity;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java
index ecd871b14..00de48b2f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java
@@ -10,15 +10,15 @@
@Generated
public class DeleteLakehouseMonitorRequest {
/** Full name of the table. */
- private String fullName;
+ private String tableName;
- public DeleteLakehouseMonitorRequest setFullName(String fullName) {
- this.fullName = fullName;
+ public DeleteLakehouseMonitorRequest setTableName(String tableName) {
+ this.tableName = tableName;
return this;
}
- public String getFullName() {
- return fullName;
+ public String getTableName() {
+ return tableName;
}
@Override
@@ -26,16 +26,18 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeleteLakehouseMonitorRequest that = (DeleteLakehouseMonitorRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(tableName);
}
@Override
public String toString() {
- return new ToStringer(DeleteLakehouseMonitorRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(DeleteLakehouseMonitorRequest.class)
+ .add("tableName", tableName)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
index 801739d2c..ef3d12232 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
@@ -13,6 +13,13 @@ public class ExternalLocationInfo {
@JsonProperty("access_point")
private String accessPoint;
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** User-provided free-form text description. */
@JsonProperty("comment")
private String comment;
@@ -74,6 +81,15 @@ public String getAccessPoint() {
return accessPoint;
}
+ public ExternalLocationInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public ExternalLocationInfo setComment(String comment) {
this.comment = comment;
return this;
@@ -197,6 +213,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ExternalLocationInfo that = (ExternalLocationInfo) o;
return Objects.equals(accessPoint, that.accessPoint)
+ && Objects.equals(browseOnly, that.browseOnly)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
@@ -216,6 +233,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
accessPoint,
+ browseOnly,
comment,
createdAt,
createdBy,
@@ -235,6 +253,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(ExternalLocationInfo.class)
.add("accessPoint", accessPoint)
+ .add("browseOnly", browseOnly)
.add("comment", comment)
.add("createdAt", createdAt)
.add("createdBy", createdBy)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index d9759ac31..361875fbe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -84,9 +84,8 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
*
* Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
* The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. For unpaginated request, there is no guarantee of a
- * specific ordering of the elements in the array. For paginated request, elements are ordered by
- * their name.
+ * some privilege on the external location. There is no guarantee of a specific ordering of the
+ * elements in the array.
*/
public Iterable list(ListExternalLocationsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
index 0cefbac94..ec6a4d48d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
@@ -51,9 +51,8 @@ public interface ExternalLocationsService {
*
* Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
* The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. For unpaginated request, there is no guarantee of a
- * specific ordering of the elements in the array. For paginated request, elements are ordered by
- * their name.
+ * some privilege on the external location. There is no guarantee of a specific ordering of the
+ * elements in the array.
*/
ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
index fe25a1bcf..02b5d835d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java
@@ -9,6 +9,13 @@
@Generated
public class FunctionInfo {
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** Name of parent catalog. */
@JsonProperty("catalog_name")
private String catalogName;
@@ -130,6 +137,15 @@ public class FunctionInfo {
@JsonProperty("updated_by")
private String updatedBy;
+ public FunctionInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public FunctionInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -396,7 +412,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FunctionInfo that = (FunctionInfo) o;
- return Objects.equals(catalogName, that.catalogName)
+ return Objects.equals(browseOnly, that.browseOnly)
+ && Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
@@ -430,6 +447,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ browseOnly,
catalogName,
comment,
createdAt,
@@ -464,6 +482,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(FunctionInfo.class)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
.add("createdAt", createdAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
index e5e01a85e..d4e3e587b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
@@ -94,8 +94,8 @@ public Iterable list(String catalogName, String schemaName) {
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
- * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering
- * of the elements in the array. For paginated request, elements are ordered by their name.
+ * or the user is the owner. There is no guarantee of a specific ordering of the elements in the
+ * array.
*/
public Iterable list(ListFunctionsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
index f891bfd19..c47075d69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
@@ -58,8 +58,8 @@ public interface FunctionsService {
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
- * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering
- * of the elements in the array. For paginated request, elements are ordered by their name.
+ * or the user is the owner. There is no guarantee of a specific ordering of the elements in the
+ * array.
*/
ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
index 6c8392670..f70ee0013 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java
@@ -3,15 +3,32 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
/** Get a catalog */
@Generated
public class GetCatalogRequest {
+ /**
+ * Whether to include catalogs in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** The name of the catalog. */
private String name;
+ public GetCatalogRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public GetCatalogRequest setName(String name) {
this.name = name;
return this;
@@ -26,16 +43,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetCatalogRequest that = (GetCatalogRequest) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(includeBrowse, name);
}
@Override
public String toString() {
- return new ToStringer(GetCatalogRequest.class).add("name", name).toString();
+ return new ToStringer(GetCatalogRequest.class)
+ .add("includeBrowse", includeBrowse)
+ .add("name", name)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
index e206ae3f6..45bd02098 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java
@@ -3,15 +3,32 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
/** Get an external location */
@Generated
public class GetExternalLocationRequest {
+ /**
+ * Whether to include external locations in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** Name of the external location. */
private String name;
+ public GetExternalLocationRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public GetExternalLocationRequest setName(String name) {
this.name = name;
return this;
@@ -26,16 +43,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetExternalLocationRequest that = (GetExternalLocationRequest) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(includeBrowse, name);
}
@Override
public String toString() {
- return new ToStringer(GetExternalLocationRequest.class).add("name", name).toString();
+ return new ToStringer(GetExternalLocationRequest.class)
+ .add("includeBrowse", includeBrowse)
+ .add("name", name)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
index d4e0cdc60..24e22c9cc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java
@@ -3,18 +3,35 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
/** Get a function */
@Generated
public class GetFunctionRequest {
+ /**
+ * Whether to include functions in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* The fully-qualified name of the function (of the form
* __catalog_name__.__schema_name__.__function__name__).
*/
private String name;
+ public GetFunctionRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public GetFunctionRequest setName(String name) {
this.name = name;
return this;
@@ -29,16 +46,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetFunctionRequest that = (GetFunctionRequest) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(includeBrowse, name);
}
@Override
public String toString() {
- return new ToStringer(GetFunctionRequest.class).add("name", name).toString();
+ return new ToStringer(GetFunctionRequest.class)
+ .add("includeBrowse", includeBrowse)
+ .add("name", name)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java
index f03af68cd..b8bd26b3b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java
@@ -10,15 +10,15 @@
@Generated
public class GetLakehouseMonitorRequest {
/** Full name of the table. */
- private String fullName;
+ private String tableName;
- public GetLakehouseMonitorRequest setFullName(String fullName) {
- this.fullName = fullName;
+ public GetLakehouseMonitorRequest setTableName(String tableName) {
+ this.tableName = tableName;
return this;
}
- public String getFullName() {
- return fullName;
+ public String getTableName() {
+ return tableName;
}
@Override
@@ -26,16 +26,16 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetLakehouseMonitorRequest that = (GetLakehouseMonitorRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(tableName);
}
@Override
public String toString() {
- return new ToStringer(GetLakehouseMonitorRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(GetLakehouseMonitorRequest.class).add("tableName", tableName).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
index 776b47df5..2d8466952 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@@ -12,6 +13,13 @@ public class GetModelVersionRequest {
/** The three-level (fully qualified) name of the model version */
private String fullName;
+ /**
+ * Whether to include model versions in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** The integer version number of the model version */
private Long version;
@@ -24,6 +32,15 @@ public String getFullName() {
return fullName;
}
+ public GetModelVersionRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public GetModelVersionRequest setVersion(Long version) {
this.version = version;
return this;
@@ -38,18 +55,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetModelVersionRequest that = (GetModelVersionRequest) o;
- return Objects.equals(fullName, that.fullName) && Objects.equals(version, that.version);
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
+ && Objects.equals(version, that.version);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, version);
+ return Objects.hash(fullName, includeBrowse, version);
}
@Override
public String toString() {
return new ToStringer(GetModelVersionRequest.class)
.add("fullName", fullName)
+ .add("includeBrowse", includeBrowse)
.add("version", version)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
index 1acf7ecd4..e94088dc8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java
@@ -9,20 +9,11 @@
/** Get refresh */
@Generated
public class GetRefreshRequest {
- /** Full name of the table. */
- private String fullName;
-
/** ID of the refresh. */
private String refreshId;
- public GetRefreshRequest setFullName(String fullName) {
- this.fullName = fullName;
- return this;
- }
-
- public String getFullName() {
- return fullName;
- }
+ /** Full name of the table. */
+ private String tableName;
public GetRefreshRequest setRefreshId(String refreshId) {
this.refreshId = refreshId;
@@ -33,24 +24,33 @@ public String getRefreshId() {
return refreshId;
}
+ public GetRefreshRequest setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetRefreshRequest that = (GetRefreshRequest) o;
- return Objects.equals(fullName, that.fullName) && Objects.equals(refreshId, that.refreshId);
+ return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, refreshId);
+ return Objects.hash(refreshId, tableName);
}
@Override
public String toString() {
return new ToStringer(GetRefreshRequest.class)
- .add("fullName", fullName)
.add("refreshId", refreshId)
+ .add("tableName", tableName)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
index 0913ad906..764dda115 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@@ -12,6 +13,13 @@ public class GetRegisteredModelRequest {
/** The three-level (fully qualified) name of the registered model */
private String fullName;
+ /**
+ * Whether to include registered models in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
public GetRegisteredModelRequest setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -21,21 +29,34 @@ public String getFullName() {
return fullName;
}
+ public GetRegisteredModelRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetRegisteredModelRequest that = (GetRegisteredModelRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeBrowse, that.includeBrowse);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(fullName, includeBrowse);
}
@Override
public String toString() {
- return new ToStringer(GetRegisteredModelRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(GetRegisteredModelRequest.class)
+ .add("fullName", fullName)
+ .add("includeBrowse", includeBrowse)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
index 23e5b7b80..4d79abe5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@@ -12,6 +13,13 @@ public class GetSchemaRequest {
/** Full name of the schema. */
private String fullName;
+ /**
+ * Whether to include schemas in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
public GetSchemaRequest setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -21,21 +29,34 @@ public String getFullName() {
return fullName;
}
+ public GetSchemaRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetSchemaRequest that = (GetSchemaRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeBrowse, that.includeBrowse);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(fullName, includeBrowse);
}
@Override
public String toString() {
- return new ToStringer(GetSchemaRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(GetSchemaRequest.class)
+ .add("fullName", fullName)
+ .add("includeBrowse", includeBrowse)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
index cea52f229..4658fa3d3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
@@ -13,6 +13,13 @@ public class GetTableRequest {
/** Full name of the table. */
private String fullName;
+ /**
+ * Whether to include tables in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** Whether delta metadata should be included in the response. */
@QueryParam("include_delta_metadata")
private Boolean includeDeltaMetadata;
@@ -26,6 +33,15 @@ public String getFullName() {
return fullName;
}
+ public GetTableRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public GetTableRequest setIncludeDeltaMetadata(Boolean includeDeltaMetadata) {
this.includeDeltaMetadata = includeDeltaMetadata;
return this;
@@ -41,18 +57,20 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GetTableRequest that = (GetTableRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, includeDeltaMetadata);
+ return Objects.hash(fullName, includeBrowse, includeDeltaMetadata);
}
@Override
public String toString() {
return new ToStringer(GetTableRequest.class)
.add("fullName", fullName)
+ .add("includeBrowse", includeBrowse)
.add("includeDeltaMetadata", includeDeltaMetadata)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java
index 13a834477..f0f36d9e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java
@@ -31,8 +31,8 @@ public LakehouseMonitorsAPI(LakehouseMonitorsService mock) {
impl = mock;
}
- public void cancelRefresh(String fullName, String refreshId) {
- cancelRefresh(new CancelRefreshRequest().setFullName(fullName).setRefreshId(refreshId));
+ public void cancelRefresh(String tableName, String refreshId) {
+ cancelRefresh(new CancelRefreshRequest().setTableName(tableName).setRefreshId(refreshId));
}
/**
@@ -51,10 +51,10 @@ public void cancelRefresh(CancelRefreshRequest request) {
impl.cancelRefresh(request);
}
- public MonitorInfo create(String fullName, String assetsDir, String outputSchemaName) {
+ public MonitorInfo create(String tableName, String assetsDir, String outputSchemaName) {
return create(
new CreateMonitor()
- .setFullName(fullName)
+ .setTableName(tableName)
.setAssetsDir(assetsDir)
.setOutputSchemaName(outputSchemaName));
}
@@ -77,8 +77,8 @@ public MonitorInfo create(CreateMonitor request) {
return impl.create(request);
}
- public void delete(String fullName) {
- delete(new DeleteLakehouseMonitorRequest().setFullName(fullName));
+ public void delete(String tableName) {
+ delete(new DeleteLakehouseMonitorRequest().setTableName(tableName));
}
/**
@@ -100,8 +100,8 @@ public void delete(DeleteLakehouseMonitorRequest request) {
impl.delete(request);
}
- public MonitorInfo get(String fullName) {
- return get(new GetLakehouseMonitorRequest().setFullName(fullName));
+ public MonitorInfo get(String tableName) {
+ return get(new GetLakehouseMonitorRequest().setTableName(tableName));
}
/**
@@ -122,8 +122,8 @@ public MonitorInfo get(GetLakehouseMonitorRequest request) {
return impl.get(request);
}
- public MonitorRefreshInfo getRefresh(String fullName, String refreshId) {
- return getRefresh(new GetRefreshRequest().setFullName(fullName).setRefreshId(refreshId));
+ public MonitorRefreshInfo getRefresh(String tableName, String refreshId) {
+ return getRefresh(new GetRefreshRequest().setTableName(tableName).setRefreshId(refreshId));
}
/**
@@ -142,8 +142,8 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) {
return impl.getRefresh(request);
}
- public Iterable listRefreshes(String fullName) {
- return listRefreshes(new ListRefreshesRequest().setFullName(fullName));
+ public Iterable listRefreshes(String tableName) {
+ return listRefreshes(new ListRefreshesRequest().setTableName(tableName));
}
/**
@@ -162,8 +162,8 @@ public Iterable listRefreshes(ListRefreshesRequest request)
return impl.listRefreshes(request);
}
- public MonitorRefreshInfo runRefresh(String fullName) {
- return runRefresh(new RunRefreshRequest().setFullName(fullName));
+ public MonitorRefreshInfo runRefresh(String tableName) {
+ return runRefresh(new RunRefreshRequest().setTableName(tableName));
}
/**
@@ -183,8 +183,9 @@ public MonitorRefreshInfo runRefresh(RunRefreshRequest request) {
return impl.runRefresh(request);
}
- public MonitorInfo update(String fullName, String outputSchemaName) {
- return update(new UpdateMonitor().setFullName(fullName).setOutputSchemaName(outputSchemaName));
+ public MonitorInfo update(String tableName, String outputSchemaName) {
+ return update(
+ new UpdateMonitor().setTableName(tableName).setOutputSchemaName(outputSchemaName));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java
index cb89939b6..bfc51e5f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java
@@ -21,14 +21,14 @@ public void cancelRefresh(CancelRefreshRequest request) {
String path =
String.format(
"/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s/cancel",
- request.getFullName(), request.getRefreshId());
+ request.getTableName(), request.getRefreshId());
Map headers = new HashMap<>();
apiClient.POST(path, null, CancelRefreshResponse.class, headers);
}
@Override
public MonitorInfo create(CreateMonitor request) {
- String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName());
+ String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
headers.put("Content-Type", "application/json");
@@ -37,14 +37,14 @@ public MonitorInfo create(CreateMonitor request) {
@Override
public void delete(DeleteLakehouseMonitorRequest request) {
- String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName());
+ String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName());
Map headers = new HashMap<>();
apiClient.DELETE(path, request, DeleteResponse.class, headers);
}
@Override
public MonitorInfo get(GetLakehouseMonitorRequest request) {
- String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName());
+ String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
return apiClient.GET(path, request, MonitorInfo.class, headers);
@@ -55,7 +55,7 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) {
String path =
String.format(
"/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s",
- request.getFullName(), request.getRefreshId());
+ request.getTableName(), request.getRefreshId());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
return apiClient.GET(path, request, MonitorRefreshInfo.class, headers);
@@ -64,7 +64,7 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) {
@Override
public Collection listRefreshes(ListRefreshesRequest request) {
String path =
- String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getFullName());
+ String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
return apiClient.getCollection(path, null, MonitorRefreshInfo.class, headers);
@@ -73,7 +73,7 @@ public Collection listRefreshes(ListRefreshesRequest request
@Override
public MonitorRefreshInfo runRefresh(RunRefreshRequest request) {
String path =
- String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getFullName());
+ String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
return apiClient.POST(path, null, MonitorRefreshInfo.class, headers);
@@ -81,7 +81,7 @@ public MonitorRefreshInfo runRefresh(RunRefreshRequest request) {
@Override
public MonitorInfo update(UpdateMonitor request) {
- String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName());
+ String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
headers.put("Content-Type", "application/json");
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
new file mode 100755
index 000000000..f61ff29f3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** List catalogs */
+@Generated
+public class ListCatalogsRequest {
+ /**
+ * Whether to include catalogs in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
+ public ListCatalogsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListCatalogsRequest that = (ListCatalogsRequest) o;
+ return Objects.equals(includeBrowse, that.includeBrowse);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(includeBrowse);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListCatalogsRequest.class).add("includeBrowse", includeBrowse).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
index 74ef5cbf3..a8a0e3729 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
@@ -10,6 +10,13 @@
/** List external locations */
@Generated
public class ListExternalLocationsRequest {
+ /**
+ * Whether to include external locations in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* Maximum number of external locations to return. If not set, all the external locations are
* returned (not recommended). - when set to a value greater than 0, the page length is the
@@ -24,6 +31,15 @@ public class ListExternalLocationsRequest {
@QueryParam("page_token")
private String pageToken;
+ public ListExternalLocationsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListExternalLocationsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -47,17 +63,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListExternalLocationsRequest that = (ListExternalLocationsRequest) o;
- return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken);
+ return Objects.equals(includeBrowse, that.includeBrowse)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(maxResults, pageToken);
+ return Objects.hash(includeBrowse, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListExternalLocationsRequest.class)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
index 7b430e1a0..c33c3be8d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
@@ -14,6 +14,13 @@ public class ListFunctionsRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Whether to include functions in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* Maximum number of functions to return. If not set, all the functions are returned (not
* recommended). - when set to a value greater than 0, the page length is the minimum of this
@@ -41,6 +48,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListFunctionsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListFunctionsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -74,6 +90,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListFunctionsRequest that = (ListFunctionsRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaName, that.schemaName);
@@ -81,13 +98,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(catalogName, maxResults, pageToken, schemaName);
+ return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName);
}
@Override
public String toString() {
return new ToStringer(ListFunctionsRequest.class)
.add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("schemaName", schemaName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
index 74945e49d..2df9e27c7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
@@ -13,6 +13,13 @@ public class ListModelVersionsRequest {
/** The full three-level name of the registered model under which to list model versions */
private String fullName;
+ /**
+ * Whether to include model versions in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* Maximum number of model versions to return. If not set, the page length is set to a server
* configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length
@@ -36,6 +43,15 @@ public String getFullName() {
return fullName;
}
+ public ListModelVersionsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListModelVersionsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -60,19 +76,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListModelVersionsRequest that = (ListModelVersionsRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, maxResults, pageToken);
+ return Objects.hash(fullName, includeBrowse, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListModelVersionsRequest.class)
.add("fullName", fullName)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
index 3b3f9433e..4edc8061d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java
@@ -10,15 +10,15 @@
@Generated
public class ListRefreshesRequest {
/** Full name of the table. */
- private String fullName;
+ private String tableName;
- public ListRefreshesRequest setFullName(String fullName) {
- this.fullName = fullName;
+ public ListRefreshesRequest setTableName(String tableName) {
+ this.tableName = tableName;
return this;
}
- public String getFullName() {
- return fullName;
+ public String getTableName() {
+ return tableName;
}
@Override
@@ -26,16 +26,16 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListRefreshesRequest that = (ListRefreshesRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(tableName);
}
@Override
public String toString() {
- return new ToStringer(ListRefreshesRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(ListRefreshesRequest.class).add("tableName", tableName).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
index 9f99fb3c4..3cc8fe9f4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java
@@ -18,9 +18,27 @@ public class ListRegisteredModelsRequest {
private String catalogName;
/**
- * Max number of registered models to return. If catalog and schema are unspecified, max_results
- * must be specified. If max_results is unspecified, we return all results, starting from the page
- * specified by page_token.
+ * Whether to include registered models in the response for which the principal can only access
+ * selective metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
+ /**
+ * Max number of registered models to return.
+ *
+ * If both catalog and schema are specified: - when max_results is not specified, the page
+ * length is set to a server configured value (10000, as of 4/2/2024). - when set to a value
+ * greater than 0, the page length is the minimum of this value and a server configured value
+ * (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value
+ * (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is
+ * returned;
+ *
+ *
If neither schema nor catalog is specified: - when max_results is not specified, the page
+ * length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater
+ * than 0, the page length is the minimum of this value and a server configured value (1000, as of
+ * 4/2/2024); - when set to 0, the page length is set to a server configured value (100, as of
+ * 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned;
*/
@QueryParam("max_results")
private Long maxResults;
@@ -45,6 +63,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListRegisteredModelsRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListRegisteredModelsRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -78,6 +105,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListRegisteredModelsRequest that = (ListRegisteredModelsRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaName, that.schemaName);
@@ -85,13 +113,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(catalogName, maxResults, pageToken, schemaName);
+ return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName);
}
@Override
public String toString() {
return new ToStringer(ListRegisteredModelsRequest.class)
.add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("schemaName", schemaName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
index acae1f8b1..43cdd3816 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
@@ -14,6 +14,13 @@ public class ListSchemasRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Whether to include schemas in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* Maximum number of schemas to return. If not set, all the schemas are returned (not
* recommended). - when set to a value greater than 0, the page length is the minimum of this
@@ -37,6 +44,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListSchemasRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListSchemasRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -61,19 +77,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListSchemasRequest that = (ListSchemasRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(catalogName, maxResults, pageToken);
+ return Objects.hash(catalogName, includeBrowse, maxResults, pageToken);
}
@Override
public String toString() {
return new ToStringer(ListSchemasRequest.class)
.add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
index 7ec16a4bf..f0667e5c5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
@@ -14,6 +14,13 @@ public class ListTablesRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Whether to include tables in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** Whether delta metadata should be included in the response. */
@QueryParam("include_delta_metadata")
private Boolean includeDeltaMetadata;
@@ -52,6 +59,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListTablesRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListTablesRequest setIncludeDeltaMetadata(Boolean includeDeltaMetadata) {
this.includeDeltaMetadata = includeDeltaMetadata;
return this;
@@ -112,6 +128,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListTablesRequest that = (ListTablesRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(omitColumns, that.omitColumns)
@@ -124,6 +141,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
catalogName,
+ includeBrowse,
includeDeltaMetadata,
maxResults,
omitColumns,
@@ -136,6 +154,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(ListTablesRequest.class)
.add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
.add("includeDeltaMetadata", includeDeltaMetadata)
.add("maxResults", maxResults)
.add("omitColumns", omitColumns)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
index 15c6596b3..0f9095bc1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java
@@ -14,6 +14,13 @@ public class ListVolumesRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Whether to include volumes in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/**
* Maximum number of volumes to return (page length).
*
@@ -50,6 +57,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListVolumesRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ListVolumesRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -83,6 +99,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListVolumesRequest that = (ListVolumesRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaName, that.schemaName);
@@ -90,13 +107,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(catalogName, maxResults, pageToken, schemaName);
+ return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName);
}
@Override
public String toString() {
return new ToStringer(ListVolumesRequest.class)
.add("catalogName", catalogName)
+ .add("includeBrowse", includeBrowse)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("schemaName", schemaName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
index 7e43d4347..c25f726bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -114,7 +115,8 @@ public MetastoreInfo get(GetMetastoreRequest request) {
* the array.
*/
public Iterable list() {
- return impl.list().getMetastores();
+ return new Paginator<>(
+ null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null);
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
index a277049ab..87813f1c5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java
@@ -9,6 +9,13 @@
@Generated
public class ModelVersionInfo {
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** The name of the catalog containing the model version */
@JsonProperty("catalog_name")
private String catalogName;
@@ -88,6 +95,15 @@ public class ModelVersionInfo {
@JsonProperty("version")
private Long version;
+ public ModelVersionInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public ModelVersionInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -246,7 +262,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ModelVersionInfo that = (ModelVersionInfo) o;
- return Objects.equals(catalogName, that.catalogName)
+ return Objects.equals(browseOnly, that.browseOnly)
+ && Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
&& Objects.equals(createdBy, that.createdBy)
@@ -268,6 +285,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ browseOnly,
catalogName,
comment,
createdAt,
@@ -290,6 +308,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(ModelVersionInfo.class)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
.add("createdAt", createdAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java
index 1c355bd46..c8135aa11 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java
@@ -9,15 +9,20 @@
@Generated
public class MonitorCronSchedule {
- /** Whether the schedule is paused or not */
+ /** Read only field that indicates whether a schedule is paused or not. */
@JsonProperty("pause_status")
private MonitorCronSchedulePauseStatus pauseStatus;
- /** A cron expression using quartz syntax that describes the schedule for a job. */
+ /**
+ * The expression that determines when to run the monitor. See [examples].
+ *
+ * [examples]:
+ * https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html
+ */
@JsonProperty("quartz_cron_expression")
private String quartzCronExpression;
- /** A Java timezone id. The schedule for a job will be resolved with respect to this timezone. */
+ /** The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression. */
@JsonProperty("timezone_id")
private String timezoneId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java
index 5b59f5385..742f75577 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Whether the schedule is paused or not */
+/** Read only field that indicates whether a schedule is paused or not. */
@Generated
public enum MonitorCronSchedulePauseStatus {
PAUSED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java
deleted file mode 100755
index 391515d79..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-
-/** The type of the custom metric. */
-@Generated
-public enum MonitorCustomMetricType {
- CUSTOM_METRIC_TYPE_AGGREGATE,
- CUSTOM_METRIC_TYPE_DERIVED,
- CUSTOM_METRIC_TYPE_DRIFT,
- MONITOR_STATUS_ERROR,
- MONITOR_STATUS_FAILED,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java
similarity index 73%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java
index 1354e0944..d34b42ae2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java
@@ -9,12 +9,15 @@
import java.util.Objects;
@Generated
-public class MonitorDestinations {
- /** The list of email addresses to send the notification to. */
+public class MonitorDestination {
+ /**
+ * The list of email addresses to send the notification to. A maximum of 5 email addresses is
+ * supported.
+ */
@JsonProperty("email_addresses")
private Collection emailAddresses;
- public MonitorDestinations setEmailAddresses(Collection emailAddresses) {
+ public MonitorDestination setEmailAddresses(Collection emailAddresses) {
this.emailAddresses = emailAddresses;
return this;
}
@@ -27,7 +30,7 @@ public Collection getEmailAddresses() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- MonitorDestinations that = (MonitorDestinations) o;
+ MonitorDestination that = (MonitorDestination) o;
return Objects.equals(emailAddresses, that.emailAddresses);
}
@@ -38,7 +41,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(MonitorDestinations.class)
+ return new ToStringer(MonitorDestination.class)
.add("emailAddresses", emailAddresses)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java
similarity index 56%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java
index 875aa0a3d..5ccc716a2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java
@@ -9,38 +9,57 @@
import java.util.Objects;
@Generated
-public class MonitorInferenceLogProfileType {
+public class MonitorInferenceLog {
/**
- * List of granularities to use when aggregating data into time windows based on their timestamp.
+ * Granularities for aggregating data into time windows based on their timestamp. Currently the
+ * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``,
+ * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.
*/
@JsonProperty("granularities")
private Collection granularities;
- /** Column of the model label. */
+ /** Optional column that contains the ground truth for the prediction. */
@JsonProperty("label_col")
private String labelCol;
- /** Column of the model id or version. */
+ /**
+ * Column that contains the id of the model generating the predictions. Metrics will be computed
+ * per model id by default, and also across all model ids.
+ */
@JsonProperty("model_id_col")
private String modelIdCol;
- /** Column of the model prediction. */
+ /** Column that contains the output/prediction from the model. */
@JsonProperty("prediction_col")
private String predictionCol;
- /** Column of the model prediction probabilities. */
+ /**
+ * Optional column that contains the prediction probabilities for each class in a classification
+ * problem type. The values in this column should be a map, mapping each class label to the
+ * prediction probability for a given sample. The map should be of PySpark MapType().
+ */
@JsonProperty("prediction_proba_col")
private String predictionProbaCol;
- /** Problem type the model aims to solve. */
+ /**
+ * Problem type the model aims to solve. Determines the type of model-quality metrics that will be
+ * computed.
+ */
@JsonProperty("problem_type")
- private MonitorInferenceLogProfileTypeProblemType problemType;
+ private MonitorInferenceLogProblemType problemType;
- /** Column of the timestamp of predictions. */
+ /**
+ * Column that contains the timestamps of requests. The column must be one of the following: - A
+ * ``TimestampType`` column - A column whose values can be converted to timestamps through the
+ * pyspark ``to_timestamp`` [function].
+ *
+ * [function]:
+ * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html
+ */
@JsonProperty("timestamp_col")
private String timestampCol;
- public MonitorInferenceLogProfileType setGranularities(Collection granularities) {
+ public MonitorInferenceLog setGranularities(Collection granularities) {
this.granularities = granularities;
return this;
}
@@ -49,7 +68,7 @@ public Collection getGranularities() {
return granularities;
}
- public MonitorInferenceLogProfileType setLabelCol(String labelCol) {
+ public MonitorInferenceLog setLabelCol(String labelCol) {
this.labelCol = labelCol;
return this;
}
@@ -58,7 +77,7 @@ public String getLabelCol() {
return labelCol;
}
- public MonitorInferenceLogProfileType setModelIdCol(String modelIdCol) {
+ public MonitorInferenceLog setModelIdCol(String modelIdCol) {
this.modelIdCol = modelIdCol;
return this;
}
@@ -67,7 +86,7 @@ public String getModelIdCol() {
return modelIdCol;
}
- public MonitorInferenceLogProfileType setPredictionCol(String predictionCol) {
+ public MonitorInferenceLog setPredictionCol(String predictionCol) {
this.predictionCol = predictionCol;
return this;
}
@@ -76,7 +95,7 @@ public String getPredictionCol() {
return predictionCol;
}
- public MonitorInferenceLogProfileType setPredictionProbaCol(String predictionProbaCol) {
+ public MonitorInferenceLog setPredictionProbaCol(String predictionProbaCol) {
this.predictionProbaCol = predictionProbaCol;
return this;
}
@@ -85,17 +104,16 @@ public String getPredictionProbaCol() {
return predictionProbaCol;
}
- public MonitorInferenceLogProfileType setProblemType(
- MonitorInferenceLogProfileTypeProblemType problemType) {
+ public MonitorInferenceLog setProblemType(MonitorInferenceLogProblemType problemType) {
this.problemType = problemType;
return this;
}
- public MonitorInferenceLogProfileTypeProblemType getProblemType() {
+ public MonitorInferenceLogProblemType getProblemType() {
return problemType;
}
- public MonitorInferenceLogProfileType setTimestampCol(String timestampCol) {
+ public MonitorInferenceLog setTimestampCol(String timestampCol) {
this.timestampCol = timestampCol;
return this;
}
@@ -108,7 +126,7 @@ public String getTimestampCol() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- MonitorInferenceLogProfileType that = (MonitorInferenceLogProfileType) o;
+ MonitorInferenceLog that = (MonitorInferenceLog) o;
return Objects.equals(granularities, that.granularities)
&& Objects.equals(labelCol, that.labelCol)
&& Objects.equals(modelIdCol, that.modelIdCol)
@@ -132,7 +150,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(MonitorInferenceLogProfileType.class)
+ return new ToStringer(MonitorInferenceLog.class)
.add("granularities", granularities)
.add("labelCol", labelCol)
.add("modelIdCol", modelIdCol)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java
similarity index 59%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java
index 11a36da5e..81529bcbe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java
@@ -4,9 +4,12 @@
import com.databricks.sdk.support.Generated;
-/** Problem type the model aims to solve. */
+/**
+ * Problem type the model aims to solve. Determines the type of model-quality metrics that will be
+ * computed.
+ */
@Generated
-public enum MonitorInferenceLogProfileTypeProblemType {
+public enum MonitorInferenceLogProblemType {
PROBLEM_TYPE_CLASSIFICATION,
PROBLEM_TYPE_REGRESSION,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java
index cd8cfde76..aac4fa412 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java
@@ -27,9 +27,12 @@ public class MonitorInfo {
* time windows).
*/
@JsonProperty("custom_metrics")
- private Collection customMetrics;
+ private Collection customMetrics;
- /** The ID of the generated dashboard. */
+ /**
+ * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in
+ * PENDING state.
+ */
@JsonProperty("dashboard_id")
private String dashboardId;
@@ -46,7 +49,7 @@ public class MonitorInfo {
/** Configuration for monitoring inference logs. */
@JsonProperty("inference_log")
- private MonitorInferenceLogProfileType inferenceLog;
+ private MonitorInferenceLog inferenceLog;
/** The latest failure message of the monitor (if any). */
@JsonProperty("latest_monitor_failure_msg")
@@ -58,7 +61,7 @@ public class MonitorInfo {
/** The notification settings for the monitor. */
@JsonProperty("notifications")
- private Collection notifications;
+ private MonitorNotifications notifications;
/** Schema where output metric tables are created. */
@JsonProperty("output_schema_name")
@@ -86,7 +89,7 @@ public class MonitorInfo {
/** Configuration for monitoring snapshot tables. */
@JsonProperty("snapshot")
- private MonitorSnapshotProfileType snapshot;
+ private MonitorSnapshot snapshot;
/** The status of the monitor. */
@JsonProperty("status")
@@ -100,7 +103,7 @@ public class MonitorInfo {
/** Configuration for monitoring time series tables. */
@JsonProperty("time_series")
- private MonitorTimeSeriesProfileType timeSeries;
+ private MonitorTimeSeries timeSeries;
public MonitorInfo setAssetsDir(String assetsDir) {
this.assetsDir = assetsDir;
@@ -120,12 +123,12 @@ public String getBaselineTableName() {
return baselineTableName;
}
- public MonitorInfo setCustomMetrics(Collection customMetrics) {
+ public MonitorInfo setCustomMetrics(Collection customMetrics) {
this.customMetrics = customMetrics;
return this;
}
- public Collection getCustomMetrics() {
+ public Collection getCustomMetrics() {
return customMetrics;
}
@@ -157,12 +160,12 @@ public String getDriftMetricsTableName() {
return driftMetricsTableName;
}
- public MonitorInfo setInferenceLog(MonitorInferenceLogProfileType inferenceLog) {
+ public MonitorInfo setInferenceLog(MonitorInferenceLog inferenceLog) {
this.inferenceLog = inferenceLog;
return this;
}
- public MonitorInferenceLogProfileType getInferenceLog() {
+ public MonitorInferenceLog getInferenceLog() {
return inferenceLog;
}
@@ -184,12 +187,12 @@ public String getMonitorVersion() {
return monitorVersion;
}
- public MonitorInfo setNotifications(Collection notifications) {
+ public MonitorInfo setNotifications(MonitorNotifications notifications) {
this.notifications = notifications;
return this;
}
- public Collection getNotifications() {
+ public MonitorNotifications getNotifications() {
return notifications;
}
@@ -229,12 +232,12 @@ public Collection getSlicingExprs() {
return slicingExprs;
}
- public MonitorInfo setSnapshot(MonitorSnapshotProfileType snapshot) {
+ public MonitorInfo setSnapshot(MonitorSnapshot snapshot) {
this.snapshot = snapshot;
return this;
}
- public MonitorSnapshotProfileType getSnapshot() {
+ public MonitorSnapshot getSnapshot() {
return snapshot;
}
@@ -256,12 +259,12 @@ public String getTableName() {
return tableName;
}
- public MonitorInfo setTimeSeries(MonitorTimeSeriesProfileType timeSeries) {
+ public MonitorInfo setTimeSeries(MonitorTimeSeries timeSeries) {
this.timeSeries = timeSeries;
return this;
}
- public MonitorTimeSeriesProfileType getTimeSeries() {
+ public MonitorTimeSeries getTimeSeries() {
return timeSeries;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java
similarity index 62%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java
index 2167187ba..1f6cdd963 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java
@@ -9,7 +9,7 @@
import java.util.Objects;
@Generated
-public class MonitorCustomMetric {
+public class MonitorMetric {
/**
* Jinja template for a SQL expression that specifies how to compute the metric. See [create
* metric definition].
@@ -20,11 +20,14 @@ public class MonitorCustomMetric {
@JsonProperty("definition")
private String definition;
- /** Columns on the monitored table to apply the custom metrics to. */
+ /**
+ * A list of column names in the input table the metric should be computed for. Can use
+ * ``":table"`` to indicate that the metric needs information from multiple columns.
+ */
@JsonProperty("input_columns")
private Collection inputColumns;
- /** Name of the custom metric. */
+ /** Name of the metric in the output tables. */
@JsonProperty("name")
private String name;
@@ -32,11 +35,19 @@ public class MonitorCustomMetric {
@JsonProperty("output_data_type")
private String outputDataType;
- /** The type of the custom metric. */
+ /**
+ * Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or
+ * ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and
+ * ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the
+ * ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the
+ * two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing
+ * columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate
+ * metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics
+ */
@JsonProperty("type")
- private MonitorCustomMetricType typeValue;
+ private MonitorMetricType typeValue;
- public MonitorCustomMetric setDefinition(String definition) {
+ public MonitorMetric setDefinition(String definition) {
this.definition = definition;
return this;
}
@@ -45,7 +56,7 @@ public String getDefinition() {
return definition;
}
- public MonitorCustomMetric setInputColumns(Collection inputColumns) {
+ public MonitorMetric setInputColumns(Collection inputColumns) {
this.inputColumns = inputColumns;
return this;
}
@@ -54,7 +65,7 @@ public Collection getInputColumns() {
return inputColumns;
}
- public MonitorCustomMetric setName(String name) {
+ public MonitorMetric setName(String name) {
this.name = name;
return this;
}
@@ -63,7 +74,7 @@ public String getName() {
return name;
}
- public MonitorCustomMetric setOutputDataType(String outputDataType) {
+ public MonitorMetric setOutputDataType(String outputDataType) {
this.outputDataType = outputDataType;
return this;
}
@@ -72,12 +83,12 @@ public String getOutputDataType() {
return outputDataType;
}
- public MonitorCustomMetric setType(MonitorCustomMetricType typeValue) {
+ public MonitorMetric setType(MonitorMetricType typeValue) {
this.typeValue = typeValue;
return this;
}
- public MonitorCustomMetricType getType() {
+ public MonitorMetricType getType() {
return typeValue;
}
@@ -85,7 +96,7 @@ public MonitorCustomMetricType getType() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- MonitorCustomMetric that = (MonitorCustomMetric) o;
+ MonitorMetric that = (MonitorMetric) o;
return Objects.equals(definition, that.definition)
&& Objects.equals(inputColumns, that.inputColumns)
&& Objects.equals(name, that.name)
@@ -100,7 +111,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(MonitorCustomMetric.class)
+ return new ToStringer(MonitorMetric.class)
.add("definition", definition)
.add("inputColumns", inputColumns)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java
new file mode 100755
index 000000000..e5020fecf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java
@@ -0,0 +1,21 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or
+ * ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and
+ * ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the
+ * ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the two
+ * consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in
+ * your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics -
+ * CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics
+ */
+@Generated
+public enum MonitorMetricType {
+ CUSTOM_METRIC_TYPE_AGGREGATE,
+ CUSTOM_METRIC_TYPE_DERIVED,
+ CUSTOM_METRIC_TYPE_DRIFT,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java
new file mode 100755
index 000000000..6586c8498
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class MonitorNotifications {
+ /** Who to send notifications to on monitor failure. */
+ @JsonProperty("on_failure")
+ private MonitorDestination onFailure;
+
+ /** Who to send notifications to when new data classification tags are detected. */
+ @JsonProperty("on_new_classification_tag_detected")
+ private MonitorDestination onNewClassificationTagDetected;
+
+ public MonitorNotifications setOnFailure(MonitorDestination onFailure) {
+ this.onFailure = onFailure;
+ return this;
+ }
+
+ public MonitorDestination getOnFailure() {
+ return onFailure;
+ }
+
+ public MonitorNotifications setOnNewClassificationTagDetected(
+ MonitorDestination onNewClassificationTagDetected) {
+ this.onNewClassificationTagDetected = onNewClassificationTagDetected;
+ return this;
+ }
+
+ public MonitorDestination getOnNewClassificationTagDetected() {
+ return onNewClassificationTagDetected;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MonitorNotifications that = (MonitorNotifications) o;
+ return Objects.equals(onFailure, that.onFailure)
+ && Objects.equals(onNewClassificationTagDetected, that.onNewClassificationTagDetected);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(onFailure, onNewClassificationTagDetected);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MonitorNotifications.class)
+ .add("onFailure", onFailure)
+ .add("onNewClassificationTagDetected", onNewClassificationTagDetected)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java
deleted file mode 100755
index 68666f166..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java
+++ /dev/null
@@ -1,42 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class MonitorNotificationsConfig {
- /** Who to send notifications to on monitor failure. */
- @JsonProperty("on_failure")
- private MonitorDestinations onFailure;
-
- public MonitorNotificationsConfig setOnFailure(MonitorDestinations onFailure) {
- this.onFailure = onFailure;
- return this;
- }
-
- public MonitorDestinations getOnFailure() {
- return onFailure;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- MonitorNotificationsConfig that = (MonitorNotificationsConfig) o;
- return Objects.equals(onFailure, that.onFailure);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(onFailure);
- }
-
- @Override
- public String toString() {
- return new ToStringer(MonitorNotificationsConfig.class).add("onFailure", onFailure).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java
index 42174fe21..15094c0fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java
@@ -9,7 +9,7 @@
@Generated
public class MonitorRefreshInfo {
- /** The time at which the refresh ended, in epoch milliseconds. */
+ /** Time at which refresh operation completed (milliseconds since 1/1/1970 UTC). */
@JsonProperty("end_time_ms")
private Long endTimeMs;
@@ -19,11 +19,11 @@ public class MonitorRefreshInfo {
@JsonProperty("message")
private String message;
- /** The ID of the refresh. */
+ /** Unique id of the refresh operation. */
@JsonProperty("refresh_id")
private Long refreshId;
- /** The time at which the refresh started, in epoch milliseconds. */
+ /** Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC). */
@JsonProperty("start_time_ms")
private Long startTimeMs;
@@ -31,6 +31,10 @@ public class MonitorRefreshInfo {
@JsonProperty("state")
private MonitorRefreshInfoState state;
+ /** The method by which the refresh was triggered. */
+ @JsonProperty("trigger")
+ private MonitorRefreshInfoTrigger trigger;
+
public MonitorRefreshInfo setEndTimeMs(Long endTimeMs) {
this.endTimeMs = endTimeMs;
return this;
@@ -76,6 +80,15 @@ public MonitorRefreshInfoState getState() {
return state;
}
+ public MonitorRefreshInfo setTrigger(MonitorRefreshInfoTrigger trigger) {
+ this.trigger = trigger;
+ return this;
+ }
+
+ public MonitorRefreshInfoTrigger getTrigger() {
+ return trigger;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -85,12 +98,13 @@ public boolean equals(Object o) {
&& Objects.equals(message, that.message)
&& Objects.equals(refreshId, that.refreshId)
&& Objects.equals(startTimeMs, that.startTimeMs)
- && Objects.equals(state, that.state);
+ && Objects.equals(state, that.state)
+ && Objects.equals(trigger, that.trigger);
}
@Override
public int hashCode() {
- return Objects.hash(endTimeMs, message, refreshId, startTimeMs, state);
+ return Objects.hash(endTimeMs, message, refreshId, startTimeMs, state, trigger);
}
@Override
@@ -101,6 +115,7 @@ public String toString() {
.add("refreshId", refreshId)
.add("startTimeMs", startTimeMs)
.add("state", state)
+ .add("trigger", trigger)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java
new file mode 100755
index 000000000..906a1c881
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/** The method by which the refresh was triggered. */
+@Generated
+public enum MonitorRefreshInfoTrigger {
+ MANUAL,
+ SCHEDULE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java
similarity index 82%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java
index bd2e8f0e0..c2c63dd78 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class MonitorSnapshotProfileType {
+public class MonitorSnapshot {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(MonitorSnapshotProfileType.class).toString();
+ return new ToStringer(MonitorSnapshot.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java
similarity index 58%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java
index ee757bd08..8ad8758fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java
@@ -9,21 +9,27 @@
import java.util.Objects;
@Generated
-public class MonitorTimeSeriesProfileType {
+public class MonitorTimeSeries {
/**
- * List of granularities to use when aggregating data into time windows based on their timestamp.
+ * Granularities for aggregating data into time windows based on their timestamp. Currently the
+ * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``,
+ * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.
*/
@JsonProperty("granularities")
private Collection granularities;
/**
- * The timestamp column. This must be timestamp types or convertible to timestamp types using the
- * pyspark to_timestamp function.
+ * Column that contains the timestamps of requests. The column must be one of the following: - A
+ * ``TimestampType`` column - A column whose values can be converted to timestamps through the
+ * pyspark ``to_timestamp`` [function].
+ *
+ * [function]:
+ * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html
*/
@JsonProperty("timestamp_col")
private String timestampCol;
- public MonitorTimeSeriesProfileType setGranularities(Collection granularities) {
+ public MonitorTimeSeries setGranularities(Collection granularities) {
this.granularities = granularities;
return this;
}
@@ -32,7 +38,7 @@ public Collection getGranularities() {
return granularities;
}
- public MonitorTimeSeriesProfileType setTimestampCol(String timestampCol) {
+ public MonitorTimeSeries setTimestampCol(String timestampCol) {
this.timestampCol = timestampCol;
return this;
}
@@ -45,7 +51,7 @@ public String getTimestampCol() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- MonitorTimeSeriesProfileType that = (MonitorTimeSeriesProfileType) o;
+ MonitorTimeSeries that = (MonitorTimeSeries) o;
return Objects.equals(granularities, that.granularities)
&& Objects.equals(timestampCol, that.timestampCol);
}
@@ -57,7 +63,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(MonitorTimeSeriesProfileType.class)
+ return new ToStringer(MonitorTimeSeries.class)
.add("granularities", granularities)
.add("timestampCol", timestampCol)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
index 98495b2eb..32f99a526 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
@@ -28,7 +28,7 @@ public OnlineTablesAPI(OnlineTablesService mock) {
*
* Create a new Online Table.
*/
- public OnlineTable create(ViewData request) {
+ public OnlineTable create(CreateOnlineTableRequest request) {
return impl.create(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java
index 3f300ec2c..3b29957f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java
@@ -16,7 +16,7 @@ public OnlineTablesImpl(ApiClient apiClient) {
}
@Override
- public OnlineTable create(ViewData request) {
+ public OnlineTable create(CreateOnlineTableRequest request) {
String path = "/api/2.0/online-tables";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
index 97bd017fa..e18d13cd0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java
@@ -17,7 +17,7 @@ public interface OnlineTablesService {
*
* Create a new Online Table.
*/
- OnlineTable create(ViewData viewData);
+ OnlineTable create(CreateOnlineTableRequest createOnlineTableRequest);
/**
* Delete an Online Table.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
index d76db37fb..df485f25e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
@@ -6,6 +6,7 @@
@Generated
public enum Privilege {
+ ACCESS,
ALL_PRIVILEGES,
APPLY_TAG,
CREATE,
@@ -22,6 +23,7 @@ public enum Privilege {
CREATE_PROVIDER,
CREATE_RECIPIENT,
CREATE_SCHEMA,
+ CREATE_SERVICE_CREDENTIAL,
CREATE_SHARE,
CREATE_STORAGE_CREDENTIAL,
CREATE_TABLE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
index cfc998d09..bf7bf0f60 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java
@@ -3,15 +3,32 @@
package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
/** Get a Volume */
@Generated
public class ReadVolumeRequest {
+ /**
+ * Whether to include volumes in the response for which the principal can only access selective
+ * metadata for
+ */
+ @QueryParam("include_browse")
+ private Boolean includeBrowse;
+
/** The three-level (fully qualified) name of the volume */
private String name;
+ public ReadVolumeRequest setIncludeBrowse(Boolean includeBrowse) {
+ this.includeBrowse = includeBrowse;
+ return this;
+ }
+
+ public Boolean getIncludeBrowse() {
+ return includeBrowse;
+ }
+
public ReadVolumeRequest setName(String name) {
this.name = name;
return this;
@@ -26,16 +43,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ReadVolumeRequest that = (ReadVolumeRequest) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(includeBrowse, name);
}
@Override
public String toString() {
- return new ToStringer(ReadVolumeRequest.class).add("name", name).toString();
+ return new ToStringer(ReadVolumeRequest.class)
+ .add("includeBrowse", includeBrowse)
+ .add("name", name)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java
index 2437e9e84..072d58d13 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java
@@ -14,6 +14,13 @@ public class RegisteredModelInfo {
@JsonProperty("aliases")
private Collection aliases;
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** The name of the catalog where the schema and the registered model reside */
@JsonProperty("catalog_name")
private String catalogName;
@@ -71,6 +78,15 @@ public Collection getAliases() {
return aliases;
}
+ public RegisteredModelInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public RegisteredModelInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -185,6 +201,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
RegisteredModelInfo that = (RegisteredModelInfo) o;
return Objects.equals(aliases, that.aliases)
+ && Objects.equals(browseOnly, that.browseOnly)
&& Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
@@ -203,6 +220,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
aliases,
+ browseOnly,
catalogName,
comment,
createdAt,
@@ -221,6 +239,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(RegisteredModelInfo.class)
.add("aliases", aliases)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
.add("createdAt", createdAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
index 779d79d78..c39338c10 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java
@@ -10,15 +10,15 @@
@Generated
public class RunRefreshRequest {
/** Full name of the table. */
- private String fullName;
+ private String tableName;
- public RunRefreshRequest setFullName(String fullName) {
- this.fullName = fullName;
+ public RunRefreshRequest setTableName(String tableName) {
+ this.tableName = tableName;
return this;
}
- public String getFullName() {
- return fullName;
+ public String getTableName() {
+ return tableName;
}
@Override
@@ -26,16 +26,16 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RunRefreshRequest that = (RunRefreshRequest) o;
- return Objects.equals(fullName, that.fullName);
+ return Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
- return Objects.hash(fullName);
+ return Objects.hash(tableName);
}
@Override
public String toString() {
- return new ToStringer(RunRefreshRequest.class).add("fullName", fullName).toString();
+ return new ToStringer(RunRefreshRequest.class).add("tableName", tableName).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
index 1d010259a..dc899512e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
@@ -10,6 +10,13 @@
@Generated
public class SchemaInfo {
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** Name of parent catalog. */
@JsonProperty("catalog_name")
private String catalogName;
@@ -58,6 +65,10 @@ public class SchemaInfo {
@JsonProperty("properties")
private Map properties;
+ /** The unique identifier of the schema. */
+ @JsonProperty("schema_id")
+ private String schemaId;
+
/** Storage location for managed tables within schema. */
@JsonProperty("storage_location")
private String storageLocation;
@@ -74,6 +85,15 @@ public class SchemaInfo {
@JsonProperty("updated_by")
private String updatedBy;
+ public SchemaInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public SchemaInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -184,6 +204,15 @@ public Map getProperties() {
return properties;
}
+ public SchemaInfo setSchemaId(String schemaId) {
+ this.schemaId = schemaId;
+ return this;
+ }
+
+ public String getSchemaId() {
+ return schemaId;
+ }
+
public SchemaInfo setStorageLocation(String storageLocation) {
this.storageLocation = storageLocation;
return this;
@@ -225,7 +254,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SchemaInfo that = (SchemaInfo) o;
- return Objects.equals(catalogName, that.catalogName)
+ return Objects.equals(browseOnly, that.browseOnly)
+ && Objects.equals(catalogName, that.catalogName)
&& Objects.equals(catalogType, that.catalogType)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
@@ -238,6 +268,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(owner, that.owner)
&& Objects.equals(properties, that.properties)
+ && Objects.equals(schemaId, that.schemaId)
&& Objects.equals(storageLocation, that.storageLocation)
&& Objects.equals(storageRoot, that.storageRoot)
&& Objects.equals(updatedAt, that.updatedAt)
@@ -247,6 +278,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ browseOnly,
catalogName,
catalogType,
comment,
@@ -259,6 +291,7 @@ public int hashCode() {
name,
owner,
properties,
+ schemaId,
storageLocation,
storageRoot,
updatedAt,
@@ -268,6 +301,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(SchemaInfo.class)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("catalogType", catalogType)
.add("comment", comment)
@@ -280,6 +314,7 @@ public String toString() {
.add("name", name)
.add("owner", owner)
.add("properties", properties)
+ .add("schemaId", schemaId)
.add("storageLocation", storageLocation)
.add("storageRoot", storageRoot)
.add("updatedAt", updatedAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index 64eb74554..441636890 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -81,9 +81,8 @@ public Iterable list(String catalogName) {
* Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
* admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
* Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific
- * ordering of the elements in the array. For paginated request, elements are ordered by their
- * name.
+ * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
+ * the array.
*/
public Iterable list(ListSchemasRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
index 68f384c70..e18efa0d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
@@ -45,9 +45,8 @@ public interface SchemasService {
* Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
* admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
* Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific
- * ordering of the elements in the array. For paginated request, elements are ordered by their
- * name.
+ * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
+ * the array.
*/
ListSchemasResponse list(ListSchemasRequest listSchemasRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java
index 2046f44ff..7a580ad73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java
@@ -11,11 +11,11 @@
public class StorageCredentialInfo {
/** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
- private AwsIamRole awsIamRole;
+ private AwsIamRoleResponse awsIamRole;
/** The Azure managed identity configuration. */
@JsonProperty("azure_managed_identity")
- private AzureManagedIdentity azureManagedIdentity;
+ private AzureManagedIdentityResponse azureManagedIdentity;
/** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
@@ -73,21 +73,22 @@ public class StorageCredentialInfo {
@JsonProperty("used_for_managed_storage")
private Boolean usedForManagedStorage;
- public StorageCredentialInfo setAwsIamRole(AwsIamRole awsIamRole) {
+ public StorageCredentialInfo setAwsIamRole(AwsIamRoleResponse awsIamRole) {
this.awsIamRole = awsIamRole;
return this;
}
- public AwsIamRole getAwsIamRole() {
+ public AwsIamRoleResponse getAwsIamRole() {
return awsIamRole;
}
- public StorageCredentialInfo setAzureManagedIdentity(AzureManagedIdentity azureManagedIdentity) {
+ public StorageCredentialInfo setAzureManagedIdentity(
+ AzureManagedIdentityResponse azureManagedIdentity) {
this.azureManagedIdentity = azureManagedIdentity;
return this;
}
- public AzureManagedIdentity getAzureManagedIdentity() {
+ public AzureManagedIdentityResponse getAzureManagedIdentity() {
return azureManagedIdentity;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
index 1c20bfd60..69d3b4f87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
@@ -82,9 +82,8 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
*
*
Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
- * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is
- * no guarantee of a specific ordering of the elements in the array. For paginated request,
- * elements are ordered by their name.
+ * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
+ * specific ordering of the elements in the array.
*/
public Iterable list(ListStorageCredentialsRequest request) {
return new Paginator<>(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
index d6de7f68f..23af9af76 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
@@ -50,9 +50,8 @@ public interface StorageCredentialsService {
*
* Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
- * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is
- * no guarantee of a specific ordering of the elements in the array. For paginated request,
- * elements are ordered by their name.
+ * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a
+ * specific ordering of the elements in the array.
*/
ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
index e28ccc54b..626f204e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -66,7 +67,8 @@ public Iterable list(String metastoreId) {
* metastore admin.
*/
public Iterable list(ListSystemSchemasRequest request) {
- return impl.list(request).getSchemas();
+ return new Paginator<>(
+ request, impl::list, ListSystemSchemasResponse::getSchemas, response -> null);
}
public SystemSchemasService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
index f84d5ca58..7208948cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java
@@ -15,6 +15,13 @@ public class TableInfo {
@JsonProperty("access_point")
private String accessPoint;
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** Name of parent catalog. */
@JsonProperty("catalog_name")
private String catalogName;
@@ -122,7 +129,7 @@ public class TableInfo {
@JsonProperty("table_constraints")
private Collection tableConstraints;
- /** Name of table, relative to parent schema. */
+ /** The unique identifier of the table. */
@JsonProperty("table_id")
private String tableId;
@@ -163,6 +170,15 @@ public String getAccessPoint() {
return accessPoint;
}
+ public TableInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public TableInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -442,6 +458,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
TableInfo that = (TableInfo) o;
return Objects.equals(accessPoint, that.accessPoint)
+ && Objects.equals(browseOnly, that.browseOnly)
&& Objects.equals(catalogName, that.catalogName)
&& Objects.equals(columns, that.columns)
&& Objects.equals(comment, that.comment)
@@ -479,6 +496,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
accessPoint,
+ browseOnly,
catalogName,
columns,
comment,
@@ -515,6 +533,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(TableInfo.class)
.add("accessPoint", accessPoint)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("columns", columns)
.add("comment", comment)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java
index 70347fa79..bd53b8d98 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java
@@ -10,6 +10,10 @@
@Generated
public class TableRowFilter {
+ /** The full name of the row filter SQL UDF. */
+ @JsonProperty("function_name")
+ private String functionName;
+
/**
* The list of table columns to be passed as input to the row filter function. The column types
* should match the types of the filter function arguments.
@@ -17,26 +21,22 @@ public class TableRowFilter {
@JsonProperty("input_column_names")
private Collection inputColumnNames;
- /** The full name of the row filter SQL UDF. */
- @JsonProperty("name")
- private String name;
-
- public TableRowFilter setInputColumnNames(Collection inputColumnNames) {
- this.inputColumnNames = inputColumnNames;
+ public TableRowFilter setFunctionName(String functionName) {
+ this.functionName = functionName;
return this;
}
- public Collection getInputColumnNames() {
- return inputColumnNames;
+ public String getFunctionName() {
+ return functionName;
}
- public TableRowFilter setName(String name) {
- this.name = name;
+ public TableRowFilter setInputColumnNames(Collection inputColumnNames) {
+ this.inputColumnNames = inputColumnNames;
return this;
}
- public String getName() {
- return name;
+ public Collection getInputColumnNames() {
+ return inputColumnNames;
}
@Override
@@ -44,20 +44,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TableRowFilter that = (TableRowFilter) o;
- return Objects.equals(inputColumnNames, that.inputColumnNames)
- && Objects.equals(name, that.name);
+ return Objects.equals(functionName, that.functionName)
+ && Objects.equals(inputColumnNames, that.inputColumnNames);
}
@Override
public int hashCode() {
- return Objects.hash(inputColumnNames, name);
+ return Objects.hash(functionName, inputColumnNames);
}
@Override
public String toString() {
return new ToStringer(TableRowFilter.class)
+ .add("functionName", functionName)
.add("inputColumnNames", inputColumnNames)
- .add("name", name)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java
index b8e73eca9..9e9130b8c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java
@@ -23,22 +23,19 @@ public class UpdateMonitor {
* time windows).
*/
@JsonProperty("custom_metrics")
- private Collection customMetrics;
+ private Collection customMetrics;
/** The data classification config for the monitor. */
@JsonProperty("data_classification_config")
private MonitorDataClassificationConfig dataClassificationConfig;
- /** Full name of the table. */
- private String fullName;
-
/** Configuration for monitoring inference logs. */
@JsonProperty("inference_log")
- private MonitorInferenceLogProfileType inferenceLog;
+ private MonitorInferenceLog inferenceLog;
/** The notification settings for the monitor. */
@JsonProperty("notifications")
- private Collection notifications;
+ private MonitorNotifications notifications;
/** Schema where output metric tables are created. */
@JsonProperty("output_schema_name")
@@ -59,11 +56,14 @@ public class UpdateMonitor {
/** Configuration for monitoring snapshot tables. */
@JsonProperty("snapshot")
- private MonitorSnapshotProfileType snapshot;
+ private MonitorSnapshot snapshot;
+
+ /** Full name of the table. */
+ private String tableName;
/** Configuration for monitoring time series tables. */
@JsonProperty("time_series")
- private MonitorTimeSeriesProfileType timeSeries;
+ private MonitorTimeSeries timeSeries;
public UpdateMonitor setBaselineTableName(String baselineTableName) {
this.baselineTableName = baselineTableName;
@@ -74,12 +74,12 @@ public String getBaselineTableName() {
return baselineTableName;
}
- public UpdateMonitor setCustomMetrics(Collection customMetrics) {
+ public UpdateMonitor setCustomMetrics(Collection customMetrics) {
this.customMetrics = customMetrics;
return this;
}
- public Collection getCustomMetrics() {
+ public Collection getCustomMetrics() {
return customMetrics;
}
@@ -93,30 +93,21 @@ public MonitorDataClassificationConfig getDataClassificationConfig() {
return dataClassificationConfig;
}
- public UpdateMonitor setFullName(String fullName) {
- this.fullName = fullName;
- return this;
- }
-
- public String getFullName() {
- return fullName;
- }
-
- public UpdateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) {
+ public UpdateMonitor setInferenceLog(MonitorInferenceLog inferenceLog) {
this.inferenceLog = inferenceLog;
return this;
}
- public MonitorInferenceLogProfileType getInferenceLog() {
+ public MonitorInferenceLog getInferenceLog() {
return inferenceLog;
}
- public UpdateMonitor setNotifications(Collection notifications) {
+ public UpdateMonitor setNotifications(MonitorNotifications notifications) {
this.notifications = notifications;
return this;
}
- public Collection getNotifications() {
+ public MonitorNotifications getNotifications() {
return notifications;
}
@@ -147,21 +138,30 @@ public Collection getSlicingExprs() {
return slicingExprs;
}
- public UpdateMonitor setSnapshot(MonitorSnapshotProfileType snapshot) {
+ public UpdateMonitor setSnapshot(MonitorSnapshot snapshot) {
this.snapshot = snapshot;
return this;
}
- public MonitorSnapshotProfileType getSnapshot() {
+ public MonitorSnapshot getSnapshot() {
return snapshot;
}
- public UpdateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) {
+ public UpdateMonitor setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public UpdateMonitor setTimeSeries(MonitorTimeSeries timeSeries) {
this.timeSeries = timeSeries;
return this;
}
- public MonitorTimeSeriesProfileType getTimeSeries() {
+ public MonitorTimeSeries getTimeSeries() {
return timeSeries;
}
@@ -173,13 +173,13 @@ public boolean equals(Object o) {
return Objects.equals(baselineTableName, that.baselineTableName)
&& Objects.equals(customMetrics, that.customMetrics)
&& Objects.equals(dataClassificationConfig, that.dataClassificationConfig)
- && Objects.equals(fullName, that.fullName)
&& Objects.equals(inferenceLog, that.inferenceLog)
&& Objects.equals(notifications, that.notifications)
&& Objects.equals(outputSchemaName, that.outputSchemaName)
&& Objects.equals(schedule, that.schedule)
&& Objects.equals(slicingExprs, that.slicingExprs)
&& Objects.equals(snapshot, that.snapshot)
+ && Objects.equals(tableName, that.tableName)
&& Objects.equals(timeSeries, that.timeSeries);
}
@@ -189,13 +189,13 @@ public int hashCode() {
baselineTableName,
customMetrics,
dataClassificationConfig,
- fullName,
inferenceLog,
notifications,
outputSchemaName,
schedule,
slicingExprs,
snapshot,
+ tableName,
timeSeries);
}
@@ -205,13 +205,13 @@ public String toString() {
.add("baselineTableName", baselineTableName)
.add("customMetrics", customMetrics)
.add("dataClassificationConfig", dataClassificationConfig)
- .add("fullName", fullName)
.add("inferenceLog", inferenceLog)
.add("notifications", notifications)
.add("outputSchemaName", outputSchemaName)
.add("schedule", schedule)
.add("slicingExprs", slicingExprs)
.add("snapshot", snapshot)
+ .add("tableName", tableName)
.add("timeSeries", timeSeries)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
index 873d551f5..29c98b451 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java
@@ -11,11 +11,11 @@
public class UpdateStorageCredential {
/** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
- private AwsIamRole awsIamRole;
+ private AwsIamRoleRequest awsIamRole;
/** The Azure managed identity configuration. */
@JsonProperty("azure_managed_identity")
- private AzureManagedIdentity azureManagedIdentity;
+ private AzureManagedIdentityResponse azureManagedIdentity;
/** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
@@ -56,22 +56,22 @@ public class UpdateStorageCredential {
@JsonProperty("skip_validation")
private Boolean skipValidation;
- public UpdateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) {
+ public UpdateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) {
this.awsIamRole = awsIamRole;
return this;
}
- public AwsIamRole getAwsIamRole() {
+ public AwsIamRoleRequest getAwsIamRole() {
return awsIamRole;
}
public UpdateStorageCredential setAzureManagedIdentity(
- AzureManagedIdentity azureManagedIdentity) {
+ AzureManagedIdentityResponse azureManagedIdentity) {
this.azureManagedIdentity = azureManagedIdentity;
return this;
}
- public AzureManagedIdentity getAzureManagedIdentity() {
+ public AzureManagedIdentityResponse getAzureManagedIdentity() {
return azureManagedIdentity;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
index 405cdd428..23fb6866a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java
@@ -11,11 +11,11 @@
public class ValidateStorageCredential {
/** The AWS IAM role configuration. */
@JsonProperty("aws_iam_role")
- private AwsIamRole awsIamRole;
+ private AwsIamRoleRequest awsIamRole;
/** The Azure managed identity configuration. */
@JsonProperty("azure_managed_identity")
- private AzureManagedIdentity azureManagedIdentity;
+ private AzureManagedIdentityRequest azureManagedIdentity;
/** The Azure service principal configuration. */
@JsonProperty("azure_service_principal")
@@ -45,22 +45,22 @@ public class ValidateStorageCredential {
@JsonProperty("url")
private String url;
- public ValidateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) {
+ public ValidateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) {
this.awsIamRole = awsIamRole;
return this;
}
- public AwsIamRole getAwsIamRole() {
+ public AwsIamRoleRequest getAwsIamRole() {
return awsIamRole;
}
public ValidateStorageCredential setAzureManagedIdentity(
- AzureManagedIdentity azureManagedIdentity) {
+ AzureManagedIdentityRequest azureManagedIdentity) {
this.azureManagedIdentity = azureManagedIdentity;
return this;
}
- public AzureManagedIdentity getAzureManagedIdentity() {
+ public AzureManagedIdentityRequest getAzureManagedIdentity() {
return azureManagedIdentity;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
index 7337f8c88..d0a625941 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java
@@ -9,6 +9,7 @@
public enum ValidationResultOperation {
DELETE,
LIST,
+ PATH_EXISTS,
READ,
WRITE,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
index ff2d58683..d28cf4e10 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java
@@ -13,6 +13,13 @@ public class VolumeInfo {
@JsonProperty("access_point")
private String accessPoint;
+ /**
+ * Indicates whether the principal is limited to retrieving metadata for the associated object
+ * through the BROWSE privilege when include_browse is enabled in the request.
+ */
+ @JsonProperty("browse_only")
+ private Boolean browseOnly;
+
/** The name of the catalog where the schema and the volume are */
@JsonProperty("catalog_name")
private String catalogName;
@@ -82,6 +89,15 @@ public String getAccessPoint() {
return accessPoint;
}
+ public VolumeInfo setBrowseOnly(Boolean browseOnly) {
+ this.browseOnly = browseOnly;
+ return this;
+ }
+
+ public Boolean getBrowseOnly() {
+ return browseOnly;
+ }
+
public VolumeInfo setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -223,6 +239,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
VolumeInfo that = (VolumeInfo) o;
return Objects.equals(accessPoint, that.accessPoint)
+ && Objects.equals(browseOnly, that.browseOnly)
&& Objects.equals(catalogName, that.catalogName)
&& Objects.equals(comment, that.comment)
&& Objects.equals(createdAt, that.createdAt)
@@ -244,6 +261,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
accessPoint,
+ browseOnly,
catalogName,
comment,
createdAt,
@@ -265,6 +283,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(VolumeInfo.class)
.add("accessPoint", accessPoint)
+ .add("browseOnly", browseOnly)
.add("catalogName", catalogName)
.add("comment", comment)
.add("createdAt", createdAt)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
index 72263ebee..505b70166 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
@@ -38,7 +38,10 @@ public class AwsAttributes {
@JsonProperty("ebs_volume_count")
private Long ebsVolumeCount;
- /** */
+ /**
+ * If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum
+ * performance of a gp2 volume with the same volume size will be used.
+ */
@JsonProperty("ebs_volume_iops")
private Long ebsVolumeIops;
@@ -50,7 +53,10 @@ public class AwsAttributes {
@JsonProperty("ebs_volume_size")
private Long ebsVolumeSize;
- /** */
+ /**
+ * If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum
+ * performance of a gp2 volume with the same volume size will be used.
+ */
@JsonProperty("ebs_volume_throughput")
private Long ebsVolumeThroughput;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java
new file mode 100755
index 000000000..8846cf8aa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CloneCluster {
+ /** The cluster that is being cloned. */
+ @JsonProperty("source_cluster_id")
+ private String sourceClusterId;
+
+ public CloneCluster setSourceClusterId(String sourceClusterId) {
+ this.sourceClusterId = sourceClusterId;
+ return this;
+ }
+
+ public String getSourceClusterId() {
+ return sourceClusterId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CloneCluster that = (CloneCluster) o;
+ return Objects.equals(sourceClusterId, that.sourceClusterId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sourceClusterId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CloneCluster.class).add("sourceClusterId", sourceClusterId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
index ff89273c2..5d97be737 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -133,7 +134,8 @@ public ClusterPolicyPermissions getPermissions(GetClusterPolicyPermissionsReques
* Returns a list of policies accessible by the requesting user.
*/
public Iterable list(ListClusterPoliciesRequest request) {
- return impl.list(request).getPolicies();
+ return new Paginator<>(
+ request, impl::list, ListPoliciesResponse::getPolicies, response -> null);
}
public ClusterPolicyPermissions setPermissions(String clusterPolicyId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
index d64e22ab1..ef834b220 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
@@ -45,6 +45,13 @@ public class ClusterSpec {
@JsonProperty("azure_attributes")
private AzureAttributes azureAttributes;
+ /**
+ * When specified, this clones libraries from a source cluster during the creation of a new
+ * cluster.
+ */
+ @JsonProperty("clone_from")
+ private CloneCluster cloneFrom;
+
/**
* The configuration for delivering spark logs to a long-term storage destination. Two kinds of
* destinations (dbfs and s3) are supported. Only one destination can be specified for one
@@ -274,6 +281,15 @@ public AzureAttributes getAzureAttributes() {
return azureAttributes;
}
+ public ClusterSpec setCloneFrom(CloneCluster cloneFrom) {
+ this.cloneFrom = cloneFrom;
+ return this;
+ }
+
+ public CloneCluster getCloneFrom() {
+ return cloneFrom;
+ }
+
public ClusterSpec setClusterLogConf(ClusterLogConf clusterLogConf) {
this.clusterLogConf = clusterLogConf;
return this;
@@ -491,6 +507,7 @@ public boolean equals(Object o) {
&& Objects.equals(autoterminationMinutes, that.autoterminationMinutes)
&& Objects.equals(awsAttributes, that.awsAttributes)
&& Objects.equals(azureAttributes, that.azureAttributes)
+ && Objects.equals(cloneFrom, that.cloneFrom)
&& Objects.equals(clusterLogConf, that.clusterLogConf)
&& Objects.equals(clusterName, that.clusterName)
&& Objects.equals(clusterSource, that.clusterSource)
@@ -524,6 +541,7 @@ public int hashCode() {
autoterminationMinutes,
awsAttributes,
azureAttributes,
+ cloneFrom,
clusterLogConf,
clusterName,
clusterSource,
@@ -557,6 +575,7 @@ public String toString() {
.add("autoterminationMinutes", autoterminationMinutes)
.add("awsAttributes", awsAttributes)
.add("azureAttributes", azureAttributes)
+ .add("cloneFrom", cloneFrom)
.add("clusterLogConf", clusterLogConf)
.add("clusterName", clusterName)
.add("clusterSource", clusterSource)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java
similarity index 78%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java
index 53fa286a2..7e4c40e41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java
@@ -9,12 +9,12 @@
/** Get status */
@Generated
-public class ClusterStatusRequest {
+public class ClusterStatus {
/** Unique identifier of the cluster whose status should be retrieved. */
@QueryParam("cluster_id")
private String clusterId;
- public ClusterStatusRequest setClusterId(String clusterId) {
+ public ClusterStatus setClusterId(String clusterId) {
this.clusterId = clusterId;
return this;
}
@@ -27,7 +27,7 @@ public String getClusterId() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- ClusterStatusRequest that = (ClusterStatusRequest) o;
+ ClusterStatus that = (ClusterStatus) o;
return Objects.equals(clusterId, that.clusterId);
}
@@ -38,6 +38,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(ClusterStatusRequest.class).add("clusterId", clusterId).toString();
+ return new ToStringer(ClusterStatus.class).add("clusterId", clusterId).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java
new file mode 100755
index 000000000..6f62c8cb5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ClusterStatusResponse {
+ /** Unique identifier for the cluster. */
+ @JsonProperty("cluster_id")
+ private String clusterId;
+
+ /** Status of all libraries on the cluster. */
+ @JsonProperty("library_statuses")
+ private Collection libraryStatuses;
+
+ public ClusterStatusResponse setClusterId(String clusterId) {
+ this.clusterId = clusterId;
+ return this;
+ }
+
+ public String getClusterId() {
+ return clusterId;
+ }
+
+ public ClusterStatusResponse setLibraryStatuses(Collection libraryStatuses) {
+ this.libraryStatuses = libraryStatuses;
+ return this;
+ }
+
+ public Collection getLibraryStatuses() {
+ return libraryStatuses;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ClusterStatusResponse that = (ClusterStatusResponse) o;
+ return Objects.equals(clusterId, that.clusterId)
+ && Objects.equals(libraryStatuses, that.libraryStatuses);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(clusterId, libraryStatuses);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ClusterStatusResponse.class)
+ .add("clusterId", clusterId)
+ .add("libraryStatuses", libraryStatuses)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
index 6bf0d8e66..f4681eb47 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
@@ -232,12 +232,7 @@ public Iterable events(String clusterId) {
*/
public Iterable events(GetEvents request) {
return new Paginator<>(
- request,
- impl::events,
- GetEventsResponse::getEvents,
- response -> {
- return response.getNextPage();
- });
+ request, impl::events, GetEventsResponse::getEvents, response -> response.getNextPage());
}
public ClusterDetails get(String clusterId) {
@@ -294,7 +289,8 @@ public ClusterPermissions getPermissions(GetClusterPermissionsRequest request) {
* the 30 most recently terminated job clusters.
*/
public Iterable list(ListClustersRequest request) {
- return impl.list(request).getClusters();
+ return new Paginator<>(
+ request, impl::list, ListClustersResponse::getClusters, response -> null);
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java
deleted file mode 100755
index 28521670c..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-
-/** The kind of compute described by this compute specification. */
-@Generated
-public enum ComputeSpecKind {
- SERVERLESS_PREVIEW,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
index dc294c91b..01824b261 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
@@ -45,6 +45,13 @@ public class CreateCluster {
@JsonProperty("azure_attributes")
private AzureAttributes azureAttributes;
+ /**
+ * When specified, this clones libraries from a source cluster during the creation of a new
+ * cluster.
+ */
+ @JsonProperty("clone_from")
+ private CloneCluster cloneFrom;
+
/**
* The configuration for delivering spark logs to a long-term storage destination. Two kinds of
* destinations (dbfs and s3) are supported. Only one destination can be specified for one
@@ -274,6 +281,15 @@ public AzureAttributes getAzureAttributes() {
return azureAttributes;
}
+ public CreateCluster setCloneFrom(CloneCluster cloneFrom) {
+ this.cloneFrom = cloneFrom;
+ return this;
+ }
+
+ public CloneCluster getCloneFrom() {
+ return cloneFrom;
+ }
+
public CreateCluster setClusterLogConf(ClusterLogConf clusterLogConf) {
this.clusterLogConf = clusterLogConf;
return this;
@@ -491,6 +507,7 @@ public boolean equals(Object o) {
&& Objects.equals(autoterminationMinutes, that.autoterminationMinutes)
&& Objects.equals(awsAttributes, that.awsAttributes)
&& Objects.equals(azureAttributes, that.azureAttributes)
+ && Objects.equals(cloneFrom, that.cloneFrom)
&& Objects.equals(clusterLogConf, that.clusterLogConf)
&& Objects.equals(clusterName, that.clusterName)
&& Objects.equals(clusterSource, that.clusterSource)
@@ -524,6 +541,7 @@ public int hashCode() {
autoterminationMinutes,
awsAttributes,
azureAttributes,
+ cloneFrom,
clusterLogConf,
clusterName,
clusterSource,
@@ -557,6 +575,7 @@ public String toString() {
.add("autoterminationMinutes", autoterminationMinutes)
.add("awsAttributes", awsAttributes)
.add("azureAttributes", azureAttributes)
+ .add("cloneFrom", cloneFrom)
.add("clusterLogConf", clusterLogConf)
.add("clusterName", clusterName)
.add("clusterSource", clusterSource)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
index 1113ae904..3da5b88c7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
@@ -45,6 +45,13 @@ public class EditCluster {
@JsonProperty("azure_attributes")
private AzureAttributes azureAttributes;
+ /**
+ * When specified, this clones libraries from a source cluster during the creation of a new
+ * cluster.
+ */
+ @JsonProperty("clone_from")
+ private CloneCluster cloneFrom;
+
/** ID of the cluser */
@JsonProperty("cluster_id")
private String clusterId;
@@ -278,6 +285,15 @@ public AzureAttributes getAzureAttributes() {
return azureAttributes;
}
+ public EditCluster setCloneFrom(CloneCluster cloneFrom) {
+ this.cloneFrom = cloneFrom;
+ return this;
+ }
+
+ public CloneCluster getCloneFrom() {
+ return cloneFrom;
+ }
+
public EditCluster setClusterId(String clusterId) {
this.clusterId = clusterId;
return this;
@@ -504,6 +520,7 @@ public boolean equals(Object o) {
&& Objects.equals(autoterminationMinutes, that.autoterminationMinutes)
&& Objects.equals(awsAttributes, that.awsAttributes)
&& Objects.equals(azureAttributes, that.azureAttributes)
+ && Objects.equals(cloneFrom, that.cloneFrom)
&& Objects.equals(clusterId, that.clusterId)
&& Objects.equals(clusterLogConf, that.clusterLogConf)
&& Objects.equals(clusterName, that.clusterName)
@@ -538,6 +555,7 @@ public int hashCode() {
autoterminationMinutes,
awsAttributes,
azureAttributes,
+ cloneFrom,
clusterId,
clusterLogConf,
clusterName,
@@ -572,6 +590,7 @@ public String toString() {
.add("autoterminationMinutes", autoterminationMinutes)
.add("awsAttributes", awsAttributes)
.add("azureAttributes", azureAttributes)
+ .add("cloneFrom", cloneFrom)
.add("clusterId", clusterId)
.add("clusterLogConf", clusterLogConf)
.add("clusterName", clusterName)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
new file mode 100755
index 000000000..e46010d44
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * The a environment entity used to preserve serverless environment side panel and jobs' environment
+ * for non-notebook task. In this minimal environment spec, only pip dependencies are supported.
+ * Next ID: 5
+ */
+@Generated
+public class Environment {
+ /**
+ * Client version used by the environment The client is the user-facing environment of the
+ * runtime. Each client comes with a specific set of pre-installed libraries. The version is a
+ * string, consisting of the major client version.
+ */
+ @JsonProperty("client")
+ private String client;
+
+ /**
+ * List of pip dependencies, as supported by the version of pip in this environment. Each
+ * dependency is a pip requirement file line
+ * https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be
+ * , , (WSFS or Volumes in
+ * Databricks), E.g. dependencies: ["foo==0.0.1", "-r
+ * /Workspace/test/requirements.txt"]
+ */
+ @JsonProperty("dependencies")
+ private Collection dependencies;
+
+ public Environment setClient(String client) {
+ this.client = client;
+ return this;
+ }
+
+ public String getClient() {
+ return client;
+ }
+
+ public Environment setDependencies(Collection dependencies) {
+ this.dependencies = dependencies;
+ return this;
+ }
+
+ public Collection getDependencies() {
+ return dependencies;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Environment that = (Environment) o;
+ return Objects.equals(client, that.client) && Objects.equals(dependencies, that.dependencies);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(client, dependencies);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Environment.class)
+ .add("client", client)
+ .add("dependencies", dependencies)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
index 54d1829f3..28f450f8e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -78,7 +79,8 @@ public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request
* a global init script](:method:globalinitscripts/get) operation.
*/
public Iterable list() {
- return impl.list().getScripts();
+ return new Paginator<>(
+ null, (Void v) -> impl.list(), ListGlobalInitScriptsResponse::getScripts, response -> null);
}
public void update(String scriptId, String name, String script) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
index 3cece3f78..b989f56de 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
@@ -33,6 +33,13 @@ public class InstancePoolGcpAttributes {
* the Databricks workspace. For example, "us-west1-a" is not a valid zone id if the Databricks
* workspace resides in the "us-east1" region. This is an optional field at instance pool
* creation, and if not specified, a default zone will be used.
+ *
+ * This field can be one of the following: - "HA" => High availability, spread nodes across
+ * availability zones for a Databricks deployment region - A GCP availability zone => Pick One of
+ * the available zones for (machine type + region) from
+ * https://cloud.google.com/compute/docs/regions-zones (e.g. "us-west1-a").
+ *
+ *
If empty, Databricks picks an availability zone to schedule the cluster on.
*/
@JsonProperty("zone_id")
private String zoneId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
index fa2da924f..11431a3c3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -133,7 +134,8 @@ public InstancePoolPermissions getPermissions(GetInstancePoolPermissionsRequest
*
Gets a list of instance pools with their statistics.
*/
public Iterable list() {
- return impl.list().getInstancePools();
+ return new Paginator<>(
+ null, (Void v) -> impl.list(), ListInstancePools::getInstancePools, response -> null);
}
public InstancePoolPermissions setPermissions(String instancePoolId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
index edc9d1921..2408fad89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -77,7 +78,11 @@ public void edit(InstanceProfile request) {
* This API is available to all users.
*/
public Iterable list() {
- return impl.list().getInstanceProfiles();
+ return new Paginator<>(
+ null,
+ (Void v) -> impl.list(),
+ ListInstanceProfilesResponse::getInstanceProfiles,
+ response -> null);
}
public void remove(String instanceProfileArn) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
index a1cf72255..2de225d87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -13,17 +14,13 @@
*
* To make third-party or custom code available to notebooks and jobs running on your clusters,
* you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload
- * Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and CRAN
+ * Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN
* repositories.
*
*
Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster
* library directly from a public repository such as PyPI or Maven, using a previously installed
* workspace library, or using an init script.
*
- *
When you install a library on a cluster, a notebook already attached to that cluster will not
- * immediately see the new library. You must first detach and then reattach the notebook to the
- * cluster.
- *
*
When you uninstall a library from a cluster, the library is removed only when you restart the
* cluster. Until you restart the cluster, the status of the uninstalled library appears as
* Uninstall pending restart.
@@ -47,37 +44,29 @@ public LibrariesAPI(LibrariesService mock) {
/**
* Get all statuses.
*
- *
Get the status of all libraries on all clusters. A status will be available for all
- * libraries installed on this cluster via the API or the libraries UI as well as libraries set to
- * be installed on all clusters via the libraries UI.
+ *
Get the status of all libraries on all clusters. A status is returned for all libraries
+ * installed on this cluster via the API or the libraries UI.
*/
public ListAllClusterLibraryStatusesResponse allClusterStatuses() {
return impl.allClusterStatuses();
}
public Iterable clusterStatus(String clusterId) {
- return clusterStatus(new ClusterStatusRequest().setClusterId(clusterId));
+ return clusterStatus(new ClusterStatus().setClusterId(clusterId));
}
/**
* Get status.
*
- * Get the status of libraries on a cluster. A status will be available for all libraries
- * installed on this cluster via the API or the libraries UI as well as libraries set to be
- * installed on all clusters via the libraries UI. The order of returned libraries will be as
- * follows.
- *
- *
1. Libraries set to be installed on this cluster will be returned first. Within this group,
- * the final order will be order in which the libraries were added to the cluster.
- *
- *
2. Libraries set to be installed on all clusters are returned next. Within this group there
- * is no order guarantee.
- *
- *
3. Libraries that were previously requested on this cluster or on all clusters, but now
- * marked for removal. Within this group there is no order guarantee.
+ *
Get the status of libraries on a cluster. A status is returned for all libraries installed
+ * on this cluster via the API or the libraries UI. The order of returned libraries is as follows:
+ * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to
+ * the cluster, are returned first. 2. Libraries that were previously requested to be installed on
+ * this cluster or, but are now marked for removal, in no particular order, are returned last.
*/
- public Iterable clusterStatus(ClusterStatusRequest request) {
- return impl.clusterStatus(request).getLibraryStatuses();
+ public Iterable clusterStatus(ClusterStatus request) {
+ return new Paginator<>(
+ request, impl::clusterStatus, ClusterStatusResponse::getLibraryStatuses, response -> null);
}
public void install(String clusterId, Collection libraries) {
@@ -87,12 +76,8 @@ public void install(String clusterId, Collection libraries) {
/**
* Add a library.
*
- * Add libraries to be installed on a cluster. The installation is asynchronous; it happens in
- * the background after the completion of this request.
- *
- *
**Note**: The actual set of libraries to be installed on a cluster is the union of the
- * libraries specified via this method and the libraries set to be installed on all clusters via
- * the libraries UI.
+ *
Add libraries to install on a cluster. The installation is asynchronous; it happens in the
+ * background after the completion of this request.
*/
public void install(InstallLibraries request) {
impl.install(request);
@@ -105,9 +90,9 @@ public void uninstall(String clusterId, Collection libraries) {
/**
* Uninstall libraries.
*
- * Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the
- * cluster is restarted. Uninstalling libraries that are not installed on the cluster will have no
- * impact but is not an error.
+ *
Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the
+ * cluster is restarted. A request to uninstall a library that is not currently installed is
+ * ignored.
*/
public void uninstall(UninstallLibraries request) {
impl.uninstall(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
index 1485f7688..8c8124d6a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
@@ -24,11 +24,11 @@ public ListAllClusterLibraryStatusesResponse allClusterStatuses() {
}
@Override
- public ClusterLibraryStatuses clusterStatus(ClusterStatusRequest request) {
+ public ClusterStatusResponse clusterStatus(ClusterStatus request) {
String path = "/api/2.0/libraries/cluster-status";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
- return apiClient.GET(path, request, ClusterLibraryStatuses.class, headers);
+ return apiClient.GET(path, request, ClusterStatusResponse.class, headers);
}
@Override
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
index 99119dc20..2e9773ff5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
@@ -9,17 +9,13 @@
*
* To make third-party or custom code available to notebooks and jobs running on your clusters,
* you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload
- * Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and CRAN
+ * Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN
* repositories.
*
*
Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster
* library directly from a public repository such as PyPI or Maven, using a previously installed
* workspace library, or using an init script.
*
- *
When you install a library on a cluster, a notebook already attached to that cluster will not
- * immediately see the new library. You must first detach and then reattach the notebook to the
- * cluster.
- *
*
When you uninstall a library from a cluster, the library is removed only when you restart the
* cluster. Until you restart the cluster, the status of the uninstalled library appears as
* Uninstall pending restart.
@@ -33,49 +29,36 @@ public interface LibrariesService {
/**
* Get all statuses.
*
- *
Get the status of all libraries on all clusters. A status will be available for all
- * libraries installed on this cluster via the API or the libraries UI as well as libraries set to
- * be installed on all clusters via the libraries UI.
+ *
Get the status of all libraries on all clusters. A status is returned for all libraries
+ * installed on this cluster via the API or the libraries UI.
*/
ListAllClusterLibraryStatusesResponse allClusterStatuses();
/**
* Get status.
*
- *
Get the status of libraries on a cluster. A status will be available for all libraries
- * installed on this cluster via the API or the libraries UI as well as libraries set to be
- * installed on all clusters via the libraries UI. The order of returned libraries will be as
- * follows.
- *
- *
1. Libraries set to be installed on this cluster will be returned first. Within this group,
- * the final order will be order in which the libraries were added to the cluster.
- *
- *
2. Libraries set to be installed on all clusters are returned next. Within this group there
- * is no order guarantee.
- *
- *
3. Libraries that were previously requested on this cluster or on all clusters, but now
- * marked for removal. Within this group there is no order guarantee.
+ *
Get the status of libraries on a cluster. A status is returned for all libraries installed
+ * on this cluster via the API or the libraries UI. The order of returned libraries is as follows:
+ * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to
+ * the cluster, are returned first. 2. Libraries that were previously requested to be installed on
+ * this cluster or, but are now marked for removal, in no particular order, are returned last.
*/
- ClusterLibraryStatuses clusterStatus(ClusterStatusRequest clusterStatusRequest);
+ ClusterStatusResponse clusterStatus(ClusterStatus clusterStatus);
/**
* Add a library.
*
- *
Add libraries to be installed on a cluster. The installation is asynchronous; it happens in
- * the background after the completion of this request.
- *
- *
**Note**: The actual set of libraries to be installed on a cluster is the union of the
- * libraries specified via this method and the libraries set to be installed on all clusters via
- * the libraries UI.
+ *
Add libraries to install on a cluster. The installation is asynchronous; it happens in the
+ * background after the completion of this request.
*/
void install(InstallLibraries installLibraries);
/**
* Uninstall libraries.
*
- *
Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the
- * cluster is restarted. Uninstalling libraries that are not installed on the cluster will have no
- * impact but is not an error.
+ *
Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the
+ * cluster is restarted. A request to uninstall a library that is not currently installed is
+ * ignored.
*/
void uninstall(UninstallLibraries uninstallLibraries);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java
index 85f8a1724..7f9f10961 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java
@@ -14,18 +14,20 @@ public class Library {
private RCranLibrary cran;
/**
- * URI of the egg to be installed. Currently only DBFS and S3 URIs are supported. For example: `{
- * "egg": "dbfs:/my/egg" }` or `{ "egg": "s3://my-bucket/egg" }`. If S3 is used, please make sure
- * the cluster has read access on the library. You may need to launch the cluster with an IAM role
- * to access the S3 URI.
+ * URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog
+ * Volumes paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{
+ * "egg" : "/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3
+ * is used, please make sure the cluster has read access on the library. You may need to launch
+ * the cluster with an IAM role to access the S3 URI.
*/
@JsonProperty("egg")
private String egg;
/**
- * URI of the jar to be installed. Currently only DBFS and S3 URIs are supported. For example: `{
- * "jar": "dbfs:/mnt/databricks/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If
- * S3 is used, please make sure the cluster has read access on the library. You may need to launch
+ * URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog
+ * Volumes paths, and S3 URIs. For example: `{ "jar": "/Workspace/path/to/library.jar" }`, `{
+ * "jar" : "/Volumes/path/to/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If S3
+ * is used, please make sure the cluster has read access on the library. You may need to launch
* the cluster with an IAM role to access the S3 URI.
*/
@JsonProperty("jar")
@@ -43,9 +45,19 @@ public class Library {
private PythonPyPiLibrary pypi;
/**
- * URI of the wheel to be installed. For example: `{ "whl": "dbfs:/my/whl" }` or `{ "whl":
- * "s3://my-bucket/whl" }`. If S3 is used, please make sure the cluster has read access on the
- * library. You may need to launch the cluster with an IAM role to access the S3 URI.
+ * URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes
+ * paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }`
+ * or `{ "requirements" : "/Volumes/path/to/requirements.txt" }`
+ */
+ @JsonProperty("requirements")
+ private String requirements;
+
+ /**
+ * URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog
+ * Volumes paths, and S3 URIs. For example: `{ "whl": "/Workspace/path/to/library.whl" }`, `{
+ * "whl" : "/Volumes/path/to/library.whl" }` or `{ "whl": "s3://my-bucket/library.whl" }`. If S3
+ * is used, please make sure the cluster has read access on the library. You may need to launch
+ * the cluster with an IAM role to access the S3 URI.
*/
@JsonProperty("whl")
private String whl;
@@ -95,6 +107,15 @@ public PythonPyPiLibrary getPypi() {
return pypi;
}
+ public Library setRequirements(String requirements) {
+ this.requirements = requirements;
+ return this;
+ }
+
+ public String getRequirements() {
+ return requirements;
+ }
+
public Library setWhl(String whl) {
this.whl = whl;
return this;
@@ -114,12 +135,13 @@ public boolean equals(Object o) {
&& Objects.equals(jar, that.jar)
&& Objects.equals(maven, that.maven)
&& Objects.equals(pypi, that.pypi)
+ && Objects.equals(requirements, that.requirements)
&& Objects.equals(whl, that.whl);
}
@Override
public int hashCode() {
- return Objects.hash(cran, egg, jar, maven, pypi, whl);
+ return Objects.hash(cran, egg, jar, maven, pypi, requirements, whl);
}
@Override
@@ -130,6 +152,7 @@ public String toString() {
.add("jar", jar)
.add("maven", maven)
.add("pypi", pypi)
+ .add("requirements", requirements)
.add("whl", whl)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java
index bdab295df..8a4a0b6c0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java
@@ -8,6 +8,7 @@
import java.util.Collection;
import java.util.Objects;
+/** The status of the library on a specific cluster. */
@Generated
public class LibraryFullStatus {
/** Whether the library was set to be installed on all clusters via the libraries UI. */
@@ -24,7 +25,7 @@ public class LibraryFullStatus {
/** Status of installing the library on the cluster. */
@JsonProperty("status")
- private LibraryFullStatusStatus status;
+ private LibraryInstallStatus status;
public LibraryFullStatus setIsLibraryForAllClusters(Boolean isLibraryForAllClusters) {
this.isLibraryForAllClusters = isLibraryForAllClusters;
@@ -53,12 +54,12 @@ public Collection getMessages() {
return messages;
}
- public LibraryFullStatus setStatus(LibraryFullStatusStatus status) {
+ public LibraryFullStatus setStatus(LibraryInstallStatus status) {
this.status = status;
return this;
}
- public LibraryFullStatusStatus getStatus() {
+ public LibraryInstallStatus getStatus() {
return status;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java
similarity index 73%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java
index 484f2fc83..51c743797 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java
@@ -4,14 +4,15 @@
import com.databricks.sdk.support.Generated;
-/** Status of installing the library on the cluster. */
+/** The status of a library on a specific cluster. */
@Generated
-public enum LibraryFullStatusStatus {
+public enum LibraryInstallStatus {
FAILED,
INSTALLED,
INSTALLING,
PENDING,
RESOLVING,
+ RESTORED,
SKIPPED,
UNINSTALL_ON_RESTART,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java
new file mode 100755
index 000000000..5977206de
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java
@@ -0,0 +1,92 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDashboardRequest {
+ /** The display name of the dashboard. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /**
+ * The workspace path of the folder containing the dashboard. Includes leading slash and no
+ * trailing slash.
+ */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /** The contents of the dashboard in serialized string form. */
+ @JsonProperty("serialized_dashboard")
+ private String serializedDashboard;
+
+ /** The warehouse ID used to run the dashboard. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public CreateDashboardRequest setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public CreateDashboardRequest setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public CreateDashboardRequest setSerializedDashboard(String serializedDashboard) {
+ this.serializedDashboard = serializedDashboard;
+ return this;
+ }
+
+ public String getSerializedDashboard() {
+ return serializedDashboard;
+ }
+
+ public CreateDashboardRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDashboardRequest that = (CreateDashboardRequest) o;
+ return Objects.equals(displayName, that.displayName)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(serializedDashboard, that.serializedDashboard)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(displayName, parentPath, serializedDashboard, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDashboardRequest.class)
+ .add("displayName", displayName)
+ .add("parentPath", parentPath)
+ .add("serializedDashboard", serializedDashboard)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java
new file mode 100755
index 000000000..8353ab585
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java
@@ -0,0 +1,195 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Dashboard {
+ /** The timestamp of when the dashboard was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** UUID identifying the dashboard. */
+ @JsonProperty("dashboard_id")
+ private String dashboardId;
+
+ /** The display name of the dashboard. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /**
+ * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
+ * has not been modified since the last read.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /** The state of the dashboard resource. Used for tracking trashed status. */
+ @JsonProperty("lifecycle_state")
+ private LifecycleState lifecycleState;
+
+ /**
+ * The workspace path of the folder containing the dashboard. Includes leading slash and no
+ * trailing slash.
+ */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /** The workspace path of the dashboard asset, including the file name. */
+ @JsonProperty("path")
+ private String path;
+
+ /** The contents of the dashboard in serialized string form. */
+ @JsonProperty("serialized_dashboard")
+ private String serializedDashboard;
+
+ /** The timestamp of when the dashboard was last updated by the user. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ /** The warehouse ID used to run the dashboard. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public Dashboard setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public Dashboard setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ public Dashboard setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public Dashboard setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public Dashboard setLifecycleState(LifecycleState lifecycleState) {
+ this.lifecycleState = lifecycleState;
+ return this;
+ }
+
+ public LifecycleState getLifecycleState() {
+ return lifecycleState;
+ }
+
+ public Dashboard setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public Dashboard setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public Dashboard setSerializedDashboard(String serializedDashboard) {
+ this.serializedDashboard = serializedDashboard;
+ return this;
+ }
+
+ public String getSerializedDashboard() {
+ return serializedDashboard;
+ }
+
+ public Dashboard setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ public Dashboard setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Dashboard that = (Dashboard) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(dashboardId, that.dashboardId)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(lifecycleState, that.lifecycleState)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(path, that.path)
+ && Objects.equals(serializedDashboard, that.serializedDashboard)
+ && Objects.equals(updateTime, that.updateTime)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ dashboardId,
+ displayName,
+ etag,
+ lifecycleState,
+ parentPath,
+ path,
+ serializedDashboard,
+ updateTime,
+ warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Dashboard.class)
+ .add("createTime", createTime)
+ .add("dashboardId", dashboardId)
+ .add("displayName", displayName)
+ .add("etag", etag)
+ .add("lifecycleState", lifecycleState)
+ .add("parentPath", parentPath)
+ .add("path", path)
+ .add("serializedDashboard", serializedDashboard)
+ .add("updateTime", updateTime)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java
new file mode 100755
index 000000000..bbdb5b13a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Get dashboard */
+@Generated
+public class GetDashboardRequest {
+ /** UUID identifying the dashboard. */
+ private String dashboardId;
+
+ public GetDashboardRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDashboardRequest that = (GetDashboardRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDashboardRequest.class).add("dashboardId", dashboardId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java
new file mode 100755
index 000000000..6f29da065
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Get published dashboard */
+@Generated
+public class GetPublishedDashboardRequest {
+ /** UUID identifying the dashboard to be published. */
+ private String dashboardId;
+
+ public GetPublishedDashboardRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublishedDashboardRequest that = (GetPublishedDashboardRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublishedDashboardRequest.class)
+ .add("dashboardId", dashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
index bc55fbe21..ff62385cf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -26,8 +26,60 @@ public LakeviewAPI(LakeviewService mock) {
impl = mock;
}
- public void publish(String dashboardId) {
- publish(new PublishRequest().setDashboardId(dashboardId));
+ public Dashboard create(String displayName) {
+ return create(new CreateDashboardRequest().setDisplayName(displayName));
+ }
+
+ /**
+ * Create dashboard.
+ *
+ * Create a draft dashboard.
+ */
+ public Dashboard create(CreateDashboardRequest request) {
+ return impl.create(request);
+ }
+
+ public Dashboard get(String dashboardId) {
+ return get(new GetDashboardRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Get dashboard.
+ *
+ *
Get a draft dashboard.
+ */
+ public Dashboard get(GetDashboardRequest request) {
+ return impl.get(request);
+ }
+
+ public PublishedDashboard getPublished(String dashboardId) {
+ return getPublished(new GetPublishedDashboardRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Get published dashboard.
+ *
+ *
Get the current published dashboard.
+ */
+ public PublishedDashboard getPublished(GetPublishedDashboardRequest request) {
+ return impl.getPublished(request);
+ }
+
+ public Dashboard migrate(String sourceDashboardId) {
+ return migrate(new MigrateDashboardRequest().setSourceDashboardId(sourceDashboardId));
+ }
+
+ /**
+ * Migrate dashboard.
+ *
+ *
Migrates a classic SQL dashboard to Lakeview.
+ */
+ public Dashboard migrate(MigrateDashboardRequest request) {
+ return impl.migrate(request);
+ }
+
+ public PublishedDashboard publish(String dashboardId) {
+ return publish(new PublishRequest().setDashboardId(dashboardId));
}
/**
@@ -35,8 +87,47 @@ public void publish(String dashboardId) {
*
*
Publish the current draft dashboard.
*/
- public void publish(PublishRequest request) {
- impl.publish(request);
+ public PublishedDashboard publish(PublishRequest request) {
+ return impl.publish(request);
+ }
+
+ public void trash(String dashboardId) {
+ trash(new TrashDashboardRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Trash dashboard.
+ *
+ *
Trash a dashboard.
+ */
+ public void trash(TrashDashboardRequest request) {
+ impl.trash(request);
+ }
+
+ public void unpublish(String dashboardId) {
+ unpublish(new UnpublishDashboardRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Unpublish dashboard.
+ *
+ *
Unpublish the dashboard.
+ */
+ public void unpublish(UnpublishDashboardRequest request) {
+ impl.unpublish(request);
+ }
+
+ public Dashboard update(String dashboardId) {
+ return update(new UpdateDashboardRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Update dashboard.
+ *
+ *
Update a draft dashboard.
+ */
+ public Dashboard update(UpdateDashboardRequest request) {
+ return impl.update(request);
}
public LakeviewService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
index 8386295ce..10a926490 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
@@ -16,12 +16,73 @@ public LakeviewImpl(ApiClient apiClient) {
}
@Override
- public void publish(PublishRequest request) {
+ public Dashboard create(CreateDashboardRequest request) {
+ String path = "/api/2.0/lakeview/dashboards";
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ return apiClient.POST(path, request, Dashboard.class, headers);
+ }
+
+ @Override
+ public Dashboard get(GetDashboardRequest request) {
+ String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ return apiClient.GET(path, request, Dashboard.class, headers);
+ }
+
+ @Override
+ public PublishedDashboard getPublished(GetPublishedDashboardRequest request) {
+ String path =
+ String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ return apiClient.GET(path, request, PublishedDashboard.class, headers);
+ }
+
+ @Override
+ public Dashboard migrate(MigrateDashboardRequest request) {
+ String path = "/api/2.0/lakeview/dashboards/migrate";
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ return apiClient.POST(path, request, Dashboard.class, headers);
+ }
+
+ @Override
+ public PublishedDashboard publish(PublishRequest request) {
String path =
String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId());
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
headers.put("Content-Type", "application/json");
- apiClient.POST(path, request, PublishResponse.class, headers);
+ return apiClient.POST(path, request, PublishedDashboard.class, headers);
+ }
+
+ @Override
+ public void trash(TrashDashboardRequest request) {
+ String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ apiClient.DELETE(path, request, TrashDashboardResponse.class, headers);
+ }
+
+ @Override
+ public void unpublish(UnpublishDashboardRequest request) {
+ String path =
+ String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ apiClient.DELETE(path, request, UnpublishDashboardResponse.class, headers);
+ }
+
+ @Override
+ public Dashboard update(UpdateDashboardRequest request) {
+ String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ return apiClient.PATCH(path, request, Dashboard.class, headers);
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
index c283a65ae..d5d713404 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
@@ -13,10 +13,59 @@
*/
@Generated
public interface LakeviewService {
+ /**
+ * Create dashboard.
+ *
+ * Create a draft dashboard.
+ */
+ Dashboard create(CreateDashboardRequest createDashboardRequest);
+
+ /**
+ * Get dashboard.
+ *
+ *
Get a draft dashboard.
+ */
+ Dashboard get(GetDashboardRequest getDashboardRequest);
+
+ /**
+ * Get published dashboard.
+ *
+ *
Get the current published dashboard.
+ */
+ PublishedDashboard getPublished(GetPublishedDashboardRequest getPublishedDashboardRequest);
+
+ /**
+ * Migrate dashboard.
+ *
+ *
Migrates a classic SQL dashboard to Lakeview.
+ */
+ Dashboard migrate(MigrateDashboardRequest migrateDashboardRequest);
+
/**
* Publish dashboard.
*
*
Publish the current draft dashboard.
*/
- void publish(PublishRequest publishRequest);
+ PublishedDashboard publish(PublishRequest publishRequest);
+
+ /**
+ * Trash dashboard.
+ *
+ *
Trash a dashboard.
+ */
+ void trash(TrashDashboardRequest trashDashboardRequest);
+
+ /**
+ * Unpublish dashboard.
+ *
+ *
Unpublish the dashboard.
+ */
+ void unpublish(UnpublishDashboardRequest unpublishDashboardRequest);
+
+ /**
+ * Update dashboard.
+ *
+ *
Update a draft dashboard.
+ */
+ Dashboard update(UpdateDashboardRequest updateDashboardRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java
new file mode 100755
index 000000000..37abfd2a8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum LifecycleState {
+ ACTIVE,
+ TRASHED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java
new file mode 100755
index 000000000..360c202ec
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class MigrateDashboardRequest {
+ /** Display name for the new Lakeview dashboard. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** The workspace path of the folder to contain the migrated Lakeview dashboard. */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /** UUID of the dashboard to be migrated. */
+ @JsonProperty("source_dashboard_id")
+ private String sourceDashboardId;
+
+ public MigrateDashboardRequest setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public MigrateDashboardRequest setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public MigrateDashboardRequest setSourceDashboardId(String sourceDashboardId) {
+ this.sourceDashboardId = sourceDashboardId;
+ return this;
+ }
+
+ public String getSourceDashboardId() {
+ return sourceDashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MigrateDashboardRequest that = (MigrateDashboardRequest) o;
+ return Objects.equals(displayName, that.displayName)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(sourceDashboardId, that.sourceDashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(displayName, parentPath, sourceDashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MigrateDashboardRequest.class)
+ .add("displayName", displayName)
+ .add("parentPath", parentPath)
+ .add("sourceDashboardId", sourceDashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java
new file mode 100755
index 000000000..c8133c4f2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java
@@ -0,0 +1,89 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PublishedDashboard {
+ /** The display name of the published dashboard. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** Indicates whether credentials are embedded in the published dashboard. */
+ @JsonProperty("embed_credentials")
+ private Boolean embedCredentials;
+
+ /** The timestamp of when the published dashboard was last revised. */
+ @JsonProperty("revision_create_time")
+ private String revisionCreateTime;
+
+ /** The warehouse ID used to run the published dashboard. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public PublishedDashboard setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public PublishedDashboard setEmbedCredentials(Boolean embedCredentials) {
+ this.embedCredentials = embedCredentials;
+ return this;
+ }
+
+ public Boolean getEmbedCredentials() {
+ return embedCredentials;
+ }
+
+ public PublishedDashboard setRevisionCreateTime(String revisionCreateTime) {
+ this.revisionCreateTime = revisionCreateTime;
+ return this;
+ }
+
+ public String getRevisionCreateTime() {
+ return revisionCreateTime;
+ }
+
+ public PublishedDashboard setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PublishedDashboard that = (PublishedDashboard) o;
+ return Objects.equals(displayName, that.displayName)
+ && Objects.equals(embedCredentials, that.embedCredentials)
+ && Objects.equals(revisionCreateTime, that.revisionCreateTime)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(displayName, embedCredentials, revisionCreateTime, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PublishedDashboard.class)
+ .add("displayName", displayName)
+ .add("embedCredentials", embedCredentials)
+ .add("revisionCreateTime", revisionCreateTime)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java
new file mode 100755
index 000000000..b346cd139
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Trash dashboard */
+@Generated
+public class TrashDashboardRequest {
+ /** UUID identifying the dashboard. */
+ private String dashboardId;
+
+ public TrashDashboardRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TrashDashboardRequest that = (TrashDashboardRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TrashDashboardRequest.class).add("dashboardId", dashboardId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java
similarity index 83%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java
index 9f953cd06..0f43a3a49 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class PublishResponse {
+public class TrashDashboardResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(PublishResponse.class).toString();
+ return new ToStringer(TrashDashboardResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java
new file mode 100755
index 000000000..6e18e5e72
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Unpublish dashboard */
+@Generated
+public class UnpublishDashboardRequest {
+ /** UUID identifying the dashboard to be published. */
+ private String dashboardId;
+
+ public UnpublishDashboardRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UnpublishDashboardRequest that = (UnpublishDashboardRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UnpublishDashboardRequest.class)
+ .add("dashboardId", dashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java
new file mode 100755
index 000000000..211e9c010
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class UnpublishDashboardResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UnpublishDashboardResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java
new file mode 100755
index 000000000..46a384eec
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java
@@ -0,0 +1,106 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDashboardRequest {
+ /** UUID identifying the dashboard. */
+ private String dashboardId;
+
+ /** The display name of the dashboard. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /**
+ * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
+ * has not been modified since the last read.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /** The contents of the dashboard in serialized string form. */
+ @JsonProperty("serialized_dashboard")
+ private String serializedDashboard;
+
+ /** The warehouse ID used to run the dashboard. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public UpdateDashboardRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ public UpdateDashboardRequest setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public UpdateDashboardRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public UpdateDashboardRequest setSerializedDashboard(String serializedDashboard) {
+ this.serializedDashboard = serializedDashboard;
+ return this;
+ }
+
+ public String getSerializedDashboard() {
+ return serializedDashboard;
+ }
+
+ public UpdateDashboardRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDashboardRequest that = (UpdateDashboardRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(serializedDashboard, that.serializedDashboard)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId, displayName, etag, serializedDashboard, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDashboardRequest.class)
+ .add("dashboardId", dashboardId)
+ .add("displayName", displayName)
+ .add("etag", etag)
+ .add("serializedDashboard", serializedDashboard)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
index be15a4ff9..f1909f28f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -137,7 +138,7 @@ public Iterable list(String path) {
* same functionality without timing out.
*/
public Iterable list(ListDbfsRequest request) {
- return impl.list(request).getFiles();
+ return new Paginator<>(request, impl::list, ListStatusResponse::getFiles, response -> null);
}
public void mkdirs(String path) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java
index ad4022f8b..95adf6bef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java
@@ -2,6 +2,7 @@
package com.databricks.sdk.service.files;
import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.http.Encoding;
import com.databricks.sdk.support.Generated;
import java.util.HashMap;
import java.util.Map;
@@ -17,28 +18,38 @@ public FilesImpl(ApiClient apiClient) {
@Override
public void createDirectory(CreateDirectoryRequest request) {
- String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath());
+ String path =
+ String.format(
+ "/api/2.0/fs/directories%s",
+ Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath()));
Map headers = new HashMap<>();
apiClient.PUT(path, null, CreateDirectoryResponse.class, headers);
}
@Override
public void delete(DeleteFileRequest request) {
- String path = String.format("/api/2.0/fs/files%s", request.getFilePath());
+ String path =
+ String.format(
+ "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath()));
Map headers = new HashMap<>();
apiClient.DELETE(path, request, DeleteResponse.class, headers);
}
@Override
public void deleteDirectory(DeleteDirectoryRequest request) {
- String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath());
+ String path =
+ String.format(
+ "/api/2.0/fs/directories%s",
+ Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath()));
Map headers = new HashMap<>();
apiClient.DELETE(path, request, DeleteDirectoryResponse.class, headers);
}
@Override
public DownloadResponse download(DownloadRequest request) {
- String path = String.format("/api/2.0/fs/files%s", request.getFilePath());
+ String path =
+ String.format(
+ "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath()));
Map headers = new HashMap<>();
headers.put("Accept", "application/octet-stream");
return apiClient.GET(path, request, DownloadResponse.class, headers);
@@ -46,21 +57,29 @@ public DownloadResponse download(DownloadRequest request) {
@Override
public void getDirectoryMetadata(GetDirectoryMetadataRequest request) {
- String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath());
+ String path =
+ String.format(
+ "/api/2.0/fs/directories%s",
+ Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath()));
Map headers = new HashMap<>();
apiClient.HEAD(path, request, GetDirectoryMetadataResponse.class, headers);
}
@Override
public GetMetadataResponse getMetadata(GetMetadataRequest request) {
- String path = String.format("/api/2.0/fs/files%s", request.getFilePath());
+ String path =
+ String.format(
+ "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath()));
Map headers = new HashMap<>();
return apiClient.HEAD(path, request, GetMetadataResponse.class, headers);
}
@Override
public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest request) {
- String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath());
+ String path =
+ String.format(
+ "/api/2.0/fs/directories%s",
+ Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath()));
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
return apiClient.GET(path, request, ListDirectoryResponse.class, headers);
@@ -68,7 +87,9 @@ public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest
@Override
public void upload(UploadRequest request) {
- String path = String.format("/api/2.0/fs/files%s", request.getFilePath());
+ String path =
+ String.format(
+ "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath()));
Map headers = new HashMap<>();
headers.put("Content-Type", "application/octet-stream");
apiClient.PUT(path, request.getContents(), UploadResponse.class, headers);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
index 7a25723e7..dca4a1bdb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java
@@ -15,7 +15,7 @@ public class GetPermissionRequest {
/**
* The type of the request object. Can be one of the following: authorization, clusters,
* cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
- * registered-models, repos, serving-endpoints, or sql-warehouses.
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java
index 18408b80b..fff729d4c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java
@@ -15,6 +15,7 @@ public enum PermissionLevel {
CAN_MANAGE_PRODUCTION_VERSIONS,
CAN_MANAGE_RUN,
CAN_MANAGE_STAGING_VERSIONS,
+ CAN_QUERY,
CAN_READ,
CAN_RESTART,
CAN_RUN,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java
new file mode 100755
index 000000000..88bb7fa66
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This spec contains undocumented permission migration APIs used in
+ * https://github.com/databrickslabs/ucx.
+ */
+@Generated
+public class PermissionMigrationAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(PermissionMigrationAPI.class);
+
+ private final PermissionMigrationService impl;
+
+ /** Regular-use constructor */
+ public PermissionMigrationAPI(ApiClient apiClient) {
+ impl = new PermissionMigrationImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public PermissionMigrationAPI(PermissionMigrationService mock) {
+ impl = mock;
+ }
+
+ public PermissionMigrationResponse migratePermissions(
+ long workspaceId, String fromWorkspaceGroupName, String toAccountGroupName) {
+ return migratePermissions(
+ new PermissionMigrationRequest()
+ .setWorkspaceId(workspaceId)
+ .setFromWorkspaceGroupName(fromWorkspaceGroupName)
+ .setToAccountGroupName(toAccountGroupName));
+ }
+
+ /**
+ * Migrate Permissions.
+ *
+ * Migrate a batch of permissions from a workspace local group to an account group.
+ */
+ public PermissionMigrationResponse migratePermissions(PermissionMigrationRequest request) {
+ return impl.migratePermissions(request);
+ }
+
+ public PermissionMigrationService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java
new file mode 100755
index 000000000..d4e939816
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java
@@ -0,0 +1,26 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of PermissionMigration */
+@Generated
+class PermissionMigrationImpl implements PermissionMigrationService {
+ private final ApiClient apiClient;
+
+ public PermissionMigrationImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public PermissionMigrationResponse migratePermissions(PermissionMigrationRequest request) {
+ String path = "/api/2.0/permissionmigration";
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ return apiClient.POST(path, request, PermissionMigrationResponse.class, headers);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java
new file mode 100755
index 000000000..3042a30bf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java
@@ -0,0 +1,92 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PermissionMigrationRequest {
+ /** The name of the workspace group that permissions will be migrated from. */
+ @JsonProperty("from_workspace_group_name")
+ private String fromWorkspaceGroupName;
+
+ /** The maximum number of permissions that will be migrated. */
+ @JsonProperty("size")
+ private Long size;
+
+ /** The name of the account group that permissions will be migrated to. */
+ @JsonProperty("to_account_group_name")
+ private String toAccountGroupName;
+
+ /**
+ * WorkspaceId of the associated workspace where the permission migration will occur. Both
+ * workspace group and account group must be in this workspace.
+ */
+ @JsonProperty("workspace_id")
+ private Long workspaceId;
+
+ public PermissionMigrationRequest setFromWorkspaceGroupName(String fromWorkspaceGroupName) {
+ this.fromWorkspaceGroupName = fromWorkspaceGroupName;
+ return this;
+ }
+
+ public String getFromWorkspaceGroupName() {
+ return fromWorkspaceGroupName;
+ }
+
+ public PermissionMigrationRequest setSize(Long size) {
+ this.size = size;
+ return this;
+ }
+
+ public Long getSize() {
+ return size;
+ }
+
+ public PermissionMigrationRequest setToAccountGroupName(String toAccountGroupName) {
+ this.toAccountGroupName = toAccountGroupName;
+ return this;
+ }
+
+ public String getToAccountGroupName() {
+ return toAccountGroupName;
+ }
+
+ public PermissionMigrationRequest setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ return this;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PermissionMigrationRequest that = (PermissionMigrationRequest) o;
+ return Objects.equals(fromWorkspaceGroupName, that.fromWorkspaceGroupName)
+ && Objects.equals(size, that.size)
+ && Objects.equals(toAccountGroupName, that.toAccountGroupName)
+ && Objects.equals(workspaceId, that.workspaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fromWorkspaceGroupName, size, toAccountGroupName, workspaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PermissionMigrationRequest.class)
+ .add("fromWorkspaceGroupName", fromWorkspaceGroupName)
+ .add("size", size)
+ .add("toAccountGroupName", toAccountGroupName)
+ .add("workspaceId", workspaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java
new file mode 100755
index 000000000..24ab24f9f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PermissionMigrationResponse {
+ /** Number of permissions migrated. */
+ @JsonProperty("permissions_migrated")
+ private Long permissionsMigrated;
+
+ public PermissionMigrationResponse setPermissionsMigrated(Long permissionsMigrated) {
+ this.permissionsMigrated = permissionsMigrated;
+ return this;
+ }
+
+ public Long getPermissionsMigrated() {
+ return permissionsMigrated;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PermissionMigrationResponse that = (PermissionMigrationResponse) o;
+ return Objects.equals(permissionsMigrated, that.permissionsMigrated);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(permissionsMigrated);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PermissionMigrationResponse.class)
+ .add("permissionsMigrated", permissionsMigrated)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java
new file mode 100755
index 000000000..61f8956ce
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java
@@ -0,0 +1,23 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.iam;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * This spec contains undocumented permission migration APIs used in
+ * https://github.com/databrickslabs/ucx.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface PermissionMigrationService {
+ /**
+ * Migrate Permissions.
+ *
+ *
Migrate a batch of permissions from a workspace local group to an account group.
+ */
+ PermissionMigrationResponse migratePermissions(
+ PermissionMigrationRequest permissionMigrationRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
index dc90904be..55eb37ad0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
@@ -51,6 +51,9 @@
*
For the mapping of the required permissions for specific actions or abilities and other
* important information, see [Access Control].
*
+ *
Note that to manage access control on service principals, use **[Account Access Control
+ * Proxy](:service:accountaccesscontrolproxy)**.
+ *
*
[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
*/
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java
index f559eb51d..6643d1063 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java
@@ -20,7 +20,7 @@ public class PermissionsRequest {
/**
* The type of the request object. Can be one of the following: authorization, clusters,
* cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines,
- * registered-models, repos, serving-endpoints, or sql-warehouses.
+ * registered-models, repos, serving-endpoints, or warehouses.
*/
private String requestObjectType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
index ee7def206..b78dbb72d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
@@ -48,6 +48,9 @@
*
For the mapping of the required permissions for specific actions or abilities and other
* important information, see [Access Control].
*
+ *
Note that to manage access control on service principals, use **[Account Access Control
+ * Proxy](:service:accountaccesscontrolproxy)**.
+ *
*
[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
*
*
This is the high-level interface, that contains generated methods.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java
index ce5380f71..e3810f4d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java
@@ -13,7 +13,7 @@ public class PrincipalOutput {
@JsonProperty("display_name")
private String displayName;
- /** The group name of the groupl. Present only if the principal is a group. */
+ /** The group name of the group. Present only if the principal is a group. */
@JsonProperty("group_name")
private String groupName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java
index 3089c6e8b..8a0c4bd90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java
@@ -10,7 +10,11 @@
@Generated
public class UpdateWorkspaceAssignments {
- /** Array of permissions assignments to update on the workspace. */
+ /**
+ * Array of permissions assignments to update on the workspace. Note that excluding this field
+ * will have the same effect as providing an empty list which will result in the deletion of all
+ * permissions for the principal.
+ */
@JsonProperty("permissions")
private Collection permissions;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
index 7b7e56a80..c04150b74 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -68,12 +69,13 @@ public Iterable list(long workspaceId) {
* workspace.
*/
public Iterable list(ListWorkspaceAssignmentRequest request) {
- return impl.list(request).getPermissionAssignments();
+ return new Paginator<>(
+ request, impl::list, PermissionAssignments::getPermissionAssignments, response -> null);
}
- public void update(
+ public PermissionAssignment update(
long workspaceId, long principalId, Collection permissions) {
- update(
+ return update(
new UpdateWorkspaceAssignments()
.setWorkspaceId(workspaceId)
.setPrincipalId(principalId)
@@ -86,8 +88,8 @@ public void update(
* Creates or updates the workspace permissions assignment in a given account and workspace for
* the specified principal.
*/
- public void update(UpdateWorkspaceAssignments request) {
- impl.update(request);
+ public PermissionAssignment update(UpdateWorkspaceAssignments request) {
+ return impl.update(request);
}
public WorkspaceAssignmentService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java
index 4372b5dd9..49ffc9ca8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java
@@ -49,7 +49,7 @@ public PermissionAssignments list(ListWorkspaceAssignmentRequest request) {
}
@Override
- public void update(UpdateWorkspaceAssignments request) {
+ public PermissionAssignment update(UpdateWorkspaceAssignments request) {
String path =
String.format(
"/api/2.0/accounts/%s/workspaces/%s/permissionassignments/principals/%s",
@@ -57,6 +57,6 @@ public void update(UpdateWorkspaceAssignments request) {
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
headers.put("Content-Type", "application/json");
- apiClient.PUT(path, request, WorkspaceAssignmentsUpdated.class, headers);
+ return apiClient.PUT(path, request, PermissionAssignment.class, headers);
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java
index 45bf51b24..4ec0dca6d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java
@@ -42,5 +42,5 @@ public interface WorkspaceAssignmentService {
* Creates or updates the workspace permissions assignment in a given account and workspace for
* the specified principal.
*/
- void update(UpdateWorkspaceAssignments updateWorkspaceAssignments);
+ PermissionAssignment update(UpdateWorkspaceAssignments updateWorkspaceAssignments);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
index 6367acafb..a4f4827aa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java
@@ -47,6 +47,10 @@ public class BaseRun {
@JsonProperty("creator_user_name")
private String creatorUserName;
+ /** Description of the run */
+ @JsonProperty("description")
+ private String description;
+
/**
* The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
* field is set to 0 if the job is still running.
@@ -109,6 +113,14 @@ public class BaseRun {
@JsonProperty("overriding_parameters")
private RunParameters overridingParameters;
+ /** The time in milliseconds that the run has spent in the queue. */
+ @JsonProperty("queue_duration")
+ private Long queueDuration;
+
+ /** The repair history of the run. */
+ @JsonProperty("repair_history")
+ private Collection repairHistory;
+
/** The time in milliseconds it took the job run and all of its repairs to finish. */
@JsonProperty("run_duration")
private Long runDuration;
@@ -126,9 +138,9 @@ public class BaseRun {
private String runPageUrl;
/**
- * * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow
- * run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with
- * :method:jobs/submit.
+ * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
+ * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
+ * run. A run created with :method:jobs/submit.
*
*