diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 7c42f6dc2..4343d612b 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -e7b127cb07af8dd4d8c61c7cc045c8910cdbb02a \ No newline at end of file +22f09783eb8a84d52026f856be3b2068f9498db3 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 438ff6f6d..2c2d21e9c 100755 --- a/.gitattributes +++ b/.gitattributes @@ -259,8 +259,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetas databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimization.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimizationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true @@ -1189,6 +1187,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index e6b3f42a4..339704ee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -998,7 +998,18 @@ public ServingEndpointsAPI servingEndpoints() { return servingEndpointsAPI; } - /** // TODO(yuyuan.tang) to add the description for the setting */ + /** + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *

Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. + */ public SettingsAPI settings() { return settingsAPI; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index cd8c6e8a6..7e43d4347 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -92,22 +92,6 @@ public void delete(DeleteMetastoreRequest request) { impl.delete(request); } - public UpdatePredictiveOptimizationResponse enableOptimization( - String metastoreId, boolean enable) { - return enableOptimization( - new UpdatePredictiveOptimization().setMetastoreId(metastoreId).setEnable(enable)); - } - - /** - * Toggle predictive optimization on the metastore. - * - *

Enables or disables predictive optimization on the metastore. - */ - public UpdatePredictiveOptimizationResponse enableOptimization( - UpdatePredictiveOptimization request) { - return impl.enableOptimization(request); - } - public MetastoreInfo get(String id) { return get(new GetMetastoreRequest().setId(id)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java index b533753bc..7fb32ce50 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java @@ -50,16 +50,6 @@ public void delete(DeleteMetastoreRequest request) { apiClient.DELETE(path, request, Void.class, headers); } - @Override - public UpdatePredictiveOptimizationResponse enableOptimization( - UpdatePredictiveOptimization request) { - String path = "/api/2.0/predictive-optimization/service"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, UpdatePredictiveOptimizationResponse.class, headers); - } - @Override public MetastoreInfo get(GetMetastoreRequest request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java index 9bcbdd5dc..ab8ed1a27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java @@ -55,14 +55,6 @@ public interface MetastoresService { */ void delete(DeleteMetastoreRequest deleteMetastoreRequest); - /** - * Toggle predictive optimization on the metastore. - * - *

Enables or disables predictive optimization on the metastore. - */ - UpdatePredictiveOptimizationResponse enableOptimization( - UpdatePredictiveOptimization updatePredictiveOptimization); - /** * Get a metastore. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java index b725c599a..73a6dadaf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java @@ -10,7 +10,9 @@ @Generated public class TableConstraintList { - /** List of table constraints. */ + /** + * List of table constraints. Note: this field is not set in the output of the __listTables__ API. + */ @JsonProperty("table_constraints") private Collection tableConstraints; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index 01c2e14e1..d88439983 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -82,6 +82,13 @@ public class TableInfo { @JsonProperty("owner") private String owner; + /** + * The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, + * Streaming Table, etc.). + */ + @JsonProperty("pipeline_id") + private String pipelineId; + /** A map of key-value properties attached to the securable. */ @JsonProperty("properties") private Map properties; @@ -301,6 +308,15 @@ public String getOwner() { return owner; } + public TableInfo setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + public TableInfo setProperties(Map properties) { this.properties = properties; return this; @@ -441,6 +457,7 @@ public boolean equals(Object o) { && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) && Objects.equals(owner, that.owner) + && Objects.equals(pipelineId, that.pipelineId) && Objects.equals(properties, that.properties) && Objects.equals(rowFilter, that.rowFilter) && Objects.equals(schemaName, that.schemaName) @@ -476,6 +493,7 @@ public int hashCode() { metastoreId, name, owner, + pipelineId, properties, rowFilter, schemaName, @@ -511,6 +529,7 @@ public String toString() { .add("metastoreId", metastoreId) .add("name", name) .add("owner", owner) + .add("pipelineId", pipelineId) .add("properties", properties) .add("rowFilter", rowFilter) .add("schemaName", schemaName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java index c73cd182d..847120306 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java @@ -14,6 +14,10 @@ public class UpdateCatalog { @JsonProperty("comment") private String comment; + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + /** * Whether the current securable is accessible from all workspaces or a specific set of * workspaces. @@ -42,6 +46,16 @@ public String getComment() { return comment; } + public UpdateCatalog setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + public UpdateCatalog setIsolationMode(IsolationMode isolationMode) { this.isolationMode = isolationMode; return this; @@ -84,6 +98,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; UpdateCatalog that = (UpdateCatalog) o; return Objects.equals(comment, that.comment) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(name, that.name) && Objects.equals(owner, that.owner) @@ -92,13 +107,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(comment, isolationMode, name, owner, properties); + return Objects.hash( + comment, enablePredictiveOptimization, isolationMode, name, owner, properties); } @Override public String toString() { return new ToStringer(UpdateCatalog.class) .add("comment", comment) + .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("isolationMode", isolationMode) .add("name", name) .add("owner", owner) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimization.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimization.java deleted file mode 100755 index b4fd46f00..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimization.java +++ /dev/null @@ -1,58 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class UpdatePredictiveOptimization { - /** Whether to enable predictive optimization on the metastore. */ - @JsonProperty("enable") - private Boolean enable; - - /** Unique identifier of metastore. */ - @JsonProperty("metastore_id") - private String metastoreId; - - public UpdatePredictiveOptimization setEnable(Boolean enable) { - this.enable = enable; - return this; - } - - public Boolean getEnable() { - return enable; - } - - public UpdatePredictiveOptimization setMetastoreId(String metastoreId) { - this.metastoreId = metastoreId; - return this; - } - - public String getMetastoreId() { - return metastoreId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpdatePredictiveOptimization that = (UpdatePredictiveOptimization) o; - return Objects.equals(enable, that.enable) && Objects.equals(metastoreId, that.metastoreId); - } - - @Override - public int hashCode() { - return Objects.hash(enable, metastoreId); - } - - @Override - public String toString() { - return new ToStringer(UpdatePredictiveOptimization.class) - .add("enable", enable) - .add("metastoreId", metastoreId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimizationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimizationResponse.java deleted file mode 100755 index a370f66b3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimizationResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class UpdatePredictiveOptimizationResponse { - /** Whether predictive optimization is enabled on the metastore. */ - @JsonProperty("state") - private Boolean state; - - /** - * Id of the predictive optimization service principal. This will be the user used to run - * optimization tasks. - */ - @JsonProperty("user_id") - private Long userId; - - /** Name of the predictive optimization service principal. */ - @JsonProperty("username") - private String username; - - public UpdatePredictiveOptimizationResponse setState(Boolean state) { - this.state = state; - return this; - } - - public Boolean getState() { - return state; - } - - public UpdatePredictiveOptimizationResponse setUserId(Long userId) { - this.userId = userId; - return this; - } - - public Long getUserId() { - return userId; - } - - public UpdatePredictiveOptimizationResponse setUsername(String username) { - this.username = username; - return this; - } - - public String getUsername() { - return username; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpdatePredictiveOptimizationResponse that = (UpdatePredictiveOptimizationResponse) o; - return Objects.equals(state, that.state) - && Objects.equals(userId, that.userId) - && Objects.equals(username, that.username); - } - - @Override - public int hashCode() { - return Objects.hash(state, userId, username); - } - - @Override - public String toString() { - return new ToStringer(UpdatePredictiveOptimizationResponse.class) - .add("state", state) - .add("userId", userId) - .add("username", username) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java index 35f9c008a..ae705eadf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java @@ -14,6 +14,10 @@ public class UpdateSchema { @JsonProperty("comment") private String comment; + /** Whether predictive optimization should be enabled for this object and objects under it. */ + @JsonProperty("enable_predictive_optimization") + private EnablePredictiveOptimization enablePredictiveOptimization; + /** Full name of the schema. */ private String fullName; @@ -38,6 +42,16 @@ public String getComment() { return comment; } + public UpdateSchema setEnablePredictiveOptimization( + EnablePredictiveOptimization enablePredictiveOptimization) { + this.enablePredictiveOptimization = enablePredictiveOptimization; + return this; + } + + public EnablePredictiveOptimization getEnablePredictiveOptimization() { + return enablePredictiveOptimization; + } + public UpdateSchema setFullName(String fullName) { this.fullName = fullName; return this; @@ -80,6 +94,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; UpdateSchema that = (UpdateSchema) o; return Objects.equals(comment, that.comment) + && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(fullName, that.fullName) && Objects.equals(name, that.name) && Objects.equals(owner, that.owner) @@ -88,13 +103,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(comment, fullName, name, owner, properties); + return Objects.hash(comment, enablePredictiveOptimization, fullName, name, owner, properties); } @Override public String toString() { return new ToStringer(UpdateSchema.class) .add("comment", comment) + .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("fullName", fullName) .add("name", name) .add("owner", owner) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java index d621fce8c..72263ebee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java @@ -106,8 +106,8 @@ public class AwsAttributes { * deployment resides in the "us-east-1" region. This is an optional field at cluster creation, * and if not specified, a default zone will be used. If the zone specified is "auto", will try to * place cluster in a zone with high availability, and will retry placement in a different AZ if - * there is not enough capacity. See [[AutoAZHelper.scala]] for more details. The list of - * available zones as well as the default value can be found by using the `List Zones`_ method. + * there is not enough capacity. The list of available zones as well as the default value can be + * found by using the `List Zones` method. */ @JsonProperty("zone_id") private String zoneId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java index 4c9c5e094..934047642 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java @@ -17,7 +17,10 @@ public class ClusterAccessControlRequest { @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java index 831e59320..aef055912 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java @@ -17,7 +17,10 @@ public class ClusterPolicyAccessControlRequest { @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java index 0e581cff5..8de3541cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java @@ -10,7 +10,12 @@ @Generated public class CreatePolicy { - /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */ + /** + * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. + * + *

[Databricks Cluster Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html + */ @JsonProperty("definition") private String definition; @@ -37,11 +42,14 @@ public class CreatePolicy { private String name; /** - * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON + * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON * document must be passed as a string and cannot be embedded in the requests. * *

You can use this to customize the policy definition inherited from the policy family. Policy * rules specified here are merged into the inherited policy definition. + * + *

[Databricks Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java index efc04bfe8..6dd048db9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java @@ -10,7 +10,12 @@ @Generated public class EditPolicy { - /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */ + /** + * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. + * + *

[Databricks Cluster Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html + */ @JsonProperty("definition") private String definition; @@ -37,11 +42,14 @@ public class EditPolicy { private String name; /** - * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON + * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON * document must be passed as a string and cannot be embedded in the requests. * *

You can use this to customize the policy definition inherited from the policy family. Policy * rules specified here are merged into the inherited policy definition. + * + *

[Databricks Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java index 0a56ec462..79c7449ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java @@ -17,7 +17,10 @@ public class InstancePoolAccessControlRequest { @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java index bd8d5545d..5858ba750 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java @@ -39,7 +39,7 @@ public class InstancePoolAwsAttributes { * the Databricks deployment. For example, "us-west-2a" is not a valid zone id if the Databricks * deployment resides in the "us-east-1" region. This is an optional field at cluster creation, * and if not specified, a default zone will be used. The list of available zones as well as the - * default value can be found by using the `List Zones`_ method. + * default value can be found by using the `List Zones` method. */ @JsonProperty("zone_id") private String zoneId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java index b61c2e6d4..fff3fb236 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java @@ -21,7 +21,12 @@ public class Policy { @JsonProperty("creator_user_name") private String creatorUserName; - /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */ + /** + * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. + * + *

[Databricks Cluster Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html + */ @JsonProperty("definition") private String definition; @@ -55,11 +60,14 @@ public class Policy { private String name; /** - * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON + * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON * document must be passed as a string and cannot be embedded in the requests. * *

You can use this to customize the policy definition inherited from the policy family. Policy * rules specified here are merged into the inherited policy definition. + * + *

[Databricks Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html */ @JsonProperty("policy_family_definition_overrides") private String policyFamilyDefinitionOverrides; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java index 9676f19c3..c40033e13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java @@ -9,7 +9,12 @@ @Generated public class PolicyFamily { - /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */ + /** + * Policy definition document expressed in [Databricks Cluster Policy Definition Language]. + * + *

[Databricks Cluster Policy Definition Language]: + * https://docs.databricks.com/administration-guide/clusters/policy-definition.html + */ @JsonProperty("definition") private String definition; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java index 59f3b9fc0..7db3e07d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java @@ -17,7 +17,10 @@ public class AccessControlRequest { @JsonProperty("permission_level") private PermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java index 3d52179b1..be8ce4b90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,7 +73,20 @@ public Group get(GetAccountGroupRequest request) { *

Gets all details of the groups associated with the Databricks account. */ public Iterable list(ListAccountGroupsRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListGroupsResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(Group::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java index 793f0ede7..1d43f5497 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -70,7 +71,20 @@ public ServicePrincipal get(GetAccountServicePrincipalRequest request) { *

Gets the set of service principals associated with a Databricks account. */ public Iterable list(ListAccountServicePrincipalsRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListServicePrincipalResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(ServicePrincipal::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java index c7086dd8f..2b0bc68e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,7 +77,20 @@ public User get(GetAccountUserRequest request) { *

Gets details for all the users associated with a Databricks account. */ public Iterable list(ListAccountUsersRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListUsersResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(User::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java index a854c1569..89f132dc5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,7 +73,20 @@ public Group get(GetGroupRequest request) { *

Gets all details of the groups associated with the Databricks workspace. */ public Iterable list(ListGroupsRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListGroupsResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(Group::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java index 0cbf882d8..c0508f67c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java @@ -17,7 +17,10 @@ public class PasswordAccessControlRequest { @JsonProperty("permission_level") private PasswordPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java index d3f7b9b20..12f27d29b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -70,7 +71,20 @@ public ServicePrincipal get(GetServicePrincipalRequest request) { *

Gets the set of service principals associated with a Databricks workspace. */ public Iterable list(ListServicePrincipalsRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListServicePrincipalResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(ServicePrincipal::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java index 6fd832f44..ca36ba4cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -95,7 +96,20 @@ public PasswordPermissions getPermissions() { *

Gets details for all the users associated with a Databricks workspace. */ public Iterable list(ListUsersRequest request) { - return impl.list(request).getResources(); + request.setStartIndex(0L); + return new Paginator<>( + request, + impl::list, + ListUsersResponse::getResources, + response -> { + Long offset = request.getStartIndex(); + if (offset == null) { + offset = 0L; + } + offset += response.getResources().size(); + return request.setStartIndex(offset); + }) + .withDedupe(User::getId); } public void patch(String id) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index e5cef35f1..3115f4c5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -30,6 +30,12 @@ public class CreateJob { @JsonProperty("deployment") private JobDeployment deployment; + /** + * An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + */ + @JsonProperty("description") + private String description; + /** * Edit mode of the job. * @@ -92,7 +98,7 @@ public class CreateJob { * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active * runs. * - *

This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped. + *

This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -199,6 +205,15 @@ public JobDeployment getDeployment() { return deployment; } + public CreateJob setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + public CreateJob setEditMode(CreateJobEditMode editMode) { this.editMode = editMode; return this; @@ -370,6 +385,7 @@ public boolean equals(Object o) { && Objects.equals(compute, that.compute) && Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) + && Objects.equals(description, that.description) && Objects.equals(editMode, that.editMode) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) @@ -397,6 +413,7 @@ public int hashCode() { compute, continuous, deployment, + description, editMode, emailNotifications, format, @@ -424,6 +441,7 @@ public String toString() { .add("compute", compute) .add("continuous", continuous) .add("deployment", deployment) + .add("description", description) .add("editMode", editMode) .add("emailNotifications", emailNotifications) .add("format", format) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java index 7a60d9919..3bce29c82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java @@ -17,7 +17,10 @@ public class JobAccessControlRequest { @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index 5ef99a058..72dda500d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -26,6 +26,12 @@ public class JobSettings { @JsonProperty("deployment") private JobDeployment deployment; + /** + * An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + */ + @JsonProperty("description") + private String description; + /** * Edit mode of the job. * @@ -88,7 +94,7 @@ public class JobSettings { * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active * runs. * - *

This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped. + *

This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -185,6 +191,15 @@ public JobDeployment getDeployment() { return deployment; } + public JobSettings setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + public JobSettings setEditMode(JobSettingsEditMode editMode) { this.editMode = editMode; return this; @@ -355,6 +370,7 @@ public boolean equals(Object o) { return Objects.equals(compute, that.compute) && Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) + && Objects.equals(description, that.description) && Objects.equals(editMode, that.editMode) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) @@ -381,6 +397,7 @@ public int hashCode() { compute, continuous, deployment, + description, editMode, emailNotifications, format, @@ -407,6 +424,7 @@ public String toString() { .add("compute", compute) .add("continuous", continuous) .add("deployment", deployment) + .add("description", description) .add("editMode", editMode) .add("emailNotifications", emailNotifications) .add("format", format) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java index 700c97ed1..906deda04 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java @@ -17,7 +17,10 @@ public class ExperimentAccessControlRequest { @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java index 22f47014e..bc6ba6c45 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java @@ -17,7 +17,10 @@ public class RegisteredModelAccessControlRequest { @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java index aa7160397..1f421fb13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java @@ -17,7 +17,10 @@ public class PipelineAccessControlRequest { @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java index ac4bb3b81..63893df68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java @@ -17,7 +17,10 @@ public class ServingEndpointAccessControlRequest { @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java index f0ad5974e..eb12d3fd3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java @@ -7,7 +7,15 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Default namespace setting. */ +/** + * This represents the setting configuration for the default namespace in the Databricks workspace. + * Setting the default catalog for the workspace determines the catalog that is used when queries do + * not reference a fully qualified 3 level name. For example, if the default catalog is set to + * 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object + * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a + * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only + * applies when using Unity Catalog-enabled compute. + */ @Generated public class DefaultNamespaceSetting { /** @@ -28,7 +36,8 @@ public class DefaultNamespaceSetting { /** * Name of the corresponding setting. This field is populated in the response, but it will not be * respected even if it's set in the request body. The setting name in the path parameter will be - * respected instead. + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. */ @JsonProperty("setting_name") private String settingName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java index 2356849af..6580a24bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java @@ -7,7 +7,7 @@ import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** Delete the default namespace */ +/** Delete the default namespace setting */ @Generated public class DeleteDefaultWorkspaceNamespaceRequest { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java new file mode 100755 index 000000000..23094c9fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListNccAzurePrivateEndpointRulesResponse { + /** */ + @JsonProperty("items") + private Collection items; + + /** + * A token that can be used to get the next page of results. If null, there are no more results to + * show. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListNccAzurePrivateEndpointRulesResponse setItems( + Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListNccAzurePrivateEndpointRulesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNccAzurePrivateEndpointRulesResponse that = (ListNccAzurePrivateEndpointRulesResponse) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNccAzurePrivateEndpointRulesResponse.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java new file mode 100755 index 000000000..85f8ff2ac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List network connectivity configurations */ +@Generated +public class ListNetworkConnectivityConfigurationsRequest { + /** Pagination token to go to next page based on previous query. */ + @QueryParam("page_token") + private String pageToken; + + public ListNetworkConnectivityConfigurationsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkConnectivityConfigurationsRequest that = + (ListNetworkConnectivityConfigurationsRequest) o; + return Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkConnectivityConfigurationsRequest.class) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java new file mode 100755 index 000000000..ac6100830 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListNetworkConnectivityConfigurationsResponse { + /** */ + @JsonProperty("items") + private Collection items; + + /** + * A token that can be used to get the next page of results. If null, there are no more results to + * show. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListNetworkConnectivityConfigurationsResponse setItems( + Collection items) { + this.items = items; + return this; + } + + public Collection getItems() { + return items; + } + + public ListNetworkConnectivityConfigurationsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListNetworkConnectivityConfigurationsResponse that = + (ListNetworkConnectivityConfigurationsResponse) o; + return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(items, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListNetworkConnectivityConfigurationsResponse.class) + .add("items", items) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java new file mode 100755 index 000000000..913bb35d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List private endpoint rules */ +@Generated +public class ListPrivateEndpointRulesRequest { + /** Your Network Connectvity Configuration ID. */ + private String networkConnectivityConfigId; + + /** Pagination token to go to next page based on previous query. */ + @QueryParam("page_token") + private String pageToken; + + public ListPrivateEndpointRulesRequest setNetworkConnectivityConfigId( + String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public ListPrivateEndpointRulesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListPrivateEndpointRulesRequest that = (ListPrivateEndpointRulesRequest) o; + return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(networkConnectivityConfigId, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListPrivateEndpointRulesRequest.class) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java index 9c8e6c176..88175b208 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -161,6 +162,53 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule return impl.getPrivateEndpointRule(request); } + /** + * List network connectivity configurations. + * + *

Gets an array of network connectivity configurations. + */ + public Iterable listNetworkConnectivityConfigurations( + ListNetworkConnectivityConfigurationsRequest request) { + return new Paginator<>( + request, + impl::listNetworkConnectivityConfigurations, + ListNetworkConnectivityConfigurationsResponse::getItems, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listPrivateEndpointRules( + String networkConnectivityConfigId) { + return listPrivateEndpointRules( + new ListPrivateEndpointRulesRequest() + .setNetworkConnectivityConfigId(networkConnectivityConfigId)); + } + + /** + * List private endpoint rules. + * + *

Gets an array of private endpoint rules. + */ + public Iterable listPrivateEndpointRules( + ListPrivateEndpointRulesRequest request) { + return new Paginator<>( + request, + impl::listPrivateEndpointRules, + ListNccAzurePrivateEndpointRulesResponse::getItems, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + public NetworkConnectivityService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java index 1639c2205..250b3f12d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java @@ -90,4 +90,28 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule headers.put("Accept", "application/json"); return apiClient.GET(path, request, NccAzurePrivateEndpointRule.class, headers); } + + @Override + public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurations( + ListNetworkConnectivityConfigurationsRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/network-connectivity-configs", apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET( + path, request, ListNetworkConnectivityConfigurationsResponse.class, headers); + } + + @Override + public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + ListPrivateEndpointRulesRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules", + apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListNccAzurePrivateEndpointRulesResponse.class, headers); + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java index 62cda409b..e5ed0e673 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java @@ -90,4 +90,20 @@ NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( */ NccAzurePrivateEndpointRule getPrivateEndpointRule( GetPrivateEndpointRuleRequest getPrivateEndpointRuleRequest); + + /** + * List network connectivity configurations. + * + *

Gets an array of network connectivity configurations. + */ + ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurations( + ListNetworkConnectivityConfigurationsRequest listNetworkConnectivityConfigurationsRequest); + + /** + * List private endpoint rules. + * + *

Gets an array of private endpoint rules. + */ + ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java index ba2da921a..80069946d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java @@ -7,7 +7,7 @@ import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** Get the default namespace */ +/** Get the default namespace setting */ @Generated public class ReadDefaultWorkspaceNamespaceRequest { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index cf0073d7d..3bfc62016 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -6,7 +6,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** // TODO(yuyuan.tang) to add the description for the setting */ +/** + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *

Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. + */ @Generated public class SettingsAPI { private static final Logger LOG = LoggerFactory.getLogger(SettingsAPI.class); @@ -29,9 +40,12 @@ public DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace(S } /** - * Delete the default namespace. + * Delete the default namespace setting. * - *

Deletes the default namespace. + *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided + * in DELETE requests (as a query parameter). The etag can be retrieved by making a GET request + * before the DELETE request. If the setting is updated/deleted concurrently, DELETE will fail + * with 409 and the request will need to be retried by using the fresh etag in the 409 response. */ public DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace( DeleteDefaultWorkspaceNamespaceRequest request) { @@ -43,9 +57,9 @@ public DefaultNamespaceSetting readDefaultWorkspaceNamespace(String etag) { } /** - * Get the default namespace. + * Get the default namespace setting. * - *

Gets the default namespace. + *

Gets the default namespace setting. */ public DefaultNamespaceSetting readDefaultWorkspaceNamespace( ReadDefaultWorkspaceNamespaceRequest request) { @@ -53,13 +67,14 @@ public DefaultNamespaceSetting readDefaultWorkspaceNamespace( } /** - * Updates the default namespace setting. + * Update the default namespace setting. * *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided - * in PATCH requests (as part the setting field). The etag can be retrieved by making a GET + * in PATCH requests (as part of the setting field). The etag can be retrieved by making a GET * request before the PATCH request. Note that if the setting does not exist, GET will return a * NOT_FOUND error and the etag will be present in the error response, which should be set in the - * PATCH request. + * PATCH request. If the setting is updated concurrently, PATCH will fail with 409 and the request + * will need to be retried by using the fresh etag in the 409 response. */ public DefaultNamespaceSetting updateDefaultWorkspaceNamespace( UpdateDefaultWorkspaceNamespaceRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java index 4775132d8..bfc401bae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java @@ -4,7 +4,16 @@ import com.databricks.sdk.support.Generated; /** - * // TODO(yuyuan.tang) to add the description for the setting + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *

Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. * *

This is the high-level interface, that contains generated methods. * @@ -13,29 +22,33 @@ @Generated public interface SettingsService { /** - * Delete the default namespace. + * Delete the default namespace setting. * - *

Deletes the default namespace. + *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided + * in DELETE requests (as a query parameter). The etag can be retrieved by making a GET request + * before the DELETE request. If the setting is updated/deleted concurrently, DELETE will fail + * with 409 and the request will need to be retried by using the fresh etag in the 409 response. */ DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace( DeleteDefaultWorkspaceNamespaceRequest deleteDefaultWorkspaceNamespaceRequest); /** - * Get the default namespace. + * Get the default namespace setting. * - *

Gets the default namespace. + *

Gets the default namespace setting. */ DefaultNamespaceSetting readDefaultWorkspaceNamespace( ReadDefaultWorkspaceNamespaceRequest readDefaultWorkspaceNamespaceRequest); /** - * Updates the default namespace setting. + * Update the default namespace setting. * *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided - * in PATCH requests (as part the setting field). The etag can be retrieved by making a GET + * in PATCH requests (as part of the setting field). The etag can be retrieved by making a GET * request before the PATCH request. Note that if the setting does not exist, GET will return a * NOT_FOUND error and the etag will be present in the error response, which should be set in the - * PATCH request. + * PATCH request. If the setting is updated concurrently, PATCH will fail with 409 and the request + * will need to be retried by using the fresh etag in the 409 response. */ DefaultNamespaceSetting updateDefaultWorkspaceNamespace( UpdateDefaultWorkspaceNamespaceRequest updateDefaultWorkspaceNamespaceRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java index 93992ed80..baf5d6388 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java @@ -17,7 +17,10 @@ public class TokenAccessControlRequest { @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java index d3128a794..5b104f7ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java @@ -7,25 +7,33 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Updates the default namespace setting */ +/** Update the default namespace setting */ @Generated public class UpdateDefaultWorkspaceNamespaceRequest { - /** This should always be set to true for Settings RPCs. Added for AIP compliance. */ + /** This should always be set to true for Settings API. Added for AIP compliance. */ @JsonProperty("allow_missing") private Boolean allowMissing; /** - * Field mask required to be passed into the PATCH request. Field mask specifies which fields of - * the setting payload will be updated. For example, for Default Namespace setting, the field mask - * is supposed to contain fields from the DefaultNamespaceSetting.namespace schema. + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. For example, for Default Namespace setting, the field + * mask is supposed to contain fields from the DefaultNamespaceSetting.namespace schema. * - *

The field mask needs to supplied as single string. To specify multiple fields in the field - * mask, use comma as the seperator (no space). + *

The field mask needs to be supplied as single string. To specify multiple fields in the + * field mask, use comma as the seperator (no space). */ @JsonProperty("field_mask") private String fieldMask; - /** Default namespace setting. */ + /** + * This represents the setting configuration for the default namespace in the Databricks + * workspace. Setting the default catalog for the workspace determines the catalog that is used + * when queries do not reference a fully qualified 3 level name. For example, if the default + * catalog is set to 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object + * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a + * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only + * applies when using Unity Catalog-enabled compute. + */ @JsonProperty("setting") private DefaultNamespaceSetting setting; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java index 645815abb..381139da8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -74,6 +74,15 @@ public class SharedDataObject { @JsonProperty("status") private SharedDataObjectStatus status; + /** + * A user-provided new name for the data object within the share. If this new name is not + * provided, the object's original name will be used as the `string_shared_as` name. The + * `string_shared_as` name must be unique within a share. For notebooks, the new name should be + * the new notebook file name. + */ + @JsonProperty("string_shared_as") + private String stringSharedAs; + public SharedDataObject setAddedAt(Long addedAt) { this.addedAt = addedAt; return this; @@ -174,6 +183,15 @@ public SharedDataObjectStatus getStatus() { return status; } + public SharedDataObject setStringSharedAs(String stringSharedAs) { + this.stringSharedAs = stringSharedAs; + return this; + } + + public String getStringSharedAs() { + return stringSharedAs; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -189,7 +207,8 @@ public boolean equals(Object o) { && Objects.equals(partitions, that.partitions) && Objects.equals(sharedAs, that.sharedAs) && Objects.equals(startVersion, that.startVersion) - && Objects.equals(status, that.status); + && Objects.equals(status, that.status) + && Objects.equals(stringSharedAs, that.stringSharedAs); } @Override @@ -205,7 +224,8 @@ public int hashCode() { partitions, sharedAs, startVersion, - status); + status, + stringSharedAs); } @Override @@ -222,6 +242,7 @@ public String toString() { .add("sharedAs", sharedAs) .add("startVersion", startVersion) .add("status", status) + .add("stringSharedAs", stringSharedAs) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java index 261715dde..594238f8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java @@ -17,7 +17,10 @@ public class WarehouseAccessControlRequest { @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java index 947d58587..07720c444 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java @@ -6,6 +6,7 @@ @Generated public enum ExportFormat { + AUTO, DBC, HTML, JUPYTER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java index b20a6cf9a..78a1cf9c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java @@ -15,17 +15,20 @@ public class ExportRequest { * *

The value is case sensitive. * - *

- `SOURCE`: The notebook is exported as source code. - `HTML`: The notebook is exported as - * an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. - - * `DBC`: The notebook is exported in Databricks archive format. - `R_MARKDOWN`: The notebook is - * exported to R Markdown format. + *

- `SOURCE`: The notebook is exported as source code. Directory exports will not include + * non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The + * notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in + * Databricks archive format. Directory exports will not include non-notebook entries. - + * `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory + * is exported depending on the objects type. Directory exports will include notebooks and + * workspace files. */ @QueryParam("format") private ExportFormat format; /** * The absolute path of the object or directory. Exporting a directory is only supported for the - * `DBC` and `SOURCE` format. + * `DBC`, `SOURCE`, and `AUTO` format. */ @QueryParam("path") private String path; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java index c306654e7..9a34d3307 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java @@ -17,7 +17,10 @@ public class RepoAccessControlRequest { @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java index 9aaaff0c5..3d6832dd3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java @@ -17,7 +17,10 @@ public class WorkspaceObjectAccessControlRequest { @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; - /** name of the service principal */ + /** + * Application ID of an active service principal. Setting this field requires the + * `servicePrincipal/user` role. + */ @JsonProperty("service_principal_name") private String servicePrincipalName;